Compare commits

..

2 Commits

Author SHA1 Message Date
Trenton H
6b0f3e93cd Fix: Add directory marker entries to zip exports
Without explicit directory entries, some zip viewers (simpler tools,
web-based viewers) don't show the folder structure when browsing the
archive. Add a _ensure_zip_dirs() helper that writes directory markers
for all parent paths of each file entry, deduplicating via a set.
Uses ZipFile.mkdir() (available since Python 3.11, the project minimum).

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-09 09:19:42 -07:00
Trenton H
882ae7a3d8 Refactor: Write zip exports directly into ZipFile instead of temp dir
Replace the temp-dir + shutil.make_archive() workaround with direct
zipfile.ZipFile writes. Document files are added via zf.write() and
JSON manifests via zf.writestr()/StringIO buffering, eliminating the
double-I/O and 2x disk usage of the previous approach.

Key changes:
- Removed tempfile.TemporaryDirectory and shutil.make_archive() from handle()
- ZipFile opened on a .tmp path; renamed to final .zip atomically on success;
  .tmp cleaned up on failure
- StreamingManifestWriter: zip mode buffers manifest in io.StringIO and
  writes to zip atomically on close() (zipfile allows only one open write
  handle at a time)
- check_and_copy(): zip mode calls zf.write(source, arcname=...) directly
- check_and_write_json(): zip mode calls zf.writestr(arcname, ...) directly
- files_in_export_dir scan skipped in zip mode (always fresh write)
- --compare-checksums and --compare-json emit warnings when used with --zip
- --delete in zip mode removes pre-existing files from target dir, skipping
  the in-progress .tmp and any prior .zip
- Added tests: atomicity on failure, no SCRATCH_DIR usage

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-03-09 09:13:12 -07:00
90 changed files with 8459 additions and 9133 deletions

View File

@@ -12,8 +12,6 @@ updates:
open-pull-requests-limit: 10
schedule:
interval: "monthly"
cooldown:
default-days: 7
labels:
- "frontend"
- "dependencies"
@@ -38,9 +36,7 @@ updates:
directory: "/"
# Check for updates once a week
schedule:
interval: "monthly"
cooldown:
default-days: 7
interval: "weekly"
labels:
- "backend"
- "dependencies"
@@ -101,8 +97,6 @@ updates:
schedule:
# Check for updates to GitHub Actions every month
interval: "monthly"
cooldown:
default-days: 7
labels:
- "ci-cd"
- "dependencies"
@@ -118,9 +112,7 @@ updates:
- "/"
- "/.devcontainer/"
schedule:
interval: "monthly"
cooldown:
default-days: 7
interval: "weekly"
open-pull-requests-limit: 5
labels:
- "dependencies"
@@ -131,9 +123,7 @@ updates:
- package-ecosystem: "docker-compose"
directory: "/docker/compose/"
schedule:
interval: "monthly"
cooldown:
default-days: 7
interval: "weekly"
open-pull-requests-limit: 5
labels:
- "dependencies"
@@ -157,11 +147,3 @@ updates:
postgres:
patterns:
- "docker.io/library/postgres*"
- package-ecosystem: "pre-commit" # See documentation for possible values
directory: "/" # Location of package manifests
schedule:
interval: "monthly"
groups:
pre-commit-dependencies:
patterns:
- "*"

View File

@@ -3,9 +3,21 @@ on:
push:
branches-ignore:
- 'translations**'
paths:
- 'src/**'
- 'pyproject.toml'
- 'uv.lock'
- 'docker/compose/docker-compose.ci-test.yml'
- '.github/workflows/ci-backend.yml'
pull_request:
branches-ignore:
- 'translations**'
paths:
- 'src/**'
- 'pyproject.toml'
- 'uv.lock'
- 'docker/compose/docker-compose.ci-test.yml'
- '.github/workflows/ci-backend.yml'
workflow_dispatch:
concurrency:
group: backend-${{ github.event.pull_request.number || github.ref }}
@@ -14,55 +26,7 @@ env:
DEFAULT_UV_VERSION: "0.10.x"
NLTK_DATA: "/usr/share/nltk_data"
jobs:
changes:
name: Detect Backend Changes
runs-on: ubuntu-slim
outputs:
backend_changed: ${{ steps.force.outputs.run_all == 'true' || steps.filter.outputs.backend == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v6.0.2
with:
fetch-depth: 0
- name: Decide run mode
id: force
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
echo "run_all=true" >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event_name }}" == "push" && ( "${{ github.ref_name }}" == "main" || "${{ github.ref_name }}" == "dev" ) ]]; then
echo "run_all=true" >> "$GITHUB_OUTPUT"
else
echo "run_all=false" >> "$GITHUB_OUTPUT"
fi
- name: Set diff range
id: range
if: steps.force.outputs.run_all != 'true'
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event.created }}" == "true" ]]; then
echo "base=${{ github.event.repository.default_branch }}" >> "$GITHUB_OUTPUT"
else
echo "base=${{ github.event.before }}" >> "$GITHUB_OUTPUT"
fi
echo "ref=${{ github.sha }}" >> "$GITHUB_OUTPUT"
- name: Detect changes
id: filter
if: steps.force.outputs.run_all != 'true'
uses: dorny/paths-filter@v3.0.2
with:
base: ${{ steps.range.outputs.base }}
ref: ${{ steps.range.outputs.ref }}
filters: |
backend:
- 'src/**'
- 'pyproject.toml'
- 'uv.lock'
- 'docker/compose/docker-compose.ci-test.yml'
- '.github/workflows/ci-backend.yml'
test:
needs: changes
if: needs.changes.outputs.backend_changed == 'true'
name: "Python ${{ matrix.python-version }}"
runs-on: ubuntu-24.04
strategy:
@@ -136,8 +100,6 @@ jobs:
docker compose --file docker/compose/docker-compose.ci-test.yml logs
docker compose --file docker/compose/docker-compose.ci-test.yml down
typing:
needs: changes
if: needs.changes.outputs.backend_changed == 'true'
name: Check project typing
runs-on: ubuntu-24.04
env:
@@ -188,27 +150,3 @@ jobs:
--show-error-codes \
--warn-unused-configs \
src/ | uv run mypy-baseline filter
gate:
name: Backend CI Gate
needs: [changes, test, typing]
if: always()
runs-on: ubuntu-slim
steps:
- name: Check gate
run: |
if [[ "${{ needs.changes.outputs.backend_changed }}" != "true" ]]; then
echo "No backend-relevant changes detected."
exit 0
fi
if [[ "${{ needs.test.result }}" != "success" ]]; then
echo "::error::Backend test job result: ${{ needs.test.result }}"
exit 1
fi
if [[ "${{ needs.typing.result }}" != "success" ]]; then
echo "::error::Backend typing job result: ${{ needs.typing.result }}"
exit 1
fi
echo "Backend checks passed."

View File

@@ -104,9 +104,9 @@ jobs:
echo "repository=${repo_name}"
echo "name=${repo_name}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v4.0.0
uses: docker/setup-buildx-action@v3.12.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v4.0.0
uses: docker/login-action@v3.7.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
@@ -179,22 +179,22 @@ jobs:
echo "Downloaded digests:"
ls -la /tmp/digests/
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v4.0.0
uses: docker/setup-buildx-action@v3.12.0
- name: Login to GitHub Container Registry
uses: docker/login-action@v4.0.0
uses: docker/login-action@v3.7.0
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Login to Docker Hub
if: needs.build-arch.outputs.push-external == 'true'
uses: docker/login-action@v4.0.0
uses: docker/login-action@v3.7.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to Quay.io
if: needs.build-arch.outputs.push-external == 'true'
uses: docker/login-action@v4.0.0
uses: docker/login-action@v3.7.0
with:
registry: quay.io
username: ${{ secrets.QUAY_USERNAME }}

View File

@@ -1,9 +1,22 @@
name: Documentation
on:
push:
branches-ignore:
- 'translations**'
branches:
- main
- dev
paths:
- 'docs/**'
- 'zensical.toml'
- 'pyproject.toml'
- 'uv.lock'
- '.github/workflows/ci-docs.yml'
pull_request:
paths:
- 'docs/**'
- 'zensical.toml'
- 'pyproject.toml'
- 'uv.lock'
- '.github/workflows/ci-docs.yml'
workflow_dispatch:
concurrency:
group: docs-${{ github.event.pull_request.number || github.ref }}
@@ -16,55 +29,7 @@ env:
DEFAULT_UV_VERSION: "0.10.x"
DEFAULT_PYTHON_VERSION: "3.12"
jobs:
changes:
name: Detect Docs Changes
runs-on: ubuntu-slim
outputs:
docs_changed: ${{ steps.force.outputs.run_all == 'true' || steps.filter.outputs.docs == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v6.0.2
with:
fetch-depth: 0
- name: Decide run mode
id: force
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
echo "run_all=true" >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event_name }}" == "push" && ( "${{ github.ref_name }}" == "main" || "${{ github.ref_name }}" == "dev" ) ]]; then
echo "run_all=true" >> "$GITHUB_OUTPUT"
else
echo "run_all=false" >> "$GITHUB_OUTPUT"
fi
- name: Set diff range
id: range
if: steps.force.outputs.run_all != 'true'
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event.created }}" == "true" ]]; then
echo "base=${{ github.event.repository.default_branch }}" >> "$GITHUB_OUTPUT"
else
echo "base=${{ github.event.before }}" >> "$GITHUB_OUTPUT"
fi
echo "ref=${{ github.sha }}" >> "$GITHUB_OUTPUT"
- name: Detect changes
id: filter
if: steps.force.outputs.run_all != 'true'
uses: dorny/paths-filter@v3.0.2
with:
base: ${{ steps.range.outputs.base }}
ref: ${{ steps.range.outputs.ref }}
filters: |
docs:
- 'docs/**'
- 'zensical.toml'
- 'pyproject.toml'
- 'uv.lock'
- '.github/workflows/ci-docs.yml'
build:
needs: changes
if: needs.changes.outputs.docs_changed == 'true'
name: Build Documentation
runs-on: ubuntu-24.04
steps:
@@ -99,8 +64,8 @@ jobs:
name: github-pages-${{ github.run_id }}-${{ github.run_attempt }}
deploy:
name: Deploy Documentation
needs: [changes, build]
if: github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.changes.outputs.docs_changed == 'true'
needs: build
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
runs-on: ubuntu-24.04
environment:
name: github-pages
@@ -111,22 +76,3 @@ jobs:
id: deployment
with:
artifact_name: github-pages-${{ github.run_id }}-${{ github.run_attempt }}
gate:
name: Docs CI Gate
needs: [changes, build]
if: always()
runs-on: ubuntu-slim
steps:
- name: Check gate
run: |
if [[ "${{ needs.changes.outputs.docs_changed }}" != "true" ]]; then
echo "No docs-relevant changes detected."
exit 0
fi
if [[ "${{ needs.build.result }}" != "success" ]]; then
echo "::error::Docs build job result: ${{ needs.build.result }}"
exit 1
fi
echo "Docs checks passed."

View File

@@ -3,60 +3,21 @@ on:
push:
branches-ignore:
- 'translations**'
paths:
- 'src-ui/**'
- '.github/workflows/ci-frontend.yml'
pull_request:
branches-ignore:
- 'translations**'
paths:
- 'src-ui/**'
- '.github/workflows/ci-frontend.yml'
workflow_dispatch:
concurrency:
group: frontend-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
changes:
name: Detect Frontend Changes
runs-on: ubuntu-slim
outputs:
frontend_changed: ${{ steps.force.outputs.run_all == 'true' || steps.filter.outputs.frontend == 'true' }}
steps:
- name: Checkout
uses: actions/checkout@v6.0.2
with:
fetch-depth: 0
- name: Decide run mode
id: force
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
echo "run_all=true" >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event_name }}" == "push" && ( "${{ github.ref_name }}" == "main" || "${{ github.ref_name }}" == "dev" ) ]]; then
echo "run_all=true" >> "$GITHUB_OUTPUT"
else
echo "run_all=false" >> "$GITHUB_OUTPUT"
fi
- name: Set diff range
id: range
if: steps.force.outputs.run_all != 'true'
run: |
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
echo "base=${{ github.event.pull_request.base.sha }}" >> "$GITHUB_OUTPUT"
elif [[ "${{ github.event.created }}" == "true" ]]; then
echo "base=${{ github.event.repository.default_branch }}" >> "$GITHUB_OUTPUT"
else
echo "base=${{ github.event.before }}" >> "$GITHUB_OUTPUT"
fi
echo "ref=${{ github.sha }}" >> "$GITHUB_OUTPUT"
- name: Detect changes
id: filter
if: steps.force.outputs.run_all != 'true'
uses: dorny/paths-filter@v3.0.2
with:
base: ${{ steps.range.outputs.base }}
ref: ${{ steps.range.outputs.ref }}
filters: |
frontend:
- 'src-ui/**'
- '.github/workflows/ci-frontend.yml'
install-dependencies:
needs: changes
if: needs.changes.outputs.frontend_changed == 'true'
name: Install Dependencies
runs-on: ubuntu-24.04
steps:
@@ -67,7 +28,7 @@ jobs:
with:
version: 10
- name: Use Node.js 24
uses: actions/setup-node@v6.3.0
uses: actions/setup-node@v6.2.0
with:
node-version: 24.x
cache: 'pnpm'
@@ -84,8 +45,7 @@ jobs:
run: cd src-ui && pnpm install
lint:
name: Lint
needs: [changes, install-dependencies]
if: needs.changes.outputs.frontend_changed == 'true'
needs: install-dependencies
runs-on: ubuntu-24.04
steps:
- name: Checkout
@@ -95,7 +55,7 @@ jobs:
with:
version: 10
- name: Use Node.js 24
uses: actions/setup-node@v6.3.0
uses: actions/setup-node@v6.2.0
with:
node-version: 24.x
cache: 'pnpm'
@@ -113,8 +73,7 @@ jobs:
run: cd src-ui && pnpm run lint
unit-tests:
name: "Unit Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
needs: [changes, install-dependencies]
if: needs.changes.outputs.frontend_changed == 'true'
needs: install-dependencies
runs-on: ubuntu-24.04
strategy:
fail-fast: false
@@ -130,7 +89,7 @@ jobs:
with:
version: 10
- name: Use Node.js 24
uses: actions/setup-node@v6.3.0
uses: actions/setup-node@v6.2.0
with:
node-version: 24.x
cache: 'pnpm'
@@ -160,8 +119,7 @@ jobs:
directory: src-ui/coverage/
e2e-tests:
name: "E2E Tests (${{ matrix.shard-index }}/${{ matrix.shard-count }})"
needs: [changes, install-dependencies]
if: needs.changes.outputs.frontend_changed == 'true'
needs: install-dependencies
runs-on: ubuntu-24.04
container: mcr.microsoft.com/playwright:v1.58.2-noble
env:
@@ -181,7 +139,7 @@ jobs:
with:
version: 10
- name: Use Node.js 24
uses: actions/setup-node@v6.3.0
uses: actions/setup-node@v6.2.0
with:
node-version: 24.x
cache: 'pnpm'
@@ -201,8 +159,7 @@ jobs:
run: cd src-ui && pnpm exec playwright test --shard ${{ matrix.shard-index }}/${{ matrix.shard-count }}
bundle-analysis:
name: Bundle Analysis
needs: [changes, unit-tests, e2e-tests]
if: needs.changes.outputs.frontend_changed == 'true'
needs: [unit-tests, e2e-tests]
runs-on: ubuntu-24.04
steps:
- name: Checkout
@@ -214,7 +171,7 @@ jobs:
with:
version: 10
- name: Use Node.js 24
uses: actions/setup-node@v6.3.0
uses: actions/setup-node@v6.2.0
with:
node-version: 24.x
cache: 'pnpm'
@@ -232,42 +189,3 @@ jobs:
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
run: cd src-ui && pnpm run build --configuration=production
gate:
name: Frontend CI Gate
needs: [changes, install-dependencies, lint, unit-tests, e2e-tests, bundle-analysis]
if: always()
runs-on: ubuntu-slim
steps:
- name: Check gate
run: |
if [[ "${{ needs.changes.outputs.frontend_changed }}" != "true" ]]; then
echo "No frontend-relevant changes detected."
exit 0
fi
if [[ "${{ needs['install-dependencies'].result }}" != "success" ]]; then
echo "::error::Frontend install job result: ${{ needs['install-dependencies'].result }}"
exit 1
fi
if [[ "${{ needs.lint.result }}" != "success" ]]; then
echo "::error::Frontend lint job result: ${{ needs.lint.result }}"
exit 1
fi
if [[ "${{ needs['unit-tests'].result }}" != "success" ]]; then
echo "::error::Frontend unit-tests job result: ${{ needs['unit-tests'].result }}"
exit 1
fi
if [[ "${{ needs['e2e-tests'].result }}" != "success" ]]; then
echo "::error::Frontend e2e-tests job result: ${{ needs['e2e-tests'].result }}"
exit 1
fi
if [[ "${{ needs['bundle-analysis'].result }}" != "success" ]]; then
echo "::error::Frontend bundle-analysis job result: ${{ needs['bundle-analysis'].result }}"
exit 1
fi
echo "Frontend checks passed."

View File

@@ -35,7 +35,7 @@ jobs:
with:
version: 10
- name: Use Node.js 24
uses: actions/setup-node@v6.3.0
uses: actions/setup-node@v6.2.0
with:
node-version: 24.x
cache: 'pnpm'

View File

@@ -40,7 +40,7 @@ jobs:
with:
version: 10
- name: Use Node.js 24
uses: actions/setup-node@v6.3.0
uses: actions/setup-node@v6.2.0
with:
node-version: 24.x
cache: 'pnpm'

View File

@@ -29,7 +29,7 @@ repos:
- id: check-case-conflict
- id: detect-private-key
- repo: https://github.com/codespell-project/codespell
rev: v2.4.2
rev: v2.4.1
hooks:
- id: codespell
additional_dependencies: [tomli]
@@ -46,11 +46,11 @@ repos:
- ts
- markdown
additional_dependencies:
- prettier@3.8.1
- 'prettier-plugin-organize-imports@4.3.0'
- prettier@3.3.3
- 'prettier-plugin-organize-imports@4.1.0'
# Python hooks
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.15.5
rev: v0.15.0
hooks:
- id: ruff-check
- id: ruff-format
@@ -65,7 +65,7 @@ repos:
- id: hadolint
# Shell script hooks
- repo: https://github.com/lovesegfault/beautysh
rev: v6.4.3
rev: v6.4.2
hooks:
- id: beautysh
types: [file]

View File

@@ -5,6 +5,14 @@ const config = {
singleQuote: true,
// https://prettier.io/docs/en/options.html#trailing-commas
trailingComma: 'es5',
overrides: [
{
files: ['docs/*.md'],
options: {
tabWidth: 4,
},
},
],
plugins: [require('prettier-plugin-organize-imports')],
}

View File

@@ -30,7 +30,7 @@ RUN set -eux \
# Purpose: Installs s6-overlay and rootfs
# Comments:
# - Don't leave anything extra in here either
FROM ghcr.io/astral-sh/uv:0.10.9-python3.12-trixie-slim AS s6-overlay-base
FROM ghcr.io/astral-sh/uv:0.10.7-python3.12-trixie-slim AS s6-overlay-base
WORKDIR /usr/src/s6

View File

@@ -56,7 +56,6 @@ services:
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBHOST: db
PAPERLESS_DBENGINE: postgres
env_file:
- stack.env
volumes:

View File

@@ -62,7 +62,6 @@ services:
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBHOST: db
PAPERLESS_DBENGINE: postgresql
PAPERLESS_TIKA_ENABLED: 1
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998

View File

@@ -56,7 +56,6 @@ services:
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBHOST: db
PAPERLESS_DBENGINE: postgresql
volumes:
data:
media:

View File

@@ -51,7 +51,6 @@ services:
env_file: docker-compose.env
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBENGINE: sqlite
PAPERLESS_TIKA_ENABLED: 1
PAPERLESS_TIKA_GOTENBERG_ENDPOINT: http://gotenberg:3000
PAPERLESS_TIKA_ENDPOINT: http://tika:9998

View File

@@ -42,7 +42,6 @@ services:
env_file: docker-compose.env
environment:
PAPERLESS_REDIS: redis://broker:6379
PAPERLESS_DBENGINE: sqlite
volumes:
data:
media:

View File

@@ -10,10 +10,8 @@ cd "${PAPERLESS_SRC_DIR}"
# The whole migrate, with flock, needs to run as the right user
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
python3 manage.py check --tag compatibility paperless || exit 1
exec s6-setlock -n "${data_dir}/migration_lock" python3 manage.py migrate --skip-checks --no-input
else
s6-setuidgid paperless python3 manage.py check --tag compatibility paperless || exit 1
exec s6-setuidgid paperless \
s6-setlock -n "${data_dir}/migration_lock" \
python3 manage.py migrate --skip-checks --no-input

View File

@@ -10,16 +10,16 @@ consuming documents at that time.
Options available to any installation of paperless:
- Use the [document exporter](#exporter). The document exporter exports all your documents,
thumbnails, metadata, and database contents to a specific folder. You may import your
documents and settings into a fresh instance of paperless again or store your
documents in another DMS with this export.
- Use the [document exporter](#exporter). The document exporter exports all your documents,
thumbnails, metadata, and database contents to a specific folder. You may import your
documents and settings into a fresh instance of paperless again or store your
documents in another DMS with this export.
The document exporter is also able to update an already existing
export. Therefore, incremental backups with `rsync` are entirely
possible.
The document exporter is also able to update an already existing
export. Therefore, incremental backups with `rsync` are entirely
possible.
The exporter does not include API tokens and they will need to be re-generated after importing.
The exporter does not include API tokens and they will need to be re-generated after importing.
!!! caution
@@ -29,27 +29,28 @@ Options available to any installation of paperless:
Options available to docker installations:
- Backup the docker volumes. These usually reside within
`/var/lib/docker/volumes` on the host and you need to be root in
order to access them.
- Backup the docker volumes. These usually reside within
`/var/lib/docker/volumes` on the host and you need to be root in
order to access them.
Paperless uses 4 volumes:
- `paperless_media`: This is where your documents are stored.
- `paperless_data`: This is where auxiliary data is stored. This
folder also contains the SQLite database, if you use it.
- `paperless_pgdata`: Exists only if you use PostgreSQL and
contains the database.
- `paperless_dbdata`: Exists only if you use MariaDB and contains
the database.
Paperless uses 4 volumes:
- `paperless_media`: This is where your documents are stored.
- `paperless_data`: This is where auxiliary data is stored. This
folder also contains the SQLite database, if you use it.
- `paperless_pgdata`: Exists only if you use PostgreSQL and
contains the database.
- `paperless_dbdata`: Exists only if you use MariaDB and contains
the database.
Options available to bare-metal and non-docker installations:
- Backup the entire paperless folder. This ensures that if your
paperless instance crashes at some point or your disk fails, you can
simply copy the folder back into place and it works.
- Backup the entire paperless folder. This ensures that if your
paperless instance crashes at some point or your disk fails, you can
simply copy the folder back into place and it works.
When using PostgreSQL or MariaDB, you'll also have to backup the
database.
When using PostgreSQL or MariaDB, you'll also have to backup the
database.
### Restoring {#migrating-restoring}
@@ -508,19 +509,19 @@ collection for issues.
The issues detected by the sanity checker are as follows:
- Missing original files.
- Missing archive files.
- Inaccessible original files due to improper permissions.
- Inaccessible archive files due to improper permissions.
- Corrupted original documents by comparing their checksum against
what is stored in the database.
- Corrupted archive documents by comparing their checksum against what
is stored in the database.
- Missing thumbnails.
- Inaccessible thumbnails due to improper permissions.
- Documents without any content (warning).
- Orphaned files in the media directory (warning). These are files
that are not referenced by any document in paperless.
- Missing original files.
- Missing archive files.
- Inaccessible original files due to improper permissions.
- Inaccessible archive files due to improper permissions.
- Corrupted original documents by comparing their checksum against
what is stored in the database.
- Corrupted archive documents by comparing their checksum against what
is stored in the database.
- Missing thumbnails.
- Inaccessible thumbnails due to improper permissions.
- Documents without any content (warning).
- Orphaned files in the media directory (warning). These are files
that are not referenced by any document in paperless.
```
document_sanity_checker

View File

@@ -25,20 +25,20 @@ documents.
The following algorithms are available:
- **None:** No matching will be performed.
- **Any:** Looks for any occurrence of any word provided in match in
the PDF. If you define the match as `Bank1 Bank2`, it will match
documents containing either of these terms.
- **All:** Requires that every word provided appears in the PDF,
albeit not in the order provided.
- **Exact:** Matches only if the match appears exactly as provided
(i.e. preserve ordering) in the PDF.
- **Regular expression:** Parses the match as a regular expression and
tries to find a match within the document.
- **Fuzzy match:** Uses a partial matching based on locating the tag text
inside the document, using a [partial ratio](https://rapidfuzz.github.io/RapidFuzz/Usage/fuzz.html#partial-ratio)
- **Auto:** Tries to automatically match new documents. This does not
require you to set a match. See the [notes below](#automatic-matching).
- **None:** No matching will be performed.
- **Any:** Looks for any occurrence of any word provided in match in
the PDF. If you define the match as `Bank1 Bank2`, it will match
documents containing either of these terms.
- **All:** Requires that every word provided appears in the PDF,
albeit not in the order provided.
- **Exact:** Matches only if the match appears exactly as provided
(i.e. preserve ordering) in the PDF.
- **Regular expression:** Parses the match as a regular expression and
tries to find a match within the document.
- **Fuzzy match:** Uses a partial matching based on locating the tag text
inside the document, using a [partial ratio](https://rapidfuzz.github.io/RapidFuzz/Usage/fuzz.html#partial-ratio)
- **Auto:** Tries to automatically match new documents. This does not
require you to set a match. See the [notes below](#automatic-matching).
When using the _any_ or _all_ matching algorithms, you can search for
terms that consist of multiple words by enclosing them in double quotes.
@@ -69,33 +69,33 @@ Paperless tries to hide much of the involved complexity with this
approach. However, there are a couple caveats you need to keep in mind
when using this feature:
- Changes to your documents are not immediately reflected by the
matching algorithm. The neural network needs to be _trained_ on your
documents after changes. Paperless periodically (default: once each
hour) checks for changes and does this automatically for you.
- The Auto matching algorithm only takes documents into account which
are NOT placed in your inbox (i.e. have any inbox tags assigned to
them). This ensures that the neural network only learns from
documents which you have correctly tagged before.
- The matching algorithm can only work if there is a correlation
between the tag, correspondent, document type, or storage path and
the document itself. Your bank statements usually contain your bank
account number and the name of the bank, so this works reasonably
well, However, tags such as "TODO" cannot be automatically
assigned.
- The matching algorithm needs a reasonable number of documents to
identify when to assign tags, correspondents, storage paths, and
types. If one out of a thousand documents has the correspondent
"Very obscure web shop I bought something five years ago", it will
probably not assign this correspondent automatically if you buy
something from them again. The more documents, the better.
- Paperless also needs a reasonable amount of negative examples to
decide when not to assign a certain tag, correspondent, document
type, or storage path. This will usually be the case as you start
filling up paperless with documents. Example: If all your documents
are either from "Webshop" or "Bank", paperless will assign one
of these correspondents to ANY new document, if both are set to
automatic matching.
- Changes to your documents are not immediately reflected by the
matching algorithm. The neural network needs to be _trained_ on your
documents after changes. Paperless periodically (default: once each
hour) checks for changes and does this automatically for you.
- The Auto matching algorithm only takes documents into account which
are NOT placed in your inbox (i.e. have any inbox tags assigned to
them). This ensures that the neural network only learns from
documents which you have correctly tagged before.
- The matching algorithm can only work if there is a correlation
between the tag, correspondent, document type, or storage path and
the document itself. Your bank statements usually contain your bank
account number and the name of the bank, so this works reasonably
well, However, tags such as "TODO" cannot be automatically
assigned.
- The matching algorithm needs a reasonable number of documents to
identify when to assign tags, correspondents, storage paths, and
types. If one out of a thousand documents has the correspondent
"Very obscure web shop I bought something five years ago", it will
probably not assign this correspondent automatically if you buy
something from them again. The more documents, the better.
- Paperless also needs a reasonable amount of negative examples to
decide when not to assign a certain tag, correspondent, document
type, or storage path. This will usually be the case as you start
filling up paperless with documents. Example: If all your documents
are either from "Webshop" or "Bank", paperless will assign one
of these correspondents to ANY new document, if both are set to
automatic matching.
## Hooking into the consumption process {#consume-hooks}
@@ -243,12 +243,12 @@ webserver:
Troubleshooting:
- Monitor the Docker Compose log
`cd ~/paperless-ngx; docker compose logs -f`
- Check your script's permission e.g. in case of permission error
`sudo chmod 755 post-consumption-example.sh`
- Pipe your scripts's output to a log file e.g.
`echo "${DOCUMENT_ID}" | tee --append /usr/src/paperless/scripts/post-consumption-example.log`
- Monitor the Docker Compose log
`cd ~/paperless-ngx; docker compose logs -f`
- Check your script's permission e.g. in case of permission error
`sudo chmod 755 post-consumption-example.sh`
- Pipe your scripts's output to a log file e.g.
`echo "${DOCUMENT_ID}" | tee --append /usr/src/paperless/scripts/post-consumption-example.log`
## File name handling {#file-name-handling}
@@ -307,35 +307,35 @@ will create a directory structure as follows:
Paperless provides the following variables for use within filenames:
- `{{ asn }}`: The archive serial number of the document, or "none".
- `{{ correspondent }}`: The name of the correspondent, or "none".
- `{{ document_type }}`: The name of the document type, or "none".
- `{{ tag_list }}`: A comma separated list of all tags assigned to the
document.
- `{{ title }}`: The title of the document.
- `{{ created }}`: The full date (ISO 8601 format, e.g. `2024-03-14`) the document was created.
- `{{ created_year }}`: Year created only, formatted as the year with
century.
- `{{ created_year_short }}`: Year created only, formatted as the year
without century, zero padded.
- `{{ created_month }}`: Month created only (number 01-12).
- `{{ created_month_name }}`: Month created name, as per locale
- `{{ created_month_name_short }}`: Month created abbreviated name, as per
locale
- `{{ created_day }}`: Day created only (number 01-31).
- `{{ added }}`: The full date (ISO format) the document was added to
paperless.
- `{{ added_year }}`: Year added only.
- `{{ added_year_short }}`: Year added only, formatted as the year without
century, zero padded.
- `{{ added_month }}`: Month added only (number 01-12).
- `{{ added_month_name }}`: Month added name, as per locale
- `{{ added_month_name_short }}`: Month added abbreviated name, as per
locale
- `{{ added_day }}`: Day added only (number 01-31).
- `{{ owner_username }}`: Username of document owner, if any, or "none"
- `{{ original_name }}`: Document original filename, minus the extension, if any, or "none"
- `{{ doc_pk }}`: The paperless identifier (primary key) for the document.
- `{{ asn }}`: The archive serial number of the document, or "none".
- `{{ correspondent }}`: The name of the correspondent, or "none".
- `{{ document_type }}`: The name of the document type, or "none".
- `{{ tag_list }}`: A comma separated list of all tags assigned to the
document.
- `{{ title }}`: The title of the document.
- `{{ created }}`: The full date (ISO 8601 format, e.g. `2024-03-14`) the document was created.
- `{{ created_year }}`: Year created only, formatted as the year with
century.
- `{{ created_year_short }}`: Year created only, formatted as the year
without century, zero padded.
- `{{ created_month }}`: Month created only (number 01-12).
- `{{ created_month_name }}`: Month created name, as per locale
- `{{ created_month_name_short }}`: Month created abbreviated name, as per
locale
- `{{ created_day }}`: Day created only (number 01-31).
- `{{ added }}`: The full date (ISO format) the document was added to
paperless.
- `{{ added_year }}`: Year added only.
- `{{ added_year_short }}`: Year added only, formatted as the year without
century, zero padded.
- `{{ added_month }}`: Month added only (number 01-12).
- `{{ added_month_name }}`: Month added name, as per locale
- `{{ added_month_name_short }}`: Month added abbreviated name, as per
locale
- `{{ added_day }}`: Day added only (number 01-31).
- `{{ owner_username }}`: Username of document owner, if any, or "none"
- `{{ original_name }}`: Document original filename, minus the extension, if any, or "none"
- `{{ doc_pk }}`: The paperless identifier (primary key) for the document.
!!! warning
@@ -388,10 +388,10 @@ before empty placeholders are removed as well, empty directories are omitted.
When a single storage layout is not sufficient for your use case, storage paths allow for more complex
structure to set precisely where each document is stored in the file system.
- Each storage path is a [`PAPERLESS_FILENAME_FORMAT`](configuration.md#PAPERLESS_FILENAME_FORMAT) and
follows the rules described above
- Each document is assigned a storage path using the matching algorithms described above, but can be
overwritten at any time
- Each storage path is a [`PAPERLESS_FILENAME_FORMAT`](configuration.md#PAPERLESS_FILENAME_FORMAT) and
follows the rules described above
- Each document is assigned a storage path using the matching algorithms described above, but can be
overwritten at any time
For example, you could define the following two storage paths:
@@ -457,13 +457,13 @@ The `get_cf_value` filter retrieves a value from custom field data with optional
###### Parameters
- `custom_fields`: This _must_ be the provided custom field data
- `name` (str): Name of the custom field to retrieve
- `default` (str, optional): Default value to return if field is not found or has no value
- `custom_fields`: This _must_ be the provided custom field data
- `name` (str): Name of the custom field to retrieve
- `default` (str, optional): Default value to return if field is not found or has no value
###### Returns
- `str | None`: The field value, default value, or `None` if neither exists
- `str | None`: The field value, default value, or `None` if neither exists
###### Examples
@@ -487,12 +487,12 @@ The `datetime` filter formats a datetime string or datetime object using Python'
###### Parameters
- `value` (str | datetime): Date/time value to format (strings will be parsed automatically)
- `format` (str): Python strftime format string
- `value` (str | datetime): Date/time value to format (strings will be parsed automatically)
- `format` (str): Python strftime format string
###### Returns
- `str`: Formatted datetime string
- `str`: Formatted datetime string
###### Examples
@@ -525,13 +525,13 @@ An ISO string can also be provided to control the output format.
###### Parameters
- `value` (date | datetime | str): Date, datetime object or ISO string to format (datetime should be timezone-aware)
- `format` (str): Format type - either a Babel preset ('short', 'medium', 'long', 'full') or custom pattern
- `locale` (str): Locale code for localization (e.g., 'en_US', 'fr_FR', 'de_DE')
- `value` (date | datetime | str): Date, datetime object or ISO string to format (datetime should be timezone-aware)
- `format` (str): Format type - either a Babel preset ('short', 'medium', 'long', 'full') or custom pattern
- `locale` (str): Locale code for localization (e.g., 'en_US', 'fr_FR', 'de_DE')
###### Returns
- `str`: Localized, formatted date string
- `str`: Localized, formatted date string
###### Examples
@@ -565,15 +565,15 @@ See the [supported format codes](https://unicode.org/reports/tr35/tr35-dates.htm
### Format Presets
- **short**: Abbreviated format (e.g., "1/15/24")
- **medium**: Medium-length format (e.g., "Jan 15, 2024")
- **long**: Long format with full month name (e.g., "January 15, 2024")
- **full**: Full format including day of week (e.g., "Monday, January 15, 2024")
- **short**: Abbreviated format (e.g., "1/15/24")
- **medium**: Medium-length format (e.g., "Jan 15, 2024")
- **long**: Long format with full month name (e.g., "January 15, 2024")
- **full**: Full format including day of week (e.g., "Monday, January 15, 2024")
#### Additional Variables
- `{{ tag_name_list }}`: A list of tag names applied to the document, ordered by the tag name. Note this is a list, not a single string
- `{{ custom_fields }}`: A mapping of custom field names to their type and value. A user can access the mapping by field name or check if a field is applied by checking its existence in the variable.
- `{{ tag_name_list }}`: A list of tag names applied to the document, ordered by the tag name. Note this is a list, not a single string
- `{{ custom_fields }}`: A mapping of custom field names to their type and value. A user can access the mapping by field name or check if a field is applied by checking its existence in the variable.
!!! tip
@@ -675,15 +675,15 @@ installation, you can use volumes to accomplish this:
```yaml
services:
# ...
webserver:
environment:
- PAPERLESS_ENABLE_FLOWER
ports:
- 5555:5555 # (2)!
# ...
volumes:
- /path/to/my/flowerconfig.py:/usr/src/paperless/src/paperless/flowerconfig.py:ro # (1)!
webserver:
environment:
- PAPERLESS_ENABLE_FLOWER
ports:
- 5555:5555 # (2)!
# ...
volumes:
- /path/to/my/flowerconfig.py:/usr/src/paperless/src/paperless/flowerconfig.py:ro # (1)!
```
1. Note the `:ro` tag means the file will be mounted as read only.
@@ -714,11 +714,11 @@ For example, using Docker Compose:
```yaml
services:
# ...
webserver:
# ...
volumes:
- /path/to/my/scripts:/custom-cont-init.d:ro # (1)!
webserver:
# ...
volumes:
- /path/to/my/scripts:/custom-cont-init.d:ro # (1)!
```
1. Note the `:ro` tag means the folder will be mounted as read only. This is for extra security against changes
@@ -771,16 +771,16 @@ Paperless is able to utilize barcodes for automatically performing some tasks.
At this time, the library utilized for detection of barcodes supports the following types:
- AN-13/UPC-A
- UPC-E
- EAN-8
- Code 128
- Code 93
- Code 39
- Codabar
- Interleaved 2 of 5
- QR Code
- SQ Code
- AN-13/UPC-A
- UPC-E
- EAN-8
- Code 128
- Code 93
- Code 39
- Codabar
- Interleaved 2 of 5
- QR Code
- SQ Code
For usage in Paperless, the type of barcode does not matter, only the contents of it.
@@ -793,8 +793,8 @@ below.
If document splitting is enabled, Paperless splits _after_ a separator barcode by default.
This means:
- any page containing the configured separator barcode starts a new document, starting with the **next** page
- pages containing the separator barcode are discarded
- any page containing the configured separator barcode starts a new document, starting with the **next** page
- pages containing the separator barcode are discarded
This is intended for dedicated separator sheets such as PATCH-T pages.
@@ -831,10 +831,10 @@ to `true`.
When enabled, documents will be split at pages containing tag barcodes, similar to how
ASN barcodes work. Key features:
- The page with the tag barcode is **retained** in the resulting document
- **Each split document extracts its own tags** - only tags on pages within that document are assigned
- Multiple tag barcodes can trigger multiple splits in the same document
- Works seamlessly with ASN barcodes - each split document gets its own ASN and tags
- The page with the tag barcode is **retained** in the resulting document
- **Each split document extracts its own tags** - only tags on pages within that document are assigned
- Multiple tag barcodes can trigger multiple splits in the same document
- Works seamlessly with ASN barcodes - each split document gets its own ASN and tags
This is useful for batch scanning where you place tag barcode pages between different
documents to both separate and categorize them in a single operation.
@@ -996,9 +996,9 @@ If using docker, you'll need to add the following volume mounts to your `docker-
```yaml
webserver:
volumes:
- /home/user/.gnupg/pubring.gpg:/usr/src/paperless/.gnupg/pubring.gpg
- <path to gpg-agent socket>:/usr/src/paperless/.gnupg/S.gpg-agent
volumes:
- /home/user/.gnupg/pubring.gpg:/usr/src/paperless/.gnupg/pubring.gpg
- <path to gpg-agent socket>:/usr/src/paperless/.gnupg/S.gpg-agent
```
For a 'bare-metal' installation no further configuration is necessary. If you
@@ -1006,9 +1006,9 @@ want to use a separate `GNUPG_HOME`, you can do so by configuring the [PAPERLESS
### Troubleshooting
- Make sure, that `gpg-agent` is running on your host machine
- Make sure, that encryption and decryption works from inside the container using the `gpg` commands from above.
- Check that all files in `/usr/src/paperless/.gnupg` have correct permissions
- Make sure, that `gpg-agent` is running on your host machine
- Make sure, that encryption and decryption works from inside the container using the `gpg` commands from above.
- Check that all files in `/usr/src/paperless/.gnupg` have correct permissions
```shell
paperless@9da1865df327:~/.gnupg$ ls -al

View File

@@ -66,10 +66,10 @@ Full text searching is available on the `/api/documents/` endpoint. Two
specific query parameters cause the API to return full text search
results:
- `/api/documents/?query=your%20search%20query`: Search for a document
using a full text query. For details on the syntax, see [Basic Usage - Searching](usage.md#basic-usage_searching).
- `/api/documents/?more_like_id=1234`: Search for documents similar to
the document with id 1234.
- `/api/documents/?query=your%20search%20query`: Search for a document
using a full text query. For details on the syntax, see [Basic Usage - Searching](usage.md#basic-usage_searching).
- `/api/documents/?more_like_id=1234`: Search for documents similar to
the document with id 1234.
Pagination works exactly the same as it does for normal requests on this
endpoint.
@@ -106,12 +106,12 @@ attribute with various information about the search results:
}
```
- `score` is an indication how well this document matches the query
relative to the other search results.
- `highlights` is an excerpt from the document content and highlights
the search terms with `<span>` tags as shown above.
- `rank` is the index of the search results. The first result will
have rank 0.
- `score` is an indication how well this document matches the query
relative to the other search results.
- `highlights` is an excerpt from the document content and highlights
the search terms with `<span>` tags as shown above.
- `rank` is the index of the search results. The first result will
have rank 0.
### Filtering by custom fields
@@ -122,33 +122,33 @@ use cases:
1. Documents with a custom field "due" (date) between Aug 1, 2024 and
Sept 1, 2024 (inclusive):
`?custom_field_query=["due", "range", ["2024-08-01", "2024-09-01"]]`
`?custom_field_query=["due", "range", ["2024-08-01", "2024-09-01"]]`
2. Documents with a custom field "customer" (text) that equals "bob"
(case sensitive):
`?custom_field_query=["customer", "exact", "bob"]`
`?custom_field_query=["customer", "exact", "bob"]`
3. Documents with a custom field "answered" (boolean) set to `true`:
`?custom_field_query=["answered", "exact", true]`
`?custom_field_query=["answered", "exact", true]`
4. Documents with a custom field "favorite animal" (select) set to either
"cat" or "dog":
`?custom_field_query=["favorite animal", "in", ["cat", "dog"]]`
`?custom_field_query=["favorite animal", "in", ["cat", "dog"]]`
5. Documents with a custom field "address" (text) that is empty:
`?custom_field_query=["OR", [["address", "isnull", true], ["address", "exact", ""]]]`
`?custom_field_query=["OR", [["address", "isnull", true], ["address", "exact", ""]]]`
6. Documents that don't have a field called "foo":
`?custom_field_query=["foo", "exists", false]`
`?custom_field_query=["foo", "exists", false]`
7. Documents that have document links "references" to both document 3 and 7:
`?custom_field_query=["references", "contains", [3, 7]]`
`?custom_field_query=["references", "contains", [3, 7]]`
All field types support basic operations including `exact`, `in`, `isnull`,
and `exists`. String, URL, and monetary fields support case-insensitive
@@ -164,8 +164,8 @@ Get auto completions for a partial search term.
Query parameters:
- `term`: The incomplete term.
- `limit`: Amount of results. Defaults to 10.
- `term`: The incomplete term.
- `limit`: Amount of results. Defaults to 10.
Results returned by the endpoint are ordered by importance of the term
in the document index. The first result is the term that has the highest
@@ -189,19 +189,19 @@ from there.
The endpoint supports the following optional form fields:
- `title`: Specify a title that the consumer should use for the
document.
- `created`: Specify a DateTime where the document was created (e.g.
"2016-04-19" or "2016-04-19 06:15:00+02:00").
- `correspondent`: Specify the ID of a correspondent that the consumer
should use for the document.
- `document_type`: Similar to correspondent.
- `storage_path`: Similar to correspondent.
- `tags`: Similar to correspondent. Specify this multiple times to
have multiple tags added to the document.
- `archive_serial_number`: An optional archive serial number to set.
- `custom_fields`: Either an array of custom field ids to assign (with an empty
value) to the document or an object mapping field id -> value.
- `title`: Specify a title that the consumer should use for the
document.
- `created`: Specify a DateTime where the document was created (e.g.
"2016-04-19" or "2016-04-19 06:15:00+02:00").
- `correspondent`: Specify the ID of a correspondent that the consumer
should use for the document.
- `document_type`: Similar to correspondent.
- `storage_path`: Similar to correspondent.
- `tags`: Similar to correspondent. Specify this multiple times to
have multiple tags added to the document.
- `archive_serial_number`: An optional archive serial number to set.
- `custom_fields`: Either an array of custom field ids to assign (with an empty
value) to the document or an object mapping field id -> value.
The endpoint will immediately return HTTP 200 if the document consumption
process was started successfully, with the UUID of the consumption task
@@ -215,16 +215,16 @@ consumption including the ID of a created document if consumption succeeded.
Document versions are file-level versions linked to one root document.
- Root document metadata (title, tags, correspondent, document type, storage path, custom fields, permissions) remains shared.
- Version-specific file data (file, mime type, checksums, archive info, extracted text content) belongs to the selected/latest version.
- Root document metadata (title, tags, correspondent, document type, storage path, custom fields, permissions) remains shared.
- Version-specific file data (file, mime type, checksums, archive info, extracted text content) belongs to the selected/latest version.
Version-aware endpoints:
- `GET /api/documents/{id}/`: returns root document data; `content` resolves to latest version content by default. Use `?version={version_id}` to resolve content for a specific version.
- `PATCH /api/documents/{id}/`: content updates target the selected version (`?version={version_id}`) or latest version by default; non-content metadata updates target the root document.
- `GET /api/documents/{id}/download/`, `GET /api/documents/{id}/preview/`, `GET /api/documents/{id}/thumb/`, `GET /api/documents/{id}/metadata/`: accept `?version={version_id}`.
- `POST /api/documents/{id}/update_version/`: uploads a new version using multipart form field `document` and optional `version_label`.
- `DELETE /api/documents/{root_id}/versions/{version_id}/`: deletes a non-root version.
- `GET /api/documents/{id}/`: returns root document data; `content` resolves to latest version content by default. Use `?version={version_id}` to resolve content for a specific version.
- `PATCH /api/documents/{id}/`: content updates target the selected version (`?version={version_id}`) or latest version by default; non-content metadata updates target the root document.
- `GET /api/documents/{id}/download/`, `GET /api/documents/{id}/preview/`, `GET /api/documents/{id}/thumb/`, `GET /api/documents/{id}/metadata/`: accept `?version={version_id}`.
- `POST /api/documents/{id}/update_version/`: uploads a new version using multipart form field `document` and optional `version_label`.
- `DELETE /api/documents/{root_id}/versions/{version_id}/`: deletes a non-root version.
## Permissions
@@ -282,38 +282,74 @@ a json payload of the format:
The following methods are supported:
- `set_correspondent`
- Requires `parameters`: `{ "correspondent": CORRESPONDENT_ID }`
- `set_document_type`
- Requires `parameters`: `{ "document_type": DOCUMENT_TYPE_ID }`
- `set_storage_path`
- Requires `parameters`: `{ "storage_path": STORAGE_PATH_ID }`
- `add_tag`
- Requires `parameters`: `{ "tag": TAG_ID }`
- `remove_tag`
- Requires `parameters`: `{ "tag": TAG_ID }`
- `modify_tags`
- Requires `parameters`: `{ "add_tags": [LIST_OF_TAG_IDS] }` and `{ "remove_tags": [LIST_OF_TAG_IDS] }`
- `delete`
- No `parameters` required
- `reprocess`
- No `parameters` required
- `set_permissions`
- Requires `parameters`:
- `"set_permissions": PERMISSIONS_OBJ` (see format [above](#permissions)) and / or
- `"owner": OWNER_ID or null`
- `"merge": true or false` (defaults to false)
- The `merge` flag determines if the supplied permissions will overwrite all existing permissions (including
removing them) or be merged with existing permissions.
- `modify_custom_fields`
- Requires `parameters`:
- `"add_custom_fields": { CUSTOM_FIELD_ID: VALUE }`: JSON object consisting of custom field id:value pairs to add to the document, can also be a list of custom field IDs
to add with empty values.
- `"remove_custom_fields": [CUSTOM_FIELD_ID]`: custom field ids to remove from the document.
#### Document-editing operations
Beginning with version 10+, the API supports individual endpoints for document-editing operations (`merge`, `rotate`, `edit_pdf`, etc), thus their documentation can be found in the API spec / viewer. Legacy document-editing methods via `/api/documents/bulk_edit/` are still supported for compatibility, are deprecated and clients should migrate to the individual endpoints before they are removed in a future version.
- `set_correspondent`
- Requires `parameters`: `{ "correspondent": CORRESPONDENT_ID }`
- `set_document_type`
- Requires `parameters`: `{ "document_type": DOCUMENT_TYPE_ID }`
- `set_storage_path`
- Requires `parameters`: `{ "storage_path": STORAGE_PATH_ID }`
- `add_tag`
- Requires `parameters`: `{ "tag": TAG_ID }`
- `remove_tag`
- Requires `parameters`: `{ "tag": TAG_ID }`
- `modify_tags`
- Requires `parameters`: `{ "add_tags": [LIST_OF_TAG_IDS] }` and `{ "remove_tags": [LIST_OF_TAG_IDS] }`
- `delete`
- No `parameters` required
- `reprocess`
- No `parameters` required
- `set_permissions`
- Requires `parameters`:
- `"set_permissions": PERMISSIONS_OBJ` (see format [above](#permissions)) and / or
- `"owner": OWNER_ID or null`
- `"merge": true or false` (defaults to false)
- The `merge` flag determines if the supplied permissions will overwrite all existing permissions (including
removing them) or be merged with existing permissions.
- `edit_pdf`
- Requires `parameters`:
- `"doc_ids": [DOCUMENT_ID]` A list of a single document ID to edit.
- `"operations": [OPERATION, ...]` A list of operations to perform on the documents. Each operation is a dictionary
with the following keys:
- `"page": PAGE_NUMBER` The page number to edit (1-based).
- `"rotate": DEGREES` Optional rotation in degrees (90, 180, 270).
- `"doc": OUTPUT_DOCUMENT_INDEX` Optional index of the output document for split operations.
- Optional `parameters`:
- `"delete_original": true` to delete the original documents after editing.
- `"update_document": true` to add the edited PDF as a new version of the root document.
- `"include_metadata": true` to copy metadata from the original document to the edited document.
- `remove_password`
- Requires `parameters`:
- `"password": "PASSWORD_STRING"` The password to remove from the PDF documents.
- Optional `parameters`:
- `"update_document": true` to add the password-less PDF as a new version of the root document.
- `"delete_original": true` to delete the original document after editing.
- `"include_metadata": true` to copy metadata from the original document to the new password-less document.
- `merge`
- No additional `parameters` required.
- The ordering of the merged document is determined by the list of IDs.
- Optional `parameters`:
- `"metadata_document_id": DOC_ID` apply metadata (tags, correspondent, etc.) from this document to the merged document.
- `"delete_originals": true` to delete the original documents. This requires the calling user being the owner of
all documents that are merged.
- `split`
- Requires `parameters`:
- `"pages": [..]` The list should be a list of pages and/or a ranges, separated by commas e.g. `"[1,2-3,4,5-7]"`
- Optional `parameters`:
- `"delete_originals": true` to delete the original document after consumption. This requires the calling user being the owner of
the document.
- The split operation only accepts a single document.
- `rotate`
- Requires `parameters`:
- `"degrees": DEGREES`. Must be an integer i.e. 90, 180, 270
- `delete_pages`
- Requires `parameters`:
- `"pages": [..]` The list should be a list of integers e.g. `"[2,3,4]"`
- The delete_pages operation only accepts a single document.
- `modify_custom_fields`
- Requires `parameters`:
- `"add_custom_fields": { CUSTOM_FIELD_ID: VALUE }`: JSON object consisting of custom field id:value pairs to add to the document, can also be a list of custom field IDs
to add with empty values.
- `"remove_custom_fields": [CUSTOM_FIELD_ID]`: custom field ids to remove from the document.
### Objects
@@ -333,38 +369,41 @@ operations, using the endpoint: `/api/bulk_edit_objects/`, which requires a json
## API Versioning
The REST API is versioned.
The REST API is versioned since Paperless-ngx 1.3.0.
- Versioning ensures that changes to the API don't break older
clients.
- Clients specify the specific version of the API they wish to use
with every request and Paperless will handle the request using the
specified API version.
- Even if the underlying data model changes, supported older API
versions continue to serve compatible data.
- If no version is specified, Paperless serves the configured default
API version (currently `10`).
- Supported API versions are currently `9` and `10`.
- Versioning ensures that changes to the API don't break older
clients.
- Clients specify the specific version of the API they wish to use
with every request and Paperless will handle the request using the
specified API version.
- Even if the underlying data model changes, older API versions will
always serve compatible data.
- If no version is specified, Paperless will serve version 1 to ensure
compatibility with older clients that do not request a specific API
version.
API versions are specified by submitting an additional HTTP `Accept`
header with every request:
```
Accept: application/json; version=10
Accept: application/json; version=6
```
If an invalid version is specified, Paperless responds with
`406 Not Acceptable` and an error message in the body.
If an invalid version is specified, Paperless 1.3.0 will respond with
"406 Not Acceptable" and an error message in the body. Earlier
versions of Paperless will serve API version 1 regardless of whether a
version is specified via the `Accept` header.
If a client wishes to verify whether it is compatible with any given
server, the following procedure should be performed:
1. Perform an _authenticated_ request against any API endpoint. The
server will add two custom headers to the response:
1. Perform an _authenticated_ request against any API endpoint. If the
server is on version 1.3.0 or newer, the server will add two custom
headers to the response:
```
X-Api-Version: 10
X-Version: <server-version>
X-Api-Version: 2
X-Version: 1.3.0
```
2. Determine whether the client is compatible with this server based on
@@ -384,56 +423,51 @@ Initial API version.
#### Version 2
- Added field `Tag.color`. This read/write string field contains a hex
color such as `#a6cee3`.
- Added read-only field `Tag.text_color`. This field contains the text
color to use for a specific tag, which is either black or white
depending on the brightness of `Tag.color`.
- Removed field `Tag.colour`.
- Added field `Tag.color`. This read/write string field contains a hex
color such as `#a6cee3`.
- Added read-only field `Tag.text_color`. This field contains the text
color to use for a specific tag, which is either black or white
depending on the brightness of `Tag.color`.
- Removed field `Tag.colour`.
#### Version 3
- Permissions endpoints have been added.
- The format of the `/api/ui_settings/` has changed.
- Permissions endpoints have been added.
- The format of the `/api/ui_settings/` has changed.
#### Version 4
- Consumption templates were refactored to workflows and API endpoints
changed as such.
- Consumption templates were refactored to workflows and API endpoints
changed as such.
#### Version 5
- Added bulk deletion methods for documents and objects.
- Added bulk deletion methods for documents and objects.
#### Version 6
- Moved acknowledge tasks endpoint to be under `/api/tasks/acknowledge/`.
- Moved acknowledge tasks endpoint to be under `/api/tasks/acknowledge/`.
#### Version 7
- The format of select type custom fields has changed to return the options
as an array of objects with `id` and `label` fields as opposed to a simple
list of strings. When creating or updating a custom field value of a
document for a select type custom field, the value should be the `id` of
the option whereas previously was the index of the option.
- The format of select type custom fields has changed to return the options
as an array of objects with `id` and `label` fields as opposed to a simple
list of strings. When creating or updating a custom field value of a
document for a select type custom field, the value should be the `id` of
the option whereas previously was the index of the option.
#### Version 8
- The user field of document notes now returns a simplified user object
rather than just the user ID.
- The user field of document notes now returns a simplified user object
rather than just the user ID.
#### Version 9
- The document `created` field is now a date, not a datetime. The
`created_date` field is considered deprecated and will be removed in a
future version.
- The document `created` field is now a date, not a datetime. The
`created_date` field is considered deprecated and will be removed in a
future version.
#### Version 10
- The `show_on_dashboard` and `show_in_sidebar` fields of saved views have been
removed. Relevant settings are now stored in the UISettings model. Compatibility is maintained
for versions < 10 until support for API v9 is dropped.
- Document-editing operations such as `merge`, `rotate`, and `edit_pdf` have been
moved from the bulk edit endpoint to their own individual endpoints. Using these methods via
the bulk edit endpoint is still supported for compatibility with versions < 10 until support
for API v9 is dropped.
- The `show_on_dashboard` and `show_in_sidebar` fields of saved views have been
removed. Relevant settings are now stored in the UISettings model.

File diff suppressed because it is too large Load Diff

View File

@@ -8,17 +8,17 @@ common [OCR](#ocr) related settings and some frontend settings. If set, these wi
preference over the settings via environment variables. If not set, the environment setting
or applicable default will be utilized instead.
- If you run paperless on docker, `paperless.conf` is not used.
Rather, configure paperless by copying necessary options to
`docker-compose.env`.
- If you run paperless on docker, `paperless.conf` is not used.
Rather, configure paperless by copying necessary options to
`docker-compose.env`.
- If you are running paperless on anything else, paperless will search
for the configuration file in these locations and use the first one
it finds:
- The environment variable `PAPERLESS_CONFIGURATION_PATH`
- `/path/to/paperless/paperless.conf`
- `/etc/paperless.conf`
- `/usr/local/etc/paperless.conf`
- If you are running paperless on anything else, paperless will search
for the configuration file in these locations and use the first one
it finds:
- The environment variable `PAPERLESS_CONFIGURATION_PATH`
- `/path/to/paperless/paperless.conf`
- `/etc/paperless.conf`
- `/usr/local/etc/paperless.conf`
## Required services

View File

@@ -6,23 +6,23 @@ on Paperless-ngx.
Check out the source from GitHub. The repository is organized in the
following way:
- `main` always represents the latest release and will only see
changes when a new release is made.
- `dev` contains the code that will be in the next release.
- `feature-X` contains bigger changes that will be in some release, but
not necessarily the next one.
- `main` always represents the latest release and will only see
changes when a new release is made.
- `dev` contains the code that will be in the next release.
- `feature-X` contains bigger changes that will be in some release, but
not necessarily the next one.
When making functional changes to Paperless-ngx, _always_ make your changes
on the `dev` branch.
Apart from that, the folder structure is as follows:
- `docs/` - Documentation.
- `src-ui/` - Code of the front end.
- `src/` - Code of the back end.
- `scripts/` - Various scripts that help with different parts of
development.
- `docker/` - Files required to build the docker image.
- `docs/` - Documentation.
- `src-ui/` - Code of the front end.
- `src/` - Code of the back end.
- `scripts/` - Various scripts that help with different parts of
development.
- `docker/` - Files required to build the docker image.
## Contributing to Paperless-ngx
@@ -94,17 +94,18 @@ first-time setup.
```
7. You can now either ...
- install Redis or
- use the included `scripts/start_services.sh` to use Docker to fire
up a Redis instance (and some other services such as Tika,
Gotenberg and a database server) or
- install Redis or
- spin up a bare Redis container
- use the included `scripts/start_services.sh` to use Docker to fire
up a Redis instance (and some other services such as Tika,
Gotenberg and a database server) or
```bash
docker run -d -p 6379:6379 --restart unless-stopped redis:latest
```
- spin up a bare Redis container
```bash
docker run -d -p 6379:6379 --restart unless-stopped redis:latest
```
8. Continue with either back-end or front-end development or both :-).
@@ -117,9 +118,9 @@ work well for development, but you can use whatever you want.
Configure the IDE to use the `src/`-folder as the base source folder.
Configure the following launch configurations in your IDE:
- `uv run manage.py runserver`
- `uv run manage.py document_consumer`
- `uv run celery --app paperless worker -l DEBUG` (or any other log level)
- `uv run manage.py runserver`
- `uv run manage.py document_consumer`
- `uv run celery --app paperless worker -l DEBUG` (or any other log level)
To start them all:
@@ -145,11 +146,11 @@ pnpm ng build --configuration production
### Testing
- Run `pytest` in the `src/` directory to execute all tests. This also
generates a HTML coverage report. When running tests, `paperless.conf`
is loaded as well. However, the tests rely on the default
configuration. This is not ideal. But for now, make sure no settings
except for DEBUG are overridden when testing.
- Run `pytest` in the `src/` directory to execute all tests. This also
generates a HTML coverage report. When running tests, `paperless.conf`
is loaded as well. However, the tests rely on the default
configuration. This is not ideal. But for now, make sure no settings
except for DEBUG are overridden when testing.
!!! note
@@ -253,14 +254,14 @@ these parts have to be translated separately.
### Front end localization
- The AngularJS front end does localization according to the [Angular
documentation](https://angular.io/guide/i18n).
- The source language of the project is "en_US".
- The source strings end up in the file `src-ui/messages.xlf`.
- The translated strings need to be placed in the
`src-ui/src/locale/` folder.
- In order to extract added or changed strings from the source files,
call `ng extract-i18n`.
- The AngularJS front end does localization according to the [Angular
documentation](https://angular.io/guide/i18n).
- The source language of the project is "en_US".
- The source strings end up in the file `src-ui/messages.xlf`.
- The translated strings need to be placed in the
`src-ui/src/locale/` folder.
- In order to extract added or changed strings from the source files,
call `ng extract-i18n`.
Adding new languages requires adding the translated files in the
`src-ui/src/locale/` folder and adjusting a couple files.
@@ -306,18 +307,18 @@ A majority of the strings that appear in the back end appear only when
the admin is used. However, some of these are still shown on the front
end (such as error messages).
- The django application does localization according to the [Django
documentation](https://docs.djangoproject.com/en/3.1/topics/i18n/translation/).
- The source language of the project is "en_US".
- Localization files end up in the folder `src/locale/`.
- In order to extract strings from the application, call
`uv run manage.py makemessages -l en_US`. This is important after
making changes to translatable strings.
- The message files need to be compiled for them to show up in the
application. Call `uv run manage.py compilemessages` to do this.
The generated files don't get committed into git, since these are
derived artifacts. The build pipeline takes care of executing this
command.
- The django application does localization according to the [Django
documentation](https://docs.djangoproject.com/en/3.1/topics/i18n/translation/).
- The source language of the project is "en_US".
- Localization files end up in the folder `src/locale/`.
- In order to extract strings from the application, call
`uv run manage.py makemessages -l en_US`. This is important after
making changes to translatable strings.
- The message files need to be compiled for them to show up in the
application. Call `uv run manage.py compilemessages` to do this.
The generated files don't get committed into git, since these are
derived artifacts. The build pipeline takes care of executing this
command.
Adding new languages requires adding the translated files in the
`src/locale/`-folder and adjusting the file
@@ -380,10 +381,10 @@ base code.
Paperless-ngx uses parsers to add documents. A parser is
responsible for:
- Retrieving the content from the original
- Creating a thumbnail
- _optional:_ Retrieving a created date from the original
- _optional:_ Creating an archived document from the original
- Retrieving the content from the original
- Creating a thumbnail
- _optional:_ Retrieving a created date from the original
- _optional:_ Creating an archived document from the original
Custom parsers can be added to Paperless-ngx to support more file types. In
order to do that, you need to write the parser itself and announce its
@@ -441,17 +442,17 @@ def myparser_consumer_declaration(sender, **kwargs):
}
```
- `parser` is a reference to a class that extends `DocumentParser`.
- `weight` is used whenever two or more parsers are able to parse a
file: The parser with the higher weight wins. This can be used to
override the parsers provided by Paperless-ngx.
- `mime_types` is a dictionary. The keys are the mime types your
parser supports and the value is the default file extension that
Paperless-ngx should use when storing files and serving them for
download. We could guess that from the file extensions, but some
mime types have many extensions associated with them and the Python
methods responsible for guessing the extension do not always return
the same value.
- `parser` is a reference to a class that extends `DocumentParser`.
- `weight` is used whenever two or more parsers are able to parse a
file: The parser with the higher weight wins. This can be used to
override the parsers provided by Paperless-ngx.
- `mime_types` is a dictionary. The keys are the mime types your
parser supports and the value is the default file extension that
Paperless-ngx should use when storing files and serving them for
download. We could guess that from the file extensions, but some
mime types have many extensions associated with them and the Python
methods responsible for guessing the extension do not always return
the same value.
## Using Visual Studio Code devcontainer
@@ -470,8 +471,9 @@ To get started:
2. VS Code will prompt you with "Reopen in container". Do so and wait for the environment to start.
3. In case your host operating system is Windows:
- The Source Control view in Visual Studio Code might show: "The detected Git repository is potentially unsafe as the folder is owned by someone other than the current user." Use "Manage Unsafe Repositories" to fix this.
- Git might have detecteded modifications for all files, because Windows is using CRLF line endings. Run `git checkout .` in the containers terminal to fix this issue.
- The Source Control view in Visual Studio Code might show: "The detected Git repository is potentially unsafe as the folder is owned by someone other than the current user." Use "Manage Unsafe Repositories" to fix this.
- Git might have detecteded modifications for all files, because Windows is using CRLF line endings. Run `git checkout .` in the containers terminal to fix this issue.
4. Initialize the project by running the task **Project Setup: Run all Init Tasks**. This
will initialize the database tables and create a superuser. Then you can compile the front end
@@ -536,12 +538,12 @@ class MyDateParserPlugin(DateParserPluginBase):
Your parser instance is initialized with a `DateParserConfig` object accessible via `self.config`. This provides:
- `languages: list[str]` - List of language codes for date parsing
- `timezone_str: str` - Timezone string for date localization
- `ignore_dates: set[datetime.date]` - Dates that should be filtered out
- `reference_time: datetime.datetime` - Current time for filtering future dates
- `filename_date_order: str | None` - Date order preference for filenames (e.g., "DMY", "MDY")
- `content_date_order: str` - Date order preference for content
- `languages: list[str]` - List of language codes for date parsing
- `timezone_str: str` - Timezone string for date localization
- `ignore_dates: set[datetime.date]` - Dates that should be filtered out
- `reference_time: datetime.datetime` - Current time for filtering future dates
- `filename_date_order: str | None` - Date order preference for filenames (e.g., "DMY", "MDY")
- `content_date_order: str` - Date order preference for content
The base class provides two helper methods you can use:

View File

@@ -44,28 +44,28 @@ system. On Linux, chances are high that this location is
You can always drag those files out of that folder to use them
elsewhere. Here are a couple notes about that.
- Paperless-ngx never modifies your original documents. It keeps
checksums of all documents and uses a scheduled sanity checker to
check that they remain the same.
- By default, paperless uses the internal ID of each document as its
filename. This might not be very convenient for export. However, you
can adjust the way files are stored in paperless by
[configuring the filename format](advanced_usage.md#file-name-handling).
- [The exporter](administration.md#exporter) is
another easy way to get your files out of paperless with reasonable
file names.
- Paperless-ngx never modifies your original documents. It keeps
checksums of all documents and uses a scheduled sanity checker to
check that they remain the same.
- By default, paperless uses the internal ID of each document as its
filename. This might not be very convenient for export. However, you
can adjust the way files are stored in paperless by
[configuring the filename format](advanced_usage.md#file-name-handling).
- [The exporter](administration.md#exporter) is
another easy way to get your files out of paperless with reasonable
file names.
## _What file types does paperless-ngx support?_
**A:** Currently, the following files are supported:
- PDF documents, PNG images, JPEG images, TIFF images, GIF images and
WebP images are processed with OCR and converted into PDF documents.
- Plain text documents are supported as well and are added verbatim to
paperless.
- With the optional Tika integration enabled (see [Tika configuration](https://docs.paperless-ngx.com/configuration#tika)),
Paperless also supports various Office documents (.docx, .doc, odt,
.ppt, .pptx, .odp, .xls, .xlsx, .ods).
- PDF documents, PNG images, JPEG images, TIFF images, GIF images and
WebP images are processed with OCR and converted into PDF documents.
- Plain text documents are supported as well and are added verbatim to
paperless.
- With the optional Tika integration enabled (see [Tika configuration](https://docs.paperless-ngx.com/configuration#tika)),
Paperless also supports various Office documents (.docx, .doc, odt,
.ppt, .pptx, .odp, .xls, .xlsx, .ods).
Paperless-ngx determines the type of a file by inspecting its content
rather than its file extensions. However, files processed via the

View File

@@ -28,36 +28,36 @@ physical documents into a searchable online archive so you can keep, well, _less
## Features
- **Organize and index** your scanned documents with tags, correspondents, types, and more.
- _Your_ data is stored locally on _your_ server and is never transmitted or shared in any way, unless you explicitly choose to do so.
- Performs **OCR** on your documents, adding searchable and selectable text, even to documents scanned with only images.
- Utilizes the open-source Tesseract engine to recognize more than 100 languages.
- _New!_ Supports remote OCR with Azure AI (opt-in).
- Documents are saved as PDF/A format which is designed for long term storage, alongside the unaltered originals.
- Uses machine-learning to automatically add tags, correspondents and document types to your documents.
- **New**: Paperless-ngx can now leverage AI (Large Language Models or LLMs) for document suggestions. This is an optional feature that can be enabled (and is disabled by default).
- Supports PDF documents, images, plain text files, Office documents (Word, Excel, PowerPoint, and LibreOffice equivalents)[^1] and more.
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely with different configurations assigned to different documents.
- **Beautiful, modern web application** that features:
- Customizable dashboard with statistics.
- Filtering by tags, correspondents, types, and more.
- Bulk editing of tags, correspondents, types and more.
- Drag-and-drop uploading of documents throughout the app.
- Customizable views can be saved and displayed on the dashboard and / or sidebar.
- Support for custom fields of various data types.
- Shareable public links with optional expiration.
- **Full text search** helps you find what you need:
- Auto completion suggests relevant words from your documents.
- Results are sorted by relevance to your search query.
- Highlighting shows you which parts of the document matched the query.
- Searching for similar documents ("More like this")
- **Email processing**[^1]: import documents from your email accounts:
- Configure multiple accounts and rules for each account.
- After processing, paperless can perform actions on the messages such as marking as read, deleting and more.
- A built-in robust **multi-user permissions** system that supports 'global' permissions as well as per document or object.
- A powerful workflow system that gives you even more control.
- **Optimized** for multi core systems: Paperless-ngx consumes multiple documents in parallel.
- The integrated sanity checker makes sure that your document archive is in good health.
- **Organize and index** your scanned documents with tags, correspondents, types, and more.
- _Your_ data is stored locally on _your_ server and is never transmitted or shared in any way, unless you explicitly choose to do so.
- Performs **OCR** on your documents, adding searchable and selectable text, even to documents scanned with only images.
- Utilizes the open-source Tesseract engine to recognize more than 100 languages.
- _New!_ Supports remote OCR with Azure AI (opt-in).
- Documents are saved as PDF/A format which is designed for long term storage, alongside the unaltered originals.
- Uses machine-learning to automatically add tags, correspondents and document types to your documents.
- **New**: Paperless-ngx can now leverage AI (Large Language Models or LLMs) for document suggestions. This is an optional feature that can be enabled (and is disabled by default).
- Supports PDF documents, images, plain text files, Office documents (Word, Excel, PowerPoint, and LibreOffice equivalents)[^1] and more.
- Paperless stores your documents plain on disk. Filenames and folders are managed by paperless and their format can be configured freely with different configurations assigned to different documents.
- **Beautiful, modern web application** that features:
- Customizable dashboard with statistics.
- Filtering by tags, correspondents, types, and more.
- Bulk editing of tags, correspondents, types and more.
- Drag-and-drop uploading of documents throughout the app.
- Customizable views can be saved and displayed on the dashboard and / or sidebar.
- Support for custom fields of various data types.
- Shareable public links with optional expiration.
- **Full text search** helps you find what you need:
- Auto completion suggests relevant words from your documents.
- Results are sorted by relevance to your search query.
- Highlighting shows you which parts of the document matched the query.
- Searching for similar documents ("More like this")
- **Email processing**[^1]: import documents from your email accounts:
- Configure multiple accounts and rules for each account.
- After processing, paperless can perform actions on the messages such as marking as read, deleting and more.
- A built-in robust **multi-user permissions** system that supports 'global' permissions as well as per document or object.
- A powerful workflow system that gives you even more control.
- **Optimized** for multi core systems: Paperless-ngx consumes multiple documents in parallel.
- The integrated sanity checker makes sure that your document archive is in good health.
[^1]: Office document and email consumption support is optional and provided by Apache Tika (see [configuration](https://docs.paperless-ngx.com/configuration/#tika))

View File

@@ -42,12 +42,12 @@ The `CONSUMER_BARCODE_SCANNER` setting has been removed. zxing-cpp is now the on
### Action Required
- If you were already using `CONSUMER_BARCODE_SCANNER=ZXING`, simply remove the setting.
- If you had `CONSUMER_BARCODE_SCANNER=PYZBAR` or were using the default, no functional changes are needed beyond
removing the setting. zxing-cpp supports all the same barcode formats and you should see improved detection
reliability.
- The `libzbar0` / `libzbar-dev` system packages are no longer required and can be removed from any custom Docker
images or host installations.
- If you were already using `CONSUMER_BARCODE_SCANNER=ZXING`, simply remove the setting.
- If you had `CONSUMER_BARCODE_SCANNER=PYZBAR` or were using the default, no functional changes are needed beyond
removing the setting. zxing-cpp supports all the same barcode formats and you should see improved detection
reliability.
- The `libzbar0` / `libzbar-dev` system packages are no longer required and can be removed from any custom Docker
images or host installations.
## Database Engine
@@ -72,30 +72,6 @@ PAPERLESS_DBHOST: postgres
See [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) for accepted values.
## Management Command: `--skip-checks` Removed
The `--skip-checks` flag has been removed from all Paperless-ngx management commands
(`document_exporter`, `document_importer`, `document_retagger`, `document_archiver`,
`document_thumbnails`, `document_index`, `document_renamer`, `document_sanity_checker`,
`document_fuzzy_match`, and others).
These commands now set `requires_system_checks = []` internally, which both skips
redundant checks at runtime (they are already run as a dedicated step during Docker
startup via `init-system-checks`) and removes `--skip-checks` from the argument parser.
#### Action Required
Remove `--skip-checks` from any scripts, cron jobs, or automation that invokes
these commands:
```bash
# v2
document_exporter /backup --skip-checks
# v3
document_exporter /backup
```
## Database Advanced Options
The individual SSL, timeout, and pooling variables have been removed in favor of a

View File

@@ -44,8 +44,8 @@ account. In short, it automates the [Docker Compose setup](#docker) described be
#### Prerequisites
- Docker and Docker Compose must be [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
- macOS users will need [GNU sed](https://formulae.brew.sh/formula/gnu-sed) with support for running as `sed` as well as [wget](https://formulae.brew.sh/formula/wget).
- Docker and Docker Compose must be [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
- macOS users will need [GNU sed](https://formulae.brew.sh/formula/gnu-sed) with support for running as `sed` as well as [wget](https://formulae.brew.sh/formula/wget).
#### Run the installation script
@@ -63,7 +63,7 @@ credentials you provided during the installation script.
#### Prerequisites
- Docker and Docker Compose must be [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
- Docker and Docker Compose must be [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
#### Installation
@@ -101,7 +101,7 @@ credentials you provided during the installation script.
```yaml
ports:
- 8010:8000
- 8010:8000
```
3. Modify `docker-compose.env` with any configuration options you need.
@@ -145,11 +145,11 @@ a [superuser](usage.md#superusers) account.
If you want to run Paperless as a rootless container, make this
change in `docker-compose.yml`:
- Set the `user` running the container to map to the `paperless`
user in the container. This value (`user_id` below) should be
the same ID that `USERMAP_UID` and `USERMAP_GID` are set to in
`docker-compose.env`. See `USERMAP_UID` and `USERMAP_GID`
[here](configuration.md#docker).
- Set the `user` running the container to map to the `paperless`
user in the container. This value (`user_id` below) should be
the same ID that `USERMAP_UID` and `USERMAP_GID` are set to in
`docker-compose.env`. See `USERMAP_UID` and `USERMAP_GID`
[here](configuration.md#docker).
Your entry for Paperless should contain something like:
@@ -171,25 +171,26 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
#### Prerequisites
- Paperless runs on Linux only, Windows is not supported.
- Python 3.11, 3.12, 3.13, or 3.14 is required. As a policy, Paperless-ngx aims to support at least the three most recent Python versions and drops support for versions as they reach end-of-life. Newer versions may work, but some dependencies may not be fully compatible.
- Paperless runs on Linux only, Windows is not supported.
- Python 3.11, 3.12, 3.13, or 3.14 is required. As a policy, Paperless-ngx aims to support at least the three most recent Python versions and drops support for versions as they reach end-of-life. Newer versions may work, but some dependencies may not be fully compatible.
#### Installation
1. Install dependencies. Paperless requires the following packages:
- `python3`
- `python3-pip`
- `python3-dev`
- `default-libmysqlclient-dev` for MariaDB
- `pkg-config` for mysqlclient (python dependency)
- `fonts-liberation` for generating thumbnails for plain text
files
- `imagemagick` >= 6 for PDF conversion
- `gnupg` for handling encrypted documents
- `libpq-dev` for PostgreSQL
- `libmagic-dev` for mime type detection
- `mariadb-client` for MariaDB compile time
- `poppler-utils` for barcode detection
- `python3`
- `python3-pip`
- `python3-dev`
- `default-libmysqlclient-dev` for MariaDB
- `pkg-config` for mysqlclient (python dependency)
- `fonts-liberation` for generating thumbnails for plain text
files
- `imagemagick` >= 6 for PDF conversion
- `gnupg` for handling encrypted documents
- `libpq-dev` for PostgreSQL
- `libmagic-dev` for mime type detection
- `mariadb-client` for MariaDB compile time
- `poppler-utils` for barcode detection
Use this list for your preferred package management:
@@ -199,17 +200,18 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
These dependencies are required for OCRmyPDF, which is used for text
recognition.
- `unpaper`
- `ghostscript`
- `icc-profiles-free`
- `qpdf`
- `liblept5`
- `libxml2`
- `pngquant` (suggested for certain PDF image optimizations)
- `zlib1g`
- `tesseract-ocr` >= 4.0.0 for OCR
- `tesseract-ocr` language packs (`tesseract-ocr-eng`,
`tesseract-ocr-deu`, etc)
- `unpaper`
- `ghostscript`
- `icc-profiles-free`
- `qpdf`
- `liblept5`
- `libxml2`
- `pngquant` (suggested for certain PDF image optimizations)
- `zlib1g`
- `tesseract-ocr` >= 4.0.0 for OCR
- `tesseract-ocr` language packs (`tesseract-ocr-eng`,
`tesseract-ocr-deu`, etc)
Use this list for your preferred package management:
@@ -218,14 +220,16 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
```
On Raspberry Pi, these libraries are required as well:
- `libatlas-base-dev`
- `libxslt1-dev`
- `mime-support`
- `libatlas-base-dev`
- `libxslt1-dev`
- `mime-support`
You will also need these for installing some of the python dependencies:
- `build-essential`
- `python3-setuptools`
- `python3-wheel`
- `build-essential`
- `python3-setuptools`
- `python3-wheel`
Use this list for your preferred package management:
@@ -275,41 +279,44 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
6. Configure Paperless-ngx. See [configuration](configuration.md) for details.
Edit the included `paperless.conf` and adjust the settings to your
needs. Required settings for getting Paperless-ngx running are:
- [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) should point to your Redis server, such as
`redis://localhost:6379`.
- [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) is optional, and should be one of `postgres`,
`mariadb`, or `sqlite`
- [`PAPERLESS_DBHOST`](configuration.md#PAPERLESS_DBHOST) should be the hostname on which your
PostgreSQL server is running. Do not configure this to use
SQLite instead. Also configure port, database name, user and
password as necessary.
- [`PAPERLESS_CONSUMPTION_DIR`](configuration.md#PAPERLESS_CONSUMPTION_DIR) should point to the folder
that Paperless-ngx should watch for incoming documents.
Likewise, [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) and
[`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) define where Paperless-ngx stores its data.
If needed, these can point to the same directory.
- [`PAPERLESS_SECRET_KEY`](configuration.md#PAPERLESS_SECRET_KEY) should be a random sequence of
characters. It's used for authentication. Failure to do so
allows third parties to forge authentication credentials.
- Set [`PAPERLESS_URL`](configuration.md#PAPERLESS_URL) if you are behind a reverse proxy. This should
point to your domain. Please see
[configuration](configuration.md) for more
information.
- [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) should point to your Redis server, such as
`redis://localhost:6379`.
- [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) is optional, and should be one of `postgres`,
`mariadb`, or `sqlite`
- [`PAPERLESS_DBHOST`](configuration.md#PAPERLESS_DBHOST) should be the hostname on which your
PostgreSQL server is running. Do not configure this to use
SQLite instead. Also configure port, database name, user and
password as necessary.
- [`PAPERLESS_CONSUMPTION_DIR`](configuration.md#PAPERLESS_CONSUMPTION_DIR) should point to the folder
that Paperless-ngx should watch for incoming documents.
Likewise, [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) and
[`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) define where Paperless-ngx stores its data.
If needed, these can point to the same directory.
- [`PAPERLESS_SECRET_KEY`](configuration.md#PAPERLESS_SECRET_KEY) should be a random sequence of
characters. It's used for authentication. Failure to do so
allows third parties to forge authentication credentials.
- Set [`PAPERLESS_URL`](configuration.md#PAPERLESS_URL) if you are behind a reverse proxy. This should
point to your domain. Please see
[configuration](configuration.md) for more
information.
You can make many more adjustments, especially for OCR.
The following options are recommended for most users:
- Set [`PAPERLESS_OCR_LANGUAGE`](configuration.md#PAPERLESS_OCR_LANGUAGE) to the language most of your
documents are written in.
- Set [`PAPERLESS_TIME_ZONE`](configuration.md#PAPERLESS_TIME_ZONE) to your local time zone.
- Set [`PAPERLESS_OCR_LANGUAGE`](configuration.md#PAPERLESS_OCR_LANGUAGE) to the language most of your
documents are written in.
- Set [`PAPERLESS_TIME_ZONE`](configuration.md#PAPERLESS_TIME_ZONE) to your local time zone.
!!! warning
Ensure your Redis instance [is secured](https://redis.io/docs/latest/operate/oss_and_stack/management/security/).
7. Create the following directories if they do not already exist:
- `/opt/paperless/media`
- `/opt/paperless/data`
- `/opt/paperless/consume`
- `/opt/paperless/media`
- `/opt/paperless/data`
- `/opt/paperless/consume`
Adjust these paths if you configured different folders.
Then verify that the `paperless` user has write permissions:
@@ -384,10 +391,11 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
starting point.
Paperless needs:
- The `webserver` script to run the webserver.
- The `consumer` script to watch the input folder.
- The `taskqueue` script for background workers (document consumption, etc.).
- The `scheduler` script for periodic tasks such as email checking.
- The `webserver` script to run the webserver.
- The `consumer` script to watch the input folder.
- The `taskqueue` script for background workers (document consumption, etc.).
- The `scheduler` script for periodic tasks such as email checking.
!!! note
@@ -493,19 +501,19 @@ your setup depending on how you installed Paperless.
This section describes how to update an existing Paperless Docker
installation. Keep these points in mind:
- Read the [changelog](changelog.md) and
take note of breaking changes.
- Decide whether to stay on SQLite or migrate to PostgreSQL.
Both work fine with Paperless-ngx.
However, if you already have a database server running
for other services, you might as well use it for Paperless as well.
- The task scheduler of Paperless, which is used to execute periodic
tasks such as email checking and maintenance, requires a
[Redis](https://redis.io/) message broker instance. The
Docker Compose route takes care of that.
- The layout of the folder structure for your documents and data
remains the same, so you can plug your old Docker volumes into
paperless-ngx and expect it to find everything where it should be.
- Read the [changelog](changelog.md) and
take note of breaking changes.
- Decide whether to stay on SQLite or migrate to PostgreSQL.
Both work fine with Paperless-ngx.
However, if you already have a database server running
for other services, you might as well use it for Paperless as well.
- The task scheduler of Paperless, which is used to execute periodic
tasks such as email checking and maintenance, requires a
[Redis](https://redis.io/) message broker instance. The
Docker Compose route takes care of that.
- The layout of the folder structure for your documents and data
remains the same, so you can plug your old Docker volumes into
paperless-ngx and expect it to find everything where it should be.
Migration to Paperless-ngx is then performed in a few simple steps:
@@ -590,6 +598,7 @@ commands as well.
1. Stop and remove the Paperless container.
2. If using an external database, stop that container.
3. Update Redis configuration.
1. If `REDIS_URL` is already set, change it to [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS)
and continue to step 4.
@@ -601,18 +610,22 @@ commands as well.
the new Redis container.
4. Update user mapping.
1. If set, change the environment variable `PUID` to `USERMAP_UID`.
1. If set, change the environment variable `PGID` to `USERMAP_GID`.
5. Update configuration paths.
1. Set the environment variable [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) to `/config`.
6. Update media paths.
1. Set the environment variable [`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) to
`/data/media`.
7. Update timezone.
1. Set the environment variable [`PAPERLESS_TIME_ZONE`](configuration.md#PAPERLESS_TIME_ZONE) to the same
value as `TZ`.
@@ -626,33 +639,33 @@ commands as well.
Paperless runs on Raspberry Pi. Some tasks can be slow on lower-powered
hardware, but a few settings can improve performance:
- Stick with SQLite to save some resources. See [troubleshooting](troubleshooting.md#log-reports-creating-paperlesstask-failed)
if you encounter issues with SQLite locking.
- If you do not need the filesystem-based consumer, consider disabling it
entirely by setting [`PAPERLESS_CONSUMER_DISABLE`](configuration.md#PAPERLESS_CONSUMER_DISABLE) to `true`.
- Consider setting [`PAPERLESS_OCR_PAGES`](configuration.md#PAPERLESS_OCR_PAGES) to 1, so that Paperless
OCRs only the first page of your documents. In most cases, this page
contains enough information to be able to find it.
- [`PAPERLESS_TASK_WORKERS`](configuration.md#PAPERLESS_TASK_WORKERS) and [`PAPERLESS_THREADS_PER_WORKER`](configuration.md#PAPERLESS_THREADS_PER_WORKER) are
configured to use all cores. The Raspberry Pi models 3 and up have 4
cores, meaning that Paperless will use 2 workers and 2 threads per
worker. This may result in sluggish response times during
consumption, so you might want to lower these settings (example: 2
workers and 1 thread to always have some computing power left for
other tasks).
- Keep [`PAPERLESS_OCR_MODE`](configuration.md#PAPERLESS_OCR_MODE) at its default value `skip` and consider
OCRing your documents before feeding them into Paperless. Some
scanners are able to do this!
- Set [`PAPERLESS_OCR_SKIP_ARCHIVE_FILE`](configuration.md#PAPERLESS_OCR_SKIP_ARCHIVE_FILE) to `with_text` to skip archive
file generation for already OCRed documents, or `always` to skip it
for all documents.
- If you want to perform OCR on the device, consider using
`PAPERLESS_OCR_CLEAN=none`. This will speed up OCR times and use
less memory at the expense of slightly worse OCR results.
- If using Docker, consider setting [`PAPERLESS_WEBSERVER_WORKERS`](configuration.md#PAPERLESS_WEBSERVER_WORKERS) to 1. This will save some memory.
- Consider setting [`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) to false, to disable the
more advanced language processing, which can take more memory and
processing time.
- Stick with SQLite to save some resources. See [troubleshooting](troubleshooting.md#log-reports-creating-paperlesstask-failed)
if you encounter issues with SQLite locking.
- If you do not need the filesystem-based consumer, consider disabling it
entirely by setting [`PAPERLESS_CONSUMER_DISABLE`](configuration.md#PAPERLESS_CONSUMER_DISABLE) to `true`.
- Consider setting [`PAPERLESS_OCR_PAGES`](configuration.md#PAPERLESS_OCR_PAGES) to 1, so that Paperless
OCRs only the first page of your documents. In most cases, this page
contains enough information to be able to find it.
- [`PAPERLESS_TASK_WORKERS`](configuration.md#PAPERLESS_TASK_WORKERS) and [`PAPERLESS_THREADS_PER_WORKER`](configuration.md#PAPERLESS_THREADS_PER_WORKER) are
configured to use all cores. The Raspberry Pi models 3 and up have 4
cores, meaning that Paperless will use 2 workers and 2 threads per
worker. This may result in sluggish response times during
consumption, so you might want to lower these settings (example: 2
workers and 1 thread to always have some computing power left for
other tasks).
- Keep [`PAPERLESS_OCR_MODE`](configuration.md#PAPERLESS_OCR_MODE) at its default value `skip` and consider
OCRing your documents before feeding them into Paperless. Some
scanners are able to do this!
- Set [`PAPERLESS_OCR_SKIP_ARCHIVE_FILE`](configuration.md#PAPERLESS_OCR_SKIP_ARCHIVE_FILE) to `with_text` to skip archive
file generation for already OCRed documents, or `always` to skip it
for all documents.
- If you want to perform OCR on the device, consider using
`PAPERLESS_OCR_CLEAN=none`. This will speed up OCR times and use
less memory at the expense of slightly worse OCR results.
- If using Docker, consider setting [`PAPERLESS_WEBSERVER_WORKERS`](configuration.md#PAPERLESS_WEBSERVER_WORKERS) to 1. This will save some memory.
- Consider setting [`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) to false, to disable the
more advanced language processing, which can take more memory and
processing time.
For details, refer to [configuration](configuration.md).

View File

@@ -4,27 +4,27 @@
Check for the following issues:
- Ensure that the directory you're putting your documents in is the
folder paperless is watching. With docker, this setting is performed
in the `docker-compose.yml` file. Without Docker, look at the
`CONSUMPTION_DIR` setting. Don't adjust this setting if you're
using docker.
- Ensure that the directory you're putting your documents in is the
folder paperless is watching. With docker, this setting is performed
in the `docker-compose.yml` file. Without Docker, look at the
`CONSUMPTION_DIR` setting. Don't adjust this setting if you're
using docker.
- Ensure that redis is up and running. Paperless does its task
processing asynchronously, and for documents to arrive at the task
processor, it needs redis to run.
- Ensure that redis is up and running. Paperless does its task
processing asynchronously, and for documents to arrive at the task
processor, it needs redis to run.
- Ensure that the task processor is running. Docker does this
automatically. Manually invoke the task processor by executing
- Ensure that the task processor is running. Docker does this
automatically. Manually invoke the task processor by executing
```shell-session
celery --app paperless worker
```
```shell-session
celery --app paperless worker
```
- Look at the output of paperless and inspect it for any errors.
- Look at the output of paperless and inspect it for any errors.
- Go to the admin interface, and check if there are failed tasks. If
so, the tasks will contain an error message.
- Go to the admin interface, and check if there are failed tasks. If
so, the tasks will contain an error message.
## Consumer warns `OCR for XX failed`
@@ -78,12 +78,12 @@ Ensure that `chown` is possible on these directories.
This indicates that the Auto matching algorithm found no documents to
learn from. This may have two reasons:
- You don't use the Auto matching algorithm: The error can be safely
ignored in this case.
- You are using the Auto matching algorithm: The classifier explicitly
excludes documents with Inbox tags. Verify that there are documents
in your archive without inbox tags. The algorithm will only learn
from documents not in your inbox.
- You don't use the Auto matching algorithm: The error can be safely
ignored in this case.
- You are using the Auto matching algorithm: The classifier explicitly
excludes documents with Inbox tags. Verify that there are documents
in your archive without inbox tags. The algorithm will only learn
from documents not in your inbox.
## UserWarning in sklearn on every single document
@@ -127,10 +127,10 @@ change in the `docker-compose.yml` file:
# The gotenberg chromium route is used to convert .eml files. We do not
# want to allow external content like tracking pixels or even javascript.
command:
- 'gotenberg'
- '--chromium-disable-javascript=true'
- '--chromium-allow-list=file:///tmp/.*'
- '--api-timeout=60s'
- 'gotenberg'
- '--chromium-disable-javascript=true'
- '--chromium-allow-list=file:///tmp/.*'
- '--api-timeout=60s'
```
## Permission denied errors in the consumption directory

View File

@@ -14,42 +14,42 @@ for finding and managing your documents.
Paperless essentially consists of two different parts for managing your
documents:
- The _consumer_ watches a specified folder and adds all documents in
that folder to paperless.
- The _web server_ (web UI) provides a UI that you use to manage and
search documents.
- The _consumer_ watches a specified folder and adds all documents in
that folder to paperless.
- The _web server_ (web UI) provides a UI that you use to manage and
search documents.
Each document has data fields that you can assign to them:
- A _Document_ is a piece of paper that sometimes contains valuable
information.
- The _correspondent_ of a document is the person, institution or
company that a document either originates from, or is sent to.
- A _tag_ is a label that you can assign to documents. Think of labels
as more powerful folders: Multiple documents can be grouped together
with a single tag, however, a single document can also have multiple
tags. This is not possible with folders. The reason folders are not
implemented in paperless is simply that tags are much more versatile
than folders.
- A _document type_ is used to demarcate the type of a document such
as letter, bank statement, invoice, contract, etc. It is used to
identify what a document is about.
- The document _storage path_ is the location where the document files
are stored. See [Storage Paths](advanced_usage.md#storage-paths) for
more information.
- The _date added_ of a document is the date the document was scanned
into paperless. You cannot and should not change this date.
- The _date created_ of a document is the date the document was
initially issued. This can be the date you bought a product, the
date you signed a contract, or the date a letter was sent to you.
- The _archive serial number_ (short: ASN) of a document is the
identifier of the document in your physical document binders. See
[recommended workflow](#usage-recommended-workflow) below.
- The _content_ of a document is the text that was OCR'ed from the
document. This text is fed into the search engine and is used for
matching tags, correspondents and document types.
- Paperless-ngx also supports _custom fields_ which can be used to
store additional metadata about a document.
- A _Document_ is a piece of paper that sometimes contains valuable
information.
- The _correspondent_ of a document is the person, institution or
company that a document either originates from, or is sent to.
- A _tag_ is a label that you can assign to documents. Think of labels
as more powerful folders: Multiple documents can be grouped together
with a single tag, however, a single document can also have multiple
tags. This is not possible with folders. The reason folders are not
implemented in paperless is simply that tags are much more versatile
than folders.
- A _document type_ is used to demarcate the type of a document such
as letter, bank statement, invoice, contract, etc. It is used to
identify what a document is about.
- The document _storage path_ is the location where the document files
are stored. See [Storage Paths](advanced_usage.md#storage-paths) for
more information.
- The _date added_ of a document is the date the document was scanned
into paperless. You cannot and should not change this date.
- The _date created_ of a document is the date the document was
initially issued. This can be the date you bought a product, the
date you signed a contract, or the date a letter was sent to you.
- The _archive serial number_ (short: ASN) of a document is the
identifier of the document in your physical document binders. See
[recommended workflow](#usage-recommended-workflow) below.
- The _content_ of a document is the text that was OCR'ed from the
document. This text is fed into the search engine and is used for
matching tags, correspondents and document types.
- Paperless-ngx also supports _custom fields_ which can be used to
store additional metadata about a document.
## The Web UI
@@ -93,12 +93,12 @@ download the document or share it via a share link.
Think of versions as **file history** for a document.
- Versions track the underlying file and extracted text content (OCR/text).
- Metadata such as tags, correspondent, document type, storage path and custom fields stay on the "root" document.
- Version files follow normal filename formatting (including storage paths) and add a `_vN` suffix (for example `_v1`, `_v2`).
- By default, search and document content use the latest version.
- In document detail, selecting a version switches the preview, file metadata and content (and download etc buttons) to that version.
- Deleting a non-root version keeps metadata and falls back to the latest remaining version.
- Versions track the underlying file and extracted text content (OCR/text).
- Metadata such as tags, correspondent, document type, storage path and custom fields stay on the "root" document.
- Version files follow normal filename formatting (including storage paths) and add a `_vN` suffix (for example `_v1`, `_v2`).
- By default, search and document content use the latest version.
- In document detail, selecting a version switches the preview, file metadata and content (and download etc buttons) to that version.
- Deleting a non-root version keeps metadata and falls back to the latest remaining version.
### Management Lists
@@ -218,20 +218,21 @@ patterns can include wildcards and multiple patterns separated by a comma.
The actions all ensure that the same mail is not consumed twice by
different means. These are as follows:
- **Delete:** Immediately deletes mail that paperless has consumed
documents from. Use with caution.
- **Mark as read:** Mark consumed mail as read. Paperless will not
consume documents from already read mails. If you read a mail before
paperless sees it, it will be ignored.
- **Flag:** Sets the 'important' flag on mails with consumed
documents. Paperless will not consume flagged mails.
- **Move to folder:** Moves consumed mails out of the way so that
paperless won't consume them again.
- **Add custom Tag:** Adds a custom tag to mails with consumed
documents (the IMAP standard calls these "keywords"). Paperless
will not consume mails already tagged. Not all mail servers support
this feature!
- **Apple Mail support:** Apple Mail clients allow differently colored tags. For this to work use `apple:<color>` (e.g. _apple:green_) as a custom tag. Available colors are _red_, _orange_, _yellow_, _blue_, _green_, _violet_ and _grey_.
- **Delete:** Immediately deletes mail that paperless has consumed
documents from. Use with caution.
- **Mark as read:** Mark consumed mail as read. Paperless will not
consume documents from already read mails. If you read a mail before
paperless sees it, it will be ignored.
- **Flag:** Sets the 'important' flag on mails with consumed
documents. Paperless will not consume flagged mails.
- **Move to folder:** Moves consumed mails out of the way so that
paperless won't consume them again.
- **Add custom Tag:** Adds a custom tag to mails with consumed
documents (the IMAP standard calls these "keywords"). Paperless
will not consume mails already tagged. Not all mail servers support
this feature!
- **Apple Mail support:** Apple Mail clients allow differently colored tags. For this to work use `apple:<color>` (e.g. _apple:green_) as a custom tag. Available colors are _red_, _orange_, _yellow_, _blue_, _green_, _violet_ and _grey_.
!!! warning
@@ -324,12 +325,12 @@ or using [email](#workflow-action-email) or [webhook](#workflow-action-webhook)
"Share links" are public links to files (or an archive of files) and can be created and managed under the 'Send' button on the document detail screen or from the bulk editor.
- Share links do not require a user to login and thus link directly to a file or bundled download.
- Links are unique and are of the form `{paperless-url}/share/{randomly-generated-slug}`.
- Links can optionally have an expiration time set.
- After a link expires or is deleted users will be redirected to the regular paperless-ngx login.
- From the document detail screen you can create a share link for that single document.
- From the bulk editor you can create a **share link bundle** for any selection. Paperless-ngx prepares a ZIP archive in the background and exposes a single share link. You can revisit the "Manage share link bundles" dialog to monitor progress, retry failed bundles, or delete links.
- Share links do not require a user to login and thus link directly to a file or bundled download.
- Links are unique and are of the form `{paperless-url}/share/{randomly-generated-slug}`.
- Links can optionally have an expiration time set.
- After a link expires or is deleted users will be redirected to the regular paperless-ngx login.
- From the document detail screen you can create a share link for that single document.
- From the bulk editor you can create a **share link bundle** for any selection. Paperless-ngx prepares a ZIP archive in the background and exposes a single share link. You can revisit the "Manage share link bundles" dialog to monitor progress, retry failed bundles, or delete links.
!!! tip
@@ -513,25 +514,25 @@ flowchart TD
Workflows allow you to filter by:
- Source, e.g. documents uploaded via consume folder, API (& the web UI) and mail fetch
- File name, including wildcards e.g. \*.pdf will apply to all pdfs.
- File path, including wildcards. Note that enabling `PAPERLESS_CONSUMER_RECURSIVE` would allow, for
example, automatically assigning documents to different owners based on the upload directory.
- Mail rule. Choosing this option will force 'mail fetch' to be the workflow source.
- Content matching (`Added`, `Updated` and `Scheduled` triggers only). Filter document content using the matching settings.
- Source, e.g. documents uploaded via consume folder, API (& the web UI) and mail fetch
- File name, including wildcards e.g. \*.pdf will apply to all pdfs.
- File path, including wildcards. Note that enabling `PAPERLESS_CONSUMER_RECURSIVE` would allow, for
example, automatically assigning documents to different owners based on the upload directory.
- Mail rule. Choosing this option will force 'mail fetch' to be the workflow source.
- Content matching (`Added`, `Updated` and `Scheduled` triggers only). Filter document content using the matching settings.
There are also 'advanced' filters available for `Added`, `Updated` and `Scheduled` triggers:
- Any Tags: Filter for documents with any of the specified tags.
- All Tags: Filter for documents with all of the specified tags.
- No Tags: Filter for documents with none of the specified tags.
- Document type: Filter documents with this document type.
- Not Document types: Filter documents without any of these document types.
- Correspondent: Filter documents with this correspondent.
- Not Correspondents: Filter documents without any of these correspondents.
- Storage path: Filter documents with this storage path.
- Not Storage paths: Filter documents without any of these storage paths.
- Custom field query: Filter documents with a custom field query (the same as used for the document list filters).
- Any Tags: Filter for documents with any of the specified tags.
- All Tags: Filter for documents with all of the specified tags.
- No Tags: Filter for documents with none of the specified tags.
- Document type: Filter documents with this document type.
- Not Document types: Filter documents without any of these document types.
- Correspondent: Filter documents with this correspondent.
- Not Correspondents: Filter documents without any of these correspondents.
- Storage path: Filter documents with this storage path.
- Not Storage paths: Filter documents without any of these storage paths.
- Custom field query: Filter documents with a custom field query (the same as used for the document list filters).
### Workflow Actions
@@ -543,37 +544,37 @@ The following workflow action types are available:
"Assignment" actions can assign:
- Title, see [workflow placeholders](usage.md#workflow-placeholders) below
- Tags, correspondent, document type and storage path
- Document owner
- View and / or edit permissions to users or groups
- Custom fields. Note that no value for the field will be set
- Title, see [workflow placeholders](usage.md#workflow-placeholders) below
- Tags, correspondent, document type and storage path
- Document owner
- View and / or edit permissions to users or groups
- Custom fields. Note that no value for the field will be set
##### Removal {#workflow-action-removal}
"Removal" actions can remove either all of or specific sets of the following:
- Tags, correspondents, document types or storage paths
- Document owner
- View and / or edit permissions
- Custom fields
- Tags, correspondents, document types or storage paths
- Document owner
- View and / or edit permissions
- Custom fields
##### Email {#workflow-action-email}
"Email" actions can send documents via email. This action requires a mail server to be [configured](configuration.md#email-sending). You can specify:
- The recipient email address(es) separated by commas
- The subject and body of the email, which can include placeholders, see [placeholders](usage.md#workflow-placeholders) below
- Whether to include the document as an attachment
- The recipient email address(es) separated by commas
- The subject and body of the email, which can include placeholders, see [placeholders](usage.md#workflow-placeholders) below
- Whether to include the document as an attachment
##### Webhook {#workflow-action-webhook}
"Webhook" actions send a POST request to a specified URL. You can specify:
- The URL to send the request to
- The request body as text or as key-value pairs, which can include placeholders, see [placeholders](usage.md#workflow-placeholders) below.
- Encoding for the request body, either JSON or form data
- The request headers as key-value pairs
- The URL to send the request to
- The request body as text or as key-value pairs, which can include placeholders, see [placeholders](usage.md#workflow-placeholders) below.
- Encoding for the request body, either JSON or form data
- The request headers as key-value pairs
For security reasons, webhooks can be limited to specific ports and disallowed from connecting to local URLs. See the relevant
[configuration settings](configuration.md#workflow-webhooks) to change this behavior. If you are allowing non-admins to create workflows,
@@ -604,33 +605,33 @@ The available inputs differ depending on the type of workflow trigger.
This is because at the time of consumption (when the text is to be set), no automatic tags etc. have been
applied. You can use the following placeholders in the template with any trigger type:
- `{{correspondent}}`: assigned correspondent name
- `{{document_type}}`: assigned document type name
- `{{owner_username}}`: assigned owner username
- `{{added}}`: added datetime
- `{{added_year}}`: added year
- `{{added_year_short}}`: added year
- `{{added_month}}`: added month
- `{{added_month_name}}`: added month name
- `{{added_month_name_short}}`: added month short name
- `{{added_day}}`: added day
- `{{added_time}}`: added time in HH:MM format
- `{{original_filename}}`: original file name without extension
- `{{filename}}`: current file name without extension (for "added" workflows this may not be final yet, you can use `{{original_filename}}`)
- `{{doc_title}}`: current document title (cannot be used in title assignment)
- `{{correspondent}}`: assigned correspondent name
- `{{document_type}}`: assigned document type name
- `{{owner_username}}`: assigned owner username
- `{{added}}`: added datetime
- `{{added_year}}`: added year
- `{{added_year_short}}`: added year
- `{{added_month}}`: added month
- `{{added_month_name}}`: added month name
- `{{added_month_name_short}}`: added month short name
- `{{added_day}}`: added day
- `{{added_time}}`: added time in HH:MM format
- `{{original_filename}}`: original file name without extension
- `{{filename}}`: current file name without extension (for "added" workflows this may not be final yet, you can use `{{original_filename}}`)
- `{{doc_title}}`: current document title (cannot be used in title assignment)
The following placeholders are only available for "added" or "updated" triggers
- `{{created}}`: created datetime
- `{{created_year}}`: created year
- `{{created_year_short}}`: created year
- `{{created_month}}`: created month
- `{{created_month_name}}`: created month name
- `{{created_month_name_short}}`: created month short name
- `{{created_day}}`: created day
- `{{created_time}}`: created time in HH:MM format
- `{{doc_url}}`: URL to the document in the web UI. Requires the `PAPERLESS_URL` setting to be set.
- `{{doc_id}}`: Document ID
- `{{created}}`: created datetime
- `{{created_year}}`: created year
- `{{created_year_short}}`: created year
- `{{created_month}}`: created month
- `{{created_month_name}}`: created month name
- `{{created_month_name_short}}`: created month short name
- `{{created_day}}`: created day
- `{{created_time}}`: created time in HH:MM format
- `{{doc_url}}`: URL to the document in the web UI. Requires the `PAPERLESS_URL` setting to be set.
- `{{doc_id}}`: Document ID
##### Examples
@@ -675,26 +676,26 @@ Multiple fields may be attached to a document but the same field name cannot be
The following custom field types are supported:
- `Text`: any text
- `Boolean`: true / false (check / unchecked) field
- `Date`: date
- `URL`: a valid url
- `Integer`: integer number e.g. 12
- `Number`: float number e.g. 12.3456
- `Monetary`: [ISO 4217 currency code](https://en.wikipedia.org/wiki/ISO_4217#List_of_ISO_4217_currency_codes) and a number with exactly two decimals, e.g. USD12.30
- `Document Link`: reference(s) to other document(s) displayed as links, automatically creates a symmetrical link in reverse
- `Select`: a pre-defined list of strings from which the user can choose
- `Text`: any text
- `Boolean`: true / false (check / unchecked) field
- `Date`: date
- `URL`: a valid url
- `Integer`: integer number e.g. 12
- `Number`: float number e.g. 12.3456
- `Monetary`: [ISO 4217 currency code](https://en.wikipedia.org/wiki/ISO_4217#List_of_ISO_4217_currency_codes) and a number with exactly two decimals, e.g. USD12.30
- `Document Link`: reference(s) to other document(s) displayed as links, automatically creates a symmetrical link in reverse
- `Select`: a pre-defined list of strings from which the user can choose
## PDF Actions
Paperless-ngx supports basic editing operations for PDFs (these operations currently cannot be performed on non-PDF files). When viewing an individual document you can
open the 'PDF Editor' to use a simple UI for re-arranging, rotating, deleting pages and splitting documents.
- Merging documents: available when selecting multiple documents for 'bulk editing'.
- Rotating documents: available when selecting multiple documents for 'bulk editing' and via the pdf editor on an individual document's details page.
- Splitting documents: via the pdf editor on an individual document's details page.
- Deleting pages: via the pdf editor on an individual document's details page.
- Re-arranging pages: via the pdf editor on an individual document's details page.
- Merging documents: available when selecting multiple documents for 'bulk editing'.
- Rotating documents: available when selecting multiple documents for 'bulk editing' and via the pdf editor on an individual document's details page.
- Splitting documents: via the pdf editor on an individual document's details page.
- Deleting pages: via the pdf editor on an individual document's details page.
- Re-arranging pages: via the pdf editor on an individual document's details page.
!!! important
@@ -772,18 +773,18 @@ the system.
Here are a couple examples of tags and types that you could use in your
collection.
- An `inbox` tag for newly added documents that you haven't manually
edited yet.
- A tag `car` for everything car related (repairs, registration,
insurance, etc)
- A tag `todo` for documents that you still need to do something with,
such as reply, or perform some task online.
- A tag `bank account x` for all bank statement related to that
account.
- A tag `mail` for anything that you added to paperless via its mail
processing capabilities.
- A tag `missing_metadata` when you still need to add some metadata to
a document, but can't or don't want to do this right now.
- An `inbox` tag for newly added documents that you haven't manually
edited yet.
- A tag `car` for everything car related (repairs, registration,
insurance, etc)
- A tag `todo` for documents that you still need to do something with,
such as reply, or perform some task online.
- A tag `bank account x` for all bank statement related to that
account.
- A tag `mail` for anything that you added to paperless via its mail
processing capabilities.
- A tag `missing_metadata` when you still need to add some metadata to
a document, but can't or don't want to do this right now.
## Searching {#basic-usage_searching}
@@ -872,8 +873,8 @@ The following diagram shows how easy it is to manage your documents.
### Preparations in paperless
- Create an inbox tag that gets assigned to all new documents.
- Create a TODO tag.
- Create an inbox tag that gets assigned to all new documents.
- Create a TODO tag.
### Processing of the physical documents
@@ -947,15 +948,15 @@ Some documents require attention and require you to act on the document.
You may take two different approaches to handle these documents based on
how regularly you intend to scan documents and use paperless.
- If you scan and process your documents in paperless regularly,
assign a TODO tag to all scanned documents that you need to process.
Create a saved view on the dashboard that shows all documents with
this tag.
- If you do not scan documents regularly and use paperless solely for
archiving, create a physical todo box next to your physical inbox
and put documents you need to process in the TODO box. When you
performed the task associated with the document, move it to the
inbox.
- If you scan and process your documents in paperless regularly,
assign a TODO tag to all scanned documents that you need to process.
Create a saved view on the dashboard that shows all documents with
this tag.
- If you do not scan documents regularly and use paperless solely for
archiving, create a physical todo box next to your physical inbox
and put documents you need to process in the TODO box. When you
performed the task associated with the document, move it to the
inbox.
## Remote OCR
@@ -976,63 +977,64 @@ or page limitations (e.g. with a free tier).
Paperless-ngx consists of the following components:
- **The webserver:** This serves the administration pages, the API,
and the new frontend. This is the main tool you'll be using to interact
with paperless. You may start the webserver directly with
- **The webserver:** This serves the administration pages, the API,
and the new frontend. This is the main tool you'll be using to interact
with paperless. You may start the webserver directly with
```shell-session
cd /path/to/paperless/src/
granian --interface asginl --ws "paperless.asgi:application"
```
```shell-session
cd /path/to/paperless/src/
granian --interface asginl --ws "paperless.asgi:application"
```
or by any other means such as Apache `mod_wsgi`.
or by any other means such as Apache `mod_wsgi`.
- **The consumer:** This is what watches your consumption folder for
documents. However, the consumer itself does not really consume your
documents. Now it notifies a task processor that a new file is ready
for consumption. I suppose it should be named differently. This was
also used to check your emails, but that's now done elsewhere as
well.
- **The consumer:** This is what watches your consumption folder for
documents. However, the consumer itself does not really consume your
documents. Now it notifies a task processor that a new file is ready
for consumption. I suppose it should be named differently. This was
also used to check your emails, but that's now done elsewhere as
well.
Start the consumer with the management command `document_consumer`:
Start the consumer with the management command `document_consumer`:
```shell-session
cd /path/to/paperless/src/
python3 manage.py document_consumer
```
```shell-session
cd /path/to/paperless/src/
python3 manage.py document_consumer
```
- **The task processor:** Paperless relies on [Celery - Distributed
Task Queue](https://docs.celeryq.dev/en/stable/index.html) for doing
most of the heavy lifting. This is a task queue that accepts tasks
from multiple sources and processes these in parallel. It also comes
with a scheduler that executes certain commands periodically.
- **The task processor:** Paperless relies on [Celery - Distributed
Task Queue](https://docs.celeryq.dev/en/stable/index.html) for doing
most of the heavy lifting. This is a task queue that accepts tasks
from multiple sources and processes these in parallel. It also comes
with a scheduler that executes certain commands periodically.
This task processor is responsible for:
- Consuming documents. When the consumer finds new documents, it
notifies the task processor to start a consumption task.
- The task processor also performs the consumption of any
documents you upload through the web interface.
- Consuming emails. It periodically checks your configured
accounts for new emails and notifies the task processor to
consume the attachment of an email.
- Maintaining the search index and the automatic matching
algorithm. These are things that paperless needs to do from time
to time in order to operate properly.
This task processor is responsible for:
This allows paperless to process multiple documents from your
consumption folder in parallel! On a modern multi core system, this
makes the consumption process with full OCR blazingly fast.
- Consuming documents. When the consumer finds new documents, it
notifies the task processor to start a consumption task.
- The task processor also performs the consumption of any
documents you upload through the web interface.
- Consuming emails. It periodically checks your configured
accounts for new emails and notifies the task processor to
consume the attachment of an email.
- Maintaining the search index and the automatic matching
algorithm. These are things that paperless needs to do from time
to time in order to operate properly.
The task processor comes with a built-in admin interface that you
can use to check whenever any of the tasks fail and inspect the
errors (i.e., wrong email credentials, errors during consuming a
specific file, etc).
This allows paperless to process multiple documents from your
consumption folder in parallel! On a modern multi core system, this
makes the consumption process with full OCR blazingly fast.
- A [redis](https://redis.io/) message broker: This is a really
lightweight service that is responsible for getting the tasks from
the webserver and the consumer to the task scheduler. These run in a
different process (maybe even on different machines!), and
therefore, this is necessary.
The task processor comes with a built-in admin interface that you
can use to check whenever any of the tasks fail and inspect the
errors (i.e., wrong email credentials, errors during consuming a
specific file, etc).
- Optional: A database server. Paperless supports PostgreSQL, MariaDB
and SQLite for storing its data.
- A [redis](https://redis.io/) message broker: This is a really
lightweight service that is responsible for getting the tasks from
the webserver and the consumer to the task scheduler. These run in a
different process (maybe even on different machines!), and
therefore, this is necessary.
- Optional: A database server. Paperless supports PostgreSQL, MariaDB
and SQLite for storing its data.

View File

@@ -42,14 +42,13 @@ dependencies = [
"djangorestframework~=3.16",
"djangorestframework-guardian~=0.4.0",
"drf-spectacular~=0.28",
"drf-spectacular-sidecar~=2026.3.1",
"drf-spectacular-sidecar~=2026.1.1",
"drf-writable-nested~=0.7.1",
"faiss-cpu>=1.10",
"filelock~=3.25.2",
"filelock~=3.24.3",
"flower~=2.0.1",
"gotenberg-client~=0.13.1",
"httpx-oauth~=0.16",
"ijson>=3.2",
"imap-tools~=1.11.0",
"jinja2~=3.1.5",
"langdetect~=1.0.9",
@@ -72,7 +71,7 @@ dependencies = [
"rapidfuzz~=3.14.0",
"redis[hiredis]~=5.2.1",
"regex>=2025.9.18",
"scikit-learn~=1.8.0",
"scikit-learn~=1.7.0",
"sentence-transformers>=4.1",
"setproctitle~=1.3.4",
"tika-client~=0.10.0",
@@ -111,7 +110,7 @@ docs = [
testing = [
"daphne",
"factory-boy~=3.3.1",
"faker~=40.8.0",
"faker~=40.5.1",
"imagehash",
"pytest~=9.0.0",
"pytest-cov~=7.0.0",

View File

@@ -19,4 +19,6 @@ following additional information about it:
* Correspondent: ${DOCUMENT_CORRESPONDENT}
* Tags: ${DOCUMENT_TAGS}
It was consumed with the passphrase ${PASSPHRASE}
"

View File

@@ -1217,7 +1217,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1758</context>
<context context-type="linenumber">1760</context>
</context-group>
</trans-unit>
<trans-unit id="1577733187050997705" datatype="html">
@@ -2802,19 +2802,19 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1759</context>
<context context-type="linenumber">1761</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">833</context>
<context context-type="linenumber">802</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">871</context>
<context context-type="linenumber">835</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">894</context>
<context context-type="linenumber">854</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/manage/document-attributes/custom-fields/custom-fields.component.ts</context>
@@ -3404,27 +3404,27 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">470</context>
<context context-type="linenumber">445</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">510</context>
<context context-type="linenumber">485</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">548</context>
<context context-type="linenumber">523</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">586</context>
<context context-type="linenumber">561</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">648</context>
<context context-type="linenumber">623</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">781</context>
<context context-type="linenumber">756</context>
</context-group>
</trans-unit>
<trans-unit id="994016933065248559" datatype="html">
@@ -3512,7 +3512,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1812</context>
<context context-type="linenumber">1814</context>
</context-group>
</trans-unit>
<trans-unit id="6661109599266152398" datatype="html">
@@ -3523,7 +3523,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1813</context>
<context context-type="linenumber">1815</context>
</context-group>
</trans-unit>
<trans-unit id="5162686434580248853" datatype="html">
@@ -3534,7 +3534,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1814</context>
<context context-type="linenumber">1816</context>
</context-group>
</trans-unit>
<trans-unit id="8157388568390631653" datatype="html">
@@ -5499,7 +5499,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">785</context>
<context context-type="linenumber">760</context>
</context-group>
</trans-unit>
<trans-unit id="4522609911791833187" datatype="html">
@@ -7327,7 +7327,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">415</context>
<context context-type="linenumber">390</context>
</context-group>
<note priority="1" from="description">this string is used to separate processing, failed and added on the file upload widget</note>
</trans-unit>
@@ -7851,7 +7851,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">783</context>
<context context-type="linenumber">758</context>
</context-group>
</trans-unit>
<trans-unit id="7295637485862454066" datatype="html">
@@ -7869,7 +7869,7 @@
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">829</context>
<context context-type="linenumber">798</context>
</context-group>
</trans-unit>
<trans-unit id="2951161989614003846" datatype="html">
@@ -7890,88 +7890,88 @@
<source>Reprocess operation for &quot;<x id="PH" equiv-text="this.document.title"/>&quot; will begin in the background.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1385</context>
<context context-type="linenumber">1387</context>
</context-group>
</trans-unit>
<trans-unit id="4409560272830824468" datatype="html">
<source>Error executing operation</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1396</context>
<context context-type="linenumber">1398</context>
</context-group>
</trans-unit>
<trans-unit id="6030453331794586802" datatype="html">
<source>Error downloading document</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1459</context>
<context context-type="linenumber">1461</context>
</context-group>
</trans-unit>
<trans-unit id="4458954481601077369" datatype="html">
<source>Page Fit</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1539</context>
<context context-type="linenumber">1541</context>
</context-group>
</trans-unit>
<trans-unit id="4663705961777238777" datatype="html">
<source>PDF edit operation for &quot;<x id="PH" equiv-text="this.document.title"/>&quot; will begin in the background.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1779</context>
<context context-type="linenumber">1781</context>
</context-group>
</trans-unit>
<trans-unit id="9043972994040261999" datatype="html">
<source>Error executing PDF edit operation</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1791</context>
<context context-type="linenumber">1793</context>
</context-group>
</trans-unit>
<trans-unit id="6172690334763056188" datatype="html">
<source>Please enter the current password before attempting to remove it.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1802</context>
<context context-type="linenumber">1804</context>
</context-group>
</trans-unit>
<trans-unit id="968660764814228922" datatype="html">
<source>Password removal operation for &quot;<x id="PH" equiv-text="this.document.title"/>&quot; will begin in the background.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1836</context>
<context context-type="linenumber">1838</context>
</context-group>
</trans-unit>
<trans-unit id="2282118435712883014" datatype="html">
<source>Error executing password removal operation</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1850</context>
<context context-type="linenumber">1852</context>
</context-group>
</trans-unit>
<trans-unit id="3740891324955700797" datatype="html">
<source>Print failed.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1889</context>
<context context-type="linenumber">1891</context>
</context-group>
</trans-unit>
<trans-unit id="6457245677384603573" datatype="html">
<source>Error loading document for printing.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1901</context>
<context context-type="linenumber">1903</context>
</context-group>
</trans-unit>
<trans-unit id="6085793215710522488" datatype="html">
<source>An error occurred loading tiff: <x id="PH" equiv-text="err.toString()"/></source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1966</context>
<context context-type="linenumber">1968</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">1970</context>
<context context-type="linenumber">1972</context>
</context-group>
</trans-unit>
<trans-unit id="4958946940233632319" datatype="html">
@@ -8215,25 +8215,25 @@
<source>Error executing bulk operation</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">321</context>
<context context-type="linenumber">294</context>
</context-group>
</trans-unit>
<trans-unit id="7894972847287473517" datatype="html">
<source>&quot;<x id="PH" equiv-text="items[0].name"/>&quot;</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">407</context>
<context context-type="linenumber">382</context>
</context-group>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">413</context>
<context context-type="linenumber">388</context>
</context-group>
</trans-unit>
<trans-unit id="8639884465898458690" datatype="html">
<source>&quot;<x id="PH" equiv-text="items[0].name"/>&quot; and &quot;<x id="PH_1" equiv-text="items[1].name"/>&quot;</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">409</context>
<context context-type="linenumber">384</context>
</context-group>
<note priority="1" from="description">This is for messages like &apos;modify &quot;tag1&quot; and &quot;tag2&quot;&apos;</note>
</trans-unit>
@@ -8241,7 +8241,7 @@
<source><x id="PH" equiv-text="list"/> and &quot;<x id="PH_1" equiv-text="items[items.length - 1].name"/>&quot;</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">417,419</context>
<context context-type="linenumber">392,394</context>
</context-group>
<note priority="1" from="description">this is for messages like &apos;modify &quot;tag1&quot;, &quot;tag2&quot; and &quot;tag3&quot;&apos;</note>
</trans-unit>
@@ -8249,14 +8249,14 @@
<source>Confirm tags assignment</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">434</context>
<context context-type="linenumber">409</context>
</context-group>
</trans-unit>
<trans-unit id="6619516195038467207" datatype="html">
<source>This operation will add the tag &quot;<x id="PH" equiv-text="tag.name"/>&quot; to <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">440</context>
<context context-type="linenumber">415</context>
</context-group>
</trans-unit>
<trans-unit id="1894412783609570695" datatype="html">
@@ -8265,14 +8265,14 @@
)"/> to <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">445,447</context>
<context context-type="linenumber">420,422</context>
</context-group>
</trans-unit>
<trans-unit id="7181166515756808573" datatype="html">
<source>This operation will remove the tag &quot;<x id="PH" equiv-text="tag.name"/>&quot; from <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">453</context>
<context context-type="linenumber">428</context>
</context-group>
</trans-unit>
<trans-unit id="3819792277998068944" datatype="html">
@@ -8281,7 +8281,7 @@
)"/> from <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">458,460</context>
<context context-type="linenumber">433,435</context>
</context-group>
</trans-unit>
<trans-unit id="2739066218579571288" datatype="html">
@@ -8292,84 +8292,84 @@
)"/> on <x id="PH_2" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">462,466</context>
<context context-type="linenumber">437,441</context>
</context-group>
</trans-unit>
<trans-unit id="2996713129519325161" datatype="html">
<source>Confirm correspondent assignment</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">503</context>
<context context-type="linenumber">478</context>
</context-group>
</trans-unit>
<trans-unit id="6900893559485781849" datatype="html">
<source>This operation will assign the correspondent &quot;<x id="PH" equiv-text="correspondent.name"/>&quot; to <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">505</context>
<context context-type="linenumber">480</context>
</context-group>
</trans-unit>
<trans-unit id="1257522660364398440" datatype="html">
<source>This operation will remove the correspondent from <x id="PH" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">507</context>
<context context-type="linenumber">482</context>
</context-group>
</trans-unit>
<trans-unit id="5393409374423140648" datatype="html">
<source>Confirm document type assignment</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">541</context>
<context context-type="linenumber">516</context>
</context-group>
</trans-unit>
<trans-unit id="332180123895325027" datatype="html">
<source>This operation will assign the document type &quot;<x id="PH" equiv-text="documentType.name"/>&quot; to <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">543</context>
<context context-type="linenumber">518</context>
</context-group>
</trans-unit>
<trans-unit id="2236642492594872779" datatype="html">
<source>This operation will remove the document type from <x id="PH" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">545</context>
<context context-type="linenumber">520</context>
</context-group>
</trans-unit>
<trans-unit id="6386555513013840736" datatype="html">
<source>Confirm storage path assignment</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">579</context>
<context context-type="linenumber">554</context>
</context-group>
</trans-unit>
<trans-unit id="8750527458618415924" datatype="html">
<source>This operation will assign the storage path &quot;<x id="PH" equiv-text="storagePath.name"/>&quot; to <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">581</context>
<context context-type="linenumber">556</context>
</context-group>
</trans-unit>
<trans-unit id="60728365335056946" datatype="html">
<source>This operation will remove the storage path from <x id="PH" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">583</context>
<context context-type="linenumber">558</context>
</context-group>
</trans-unit>
<trans-unit id="4187352575310415704" datatype="html">
<source>Confirm custom field assignment</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">612</context>
<context context-type="linenumber">587</context>
</context-group>
</trans-unit>
<trans-unit id="7966494636326273856" datatype="html">
<source>This operation will assign the custom field &quot;<x id="PH" equiv-text="customField.name"/>&quot; to <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">618</context>
<context context-type="linenumber">593</context>
</context-group>
</trans-unit>
<trans-unit id="5789455969634598553" datatype="html">
@@ -8378,14 +8378,14 @@
)"/> to <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">623,625</context>
<context context-type="linenumber">598,600</context>
</context-group>
</trans-unit>
<trans-unit id="5648572354333199245" datatype="html">
<source>This operation will remove the custom field &quot;<x id="PH" equiv-text="customField.name"/>&quot; from <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">631</context>
<context context-type="linenumber">606</context>
</context-group>
</trans-unit>
<trans-unit id="6666899594015948817" datatype="html">
@@ -8394,7 +8394,7 @@
)"/> from <x id="PH_1" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">636,638</context>
<context context-type="linenumber">611,613</context>
</context-group>
</trans-unit>
<trans-unit id="8050047262594964176" datatype="html">
@@ -8405,91 +8405,91 @@
)"/> on <x id="PH_2" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">640,644</context>
<context context-type="linenumber">615,619</context>
</context-group>
</trans-unit>
<trans-unit id="8615059324209654051" datatype="html">
<source>Move <x id="PH" equiv-text="this.list.selected.size"/> selected document(s) to the trash?</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">782</context>
<context context-type="linenumber">757</context>
</context-group>
</trans-unit>
<trans-unit id="8585195717323764335" datatype="html">
<source>This operation will permanently recreate the archive files for <x id="PH" equiv-text="this.list.selected.size"/> selected document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">830</context>
<context context-type="linenumber">799</context>
</context-group>
</trans-unit>
<trans-unit id="7366623494074776040" datatype="html">
<source>The archive files will be re-generated with the current settings.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">831</context>
<context context-type="linenumber">800</context>
</context-group>
</trans-unit>
<trans-unit id="6555329262222566158" datatype="html">
<source>Rotate confirm</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">868</context>
<context context-type="linenumber">832</context>
</context-group>
</trans-unit>
<trans-unit id="5203024009814367559" datatype="html">
<source>This operation will add rotated versions of the <x id="PH" equiv-text="this.list.selected.size"/> document(s).</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">869</context>
<context context-type="linenumber">833</context>
</context-group>
</trans-unit>
<trans-unit id="7910756456450124185" datatype="html">
<source>Merge confirm</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">892</context>
<context context-type="linenumber">852</context>
</context-group>
</trans-unit>
<trans-unit id="7643543647233874431" datatype="html">
<source>This operation will merge <x id="PH" equiv-text="this.list.selected.size"/> selected documents into a new document.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">893</context>
<context context-type="linenumber">853</context>
</context-group>
</trans-unit>
<trans-unit id="7869008840945899895" datatype="html">
<source>Merged document will be queued for consumption.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">916</context>
<context context-type="linenumber">872</context>
</context-group>
</trans-unit>
<trans-unit id="476913782630693351" datatype="html">
<source>Custom fields updated.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">940</context>
<context context-type="linenumber">896</context>
</context-group>
</trans-unit>
<trans-unit id="3873496751167944011" datatype="html">
<source>Error updating custom fields.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">949</context>
<context context-type="linenumber">905</context>
</context-group>
</trans-unit>
<trans-unit id="6144801143088984138" datatype="html">
<source>Share link bundle creation requested.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">989</context>
<context context-type="linenumber">945</context>
</context-group>
</trans-unit>
<trans-unit id="46019676931295023" datatype="html">
<source>Share link bundle creation is not available yet.</source>
<context-group purpose="location">
<context context-type="sourcefile">src/app/components/document-list/bulk-editor/bulk-editor.component.ts</context>
<context context-type="linenumber">996</context>
<context context-type="linenumber">952</context>
</context-group>
</trans-unit>
<trans-unit id="6307402210351946694" datatype="html">

View File

@@ -31,8 +31,8 @@ export enum EditDialogMode {
@Directive()
export abstract class EditDialogComponent<
T extends ObjectWithPermissions | ObjectWithId,
>
T extends ObjectWithPermissions | ObjectWithId,
>
extends LoadingComponentWithPermissions
implements OnInit
{

View File

@@ -950,8 +950,8 @@ describe('DocumentDetailComponent', () => {
it('should support reprocess, confirm and close modal after started', () => {
initNormally()
const reprocessSpy = jest.spyOn(documentService, 'reprocessDocuments')
reprocessSpy.mockReturnValue(of(true))
const bulkEditSpy = jest.spyOn(documentService, 'bulkEdit')
bulkEditSpy.mockReturnValue(of(true))
let openModal: NgbModalRef
modalService.activeInstances.subscribe((modal) => (openModal = modal[0]))
const modalSpy = jest.spyOn(modalService, 'open')
@@ -959,7 +959,7 @@ describe('DocumentDetailComponent', () => {
component.reprocess()
const modalCloseSpy = jest.spyOn(openModal, 'close')
openModal.componentInstance.confirmClicked.next()
expect(reprocessSpy).toHaveBeenCalledWith([doc.id])
expect(bulkEditSpy).toHaveBeenCalledWith([doc.id], 'reprocess', {})
expect(modalSpy).toHaveBeenCalled()
expect(toastSpy).toHaveBeenCalled()
expect(modalCloseSpy).toHaveBeenCalled()
@@ -967,13 +967,13 @@ describe('DocumentDetailComponent', () => {
it('should show error if redo ocr call fails', () => {
initNormally()
const reprocessSpy = jest.spyOn(documentService, 'reprocessDocuments')
const bulkEditSpy = jest.spyOn(documentService, 'bulkEdit')
let openModal: NgbModalRef
modalService.activeInstances.subscribe((modal) => (openModal = modal[0]))
const toastSpy = jest.spyOn(toastService, 'showError')
component.reprocess()
const modalCloseSpy = jest.spyOn(openModal, 'close')
reprocessSpy.mockReturnValue(throwError(() => new Error('error occurred')))
bulkEditSpy.mockReturnValue(throwError(() => new Error('error occurred')))
openModal.componentInstance.confirmClicked.next()
expect(toastSpy).toHaveBeenCalled()
expect(modalCloseSpy).not.toHaveBeenCalled()
@@ -1644,9 +1644,9 @@ describe('DocumentDetailComponent', () => {
expect(
fixture.debugElement.query(By.css('.preview-sticky img'))
).not.toBeUndefined()
;((component.document.mime_type =
;(component.document.mime_type =
'application/vnd.openxmlformats-officedocument.wordprocessingml.document'),
fixture.detectChanges())
fixture.detectChanges()
expect(component.archiveContentRenderType).toEqual(
component.ContentRenderType.Other
)
@@ -1669,15 +1669,18 @@ describe('DocumentDetailComponent', () => {
modal.componentInstance.pages = [{ page: 1, rotate: 0, splitAfter: false }]
modal.componentInstance.confirm()
let req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/edit_pdf/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
expect(req.request.body).toEqual({
documents: [10],
operations: [{ page: 1, rotate: 0, doc: 0 }],
delete_original: false,
update_document: false,
include_metadata: true,
source_mode: 'explicit_selection',
method: 'edit_pdf',
parameters: {
operations: [{ page: 1, rotate: 0, doc: 0 }],
delete_original: false,
update_document: false,
include_metadata: true,
source_mode: 'explicit_selection',
},
})
req.error(new ErrorEvent('failed'))
expect(errorSpy).toHaveBeenCalled()
@@ -1688,7 +1691,7 @@ describe('DocumentDetailComponent', () => {
modal.componentInstance.deleteOriginal = true
modal.componentInstance.confirm()
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/edit_pdf/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.flush(true)
expect(closeSpy).toHaveBeenCalled()
@@ -1708,15 +1711,18 @@ describe('DocumentDetailComponent', () => {
dialog.deleteOriginal = true
dialog.confirm()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/remove_password/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
expect(req.request.body).toEqual({
documents: [10],
password: 'secret',
update_document: false,
include_metadata: false,
delete_original: true,
source_mode: 'explicit_selection',
method: 'remove_password',
parameters: {
password: 'secret',
update_document: false,
include_metadata: false,
delete_original: true,
source_mode: 'explicit_selection',
},
})
req.flush(true)
})
@@ -1731,7 +1737,7 @@ describe('DocumentDetailComponent', () => {
expect(errorSpy).toHaveBeenCalled()
httpTestingController.expectNone(
`${environment.apiBaseUrl}documents/remove_password/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
})
@@ -1747,7 +1753,7 @@ describe('DocumentDetailComponent', () => {
modal.componentInstance as PasswordRemovalConfirmDialogComponent
dialog.confirm()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/remove_password/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.error(new ErrorEvent('failed'))
@@ -1768,7 +1774,7 @@ describe('DocumentDetailComponent', () => {
modal.componentInstance as PasswordRemovalConfirmDialogComponent
dialog.confirm()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/remove_password/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.flush(true)

View File

@@ -1379,25 +1379,27 @@ export class DocumentDetailComponent
modal.componentInstance.btnCaption = $localize`Proceed`
modal.componentInstance.confirmClicked.subscribe(() => {
modal.componentInstance.buttonsEnabled = false
this.documentsService.reprocessDocuments([this.document.id]).subscribe({
next: () => {
this.toastService.showInfo(
$localize`Reprocess operation for "${this.document.title}" will begin in the background.`
)
if (modal) {
modal.close()
}
},
error: (error) => {
if (modal) {
modal.componentInstance.buttonsEnabled = true
}
this.toastService.showError(
$localize`Error executing operation`,
error
)
},
})
this.documentsService
.bulkEdit([this.document.id], 'reprocess', {})
.subscribe({
next: () => {
this.toastService.showInfo(
$localize`Reprocess operation for "${this.document.title}" will begin in the background.`
)
if (modal) {
modal.close()
}
},
error: (error) => {
if (modal) {
modal.componentInstance.buttonsEnabled = true
}
this.toastService.showError(
$localize`Error executing operation`,
error
)
},
})
})
}
@@ -1764,7 +1766,7 @@ export class DocumentDetailComponent
.subscribe(() => {
modal.componentInstance.buttonsEnabled = false
this.documentsService
.editPdfDocuments([sourceDocumentId], {
.bulkEdit([sourceDocumentId], 'edit_pdf', {
operations: modal.componentInstance.getOperations(),
delete_original: modal.componentInstance.deleteOriginal,
update_document:
@@ -1822,7 +1824,7 @@ export class DocumentDetailComponent
dialog.buttonsEnabled = false
this.networkActive = true
this.documentsService
.removePasswordDocuments([sourceDocumentId], {
.bulkEdit([sourceDocumentId], 'remove_password', {
password: this.password,
update_document: dialog.updateDocument,
include_metadata: dialog.includeMetadata,

View File

@@ -1,4 +1,3 @@
import { DatePipe } from '@angular/common'
import { provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'
import {
HttpTestingController,
@@ -139,7 +138,6 @@ describe('BulkEditorComponent', () => {
},
},
FilterPipe,
DatePipe,
SettingsService,
{
provide: UserService,
@@ -851,11 +849,13 @@ describe('BulkEditorComponent', () => {
expect(modal).not.toBeUndefined()
modal.componentInstance.confirm()
let req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/delete/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.flush(true)
expect(req.request.body).toEqual({
documents: [3, 4],
method: 'delete',
parameters: {},
})
httpTestingController.match(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
@@ -868,7 +868,7 @@ describe('BulkEditorComponent', () => {
fixture.detectChanges()
component.applyDelete()
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/delete/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
})
@@ -944,11 +944,13 @@ describe('BulkEditorComponent', () => {
expect(modal).not.toBeUndefined()
modal.componentInstance.confirm()
let req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/reprocess/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.flush(true)
expect(req.request.body).toEqual({
documents: [3, 4],
method: 'reprocess',
parameters: {},
})
httpTestingController.match(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
@@ -977,13 +979,13 @@ describe('BulkEditorComponent', () => {
modal.componentInstance.rotate()
modal.componentInstance.confirm()
let req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/rotate/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.flush(true)
expect(req.request.body).toEqual({
documents: [3, 4],
degrees: 90,
source_mode: 'latest_version',
method: 'rotate',
parameters: { degrees: 90 },
})
httpTestingController.match(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
@@ -1019,12 +1021,13 @@ describe('BulkEditorComponent', () => {
modal.componentInstance.metadataDocumentID = 3
modal.componentInstance.confirm()
let req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/merge/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.flush(true)
expect(req.request.body).toEqual({
documents: [3, 4],
metadata_document_id: 3,
method: 'merge',
parameters: { metadata_document_id: 3 },
})
httpTestingController.match(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
@@ -1037,13 +1040,13 @@ describe('BulkEditorComponent', () => {
modal.componentInstance.deleteOriginals = true
modal.componentInstance.confirm()
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/merge/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.flush(true)
expect(req.request.body).toEqual({
documents: [3, 4],
metadata_document_id: 3,
delete_originals: true,
method: 'merge',
parameters: { metadata_document_id: 3, delete_originals: true },
})
httpTestingController.match(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
@@ -1058,13 +1061,13 @@ describe('BulkEditorComponent', () => {
modal.componentInstance.archiveFallback = true
modal.componentInstance.confirm()
req = httpTestingController.expectOne(
`${environment.apiBaseUrl}documents/merge/`
`${environment.apiBaseUrl}documents/bulk_edit/`
)
req.flush(true)
expect(req.request.body).toEqual({
documents: [3, 4],
metadata_document_id: 3,
archive_fallback: true,
method: 'merge',
parameters: { metadata_document_id: 3, archive_fallback: true },
})
httpTestingController.match(
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`

View File

@@ -12,7 +12,7 @@ import {
} from '@ng-bootstrap/ng-bootstrap'
import { saveAs } from 'file-saver'
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
import { first, map, Observable, Subject, switchMap, takeUntil } from 'rxjs'
import { first, map, Subject, switchMap, takeUntil } from 'rxjs'
import { ConfirmDialogComponent } from 'src/app/components/common/confirm-dialog/confirm-dialog.component'
import { CustomField } from 'src/app/data/custom-field'
import { MatchingModel } from 'src/app/data/matching-model'
@@ -29,9 +29,7 @@ import { CorrespondentService } from 'src/app/services/rest/correspondent.servic
import { CustomFieldsService } from 'src/app/services/rest/custom-fields.service'
import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
import {
DocumentBulkEditMethod,
DocumentService,
MergeDocumentsRequest,
SelectionDataItem,
} from 'src/app/services/rest/document.service'
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
@@ -257,9 +255,9 @@ export class BulkEditorComponent
this.unsubscribeNotifier.complete()
}
private executeBulkEditMethod(
private executeBulkOperation(
modal: NgbModalRef,
method: DocumentBulkEditMethod,
method: string,
args: any,
overrideDocumentIDs?: number[]
) {
@@ -274,55 +272,32 @@ export class BulkEditorComponent
)
.pipe(first())
.subscribe({
next: () => this.handleOperationSuccess(modal),
error: (error) => this.handleOperationError(modal, error),
next: () => {
if (args['delete_originals']) {
this.list.selected.clear()
}
this.list.reload()
this.list.reduceSelectionToFilter()
this.list.selected.forEach((id) => {
this.openDocumentService.refreshDocument(id)
})
this.savedViewService.maybeRefreshDocumentCounts()
if (modal) {
modal.close()
}
},
error: (error) => {
if (modal) {
modal.componentInstance.buttonsEnabled = true
}
this.toastService.showError(
$localize`Error executing bulk operation`,
error
)
},
})
}
private executeDocumentAction(
modal: NgbModalRef,
request: Observable<any>,
options: { deleteOriginals?: boolean } = {}
) {
if (modal) {
modal.componentInstance.buttonsEnabled = false
}
request.pipe(first()).subscribe({
next: () => {
this.handleOperationSuccess(modal, options.deleteOriginals ?? false)
},
error: (error) => this.handleOperationError(modal, error),
})
}
private handleOperationSuccess(
modal: NgbModalRef,
clearSelection: boolean = false
) {
if (clearSelection) {
this.list.selected.clear()
}
this.list.reload()
this.list.reduceSelectionToFilter()
this.list.selected.forEach((id) => {
this.openDocumentService.refreshDocument(id)
})
this.savedViewService.maybeRefreshDocumentCounts()
if (modal) {
modal.close()
}
}
private handleOperationError(modal: NgbModalRef, error: any) {
if (modal) {
modal.componentInstance.buttonsEnabled = true
}
this.toastService.showError(
$localize`Error executing bulk operation`,
error
)
}
private applySelectionData(
items: SelectionDataItem[],
selectionModel: FilterableDropdownSelectionModel
@@ -471,13 +446,13 @@ export class BulkEditorComponent
modal.componentInstance.confirmClicked
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
this.executeBulkEditMethod(modal, 'modify_tags', {
this.executeBulkOperation(modal, 'modify_tags', {
add_tags: changedTags.itemsToAdd.map((t) => t.id),
remove_tags: changedTags.itemsToRemove.map((t) => t.id),
})
})
} else {
this.executeBulkEditMethod(null, 'modify_tags', {
this.executeBulkOperation(null, 'modify_tags', {
add_tags: changedTags.itemsToAdd.map((t) => t.id),
remove_tags: changedTags.itemsToRemove.map((t) => t.id),
})
@@ -511,12 +486,12 @@ export class BulkEditorComponent
modal.componentInstance.confirmClicked
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
this.executeBulkEditMethod(modal, 'set_correspondent', {
this.executeBulkOperation(modal, 'set_correspondent', {
correspondent: correspondent ? correspondent.id : null,
})
})
} else {
this.executeBulkEditMethod(null, 'set_correspondent', {
this.executeBulkOperation(null, 'set_correspondent', {
correspondent: correspondent ? correspondent.id : null,
})
}
@@ -549,12 +524,12 @@ export class BulkEditorComponent
modal.componentInstance.confirmClicked
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
this.executeBulkEditMethod(modal, 'set_document_type', {
this.executeBulkOperation(modal, 'set_document_type', {
document_type: documentType ? documentType.id : null,
})
})
} else {
this.executeBulkEditMethod(null, 'set_document_type', {
this.executeBulkOperation(null, 'set_document_type', {
document_type: documentType ? documentType.id : null,
})
}
@@ -587,12 +562,12 @@ export class BulkEditorComponent
modal.componentInstance.confirmClicked
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
this.executeBulkEditMethod(modal, 'set_storage_path', {
this.executeBulkOperation(modal, 'set_storage_path', {
storage_path: storagePath ? storagePath.id : null,
})
})
} else {
this.executeBulkEditMethod(null, 'set_storage_path', {
this.executeBulkOperation(null, 'set_storage_path', {
storage_path: storagePath ? storagePath.id : null,
})
}
@@ -649,7 +624,7 @@ export class BulkEditorComponent
modal.componentInstance.confirmClicked
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
this.executeBulkEditMethod(modal, 'modify_custom_fields', {
this.executeBulkOperation(modal, 'modify_custom_fields', {
add_custom_fields: changedCustomFields.itemsToAdd.map((f) => f.id),
remove_custom_fields: changedCustomFields.itemsToRemove.map(
(f) => f.id
@@ -657,7 +632,7 @@ export class BulkEditorComponent
})
})
} else {
this.executeBulkEditMethod(null, 'modify_custom_fields', {
this.executeBulkOperation(null, 'modify_custom_fields', {
add_custom_fields: changedCustomFields.itemsToAdd.map((f) => f.id),
remove_custom_fields: changedCustomFields.itemsToRemove.map(
(f) => f.id
@@ -787,16 +762,10 @@ export class BulkEditorComponent
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
modal.componentInstance.buttonsEnabled = false
this.executeDocumentAction(
modal,
this.documentService.deleteDocuments(Array.from(this.list.selected))
)
this.executeBulkOperation(modal, 'delete', {})
})
} else {
this.executeDocumentAction(
null,
this.documentService.deleteDocuments(Array.from(this.list.selected))
)
this.executeBulkOperation(null, 'delete', {})
}
}
@@ -835,12 +804,7 @@ export class BulkEditorComponent
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
modal.componentInstance.buttonsEnabled = false
this.executeDocumentAction(
modal,
this.documentService.reprocessDocuments(
Array.from(this.list.selected)
)
)
this.executeBulkOperation(modal, 'reprocess', {})
})
}
@@ -851,7 +815,7 @@ export class BulkEditorComponent
modal.componentInstance.confirmClicked.subscribe(
({ permissions, merge }) => {
modal.componentInstance.buttonsEnabled = false
this.executeBulkEditMethod(modal, 'set_permissions', {
this.executeBulkOperation(modal, 'set_permissions', {
...permissions,
merge,
})
@@ -874,13 +838,9 @@ export class BulkEditorComponent
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
rotateDialog.buttonsEnabled = false
this.executeDocumentAction(
modal,
this.documentService.rotateDocuments(
Array.from(this.list.selected),
rotateDialog.degrees
)
)
this.executeBulkOperation(modal, 'rotate', {
degrees: rotateDialog.degrees,
})
})
}
@@ -896,22 +856,18 @@ export class BulkEditorComponent
mergeDialog.confirmClicked
.pipe(takeUntil(this.unsubscribeNotifier))
.subscribe(() => {
const args: MergeDocumentsRequest = {}
const args = {}
if (mergeDialog.metadataDocumentID > -1) {
args.metadata_document_id = mergeDialog.metadataDocumentID
args['metadata_document_id'] = mergeDialog.metadataDocumentID
}
if (mergeDialog.deleteOriginals) {
args.delete_originals = true
args['delete_originals'] = true
}
if (mergeDialog.archiveFallback) {
args.archive_fallback = true
args['archive_fallback'] = true
}
mergeDialog.buttonsEnabled = false
this.executeDocumentAction(
modal,
this.documentService.mergeDocuments(mergeDialog.documentIDs, args),
{ deleteOriginals: !!args.delete_originals }
)
this.executeBulkOperation(modal, 'merge', args, mergeDialog.documentIDs)
this.toastService.showInfo(
$localize`Merged document will be queued for consumption.`
)

View File

@@ -230,88 +230,6 @@ describe(`DocumentService`, () => {
})
})
it('should call appropriate api endpoint for delete documents', () => {
const ids = [1, 2, 3]
subscription = service.deleteDocuments(ids).subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/delete/`
)
expect(req.request.method).toEqual('POST')
expect(req.request.body).toEqual({
documents: ids,
})
})
it('should call appropriate api endpoint for reprocess documents', () => {
const ids = [1, 2, 3]
subscription = service.reprocessDocuments(ids).subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/reprocess/`
)
expect(req.request.method).toEqual('POST')
expect(req.request.body).toEqual({
documents: ids,
})
})
it('should call appropriate api endpoint for rotate documents', () => {
const ids = [1, 2, 3]
subscription = service.rotateDocuments(ids, 90).subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/rotate/`
)
expect(req.request.method).toEqual('POST')
expect(req.request.body).toEqual({
documents: ids,
degrees: 90,
source_mode: 'latest_version',
})
})
it('should call appropriate api endpoint for merge documents', () => {
const ids = [1, 2, 3]
const args = { metadata_document_id: 1, delete_originals: true }
subscription = service.mergeDocuments(ids, args).subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/merge/`
)
expect(req.request.method).toEqual('POST')
expect(req.request.body).toEqual({
documents: ids,
metadata_document_id: 1,
delete_originals: true,
})
})
it('should call appropriate api endpoint for edit pdf', () => {
const ids = [1]
const args = { operations: [{ page: 1, rotate: 90, doc: 0 }] }
subscription = service.editPdfDocuments(ids, args).subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/edit_pdf/`
)
expect(req.request.method).toEqual('POST')
expect(req.request.body).toEqual({
documents: ids,
operations: [{ page: 1, rotate: 90, doc: 0 }],
})
})
it('should call appropriate api endpoint for remove password', () => {
const ids = [1]
const args = { password: 'secret', update_document: true }
subscription = service.removePasswordDocuments(ids, args).subscribe()
const req = httpTestingController.expectOne(
`${environment.apiBaseUrl}${endpoint}/remove_password/`
)
expect(req.request.method).toEqual('POST')
expect(req.request.body).toEqual({
documents: ids,
password: 'secret',
update_document: true,
})
})
it('should return the correct preview URL for a single document', () => {
let url = service.getPreviewUrl(documents[0].id)
expect(url).toEqual(

View File

@@ -42,45 +42,6 @@ export enum BulkEditSourceMode {
EXPLICIT_SELECTION = 'explicit_selection',
}
export type DocumentBulkEditMethod =
| 'set_correspondent'
| 'set_document_type'
| 'set_storage_path'
| 'add_tag'
| 'remove_tag'
| 'modify_tags'
| 'modify_custom_fields'
| 'set_permissions'
export interface MergeDocumentsRequest {
metadata_document_id?: number
delete_originals?: boolean
archive_fallback?: boolean
source_mode?: BulkEditSourceMode
}
export interface EditPdfOperation {
page: number
rotate?: number
doc?: number
}
export interface EditPdfDocumentsRequest {
operations: EditPdfOperation[]
delete_original?: boolean
update_document?: boolean
include_metadata?: boolean
source_mode?: BulkEditSourceMode
}
export interface RemovePasswordDocumentsRequest {
password: string
update_document?: boolean
delete_original?: boolean
include_metadata?: boolean
source_mode?: BulkEditSourceMode
}
@Injectable({
providedIn: 'root',
})
@@ -338,7 +299,7 @@ export class DocumentService extends AbstractPaperlessService<Document> {
return this.http.get<DocumentMetadata>(url.toString())
}
bulkEdit(ids: number[], method: DocumentBulkEditMethod, args: any) {
bulkEdit(ids: number[], method: string, args: any) {
return this.http.post(this.getResourceUrl(null, 'bulk_edit'), {
documents: ids,
method: method,
@@ -346,54 +307,6 @@ export class DocumentService extends AbstractPaperlessService<Document> {
})
}
deleteDocuments(ids: number[]) {
return this.http.post(this.getResourceUrl(null, 'delete'), {
documents: ids,
})
}
reprocessDocuments(ids: number[]) {
return this.http.post(this.getResourceUrl(null, 'reprocess'), {
documents: ids,
})
}
rotateDocuments(
ids: number[],
degrees: number,
sourceMode: BulkEditSourceMode = BulkEditSourceMode.LATEST_VERSION
) {
return this.http.post(this.getResourceUrl(null, 'rotate'), {
documents: ids,
degrees,
source_mode: sourceMode,
})
}
mergeDocuments(ids: number[], request: MergeDocumentsRequest = {}) {
return this.http.post(this.getResourceUrl(null, 'merge'), {
documents: ids,
...request,
})
}
editPdfDocuments(ids: number[], request: EditPdfDocumentsRequest) {
return this.http.post(this.getResourceUrl(null, 'edit_pdf'), {
documents: ids,
...request,
})
}
removePasswordDocuments(
ids: number[],
request: RemovePasswordDocumentsRequest
) {
return this.http.post(this.getResourceUrl(null, 'remove_password'), {
documents: ids,
...request,
})
}
getSelectionData(ids: number[]): Observable<SelectionData> {
return this.http.post<SelectionData>(
this.getResourceUrl(null, 'selection_data'),

View File

@@ -62,7 +62,7 @@ export function hslToRgb(h, s, l) {
* @return Array The HSL representation
*/
export function rgbToHsl(r, g, b) {
;((r /= 255), (g /= 255), (b /= 255))
;(r /= 255), (g /= 255), (b /= 255)
var max = Math.max(r, g, b),
min = Math.min(r, g, b)
var h,

View File

@@ -5,7 +5,7 @@
export const environment = {
production: false,
apiBaseUrl: 'http://localhost:8000/api/',
apiVersion: '10',
apiVersion: '9',
appTitle: 'Paperless-ngx',
tag: 'dev',
version: 'DEVELOPMENT',

View File

@@ -151,10 +151,6 @@ class PaperlessCommand(RichCommand):
supports_progress_bar: Adds --no-progress-bar argument (default: True)
supports_multiprocessing: Adds --processes argument (default: False)
System checks are skipped by default (requires_system_checks = []) because
these commands run post-startup where checks have already been performed by
the application server. Subclasses that genuinely need checks can override.
Example usage:
class Command(PaperlessCommand):
@@ -193,8 +189,6 @@ class PaperlessCommand(RichCommand):
stats.imported += 1
"""
requires_system_checks: ClassVar[list] = []
supports_progress_bar: ClassVar[bool] = True
supports_multiprocessing: ClassVar[bool] = False

View File

@@ -1,8 +1,9 @@
import hashlib
import io
import json
import os
import shutil
import tempfile
import zipfile
from itertools import islice
from pathlib import Path
from typing import TYPE_CHECKING
@@ -93,6 +94,8 @@ class StreamingManifestWriter:
*,
compare_json: bool = False,
files_in_export_dir: "set[Path] | None" = None,
zip_file: "zipfile.ZipFile | None" = None,
zip_arcname: str | None = None,
) -> None:
self._path = path.resolve()
self._tmp_path = self._path.with_suffix(self._path.suffix + ".tmp")
@@ -100,12 +103,20 @@ class StreamingManifestWriter:
self._files_in_export_dir: set[Path] = (
files_in_export_dir if files_in_export_dir is not None else set()
)
self._zip_file = zip_file
self._zip_arcname = zip_arcname
self._zip_mode = zip_file is not None
self._file = None
self._first = True
def open(self) -> None:
self._path.parent.mkdir(parents=True, exist_ok=True)
self._file = self._tmp_path.open("w", encoding="utf-8")
if self._zip_mode:
# zipfile only allows one open write handle at a time, so buffer
# the manifest in memory and write it atomically on close()
self._file = io.StringIO()
else:
self._path.parent.mkdir(parents=True, exist_ok=True)
self._file = self._tmp_path.open("w", encoding="utf-8")
self._file.write("[")
self._first = True
@@ -126,15 +137,18 @@ class StreamingManifestWriter:
if self._file is None:
return
self._file.write("\n]")
if self._zip_mode:
self._zip_file.writestr(self._zip_arcname, self._file.getvalue())
self._file.close()
self._file = None
self._finalize()
if not self._zip_mode:
self._finalize()
def discard(self) -> None:
if self._file is not None:
self._file.close()
self._file = None
if self._tmp_path.exists():
if not self._zip_mode and self._tmp_path.exists():
self._tmp_path.unlink()
def _finalize(self) -> None:
@@ -311,18 +325,13 @@ class Command(CryptMixin, PaperlessCommand):
self.files_in_export_dir: set[Path] = set()
self.exported_files: set[str] = set()
self.zip_file: zipfile.ZipFile | None = None
self._zip_dirs: set[str] = set()
# If zipping, save the original target for later and
# get a temporary directory for the target instead
temp_dir = None
self.original_target = self.target
if self.zip_export:
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
temp_dir = tempfile.TemporaryDirectory(
dir=settings.SCRATCH_DIR,
prefix="paperless-export",
)
self.target = Path(temp_dir.name).resolve()
zip_name = options["zip_name"]
self.zip_path = (self.target / zip_name).with_suffix(".zip")
self.zip_tmp_path = self.zip_path.parent / (self.zip_path.name + ".tmp")
if not self.target.exists():
raise CommandError("That path doesn't exist")
@@ -333,30 +342,53 @@ class Command(CryptMixin, PaperlessCommand):
if not os.access(self.target, os.W_OK):
raise CommandError("That path doesn't appear to be writable")
if self.zip_export:
if self.compare_checksums:
self.stdout.write(
self.style.WARNING(
"--compare-checksums is ignored when --zip is used",
),
)
if self.compare_json:
self.stdout.write(
self.style.WARNING(
"--compare-json is ignored when --zip is used",
),
)
try:
# Prevent any ongoing changes in the documents
with FileLock(settings.MEDIA_LOCK):
self.dump()
# We've written everything to the temporary directory in this case,
# now make an archive in the original target, with all files stored
if self.zip_export and temp_dir is not None:
shutil.make_archive(
self.original_target / options["zip_name"],
format="zip",
root_dir=temp_dir.name,
if self.zip_export:
self.zip_file = zipfile.ZipFile(
self.zip_tmp_path,
"w",
compression=zipfile.ZIP_DEFLATED,
allowZip64=True,
)
self.dump()
if self.zip_export and self.zip_file is not None:
self.zip_file.close()
self.zip_file = None
self.zip_tmp_path.rename(self.zip_path)
finally:
# Always cleanup the temporary directory, if one was created
if self.zip_export and temp_dir is not None:
temp_dir.cleanup()
# Ensure zip_file is closed and the incomplete .tmp is removed on failure
if self.zip_file is not None:
self.zip_file.close()
self.zip_file = None
if self.zip_export and self.zip_tmp_path.exists():
self.zip_tmp_path.unlink()
def dump(self) -> None:
# 1. Take a snapshot of what files exist in the current export folder
for x in self.target.glob("**/*"):
if x.is_file():
self.files_in_export_dir.add(x.resolve())
# (skipped in zip mode — always write fresh, no skip/compare logic applies)
if not self.zip_export:
for x in self.target.glob("**/*"):
if x.is_file():
self.files_in_export_dir.add(x.resolve())
# 2. Create manifest, containing all correspondents, types, tags, storage paths
# note, documents and ui_settings
@@ -414,6 +446,8 @@ class Command(CryptMixin, PaperlessCommand):
manifest_path,
compare_json=self.compare_json,
files_in_export_dir=self.files_in_export_dir,
zip_file=self.zip_file,
zip_arcname="manifest.json",
) as writer:
with transaction.atomic():
for key, qs in manifest_key_to_object_query.items():
@@ -504,8 +538,12 @@ class Command(CryptMixin, PaperlessCommand):
self.target,
)
else:
# 5. Remove anything in the original location (before moving the zip)
for item in self.original_target.glob("*"):
# 5. Remove pre-existing files/dirs from target, keeping the
# in-progress zip (.tmp) and any prior zip at the final path
skip = {self.zip_path.resolve(), self.zip_tmp_path.resolve()}
for item in self.target.glob("*"):
if item.resolve() in skip:
continue
if item.is_dir():
shutil.rmtree(item)
else:
@@ -630,9 +668,23 @@ class Command(CryptMixin, PaperlessCommand):
if self.use_folder_prefix:
manifest_name = Path("json") / manifest_name
manifest_name = (self.target / manifest_name).resolve()
manifest_name.parent.mkdir(parents=True, exist_ok=True)
if not self.zip_export:
manifest_name.parent.mkdir(parents=True, exist_ok=True)
self.check_and_write_json(content, manifest_name)
def _ensure_zip_dirs(self, arcname: str) -> None:
"""Write directory marker entries for all parent directories of arcname.
Some zip viewers only show folder structure when explicit directory
entries exist, so we add them to avoid confusing users.
"""
parts = Path(arcname).parts[:-1]
for i in range(len(parts)):
dir_arc = "/".join(parts[: i + 1]) + "/"
if dir_arc not in self._zip_dirs:
self._zip_dirs.add(dir_arc)
self.zip_file.mkdir(dir_arc)
def check_and_write_json(
self,
content: list[dict] | dict,
@@ -645,6 +697,20 @@ class Command(CryptMixin, PaperlessCommand):
This preserves the file timestamps when no changes are made.
"""
if self.zip_export:
arcname = str(target.resolve().relative_to(self.target))
self._ensure_zip_dirs(arcname)
self.zip_file.writestr(
arcname,
json.dumps(
content,
cls=DjangoJSONEncoder,
indent=2,
ensure_ascii=False,
),
)
return
target = target.resolve()
perform_write = True
if target in self.files_in_export_dir:
@@ -683,6 +749,12 @@ class Command(CryptMixin, PaperlessCommand):
the source attributes
"""
if self.zip_export:
arcname = str(target.resolve().relative_to(self.target))
self._ensure_zip_dirs(arcname)
self.zip_file.write(source, arcname=arcname)
return
target = target.resolve()
if target in self.files_in_export_dir:
self.files_in_export_dir.remove(target)

View File

@@ -8,7 +8,6 @@ from pathlib import Path
from zipfile import ZipFile
from zipfile import is_zipfile
import ijson
from django.conf import settings
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
@@ -33,6 +32,7 @@ from documents.models import Document
from documents.models import DocumentType
from documents.models import Note
from documents.models import Tag
from documents.parsers import run_convert
from documents.settings import EXPORTER_ARCHIVE_NAME
from documents.settings import EXPORTER_CRYPTO_SETTINGS_NAME
from documents.settings import EXPORTER_FILE_NAME
@@ -46,15 +46,6 @@ if settings.AUDIT_LOG_ENABLED:
from auditlog.registry import auditlog
def iter_manifest_records(path: Path) -> Generator[dict, None, None]:
"""Yield records one at a time from a manifest JSON array via ijson."""
try:
with path.open("rb") as f:
yield from ijson.items(f, "item")
except ijson.JSONError as e:
raise CommandError(f"Failed to parse manifest file {path}: {e}") from e
@contextmanager
def disable_signal(sig, receiver, sender, *, weak: bool | None = None) -> Generator:
try:
@@ -152,9 +143,14 @@ class Command(CryptMixin, PaperlessCommand):
Loads manifest data from the various JSON files for parsing and loading the database
"""
main_manifest_path: Path = self.source / "manifest.json"
with main_manifest_path.open() as infile:
self.manifest = json.load(infile)
self.manifest_paths.append(main_manifest_path)
for file in Path(self.source).glob("**/*-manifest.json"):
with file.open() as infile:
self.manifest += json.load(infile)
self.manifest_paths.append(file)
def load_metadata(self) -> None:
@@ -205,7 +201,7 @@ class Command(CryptMixin, PaperlessCommand):
ContentType.objects.all().delete()
Permission.objects.all().delete()
for manifest_path in self.manifest_paths:
call_command("loaddata", manifest_path, skip_checks=True)
call_command("loaddata", manifest_path)
except (FieldDoesNotExist, DeserializationError, IntegrityError) as e:
self.stdout.write(self.style.ERROR("Database import failed"))
if (
@@ -235,6 +231,7 @@ class Command(CryptMixin, PaperlessCommand):
self.version: str | None = None
self.salt: str | None = None
self.manifest_paths = []
self.manifest = []
# Create a temporary directory for extracting a zip file into it, even if supplied source is no zip file to keep code cleaner.
with tempfile.TemporaryDirectory() as tmp_dir:
@@ -294,9 +291,6 @@ class Command(CryptMixin, PaperlessCommand):
else:
self.stdout.write(self.style.NOTICE("Data only import completed"))
for tmp in getattr(self, "_decrypted_tmp_paths", []):
tmp.unlink(missing_ok=True)
self.stdout.write("Updating search index...")
call_command(
"document_index",
@@ -349,12 +343,11 @@ class Command(CryptMixin, PaperlessCommand):
) from e
self.stdout.write("Checking the manifest")
for manifest_path in self.manifest_paths:
for record in iter_manifest_records(manifest_path):
# Only check if the document files exist if this is not data only
# We don't care about documents for a data only import
if not self.data_only and record["model"] == "documents.document":
check_document_validity(record)
for record in self.manifest:
# Only check if the document files exist if this is not data only
# We don't care about documents for a data only import
if not self.data_only and record["model"] == "documents.document":
check_document_validity(record)
def _import_files_from_manifest(self) -> None:
settings.ORIGINALS_DIR.mkdir(parents=True, exist_ok=True)
@@ -363,31 +356,23 @@ class Command(CryptMixin, PaperlessCommand):
self.stdout.write("Copy files into paperless...")
document_records = [
{
"pk": record["pk"],
EXPORTER_FILE_NAME: record[EXPORTER_FILE_NAME],
EXPORTER_THUMBNAIL_NAME: record.get(EXPORTER_THUMBNAIL_NAME),
EXPORTER_ARCHIVE_NAME: record.get(EXPORTER_ARCHIVE_NAME),
}
for manifest_path in self.manifest_paths
for record in iter_manifest_records(manifest_path)
if record["model"] == "documents.document"
]
manifest_documents = list(
filter(lambda r: r["model"] == "documents.document", self.manifest),
)
for record in self.track(document_records, description="Copying files..."):
for record in self.track(manifest_documents, description="Copying files..."):
document = Document.objects.get(pk=record["pk"])
doc_file = record[EXPORTER_FILE_NAME]
document_path = self.source / doc_file
if record[EXPORTER_THUMBNAIL_NAME]:
if EXPORTER_THUMBNAIL_NAME in record:
thumb_file = record[EXPORTER_THUMBNAIL_NAME]
thumbnail_path = (self.source / thumb_file).resolve()
else:
thumbnail_path = None
if record[EXPORTER_ARCHIVE_NAME]:
if EXPORTER_ARCHIVE_NAME in record:
archive_file = record[EXPORTER_ARCHIVE_NAME]
archive_path = self.source / archive_file
else:
@@ -402,10 +387,22 @@ class Command(CryptMixin, PaperlessCommand):
copy_file_with_basic_stats(document_path, document.source_path)
if thumbnail_path:
copy_file_with_basic_stats(
thumbnail_path,
document.thumbnail_path,
)
if thumbnail_path.suffix in {".png", ".PNG"}:
run_convert(
density=300,
scale="500x5000>",
alpha="remove",
strip=True,
trim=False,
auto_orient=True,
input_file=f"{thumbnail_path}[0]",
output_file=str(document.thumbnail_path),
)
else:
copy_file_with_basic_stats(
thumbnail_path,
document.thumbnail_path,
)
if archive_path:
create_source_path_directory(document.archive_path)
@@ -416,43 +413,33 @@ class Command(CryptMixin, PaperlessCommand):
document.save()
def _decrypt_record_if_needed(self, record: dict) -> dict:
fields = self.CRYPT_FIELDS_BY_MODEL.get(record.get("model", ""))
if fields:
for field in fields:
if record["fields"].get(field):
record["fields"][field] = self.decrypt_string(
value=record["fields"][field],
)
return record
def decrypt_secret_fields(self) -> None:
"""
The converse decryption of some fields out of the export before importing to database.
Streams records from each manifest path and writes decrypted content to a temp file.
The converse decryption of some fields out of the export before importing to database
"""
if not self.passphrase:
return
# Salt has been loaded from metadata.json at this point, so it cannot be None
self.setup_crypto(passphrase=self.passphrase, salt=self.salt)
self._decrypted_tmp_paths: list[Path] = []
new_paths: list[Path] = []
for manifest_path in self.manifest_paths:
tmp = manifest_path.with_name(manifest_path.stem + ".decrypted.json")
with tmp.open("w", encoding="utf-8") as out:
out.write("[\n")
first = True
for record in iter_manifest_records(manifest_path):
if not first:
out.write(",\n")
json.dump(
self._decrypt_record_if_needed(record),
out,
indent=2,
ensure_ascii=False,
)
first = False
out.write("\n]\n")
self._decrypted_tmp_paths.append(tmp)
new_paths.append(tmp)
self.manifest_paths = new_paths
if self.passphrase:
# Salt has been loaded from metadata.json at this point, so it cannot be None
self.setup_crypto(passphrase=self.passphrase, salt=self.salt)
had_at_least_one_record = False
for crypt_config in self.CRYPT_FIELDS:
importer_model: str = crypt_config["model_name"]
crypt_fields: str = crypt_config["fields"]
for record in filter(
lambda x: x["model"] == importer_model,
self.manifest,
):
had_at_least_one_record = True
for field in crypt_fields:
if record["fields"][field]:
record["fields"][field] = self.decrypt_string(
value=record["fields"][field],
)
if had_at_least_one_record:
# It's annoying, but the DB is loaded from the JSON directly
# Maybe could change that in the future?
(self.source / "manifest.json").write_text(
json.dumps(self.manifest, indent=2, ensure_ascii=False),
)

View File

@@ -0,0 +1,22 @@
import sys
from django.core.management.commands.loaddata import Command as LoadDataCommand
# This class is used to migrate data between databases
# That's difficult to test
class Command(LoadDataCommand): # pragma: no cover
"""
Allow the loading of data from standard in. Sourced originally from:
https://gist.github.com/bmispelon/ad5a2c333443b3a1d051 (MIT licensed)
"""
def parse_name(self, fixture_name):
self.compression_formats["stdin"] = (lambda x, y: sys.stdin, None)
if fixture_name == "-":
return "-", "json", "stdin"
def find_fixtures(self, fixture_label):
if fixture_label == "-":
return [("-", None, "-")]
return super().find_fixtures(fixture_label)

View File

@@ -169,7 +169,7 @@ def match_storage_paths(document: Document, classifier: DocumentClassifier, user
def matches(matching_model: MatchingModel, document: Document):
search_flags = 0
document_content = document.get_effective_content() or ""
document_content = document.content
# Check that match is not empty
if not matching_model.match.strip():

View File

@@ -0,0 +1,18 @@
# Generated by Django 5.2.9 on 2026-01-20 20:06
from django.db import migrations
from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0002_squashed"),
]
operations = [
migrations.AddField(
model_name="workflowaction",
name="order",
field=models.PositiveIntegerField(default=0, verbose_name="order"),
),
]

View File

@@ -5,7 +5,7 @@ from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("documents", "0002_squashed"),
("documents", "0003_workflowaction_order"),
]
operations = [

View File

@@ -6,7 +6,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0003_remove_document_storage_type"),
("documents", "0004_remove_document_storage_type"),
]
operations = [

View File

@@ -6,7 +6,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0004_workflowtrigger_filter_has_any_correspondents_and_more"),
("documents", "0005_workflowtrigger_filter_has_any_correspondents_and_more"),
]
operations = [

View File

@@ -7,7 +7,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0005_alter_document_checksum_unique"),
("documents", "0006_alter_document_checksum_unique"),
]
operations = [

View File

@@ -46,7 +46,7 @@ def revoke_share_link_bundle_permissions(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("documents", "0006_document_content_length"),
("documents", "0007_document_content_length"),
]
operations = [

View File

@@ -6,7 +6,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0007_sharelinkbundle"),
("documents", "0008_sharelinkbundle"),
]
operations = [

View File

@@ -7,7 +7,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0008_workflowaction_passwords_alter_workflowaction_type"),
("documents", "0009_workflowaction_passwords_alter_workflowaction_type"),
]
operations = [

View File

@@ -7,7 +7,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0009_alter_document_content_length"),
("documents", "0010_alter_document_content_length"),
]
operations = [

View File

@@ -6,7 +6,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0010_optimize_integer_field_sizes"),
("documents", "0011_optimize_integer_field_sizes"),
]
operations = [

View File

@@ -7,7 +7,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0011_alter_workflowaction_type"),
("documents", "0012_alter_workflowaction_type"),
]
operations = [

View File

@@ -6,7 +6,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0012_document_root_document"),
("documents", "0013_document_root_document"),
]
operations = [

View File

@@ -124,7 +124,7 @@ def _restore_visibility_fields(apps, schema_editor):
class Migration(migrations.Migration):
dependencies = [
("documents", "0013_alter_paperlesstask_task_name"),
("documents", "0014_alter_paperlesstask_task_name"),
]
operations = [

View File

@@ -7,7 +7,7 @@ from django.db import models
class Migration(migrations.Migration):
dependencies = [
("documents", "0014_savedview_visibility_to_ui_settings"),
("documents", "0015_savedview_visibility_to_ui_settings"),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]

View File

@@ -361,42 +361,6 @@ class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-
res += f" {self.title}"
return res
def get_effective_content(self) -> str | None:
"""
Returns the effective content for the document.
For root documents, this is the latest version's content when available.
For version documents, this is always the document's own content.
If the queryset already annotated ``effective_content``, that value is used.
"""
if hasattr(self, "effective_content"):
return getattr(self, "effective_content")
if self.root_document_id is not None or self.pk is None:
return self.content
prefetched_cache = getattr(self, "_prefetched_objects_cache", None)
prefetched_versions = (
prefetched_cache.get("versions")
if isinstance(prefetched_cache, dict)
else None
)
if prefetched_versions:
latest_prefetched = max(prefetched_versions, key=lambda doc: doc.id)
return latest_prefetched.content
latest_version_content = (
Document.objects.filter(root_document=self)
.order_by("-id")
.values_list("content", flat=True)
.first()
)
return (
latest_version_content
if latest_version_content is not None
else self.content
)
@property
def suggestion_content(self):
"""
@@ -409,21 +373,15 @@ class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-
This improves processing speed for large documents while keeping
enough context for accurate suggestions.
"""
effective_content = self.get_effective_content()
if not effective_content or len(effective_content) <= 1200000:
return effective_content
if not self.content or len(self.content) <= 1200000:
return self.content
else:
# Use 80% from the start and 20% from the end
# to preserve both opening and closing context.
head_len = 800000
tail_len = 200000
return " ".join(
(
effective_content[:head_len],
effective_content[-tail_len:],
),
)
return " ".join((self.content[:head_len], self.content[-tail_len:]))
@property
def source_path(self) -> Path:

View File

@@ -703,6 +703,15 @@ class StoragePathField(serializers.PrimaryKeyRelatedField):
class CustomFieldSerializer(serializers.ModelSerializer):
def __init__(self, *args, **kwargs):
context = kwargs.get("context")
self.api_version = int(
context.get("request").version
if context and context.get("request")
else settings.REST_FRAMEWORK["DEFAULT_VERSION"],
)
super().__init__(*args, **kwargs)
data_type = serializers.ChoiceField(
choices=CustomField.FieldDataType,
read_only=False,
@@ -782,6 +791,38 @@ class CustomFieldSerializer(serializers.ModelSerializer):
)
return super().validate(attrs)
def to_internal_value(self, data):
ret = super().to_internal_value(data)
if (
self.api_version < 7
and ret.get("data_type", "") == CustomField.FieldDataType.SELECT
and isinstance(ret.get("extra_data", {}).get("select_options"), list)
):
ret["extra_data"]["select_options"] = [
{
"label": option,
"id": get_random_string(length=16),
}
for option in ret["extra_data"]["select_options"]
]
return ret
def to_representation(self, instance):
ret = super().to_representation(instance)
if (
self.api_version < 7
and instance.data_type == CustomField.FieldDataType.SELECT
):
# Convert the select options with ids to a list of strings
ret["extra_data"]["select_options"] = [
option["label"] for option in ret["extra_data"]["select_options"]
]
return ret
class ReadWriteSerializerMethodField(serializers.SerializerMethodField):
"""
@@ -896,6 +937,50 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer):
return data
def get_api_version(self):
return int(
self.context.get("request").version
if self.context.get("request")
else settings.REST_FRAMEWORK["DEFAULT_VERSION"],
)
def to_internal_value(self, data):
ret = super().to_internal_value(data)
if (
self.get_api_version() < 7
and ret.get("field").data_type == CustomField.FieldDataType.SELECT
and ret.get("value") is not None
):
# Convert the index of the option in the field.extra_data["select_options"]
# list to the options unique id
ret["value"] = ret.get("field").extra_data["select_options"][ret["value"]][
"id"
]
return ret
def to_representation(self, instance):
ret = super().to_representation(instance)
if (
self.get_api_version() < 7
and instance.field.data_type == CustomField.FieldDataType.SELECT
):
# return the index of the option in the field.extra_data["select_options"] list
ret["value"] = next(
(
idx
for idx, option in enumerate(
instance.field.extra_data["select_options"],
)
if option["id"] == instance.value
),
None,
)
return ret
class Meta:
model = CustomFieldInstance
fields = [
@@ -919,6 +1004,20 @@ class NotesSerializer(serializers.ModelSerializer):
fields = ["id", "note", "created", "user"]
ordering = ["-created"]
def to_representation(self, instance):
ret = super().to_representation(instance)
request = self.context.get("request")
api_version = int(
request.version if request else settings.REST_FRAMEWORK["DEFAULT_VERSION"],
)
if api_version < 8 and "user" in ret:
user_id = ret["user"]["id"]
ret["user"] = user_id
return ret
def _get_viewable_duplicates(
document: Document,
@@ -1073,6 +1172,22 @@ class DocumentSerializer(
doc["content"] = getattr(instance, "effective_content") or ""
if self.truncate_content and "content" in self.fields:
doc["content"] = doc.get("content")[0:550]
request = self.context.get("request")
api_version = int(
request.version if request else settings.REST_FRAMEWORK["DEFAULT_VERSION"],
)
if api_version < 9 and "created" in self.fields:
# provide created as a datetime for backwards compatibility
from django.utils import timezone
doc["created"] = timezone.make_aware(
datetime.combine(
instance.created,
datetime.min.time(),
),
).isoformat()
return doc
def to_internal_value(self, data):
@@ -1540,124 +1655,11 @@ class DocumentListSerializer(serializers.Serializer):
return documents
class SourceModeValidationMixin:
def validate_source_mode(self, source_mode: str) -> str:
if source_mode not in bulk_edit.SourceModeChoices.__dict__.values():
raise serializers.ValidationError("Invalid source_mode")
return source_mode
class RotateDocumentsSerializer(DocumentListSerializer, SourceModeValidationMixin):
degrees = serializers.IntegerField(required=True)
source_mode = serializers.CharField(
required=False,
default=bulk_edit.SourceModeChoices.LATEST_VERSION,
)
class MergeDocumentsSerializer(DocumentListSerializer, SourceModeValidationMixin):
metadata_document_id = serializers.IntegerField(
required=False,
allow_null=True,
)
delete_originals = serializers.BooleanField(required=False, default=False)
archive_fallback = serializers.BooleanField(required=False, default=False)
source_mode = serializers.CharField(
required=False,
default=bulk_edit.SourceModeChoices.LATEST_VERSION,
)
class EditPdfDocumentsSerializer(DocumentListSerializer, SourceModeValidationMixin):
operations = serializers.ListField(required=True)
delete_original = serializers.BooleanField(required=False, default=False)
update_document = serializers.BooleanField(required=False, default=False)
include_metadata = serializers.BooleanField(required=False, default=True)
source_mode = serializers.CharField(
required=False,
default=bulk_edit.SourceModeChoices.LATEST_VERSION,
)
def validate(self, attrs):
documents = attrs["documents"]
if len(documents) > 1:
raise serializers.ValidationError(
"Edit PDF method only supports one document",
)
operations = attrs["operations"]
if not isinstance(operations, list):
raise serializers.ValidationError("operations must be a list")
for op in operations:
if not isinstance(op, dict):
raise serializers.ValidationError("invalid operation entry")
if "page" not in op or not isinstance(op["page"], int):
raise serializers.ValidationError("page must be an integer")
if "rotate" in op and not isinstance(op["rotate"], int):
raise serializers.ValidationError("rotate must be an integer")
if "doc" in op and not isinstance(op["doc"], int):
raise serializers.ValidationError("doc must be an integer")
if attrs["update_document"]:
max_idx = max(op.get("doc", 0) for op in operations)
if max_idx > 0:
raise serializers.ValidationError(
"update_document only allowed with a single output document",
)
doc = Document.objects.get(id=documents[0])
if doc.page_count:
for op in operations:
if op["page"] < 1 or op["page"] > doc.page_count:
raise serializers.ValidationError(
f"Page {op['page']} is out of bounds for document with {doc.page_count} pages.",
)
return attrs
class RemovePasswordDocumentsSerializer(
DocumentListSerializer,
SourceModeValidationMixin,
):
password = serializers.CharField(required=True)
update_document = serializers.BooleanField(required=False, default=False)
delete_original = serializers.BooleanField(required=False, default=False)
include_metadata = serializers.BooleanField(required=False, default=True)
source_mode = serializers.CharField(
required=False,
default=bulk_edit.SourceModeChoices.LATEST_VERSION,
)
class DeleteDocumentsSerializer(DocumentListSerializer):
pass
class ReprocessDocumentsSerializer(DocumentListSerializer):
pass
class BulkEditSerializer(
SerializerWithPerms,
DocumentListSerializer,
SetPermissionsMixin,
SourceModeValidationMixin,
):
# TODO: remove this and related backwards compatibility code when API v9 is dropped
# split, delete_pages can be removed entirely
MOVED_DOCUMENT_ACTION_ENDPOINTS = {
"delete": "/api/documents/delete/",
"reprocess": "/api/documents/reprocess/",
"rotate": "/api/documents/rotate/",
"merge": "/api/documents/merge/",
"edit_pdf": "/api/documents/edit_pdf/",
"remove_password": "/api/documents/remove_password/",
"split": "/api/documents/edit_pdf/",
"delete_pages": "/api/documents/edit_pdf/",
}
LEGACY_DOCUMENT_ACTION_METHODS = tuple(MOVED_DOCUMENT_ACTION_ENDPOINTS.keys())
method = serializers.ChoiceField(
choices=[
"set_correspondent",
@@ -1667,8 +1669,15 @@ class BulkEditSerializer(
"remove_tag",
"modify_tags",
"modify_custom_fields",
"delete",
"reprocess",
"set_permissions",
*LEGACY_DOCUMENT_ACTION_METHODS,
"rotate",
"merge",
"split",
"delete_pages",
"edit_pdf",
"remove_password",
],
label="Method",
write_only=True,
@@ -1746,7 +1755,8 @@ class BulkEditSerializer(
return bulk_edit.edit_pdf
elif method == "remove_password":
return bulk_edit.remove_password
else:
else: # pragma: no cover
# This will never happen as it is handled by the ChoiceField
raise serializers.ValidationError("Unsupported method.")
def _validate_parameters_tags(self, parameters) -> None:
@@ -1856,7 +1866,9 @@ class BulkEditSerializer(
"source_mode",
bulk_edit.SourceModeChoices.LATEST_VERSION,
)
parameters["source_mode"] = self.validate_source_mode(source_mode)
if source_mode not in bulk_edit.SourceModeChoices.__dict__.values():
raise serializers.ValidationError("Invalid source_mode")
parameters["source_mode"] = source_mode
def _validate_parameters_split(self, parameters) -> None:
if "pages" not in parameters:

View File

@@ -422,34 +422,6 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(args[0], [self.doc1.id])
self.assertEqual(len(kwargs), 0)
@mock.patch("documents.views.bulk_edit.delete")
def test_delete_documents_endpoint(self, m) -> None:
self.setup_mock(m, "delete")
response = self.client.post(
"/api/documents/delete/",
json.dumps({"documents": [self.doc1.id]}),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertEqual(len(kwargs), 0)
@mock.patch("documents.views.bulk_edit.reprocess")
def test_reprocess_documents_endpoint(self, m) -> None:
self.setup_mock(m, "reprocess")
response = self.client.post(
"/api/documents/reprocess/",
json.dumps({"documents": [self.doc1.id]}),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertEqual(len(kwargs), 0)
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
def test_api_set_storage_path(self, m) -> None:
"""
@@ -905,7 +877,7 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(kwargs["merge"], True)
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
@mock.patch("documents.views.bulk_edit.merge")
@mock.patch("documents.serialisers.bulk_edit.merge")
def test_insufficient_global_perms(self, mock_merge, mock_set_storage) -> None:
"""
GIVEN:
@@ -940,11 +912,12 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
mock_set_storage.assert_not_called()
response = self.client.post(
"/api/documents/merge/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"metadata_document_id": self.doc1.id,
"method": "merge",
"parameters": {"metadata_document_id": self.doc1.id},
},
),
content_type="application/json",
@@ -954,12 +927,15 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
mock_merge.assert_not_called()
response = self.client.post(
"/api/documents/merge/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"metadata_document_id": self.doc1.id,
"delete_originals": True,
"method": "merge",
"parameters": {
"metadata_document_id": self.doc1.id,
"delete_originals": True,
},
},
),
content_type="application/json",
@@ -1076,117 +1052,84 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
m.assert_called_once()
@mock.patch("documents.views.bulk_edit.rotate")
@mock.patch("documents.serialisers.bulk_edit.rotate")
def test_rotate(self, m) -> None:
self.setup_mock(m, "rotate")
response = self.client.post(
"/api/documents/rotate/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"degrees": 90,
"method": "rotate",
"parameters": {"degrees": 90},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertCountEqual(args[0], [self.doc2.id, self.doc3.id])
self.assertEqual(kwargs["degrees"], 90)
self.assertEqual(kwargs["source_mode"], "latest_version")
self.assertEqual(kwargs["user"], self.user)
@mock.patch("documents.views.bulk_edit.rotate")
@mock.patch("documents.serialisers.bulk_edit.rotate")
def test_rotate_invalid_params(self, m) -> None:
response = self.client.post(
"/api/documents/rotate/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"degrees": "foo",
"method": "rotate",
"parameters": {"degrees": "foo"},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.post(
"/api/documents/rotate/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"degrees": 90.5,
"method": "rotate",
"parameters": {"degrees": 90.5},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
m.assert_not_called()
@mock.patch("documents.views.bulk_edit.rotate")
def test_rotate_insufficient_permissions(self, m) -> None:
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
user1 = User.objects.create(username="user1")
user1.user_permissions.add(*Permission.objects.all())
user1.save()
self.client.force_authenticate(user=user1)
self.setup_mock(m, "rotate")
response = self.client.post(
"/api/documents/rotate/",
json.dumps(
{
"documents": [self.doc1.id, self.doc2.id],
"degrees": 90,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
m.assert_not_called()
self.assertEqual(response.content, b"Insufficient permissions")
response = self.client.post(
"/api/documents/rotate/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"degrees": 90,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
@mock.patch("documents.views.bulk_edit.merge")
@mock.patch("documents.serialisers.bulk_edit.merge")
def test_merge(self, m) -> None:
self.setup_mock(m, "merge")
response = self.client.post(
"/api/documents/merge/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"metadata_document_id": self.doc3.id,
"method": "merge",
"parameters": {"metadata_document_id": self.doc3.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertCountEqual(args[0], [self.doc2.id, self.doc3.id])
self.assertEqual(kwargs["metadata_document_id"], self.doc3.id)
self.assertEqual(kwargs["source_mode"], "latest_version")
self.assertEqual(kwargs["user"], self.user)
@mock.patch("documents.views.bulk_edit.merge")
@mock.patch("documents.serialisers.bulk_edit.merge")
def test_merge_and_delete_insufficient_permissions(self, m) -> None:
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
@@ -1197,12 +1140,15 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.setup_mock(m, "merge")
response = self.client.post(
"/api/documents/merge/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id, self.doc2.id],
"metadata_document_id": self.doc2.id,
"delete_originals": True,
"method": "merge",
"parameters": {
"metadata_document_id": self.doc2.id,
"delete_originals": True,
},
},
),
content_type="application/json",
@@ -1213,12 +1159,15 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.content, b"Insufficient permissions")
response = self.client.post(
"/api/documents/merge/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"metadata_document_id": self.doc2.id,
"delete_originals": True,
"method": "merge",
"parameters": {
"metadata_document_id": self.doc2.id,
"delete_originals": True,
},
},
),
content_type="application/json",
@@ -1227,15 +1176,27 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
@mock.patch("documents.views.bulk_edit.merge")
@mock.patch("documents.serialisers.bulk_edit.merge")
def test_merge_invalid_parameters(self, m) -> None:
"""
GIVEN:
- API data for merging documents is called
- The parameters are invalid
WHEN:
- API is called
THEN:
- The API fails with a correct error code
"""
self.setup_mock(m, "merge")
response = self.client.post(
"/api/documents/merge/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id, self.doc2.id],
"delete_originals": "not_boolean",
"method": "merge",
"parameters": {
"delete_originals": "not_boolean",
},
},
),
content_type="application/json",
@@ -1244,67 +1205,207 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
m.assert_not_called()
def test_bulk_edit_allows_legacy_file_methods_with_warning(self) -> None:
method_payloads = {
"delete": {},
"reprocess": {},
"rotate": {"degrees": 90},
"merge": {"metadata_document_id": self.doc2.id},
"edit_pdf": {"operations": [{"page": 1}]},
"remove_password": {"password": "secret"},
"split": {"pages": "1,2-4"},
"delete_pages": {"pages": [1, 2]},
}
for version in (9, 10):
for method, parameters in method_payloads.items():
with self.subTest(method=method, version=version):
with mock.patch(
f"documents.views.bulk_edit.{method}",
) as mocked_method:
self.setup_mock(mocked_method, method)
with self.assertLogs("paperless.api", level="WARNING") as logs:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": method,
"parameters": parameters,
},
),
content_type="application/json",
headers={
"Accept": f"application/json; version={version}",
},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
mocked_method.assert_called_once()
self.assertTrue(
any(
"Deprecated bulk_edit method" in entry
and f"'{method}'" in entry
for entry in logs.output
),
)
@mock.patch("documents.views.bulk_edit.edit_pdf")
def test_edit_pdf(self, m) -> None:
self.setup_mock(m, "edit_pdf")
@mock.patch("documents.serialisers.bulk_edit.split")
def test_split(self, m) -> None:
self.setup_mock(m, "split")
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": [{"page": 1}],
"source_mode": "explicit_selection",
"method": "split",
"parameters": {"pages": "1,2-4,5-6,7"},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertCountEqual(args[0], [self.doc2.id])
self.assertEqual(kwargs["pages"], [[1], [2, 3, 4], [5, 6], [7]])
self.assertEqual(kwargs["user"], self.user)
def test_split_invalid_params(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "split",
"parameters": {}, # pages not specified
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"pages not specified", response.content)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "split",
"parameters": {"pages": "1:7"}, # wrong format
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"invalid pages specified", response.content)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [
self.doc1.id,
self.doc2.id,
], # only one document supported
"method": "split",
"parameters": {"pages": "1-2,3-7"}, # wrong format
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"Split method only supports one document", response.content)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "split",
"parameters": {
"pages": "1",
"delete_originals": "notabool",
}, # not a bool
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"delete_originals must be a boolean", response.content)
@mock.patch("documents.serialisers.bulk_edit.delete_pages")
def test_delete_pages(self, m) -> None:
self.setup_mock(m, "delete_pages")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "delete_pages",
"parameters": {"pages": [1, 2, 3, 4]},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertCountEqual(args[0], [self.doc2.id])
self.assertEqual(kwargs["pages"], [1, 2, 3, 4])
def test_delete_pages_invalid_params(self) -> None:
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [
self.doc1.id,
self.doc2.id,
], # only one document supported
"method": "delete_pages",
"parameters": {
"pages": [1, 2, 3, 4],
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(
b"Delete pages method only supports one document",
response.content,
)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "delete_pages",
"parameters": {}, # pages not specified
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"pages not specified", response.content)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "delete_pages",
"parameters": {"pages": "1-3"}, # not a list
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"pages must be a list", response.content)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "delete_pages",
"parameters": {"pages": ["1-3"]}, # not ints
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"pages must be a list of integers", response.content)
@mock.patch("documents.serialisers.bulk_edit.edit_pdf")
def test_edit_pdf(self, m) -> None:
self.setup_mock(m, "edit_pdf")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "edit_pdf",
"parameters": {
"operations": [{"page": 1}],
"source_mode": "explicit_selection",
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertCountEqual(args[0], [self.doc2.id])
@@ -1313,12 +1414,14 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(kwargs["user"], self.user)
def test_edit_pdf_invalid_params(self) -> None:
# multiple documents
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"operations": [{"page": 1}],
"method": "edit_pdf",
"parameters": {"operations": [{"page": 1}]},
},
),
content_type="application/json",
@@ -1326,25 +1429,44 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"Edit PDF method only supports one document", response.content)
# no operations specified
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": "not_a_list",
"method": "edit_pdf",
"parameters": {},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"Expected a list of items", response.content)
self.assertIn(b"operations not specified", response.content)
# operations not a list
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": ["invalid_operation"],
"method": "edit_pdf",
"parameters": {"operations": "not_a_list"},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"operations must be a list", response.content)
# invalid operation
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "edit_pdf",
"parameters": {"operations": ["invalid_operation"]},
},
),
content_type="application/json",
@@ -1352,12 +1474,14 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"invalid operation entry", response.content)
# page not an int
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": [{"page": "not_an_int"}],
"method": "edit_pdf",
"parameters": {"operations": [{"page": "not_an_int"}]},
},
),
content_type="application/json",
@@ -1365,12 +1489,14 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"page must be an integer", response.content)
# rotate not an int
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": [{"page": 1, "rotate": "not_an_int"}],
"method": "edit_pdf",
"parameters": {"operations": [{"page": 1, "rotate": "not_an_int"}]},
},
),
content_type="application/json",
@@ -1378,12 +1504,14 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"rotate must be an integer", response.content)
# doc not an int
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": [{"page": 1, "doc": "not_an_int"}],
"method": "edit_pdf",
"parameters": {"operations": [{"page": 1, "doc": "not_an_int"}]},
},
),
content_type="application/json",
@@ -1391,13 +1519,53 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"doc must be an integer", response.content)
# update_document not a boolean
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"update_document": True,
"operations": [{"page": 1, "doc": 1}, {"page": 2, "doc": 2}],
"method": "edit_pdf",
"parameters": {
"update_document": "not_a_bool",
"operations": [{"page": 1}],
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"update_document must be a boolean", response.content)
# include_metadata not a boolean
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "edit_pdf",
"parameters": {
"include_metadata": "not_a_bool",
"operations": [{"page": 1}],
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"include_metadata must be a boolean", response.content)
# update_document True but output would be multiple documents
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "edit_pdf",
"parameters": {
"update_document": True,
"operations": [{"page": 1, "doc": 1}, {"page": 2, "doc": 2}],
},
},
),
content_type="application/json",
@@ -1408,13 +1576,17 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
response.content,
)
# invalid source mode
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": [{"page": 1}],
"source_mode": "not_a_mode",
"method": "edit_pdf",
"parameters": {
"operations": [{"page": 1}],
"source_mode": "not_a_mode",
},
},
),
content_type="application/json",
@@ -1422,70 +1594,42 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"Invalid source_mode", response.content)
@mock.patch("documents.views.bulk_edit.edit_pdf")
@mock.patch("documents.serialisers.bulk_edit.edit_pdf")
def test_edit_pdf_page_out_of_bounds(self, m) -> None:
"""
GIVEN:
- API data for editing PDF is called
- The page number is out of bounds
WHEN:
- API is called
THEN:
- The API fails with a correct error code
"""
self.setup_mock(m, "edit_pdf")
response = self.client.post(
"/api/documents/edit_pdf/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": [{"page": 99}],
"method": "edit_pdf",
"parameters": {"operations": [{"page": 99}]},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"out of bounds", response.content)
m.assert_not_called()
@mock.patch("documents.views.bulk_edit.edit_pdf")
def test_edit_pdf_insufficient_permissions(self, m) -> None:
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
user1 = User.objects.create(username="user1")
user1.user_permissions.add(*Permission.objects.all())
user1.save()
self.client.force_authenticate(user=user1)
self.setup_mock(m, "edit_pdf")
response = self.client.post(
"/api/documents/edit_pdf/",
json.dumps(
{
"documents": [self.doc1.id],
"operations": [{"page": 1}],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
m.assert_not_called()
self.assertEqual(response.content, b"Insufficient permissions")
response = self.client.post(
"/api/documents/edit_pdf/",
json.dumps(
{
"documents": [self.doc2.id],
"operations": [{"page": 1}],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
@mock.patch("documents.views.bulk_edit.remove_password")
@mock.patch("documents.serialisers.bulk_edit.remove_password")
def test_remove_password(self, m) -> None:
self.setup_mock(m, "remove_password")
response = self.client.post(
"/api/documents/remove_password/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"password": "secret",
"update_document": True,
"method": "remove_password",
"parameters": {"password": "secret", "update_document": True},
},
),
content_type="application/json",
@@ -1497,69 +1641,36 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
self.assertCountEqual(args[0], [self.doc2.id])
self.assertEqual(kwargs["password"], "secret")
self.assertTrue(kwargs["update_document"])
self.assertEqual(kwargs["source_mode"], "latest_version")
self.assertEqual(kwargs["user"], self.user)
def test_remove_password_invalid_params(self) -> None:
response = self.client.post(
"/api/documents/remove_password/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "remove_password",
"parameters": {},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertIn(b"password not specified", response.content)
response = self.client.post(
"/api/documents/remove_password/",
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"password": 123,
"method": "remove_password",
"parameters": {"password": 123},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@mock.patch("documents.views.bulk_edit.remove_password")
def test_remove_password_insufficient_permissions(self, m) -> None:
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
user1 = User.objects.create(username="user1")
user1.user_permissions.add(*Permission.objects.all())
user1.save()
self.client.force_authenticate(user=user1)
self.setup_mock(m, "remove_password")
response = self.client.post(
"/api/documents/remove_password/",
json.dumps(
{
"documents": [self.doc1.id],
"password": "secret",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
m.assert_not_called()
self.assertEqual(response.content, b"Insufficient permissions")
response = self.client.post(
"/api/documents/remove_password/",
json.dumps(
{
"documents": [self.doc2.id],
"password": "secret",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
self.assertIn(b"password must be a string", response.content)
@override_settings(AUDIT_LOG_ENABLED=True)
def test_bulk_edit_audit_log_enabled_simple_field(self) -> None:

View File

@@ -323,6 +323,113 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
mock_delay.assert_called_once_with(cf_select)
def test_custom_field_select_old_version(self) -> None:
"""
GIVEN:
- Nothing
WHEN:
- API post request is made for custom fields with api version header < 7
- API get request is made for custom fields with api version header < 7
THEN:
- The select options are created with unique ids
- The select options are returned in the old format
"""
resp = self.client.post(
self.ENDPOINT,
headers={"Accept": "application/json; version=6"},
data=json.dumps(
{
"data_type": "select",
"name": "Select Field",
"extra_data": {
"select_options": [
"Option 1",
"Option 2",
],
},
},
),
content_type="application/json",
)
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
field = CustomField.objects.get(name="Select Field")
self.assertEqual(
field.extra_data["select_options"],
[
{"label": "Option 1", "id": ANY},
{"label": "Option 2", "id": ANY},
],
)
resp = self.client.get(
f"{self.ENDPOINT}{field.id}/",
headers={"Accept": "application/json; version=6"},
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = resp.json()
self.assertEqual(
data["extra_data"]["select_options"],
[
"Option 1",
"Option 2",
],
)
def test_custom_field_select_value_old_version(self) -> None:
"""
GIVEN:
- Existing document with custom field select
WHEN:
- API post request is made to add the field for document with api version header < 7
- API get request is made for document with api version header < 7
THEN:
- The select value is returned in the old format, the index of the option
"""
custom_field_select = CustomField.objects.create(
name="Select Field",
data_type=CustomField.FieldDataType.SELECT,
extra_data={
"select_options": [
{"label": "Option 1", "id": "abc-123"},
{"label": "Option 2", "id": "def-456"},
],
},
)
doc = Document.objects.create(
title="WOW",
content="the content",
checksum="123",
mime_type="application/pdf",
)
resp = self.client.patch(
f"/api/documents/{doc.id}/",
headers={"Accept": "application/json; version=6"},
data=json.dumps(
{
"custom_fields": [
{"field": custom_field_select.id, "value": 1},
],
},
),
content_type="application/json",
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
doc.refresh_from_db()
self.assertEqual(doc.custom_fields.first().value, "def-456")
resp = self.client.get(
f"/api/documents/{doc.id}/",
headers={"Accept": "application/json; version=6"},
)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
data = resp.json()
self.assertEqual(data["custom_fields"][0]["value"], 1)
def test_create_custom_field_monetary_validation(self) -> None:
"""
GIVEN:

View File

@@ -177,7 +177,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
results = response.data["results"]
self.assertEqual(len(results[0]), 0)
def test_document_fields_respects_created(self) -> None:
def test_document_fields_api_version_8_respects_created(self) -> None:
Document.objects.create(
title="legacy",
checksum="123",
@@ -187,6 +187,7 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
response = self.client.get(
"/api/documents/?fields=id",
headers={"Accept": "application/json; version=8"},
format="json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
@@ -196,22 +197,25 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
response = self.client.get(
"/api/documents/?fields=id,created",
headers={"Accept": "application/json; version=8"},
format="json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertIn("id", results[0])
self.assertIn("created", results[0])
self.assertEqual(results[0]["created"], "2024-01-15")
self.assertRegex(results[0]["created"], r"^2024-01-15T00:00:00.*$")
def test_document_created_format(self) -> None:
def test_document_legacy_created_format(self) -> None:
"""
GIVEN:
- Existing document
WHEN:
- Document is requested
- Document is requested with api version ≥ 9
- Document is requested with api version < 9
THEN:
- Document created field is returned as date
- Document created field is returned as datetime
"""
doc = Document.objects.create(
title="none",
@@ -222,6 +226,14 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
response = self.client.get(
f"/api/documents/{doc.pk}/",
headers={"Accept": "application/json; version=8"},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertRegex(response.data["created"], r"^2023-01-01T00:00:00.*$")
response = self.client.get(
f"/api/documents/{doc.pk}/",
headers={"Accept": "application/json; version=9"},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["created"], "2023-01-01")
@@ -2791,6 +2803,26 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
},
)
def test_docnote_serializer_v7(self) -> None:
doc = Document.objects.create(
title="test",
mime_type="application/pdf",
content="this is a document which will have notes!",
)
Note.objects.create(
note="This is a note.",
document=doc,
user=self.user,
)
self.assertEqual(
self.client.get(
f"/api/documents/{doc.pk}/",
headers={"Accept": "application/json; version=7"},
format="json",
).data["notes"][0]["user"],
self.user.id,
)
def test_create_note(self) -> None:
"""
GIVEN:
@@ -3559,13 +3591,14 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
)
class TestDocumentApiTagColors(DirectoriesMixin, APITestCase):
class TestDocumentApiV2(DirectoriesMixin, APITestCase):
def setUp(self) -> None:
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
self.client.defaults["HTTP_ACCEPT"] = "application/json; version=2"
def test_tag_validate_color(self) -> None:
self.assertEqual(

View File

@@ -152,7 +152,7 @@ class TestCustomFieldsSearch(DirectoriesMixin, APITestCase):
context={
"request": types.SimpleNamespace(
method="GET",
version="9",
version="7",
),
},
)

View File

@@ -12,12 +12,7 @@ class TestApiSchema(APITestCase):
Test that the schema is valid
"""
try:
call_command(
"spectacular",
"--validate",
"--fail-on-warn",
skip_checks=True,
)
call_command("spectacular", "--validate", "--fail-on-warn")
except CommandError as e:
self.fail(f"Schema validation failed: {e}")
@@ -30,39 +25,3 @@ class TestApiSchema(APITestCase):
ui_response = self.client.get(self.ENDPOINT + "view/")
self.assertEqual(ui_response.status_code, status.HTTP_200_OK)
def test_schema_includes_dedicated_document_edit_endpoints(self) -> None:
schema_response = self.client.get(self.ENDPOINT)
self.assertEqual(schema_response.status_code, status.HTTP_200_OK)
paths = schema_response.data["paths"]
self.assertIn("/api/documents/delete/", paths)
self.assertIn("/api/documents/reprocess/", paths)
self.assertIn("/api/documents/rotate/", paths)
self.assertIn("/api/documents/merge/", paths)
self.assertIn("/api/documents/edit_pdf/", paths)
self.assertIn("/api/documents/remove_password/", paths)
def test_schema_bulk_edit_advertises_legacy_document_action_methods(self) -> None:
schema_response = self.client.get(self.ENDPOINT)
self.assertEqual(schema_response.status_code, status.HTTP_200_OK)
schema = schema_response.data["components"]["schemas"]
bulk_schema = schema["BulkEditRequest"]
method_schema = bulk_schema["properties"]["method"]
# drf-spectacular emits the enum as a referenced schema for this field
enum_ref = method_schema["allOf"][0]["$ref"].split("/")[-1]
advertised_methods = schema[enum_ref]["enum"]
for action_method in [
"delete",
"reprocess",
"rotate",
"merge",
"edit_pdf",
"remove_password",
"split",
"delete_pages",
]:
self.assertIn(action_method, advertised_methods)

View File

@@ -156,46 +156,6 @@ class TestDocument(TestCase):
)
self.assertEqual(doc.get_public_filename(), "2020-12-25 test")
def test_suggestion_content_uses_latest_version_content_for_root_documents(
self,
) -> None:
root = Document.objects.create(
title="root",
checksum="root",
mime_type="application/pdf",
content="outdated root content",
)
version = Document.objects.create(
title="v1",
checksum="v1",
mime_type="application/pdf",
root_document=root,
content="latest version content",
)
self.assertEqual(root.suggestion_content, version.content)
def test_content_length_is_per_document_row_for_versions(self) -> None:
root = Document.objects.create(
title="root",
checksum="root",
mime_type="application/pdf",
content="abc",
)
version = Document.objects.create(
title="v1",
checksum="v1",
mime_type="application/pdf",
root_document=root,
content="abcdefgh",
)
root.refresh_from_db()
version.refresh_from_db()
self.assertEqual(root.content_length, 3)
self.assertEqual(version.content_length, 8)
def test_suggestion_content() -> None:
"""

View File

@@ -140,7 +140,7 @@ class TestCreateClassifier(TestCase):
"documents.management.commands.document_create_classifier.train_classifier",
)
def test_create_classifier(self, m) -> None:
call_command("document_create_classifier", skip_checks=True)
call_command("document_create_classifier")
m.assert_called_once()
@@ -152,7 +152,7 @@ class TestConvertMariaDBUUID(TestCase):
m.alter_field.return_value = None
stdout = StringIO()
call_command("convert_mariadb_uuid", stdout=stdout, skip_checks=True)
call_command("convert_mariadb_uuid", stdout=stdout)
m.assert_called_once()

View File

@@ -147,6 +147,7 @@ class TestExportImport(
else:
raise ValueError(f"document with id {id} does not exist in manifest")
@override_settings(PASSPHRASE="test")
def _do_export(
self,
*,
@@ -440,6 +441,7 @@ class TestExportImport(
)
self.assertRaises(FileNotFoundError, call_command, "document_exporter", target)
@override_settings(PASSPHRASE="test")
def test_export_zipped(self) -> None:
"""
GIVEN:
@@ -471,6 +473,7 @@ class TestExportImport(
self.assertIn("manifest.json", zip.namelist())
self.assertIn("metadata.json", zip.namelist())
@override_settings(PASSPHRASE="test")
def test_export_zipped_format(self) -> None:
"""
GIVEN:
@@ -502,11 +505,12 @@ class TestExportImport(
self.assertIsFile(expected_file)
with ZipFile(expected_file) as zip:
# Extras are from the directories, which also appear in the listing
# 11 files + 3 directory marker entries for the subdirectory structure
self.assertEqual(len(zip.namelist()), 14)
self.assertIn("manifest.json", zip.namelist())
self.assertIn("metadata.json", zip.namelist())
@override_settings(PASSPHRASE="test")
def test_export_zipped_with_delete(self) -> None:
"""
GIVEN:
@@ -553,6 +557,59 @@ class TestExportImport(
self.assertIn("manifest.json", zip.namelist())
self.assertIn("metadata.json", zip.namelist())
def test_export_zip_atomic_on_failure(self) -> None:
"""
GIVEN:
- Request to export documents to zipfile
WHEN:
- Export raises an exception mid-way
THEN:
- No .zip file is written at the final path
- The .tmp file is cleaned up
"""
args = ["document_exporter", self.target, "--zip"]
with mock.patch.object(
document_exporter.Command,
"dump",
side_effect=RuntimeError("simulated failure"),
):
with self.assertRaises(RuntimeError):
call_command(*args)
expected_zip = self.target / f"export-{timezone.localdate().isoformat()}.zip"
expected_tmp = (
self.target / f"export-{timezone.localdate().isoformat()}.zip.tmp"
)
self.assertIsNotFile(expected_zip)
self.assertIsNotFile(expected_tmp)
def test_export_zip_no_scratch_dir(self) -> None:
"""
GIVEN:
- Request to export documents to zipfile
WHEN:
- Documents are exported
THEN:
- No files are written under SCRATCH_DIR during the export
(the old workaround used a temp dir there)
"""
from django.conf import settings
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
Path(self.dirs.media_dir) / "documents",
)
scratch_before = set(settings.SCRATCH_DIR.glob("paperless-export*"))
args = ["document_exporter", self.target, "--zip"]
call_command(*args)
scratch_after = set(settings.SCRATCH_DIR.glob("paperless-export*"))
self.assertEqual(scratch_before, scratch_after)
def test_export_target_not_exists(self) -> None:
"""
GIVEN:

View File

@@ -119,22 +119,15 @@ class TestCommandImport(
# No read permissions
original_path.chmod(0o222)
manifest_path = Path(temp_dir) / "manifest.json"
manifest_path.write_text(
json.dumps(
[
{
"model": "documents.document",
EXPORTER_FILE_NAME: "original.pdf",
EXPORTER_ARCHIVE_NAME: "archive.pdf",
},
],
),
)
cmd = Command()
cmd.source = Path(temp_dir)
cmd.manifest_paths = [manifest_path]
cmd.manifest = [
{
"model": "documents.document",
EXPORTER_FILE_NAME: "original.pdf",
EXPORTER_ARCHIVE_NAME: "archive.pdf",
},
]
cmd.data_only = False
with self.assertRaises(CommandError) as cm:
cmd.check_manifest_validity()
@@ -303,7 +296,7 @@ class TestCommandImport(
(self.dirs.scratch_dir / "manifest.json").touch()
# We're not building a manifest, so it fails, but this test doesn't care
with self.assertRaises(CommandError):
with self.assertRaises(json.decoder.JSONDecodeError):
call_command(
"document_importer",
"--no-progress-bar",
@@ -332,7 +325,7 @@ class TestCommandImport(
)
# We're not building a manifest, so it fails, but this test doesn't care
with self.assertRaises(CommandError):
with self.assertRaises(json.decoder.JSONDecodeError):
call_command(
"document_importer",
"--no-progress-bar",

View File

@@ -20,7 +20,6 @@ class TestManageSuperUser(DirectoriesMixin, TestCase):
"--no-color",
stdout=out,
stderr=StringIO(),
skip_checks=True,
)
return out.getvalue()

View File

@@ -48,52 +48,6 @@ class _TestMatchingBase(TestCase):
class TestMatching(_TestMatchingBase):
def test_matches_uses_latest_version_content_for_root_documents(self) -> None:
root = Document.objects.create(
title="root",
checksum="root",
mime_type="application/pdf",
content="root content without token",
)
Document.objects.create(
title="v1",
checksum="v1",
mime_type="application/pdf",
root_document=root,
content="latest version contains keyword",
)
tag = Tag.objects.create(
name="tag",
match="keyword",
matching_algorithm=Tag.MATCH_ANY,
)
self.assertTrue(matching.matches(tag, root))
def test_matches_does_not_fall_back_to_root_content_when_version_exists(
self,
) -> None:
root = Document.objects.create(
title="root",
checksum="root",
mime_type="application/pdf",
content="root contains keyword",
)
Document.objects.create(
title="v1",
checksum="v1",
mime_type="application/pdf",
root_document=root,
content="latest version without token",
)
tag = Tag.objects.create(
name="tag",
match="keyword",
matching_algorithm=Tag.MATCH_ANY,
)
self.assertFalse(matching.matches(tag, root))
def test_match_none(self) -> None:
self._test_matching(
"",

View File

@@ -6,8 +6,8 @@ SIDEBAR_VIEWS_VISIBLE_IDS_KEY = "sidebar_views_visible_ids"
class TestMigrateSavedViewVisibilityToUiSettings(TestMigrations):
migrate_from = "0013_alter_paperlesstask_task_name"
migrate_to = "0014_savedview_visibility_to_ui_settings"
migrate_from = "0013_document_root_document"
migrate_to = "0015_savedview_visibility_to_ui_settings"
def setUpBeforeMigration(self, apps) -> None:
User = apps.get_model("auth", "User")
@@ -132,8 +132,8 @@ class TestMigrateSavedViewVisibilityToUiSettings(TestMigrations):
class TestReverseMigrateSavedViewVisibilityFromUiSettings(TestMigrations):
migrate_from = "0014_savedview_visibility_to_ui_settings"
migrate_to = "0013_alter_paperlesstask_task_name"
migrate_from = "0015_savedview_visibility_to_ui_settings"
migrate_to = "0014_alter_paperlesstask_task_name"
def setUpBeforeMigration(self, apps) -> None:
User = apps.get_model("auth", "User")

View File

@@ -2,8 +2,8 @@ from documents.tests.utils import TestMigrations
class TestMigrateShareLinkBundlePermissions(TestMigrations):
migrate_from = "0006_document_content_length"
migrate_to = "0007_sharelinkbundle"
migrate_from = "0007_document_content_length"
migrate_to = "0008_sharelinkbundle"
def setUpBeforeMigration(self, apps) -> None:
User = apps.get_model("auth", "User")
@@ -24,8 +24,8 @@ class TestMigrateShareLinkBundlePermissions(TestMigrations):
class TestReverseMigrateShareLinkBundlePermissions(TestMigrations):
migrate_from = "0007_sharelinkbundle"
migrate_to = "0006_document_content_length"
migrate_from = "0008_sharelinkbundle"
migrate_to = "0007_document_content_length"
def setUpBeforeMigration(self, apps) -> None:
User = apps.get_model("auth", "User")

View File

@@ -176,20 +176,14 @@ from documents.serialisers import BulkEditObjectsSerializer
from documents.serialisers import BulkEditSerializer
from documents.serialisers import CorrespondentSerializer
from documents.serialisers import CustomFieldSerializer
from documents.serialisers import DeleteDocumentsSerializer
from documents.serialisers import DocumentListSerializer
from documents.serialisers import DocumentSerializer
from documents.serialisers import DocumentTypeSerializer
from documents.serialisers import DocumentVersionLabelSerializer
from documents.serialisers import DocumentVersionSerializer
from documents.serialisers import EditPdfDocumentsSerializer
from documents.serialisers import EmailSerializer
from documents.serialisers import MergeDocumentsSerializer
from documents.serialisers import NotesSerializer
from documents.serialisers import PostDocumentSerializer
from documents.serialisers import RemovePasswordDocumentsSerializer
from documents.serialisers import ReprocessDocumentsSerializer
from documents.serialisers import RotateDocumentsSerializer
from documents.serialisers import RunTaskViewSerializer
from documents.serialisers import SavedViewSerializer
from documents.serialisers import SearchResultSerializer
@@ -1528,17 +1522,13 @@ class DocumentViewSet(
return Response(sorted(entries, key=lambda x: x["timestamp"], reverse=True))
@extend_schema(
operation_id="documents_email_document",
deprecated=True,
)
@action(
methods=["post"],
detail=True,
url_path="email",
permission_classes=[IsAuthenticated, ViewDocumentsPermissions],
)
# TODO: deprecated, remove with drop of support for API v9
# TODO: deprecated as of 2.19, remove in future release
def email_document(self, request, pk=None):
request_data = request.data.copy()
request_data.setlist("documents", [pk])
@@ -2124,125 +2114,6 @@ class SavedViewViewSet(BulkPermissionMixin, PassUserMixin, ModelViewSet):
ordering_fields = ("name",)
class DocumentOperationPermissionMixin(PassUserMixin):
permission_classes = (IsAuthenticated,)
parser_classes = (parsers.JSONParser,)
METHOD_NAMES_REQUIRING_USER = {
"split",
"merge",
"rotate",
"delete_pages",
"edit_pdf",
"remove_password",
}
def _has_document_permissions(
self,
*,
user: User,
documents: list[int],
method,
parameters: dict[str, Any],
) -> bool:
if user.is_superuser:
return True
document_objs = Document.objects.select_related("owner").filter(
pk__in=documents,
)
user_is_owner_of_all_documents = all(
(doc.owner == user or doc.owner is None) for doc in document_objs
)
# check global and object permissions for all documents
has_perms = user.has_perm("documents.change_document") and all(
has_perms_owner_aware(user, "change_document", doc) for doc in document_objs
)
# check ownership for methods that change original document
if (
(
has_perms
and method
in [
bulk_edit.set_permissions,
bulk_edit.delete,
bulk_edit.rotate,
bulk_edit.delete_pages,
bulk_edit.edit_pdf,
bulk_edit.remove_password,
]
)
or (
method in [bulk_edit.merge, bulk_edit.split]
and parameters.get("delete_originals")
)
or (method == bulk_edit.edit_pdf and parameters.get("update_document"))
):
has_perms = user_is_owner_of_all_documents
# check global add permissions for methods that create documents
if (
has_perms
and (
method in [bulk_edit.split, bulk_edit.merge]
or (
method in [bulk_edit.edit_pdf, bulk_edit.remove_password]
and not parameters.get("update_document")
)
)
and not user.has_perm("documents.add_document")
):
has_perms = False
# check global delete permissions for methods that delete documents
if (
has_perms
and (
method == bulk_edit.delete
or (
method in [bulk_edit.merge, bulk_edit.split]
and parameters.get("delete_originals")
)
)
and not user.has_perm("documents.delete_document")
):
has_perms = False
return has_perms
def _execute_document_action(
self,
*,
method,
validated_data: dict[str, Any],
operation_label: str,
):
documents = validated_data["documents"]
parameters = {k: v for k, v in validated_data.items() if k != "documents"}
user = self.request.user
if method.__name__ in self.METHOD_NAMES_REQUIRING_USER:
parameters["user"] = user
if not self._has_document_permissions(
user=user,
documents=documents,
method=method,
parameters=parameters,
):
return HttpResponseForbidden("Insufficient permissions")
try:
result = method(documents, **parameters)
return Response({"result": result})
except Exception as e:
logger.warning(f"An error occurred performing {operation_label}: {e!s}")
return HttpResponseBadRequest(
f"Error performing {operation_label}, check logs for more detail.",
)
@extend_schema_view(
post=extend_schema(
operation_id="bulk_edit",
@@ -2261,7 +2132,7 @@ class DocumentOperationPermissionMixin(PassUserMixin):
},
),
)
class BulkEditView(DocumentOperationPermissionMixin):
class BulkEditView(PassUserMixin):
MODIFIED_FIELD_BY_METHOD = {
"set_correspondent": "correspondent",
"set_document_type": "document_type",
@@ -2283,24 +2154,11 @@ class BulkEditView(DocumentOperationPermissionMixin):
"remove_password": None,
}
permission_classes = (IsAuthenticated,)
serializer_class = BulkEditSerializer
parser_classes = (parsers.JSONParser,)
def post(self, request, *args, **kwargs):
request_method = request.data.get("method")
api_version = int(request.version or settings.REST_FRAMEWORK["DEFAULT_VERSION"])
# TODO: remove this and related backwards compatibility code when API v9 is dropped
if request_method in BulkEditSerializer.LEGACY_DOCUMENT_ACTION_METHODS:
endpoint = BulkEditSerializer.MOVED_DOCUMENT_ACTION_ENDPOINTS[
request_method
]
logger.warning(
"Deprecated bulk_edit method '%s' requested on API version %s. "
"Use '%s' instead.",
request_method,
api_version,
endpoint,
)
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
@@ -2308,15 +2166,82 @@ class BulkEditView(DocumentOperationPermissionMixin):
method = serializer.validated_data.get("method")
parameters = serializer.validated_data.get("parameters")
documents = serializer.validated_data.get("documents")
if method.__name__ in self.METHOD_NAMES_REQUIRING_USER:
if method in [
bulk_edit.split,
bulk_edit.merge,
bulk_edit.rotate,
bulk_edit.delete_pages,
bulk_edit.edit_pdf,
bulk_edit.remove_password,
]:
parameters["user"] = user
if not self._has_document_permissions(
user=user,
documents=documents,
method=method,
parameters=parameters,
):
return HttpResponseForbidden("Insufficient permissions")
if not user.is_superuser:
document_objs = Document.objects.select_related("owner").filter(
pk__in=documents,
)
user_is_owner_of_all_documents = all(
(doc.owner == user or doc.owner is None) for doc in document_objs
)
# check global and object permissions for all documents
has_perms = user.has_perm("documents.change_document") and all(
has_perms_owner_aware(user, "change_document", doc)
for doc in document_objs
)
# check ownership for methods that change original document
if (
(
has_perms
and method
in [
bulk_edit.set_permissions,
bulk_edit.delete,
bulk_edit.rotate,
bulk_edit.delete_pages,
bulk_edit.edit_pdf,
bulk_edit.remove_password,
]
)
or (
method in [bulk_edit.merge, bulk_edit.split]
and parameters["delete_originals"]
)
or (method == bulk_edit.edit_pdf and parameters["update_document"])
):
has_perms = user_is_owner_of_all_documents
# check global add permissions for methods that create documents
if (
has_perms
and (
method in [bulk_edit.split, bulk_edit.merge]
or (
method in [bulk_edit.edit_pdf, bulk_edit.remove_password]
and not parameters["update_document"]
)
)
and not user.has_perm("documents.add_document")
):
has_perms = False
# check global delete permissions for methods that delete documents
if (
has_perms
and (
method == bulk_edit.delete
or (
method in [bulk_edit.merge, bulk_edit.split]
and parameters["delete_originals"]
)
)
and not user.has_perm("documents.delete_document")
):
has_perms = False
if not has_perms:
return HttpResponseForbidden("Insufficient permissions")
try:
modified_field = self.MODIFIED_FIELD_BY_METHOD.get(method.__name__, None)
@@ -2373,168 +2298,6 @@ class BulkEditView(DocumentOperationPermissionMixin):
)
@extend_schema_view(
post=extend_schema(
operation_id="documents_rotate",
description="Rotate one or more documents",
responses={
200: inline_serializer(
name="RotateDocumentsResult",
fields={
"result": serializers.CharField(),
},
),
},
),
)
class RotateDocumentsView(DocumentOperationPermissionMixin):
serializer_class = RotateDocumentsSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._execute_document_action(
method=bulk_edit.rotate,
validated_data=serializer.validated_data,
operation_label="document rotate",
)
@extend_schema_view(
post=extend_schema(
operation_id="documents_merge",
description="Merge selected documents into a new document",
responses={
200: inline_serializer(
name="MergeDocumentsResult",
fields={
"result": serializers.CharField(),
},
),
},
),
)
class MergeDocumentsView(DocumentOperationPermissionMixin):
serializer_class = MergeDocumentsSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._execute_document_action(
method=bulk_edit.merge,
validated_data=serializer.validated_data,
operation_label="document merge",
)
@extend_schema_view(
post=extend_schema(
operation_id="documents_delete",
description="Move selected documents to trash",
responses={
200: inline_serializer(
name="DeleteDocumentsResult",
fields={
"result": serializers.CharField(),
},
),
},
),
)
class DeleteDocumentsView(DocumentOperationPermissionMixin):
serializer_class = DeleteDocumentsSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._execute_document_action(
method=bulk_edit.delete,
validated_data=serializer.validated_data,
operation_label="document delete",
)
@extend_schema_view(
post=extend_schema(
operation_id="documents_reprocess",
description="Reprocess selected documents",
responses={
200: inline_serializer(
name="ReprocessDocumentsResult",
fields={
"result": serializers.CharField(),
},
),
},
),
)
class ReprocessDocumentsView(DocumentOperationPermissionMixin):
serializer_class = ReprocessDocumentsSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._execute_document_action(
method=bulk_edit.reprocess,
validated_data=serializer.validated_data,
operation_label="document reprocess",
)
@extend_schema_view(
post=extend_schema(
operation_id="documents_edit_pdf",
description="Perform PDF edit operations on a selected document",
responses={
200: inline_serializer(
name="EditPdfDocumentsResult",
fields={
"result": serializers.CharField(),
},
),
},
),
)
class EditPdfDocumentsView(DocumentOperationPermissionMixin):
serializer_class = EditPdfDocumentsSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._execute_document_action(
method=bulk_edit.edit_pdf,
validated_data=serializer.validated_data,
operation_label="PDF edit",
)
@extend_schema_view(
post=extend_schema(
operation_id="documents_remove_password",
description="Remove password protection from selected PDFs",
responses={
200: inline_serializer(
name="RemovePasswordDocumentsResult",
fields={
"result": serializers.CharField(),
},
),
},
),
)
class RemovePasswordDocumentsView(DocumentOperationPermissionMixin):
serializer_class = RemovePasswordDocumentsSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._execute_document_action(
method=bulk_edit.remove_password,
validated_data=serializer.validated_data,
operation_label="password removal",
)
@extend_schema_view(
post=extend_schema(
description="Upload a document via the API",

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,6 @@ from pathlib import Path
from django.conf import settings
from django.core.checks import Error
from django.core.checks import Tags
from django.core.checks import Warning
from django.core.checks import register
from django.db import connections
@@ -205,16 +204,15 @@ def audit_log_check(app_configs, **kwargs):
return result
@register(Tags.compatibility)
@register()
def check_v3_minimum_upgrade_version(
app_configs: object,
**kwargs: object,
) -> list[Error]:
"""
Enforce that upgrades to v3 must start from v2.20.10.
"""Enforce that upgrades to v3 must start from v2.20.9.
v3 squashes all prior migrations into 0001_squashed and 0002_squashed.
If a user skips v2.20.10, the data migration in 1075_workflowaction_order
If a user skips v2.20.9, the data migration in 1075_workflowaction_order
never runs and the squash may apply schema changes against an incomplete
database state.
"""
@@ -241,7 +239,7 @@ def check_v3_minimum_upgrade_version(
if {"0001_squashed", "0002_squashed"} & applied:
return []
# On v2.20.10 exactly — squash will pick up cleanly from here
# On v2.20.9 exactly — squash will pick up cleanly from here
if "1075_workflowaction_order" in applied:
return []
@@ -252,8 +250,8 @@ def check_v3_minimum_upgrade_version(
Error(
"Cannot upgrade to Paperless-ngx v3 from this version.",
hint=(
"Upgrading to v3 can only be performed from v2.20.10."
"Please upgrade to v2.20.10, run migrations, then upgrade to v3."
"Upgrading to v3 can only be performed from v2.20.9."
"Please upgrade to v2.20.9, run migrations, then upgrade to v3."
"See https://docs.paperless-ngx.com/setup/#upgrading for details."
),
id="paperless.E002",

View File

@@ -6,25 +6,18 @@ import math
import multiprocessing
import os
import tempfile
from os import PathLike
from pathlib import Path
from typing import Final
from urllib.parse import urlparse
from celery.schedules import crontab
from compression_middleware.middleware import CompressionMiddleware
from dateparser.languages.loader import LocaleDataLoader
from django.utils.translation import gettext_lazy as _
from dotenv import load_dotenv
from paperless.settings.custom import parse_beat_schedule
from paperless.settings.custom import parse_dateparser_languages
from paperless.settings.custom import parse_db_settings
from paperless.settings.custom import parse_hosting_settings
from paperless.settings.custom import parse_ignore_dates
from paperless.settings.custom import parse_redis_url
from paperless.settings.parsers import get_bool_from_env
from paperless.settings.parsers import get_float_from_env
from paperless.settings.parsers import get_int_from_env
from paperless.settings.parsers import get_list_from_env
from paperless.settings.parsers import get_path_from_env
logger = logging.getLogger("paperless.settings")
@@ -52,8 +45,239 @@ for path in [
os.environ["OMP_THREAD_LIMIT"] = "1"
def __get_boolean(key: str, default: str = "NO") -> bool:
"""
Return a boolean value based on whatever the user has supplied in the
environment based on whether the value "looks like" it's True or not.
"""
return bool(os.getenv(key, default).lower() in ("yes", "y", "1", "t", "true"))
def __get_int(key: str, default: int) -> int:
"""
Return an integer value based on the environment variable or a default
"""
return int(os.getenv(key, default))
def __get_optional_int(key: str) -> int | None:
"""
Returns None if the environment key is not present, otherwise an integer
"""
if key in os.environ:
return __get_int(key, -1) # pragma: no cover
return None
def __get_float(key: str, default: float) -> float:
"""
Return an integer value based on the environment variable or a default
"""
return float(os.getenv(key, default))
def __get_path(
key: str,
default: PathLike | str,
) -> Path:
"""
Return a normalized, absolute path based on the environment variable or a default,
if provided
"""
if key in os.environ:
return Path(os.environ[key]).resolve()
return Path(default).resolve()
def __get_optional_path(key: str) -> Path | None:
"""
Returns None if the environment key is not present, otherwise a fully resolved Path
"""
if key in os.environ:
return __get_path(key, "")
return None
def __get_list(
key: str,
default: list[str] | None = None,
sep: str = ",",
) -> list[str]:
"""
Return a list of elements from the environment, as separated by the given
string, or the default if the key does not exist
"""
if key in os.environ:
return list(filter(None, os.environ[key].split(sep)))
elif default is not None:
return default
else:
return []
def _parse_redis_url(env_redis: str | None) -> tuple[str, str]:
"""
Gets the Redis information from the environment or a default and handles
converting from incompatible django_channels and celery formats.
Returns a tuple of (celery_url, channels_url)
"""
# Not set, return a compatible default
if env_redis is None:
return ("redis://localhost:6379", "redis://localhost:6379")
if "unix" in env_redis.lower():
# channels_redis socket format, looks like:
# "unix:///path/to/redis.sock"
_, path = env_redis.split(":", 1)
# Optionally setting a db number
if "?db=" in env_redis:
path, number = path.split("?db=")
return (f"redis+socket:{path}?virtual_host={number}", env_redis)
else:
return (f"redis+socket:{path}", env_redis)
elif "+socket" in env_redis.lower():
# celery socket style, looks like:
# "redis+socket:///path/to/redis.sock"
_, path = env_redis.split(":", 1)
if "?virtual_host=" in env_redis:
# Virtual host (aka db number)
path, number = path.split("?virtual_host=")
return (env_redis, f"unix:{path}?db={number}")
else:
return (env_redis, f"unix:{path}")
# Not a socket
return (env_redis, env_redis)
def _parse_beat_schedule() -> dict:
"""
Configures the scheduled tasks, according to default or
environment variables. Task expiration is configured so the task will
expire (and not run), shortly before the default frequency will put another
of the same task into the queue
https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html#beat-entries
https://docs.celeryq.dev/en/latest/userguide/calling.html#expiration
"""
schedule = {}
tasks = [
{
"name": "Check all e-mail accounts",
"env_key": "PAPERLESS_EMAIL_TASK_CRON",
# Default every ten minutes
"env_default": "*/10 * * * *",
"task": "paperless_mail.tasks.process_mail_accounts",
"options": {
# 1 minute before default schedule sends again
"expires": 9.0 * 60.0,
},
},
{
"name": "Train the classifier",
"env_key": "PAPERLESS_TRAIN_TASK_CRON",
# Default hourly at 5 minutes past the hour
"env_default": "5 */1 * * *",
"task": "documents.tasks.train_classifier",
"options": {
# 1 minute before default schedule sends again
"expires": 59.0 * 60.0,
},
},
{
"name": "Optimize the index",
"env_key": "PAPERLESS_INDEX_TASK_CRON",
# Default daily at midnight
"env_default": "0 0 * * *",
"task": "documents.tasks.index_optimize",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
{
"name": "Perform sanity check",
"env_key": "PAPERLESS_SANITY_TASK_CRON",
# Default Sunday at 00:30
"env_default": "30 0 * * sun",
"task": "documents.tasks.sanity_check",
"options": {
# 1 hour before default schedule sends again
"expires": ((7.0 * 24.0) - 1.0) * 60.0 * 60.0,
},
},
{
"name": "Empty trash",
"env_key": "PAPERLESS_EMPTY_TRASH_TASK_CRON",
# Default daily at 01:00
"env_default": "0 1 * * *",
"task": "documents.tasks.empty_trash",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
{
"name": "Check and run scheduled workflows",
"env_key": "PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON",
# Default hourly at 5 minutes past the hour
"env_default": "5 */1 * * *",
"task": "documents.tasks.check_scheduled_workflows",
"options": {
# 1 minute before default schedule sends again
"expires": 59.0 * 60.0,
},
},
{
"name": "Rebuild LLM index",
"env_key": "PAPERLESS_LLM_INDEX_TASK_CRON",
# Default daily at 02:10
"env_default": "10 2 * * *",
"task": "documents.tasks.llmindex_index",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
{
"name": "Cleanup expired share link bundles",
"env_key": "PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON",
# Default daily at 02:00
"env_default": "0 2 * * *",
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
]
for task in tasks:
# Either get the environment setting or use the default
value = os.getenv(task["env_key"], task["env_default"])
# Don't add disabled tasks to the schedule
if value == "disable":
continue
# I find https://crontab.guru/ super helpful
# crontab(5) format
# - five time-and-date fields
# - separated by at least one blank
minute, hour, day_month, month, day_week = value.split(" ")
schedule[task["name"]] = {
"task": task["task"],
"schedule": crontab(minute, hour, day_week, day_month, month),
"options": task["options"],
}
return schedule
# NEVER RUN WITH DEBUG IN PRODUCTION.
DEBUG = get_bool_from_env("PAPERLESS_DEBUG", "NO")
DEBUG = __get_boolean("PAPERLESS_DEBUG", "NO")
###############################################################################
@@ -62,21 +286,21 @@ DEBUG = get_bool_from_env("PAPERLESS_DEBUG", "NO")
BASE_DIR: Path = Path(__file__).resolve().parent.parent.parent
STATIC_ROOT = get_path_from_env("PAPERLESS_STATICDIR", BASE_DIR.parent / "static")
STATIC_ROOT = __get_path("PAPERLESS_STATICDIR", BASE_DIR.parent / "static")
MEDIA_ROOT = get_path_from_env("PAPERLESS_MEDIA_ROOT", BASE_DIR.parent / "media")
MEDIA_ROOT = __get_path("PAPERLESS_MEDIA_ROOT", BASE_DIR.parent / "media")
ORIGINALS_DIR = MEDIA_ROOT / "documents" / "originals"
ARCHIVE_DIR = MEDIA_ROOT / "documents" / "archive"
THUMBNAIL_DIR = MEDIA_ROOT / "documents" / "thumbnails"
SHARE_LINK_BUNDLE_DIR = MEDIA_ROOT / "documents" / "share_link_bundles"
DATA_DIR = get_path_from_env("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
DATA_DIR = __get_path("PAPERLESS_DATA_DIR", BASE_DIR.parent / "data")
NLTK_DIR = get_path_from_env("PAPERLESS_NLTK_DIR", "/usr/share/nltk_data")
NLTK_DIR = __get_path("PAPERLESS_NLTK_DIR", "/usr/share/nltk_data")
# Check deprecated setting first
EMPTY_TRASH_DIR = (
get_path_from_env("PAPERLESS_TRASH_DIR", os.getenv("PAPERLESS_EMPTY_TRASH_DIR"))
__get_path("PAPERLESS_TRASH_DIR", os.getenv("PAPERLESS_EMPTY_TRASH_DIR"))
if os.getenv("PAPERLESS_TRASH_DIR") or os.getenv("PAPERLESS_EMPTY_TRASH_DIR")
else None
)
@@ -85,21 +309,21 @@ EMPTY_TRASH_DIR = (
# threads.
MEDIA_LOCK = MEDIA_ROOT / "media.lock"
INDEX_DIR = DATA_DIR / "index"
MODEL_FILE = get_path_from_env(
MODEL_FILE = __get_path(
"PAPERLESS_MODEL_FILE",
DATA_DIR / "classification_model.pickle",
)
LLM_INDEX_DIR = DATA_DIR / "llm_index"
LOGGING_DIR = get_path_from_env("PAPERLESS_LOGGING_DIR", DATA_DIR / "log")
LOGGING_DIR = __get_path("PAPERLESS_LOGGING_DIR", DATA_DIR / "log")
CONSUMPTION_DIR = get_path_from_env(
CONSUMPTION_DIR = __get_path(
"PAPERLESS_CONSUMPTION_DIR",
BASE_DIR.parent / "consume",
)
# This will be created if it doesn't exist
SCRATCH_DIR = get_path_from_env(
SCRATCH_DIR = __get_path(
"PAPERLESS_SCRATCH_DIR",
Path(tempfile.gettempdir()) / "paperless",
)
@@ -108,7 +332,7 @@ SCRATCH_DIR = get_path_from_env(
# Application Definition #
###############################################################################
env_apps = get_list_from_env("PAPERLESS_APPS")
env_apps = __get_list("PAPERLESS_APPS")
INSTALLED_APPS = [
"whitenoise.runserver_nostatic",
@@ -155,7 +379,7 @@ REST_FRAMEWORK = {
"DEFAULT_VERSION": "10", # match src-ui/src/environments/environment.prod.ts
# Make sure these are ordered and that the most recent version appears
# last. See api.md#api-versioning when adding new versions.
"ALLOWED_VERSIONS": ["9", "10"],
"ALLOWED_VERSIONS": ["2", "3", "4", "5", "6", "7", "8", "9", "10"],
# DRF Spectacular default schema
"DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
}
@@ -181,7 +405,7 @@ MIDDLEWARE = [
]
# Optional to enable compression
if get_bool_from_env("PAPERLESS_ENABLE_COMPRESSION", "yes"): # pragma: no cover
if __get_boolean("PAPERLESS_ENABLE_COMPRESSION", "yes"): # pragma: no cover
MIDDLEWARE.insert(0, "compression_middleware.middleware.CompressionMiddleware")
# Workaround to not compress streaming responses (e.g. chat).
@@ -200,8 +424,20 @@ CompressionMiddleware.process_response = patched_process_response
ROOT_URLCONF = "paperless.urls"
def _parse_base_paths() -> tuple[str, str, str, str, str]:
script_name = os.getenv("PAPERLESS_FORCE_SCRIPT_NAME")
base_url = (script_name or "") + "/"
login_url = base_url + "accounts/login/"
login_redirect_url = base_url + "dashboard"
logout_redirect_url = os.getenv(
"PAPERLESS_LOGOUT_REDIRECT_URL",
login_url + "?loggedout=1",
)
return script_name, base_url, login_url, login_redirect_url, logout_redirect_url
FORCE_SCRIPT_NAME, BASE_URL, LOGIN_URL, LOGIN_REDIRECT_URL, LOGOUT_REDIRECT_URL = (
parse_hosting_settings()
_parse_base_paths()
)
# DRF Spectacular settings
@@ -235,7 +471,7 @@ STORAGES = {
"default": {"BACKEND": "django.core.files.storage.FileSystemStorage"},
}
_CELERY_REDIS_URL, _CHANNELS_REDIS_URL = parse_redis_url(
_CELERY_REDIS_URL, _CHANNELS_REDIS_URL = _parse_redis_url(
os.getenv("PAPERLESS_REDIS", None),
)
_REDIS_KEY_PREFIX = os.getenv("PAPERLESS_REDIS_PREFIX", "")
@@ -284,8 +520,8 @@ EMAIL_PORT: Final[int] = int(os.getenv("PAPERLESS_EMAIL_PORT", 25))
EMAIL_HOST_USER: Final[str] = os.getenv("PAPERLESS_EMAIL_HOST_USER", "")
EMAIL_HOST_PASSWORD: Final[str] = os.getenv("PAPERLESS_EMAIL_HOST_PASSWORD", "")
DEFAULT_FROM_EMAIL: Final[str] = os.getenv("PAPERLESS_EMAIL_FROM", EMAIL_HOST_USER)
EMAIL_USE_TLS: Final[bool] = get_bool_from_env("PAPERLESS_EMAIL_USE_TLS")
EMAIL_USE_SSL: Final[bool] = get_bool_from_env("PAPERLESS_EMAIL_USE_SSL")
EMAIL_USE_TLS: Final[bool] = __get_boolean("PAPERLESS_EMAIL_USE_TLS")
EMAIL_USE_SSL: Final[bool] = __get_boolean("PAPERLESS_EMAIL_USE_SSL")
EMAIL_SUBJECT_PREFIX: Final[str] = "[Paperless-ngx] "
EMAIL_TIMEOUT = 30.0
EMAIL_ENABLED = EMAIL_HOST != "localhost" or EMAIL_HOST_USER != ""
@@ -310,22 +546,20 @@ ACCOUNT_DEFAULT_HTTP_PROTOCOL = os.getenv(
)
ACCOUNT_ADAPTER = "paperless.adapter.CustomAccountAdapter"
ACCOUNT_ALLOW_SIGNUPS = get_bool_from_env("PAPERLESS_ACCOUNT_ALLOW_SIGNUPS")
ACCOUNT_DEFAULT_GROUPS = get_list_from_env("PAPERLESS_ACCOUNT_DEFAULT_GROUPS")
ACCOUNT_ALLOW_SIGNUPS = __get_boolean("PAPERLESS_ACCOUNT_ALLOW_SIGNUPS")
ACCOUNT_DEFAULT_GROUPS = __get_list("PAPERLESS_ACCOUNT_DEFAULT_GROUPS")
SOCIALACCOUNT_ADAPTER = "paperless.adapter.CustomSocialAccountAdapter"
SOCIALACCOUNT_ALLOW_SIGNUPS = get_bool_from_env(
SOCIALACCOUNT_ALLOW_SIGNUPS = __get_boolean(
"PAPERLESS_SOCIALACCOUNT_ALLOW_SIGNUPS",
"yes",
)
SOCIALACCOUNT_AUTO_SIGNUP = get_bool_from_env("PAPERLESS_SOCIAL_AUTO_SIGNUP")
SOCIALACCOUNT_AUTO_SIGNUP = __get_boolean("PAPERLESS_SOCIAL_AUTO_SIGNUP")
SOCIALACCOUNT_PROVIDERS = json.loads(
os.getenv("PAPERLESS_SOCIALACCOUNT_PROVIDERS", "{}"),
)
SOCIAL_ACCOUNT_DEFAULT_GROUPS = get_list_from_env(
"PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS",
)
SOCIAL_ACCOUNT_SYNC_GROUPS = get_bool_from_env("PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS")
SOCIAL_ACCOUNT_DEFAULT_GROUPS = __get_list("PAPERLESS_SOCIAL_ACCOUNT_DEFAULT_GROUPS")
SOCIAL_ACCOUNT_SYNC_GROUPS = __get_boolean("PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS")
SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM: Final[str] = os.getenv(
"PAPERLESS_SOCIAL_ACCOUNT_SYNC_GROUPS_CLAIM",
"groups",
@@ -337,8 +571,8 @@ MFA_TOTP_ISSUER = "Paperless-ngx"
ACCOUNT_EMAIL_SUBJECT_PREFIX = "[Paperless-ngx] "
DISABLE_REGULAR_LOGIN = get_bool_from_env("PAPERLESS_DISABLE_REGULAR_LOGIN")
REDIRECT_LOGIN_TO_SSO = get_bool_from_env("PAPERLESS_REDIRECT_LOGIN_TO_SSO")
DISABLE_REGULAR_LOGIN = __get_boolean("PAPERLESS_DISABLE_REGULAR_LOGIN")
REDIRECT_LOGIN_TO_SSO = __get_boolean("PAPERLESS_REDIRECT_LOGIN_TO_SSO")
AUTO_LOGIN_USERNAME = os.getenv("PAPERLESS_AUTO_LOGIN_USERNAME")
@@ -351,15 +585,12 @@ ACCOUNT_EMAIL_VERIFICATION = (
)
)
ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS = get_bool_from_env(
ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS = __get_boolean(
"PAPERLESS_ACCOUNT_EMAIL_UNKNOWN_ACCOUNTS",
"True",
)
ACCOUNT_SESSION_REMEMBER = get_bool_from_env(
"PAPERLESS_ACCOUNT_SESSION_REMEMBER",
"True",
)
ACCOUNT_SESSION_REMEMBER = __get_boolean("PAPERLESS_ACCOUNT_SESSION_REMEMBER", "True")
SESSION_EXPIRE_AT_BROWSER_CLOSE = not ACCOUNT_SESSION_REMEMBER
SESSION_COOKIE_AGE = int(
os.getenv("PAPERLESS_SESSION_COOKIE_AGE", 60 * 60 * 24 * 7 * 3),
@@ -376,8 +607,8 @@ if AUTO_LOGIN_USERNAME:
def _parse_remote_user_settings() -> str:
global MIDDLEWARE, AUTHENTICATION_BACKENDS, REST_FRAMEWORK
enable = get_bool_from_env("PAPERLESS_ENABLE_HTTP_REMOTE_USER")
enable_api = get_bool_from_env("PAPERLESS_ENABLE_HTTP_REMOTE_USER_API")
enable = __get_boolean("PAPERLESS_ENABLE_HTTP_REMOTE_USER")
enable_api = __get_boolean("PAPERLESS_ENABLE_HTTP_REMOTE_USER_API")
if enable or enable_api:
MIDDLEWARE.append("paperless.auth.HttpRemoteUserMiddleware")
AUTHENTICATION_BACKENDS.insert(
@@ -405,16 +636,16 @@ HTTP_REMOTE_USER_HEADER_NAME = _parse_remote_user_settings()
X_FRAME_OPTIONS = "SAMEORIGIN"
# The next 3 settings can also be set using just PAPERLESS_URL
CSRF_TRUSTED_ORIGINS = get_list_from_env("PAPERLESS_CSRF_TRUSTED_ORIGINS")
CSRF_TRUSTED_ORIGINS = __get_list("PAPERLESS_CSRF_TRUSTED_ORIGINS")
if DEBUG:
# Allow access from the angular development server during debugging
CSRF_TRUSTED_ORIGINS.append("http://localhost:4200")
# We allow CORS from localhost:8000
CORS_ALLOWED_ORIGINS = get_list_from_env(
CORS_ALLOWED_ORIGINS = __get_list(
"PAPERLESS_CORS_ALLOWED_HOSTS",
default=["http://localhost:8000"],
["http://localhost:8000"],
)
if DEBUG:
@@ -427,7 +658,7 @@ CORS_EXPOSE_HEADERS = [
"Content-Disposition",
]
ALLOWED_HOSTS = get_list_from_env("PAPERLESS_ALLOWED_HOSTS", default=["*"])
ALLOWED_HOSTS = __get_list("PAPERLESS_ALLOWED_HOSTS", ["*"])
if ALLOWED_HOSTS != ["*"]:
# always allow localhost. Necessary e.g. for healthcheck in docker.
ALLOWED_HOSTS.append("localhost")
@@ -447,10 +678,10 @@ def _parse_paperless_url():
PAPERLESS_URL = _parse_paperless_url()
# For use with trusted proxies
TRUSTED_PROXIES = get_list_from_env("PAPERLESS_TRUSTED_PROXIES")
TRUSTED_PROXIES = __get_list("PAPERLESS_TRUSTED_PROXIES")
USE_X_FORWARDED_HOST = get_bool_from_env("PAPERLESS_USE_X_FORWARD_HOST", "false")
USE_X_FORWARDED_PORT = get_bool_from_env("PAPERLESS_USE_X_FORWARD_PORT", "false")
USE_X_FORWARDED_HOST = __get_boolean("PAPERLESS_USE_X_FORWARD_HOST", "false")
USE_X_FORWARDED_PORT = __get_boolean("PAPERLESS_USE_X_FORWARD_PORT", "false")
SECURE_PROXY_SSL_HEADER = (
tuple(json.loads(os.environ["PAPERLESS_PROXY_SSL_HEADER"]))
if "PAPERLESS_PROXY_SSL_HEADER" in os.environ
@@ -493,7 +724,7 @@ CSRF_COOKIE_NAME = f"{COOKIE_PREFIX}csrftoken"
SESSION_COOKIE_NAME = f"{COOKIE_PREFIX}sessionid"
LANGUAGE_COOKIE_NAME = f"{COOKIE_PREFIX}django_language"
EMAIL_CERTIFICATE_FILE = get_path_from_env("PAPERLESS_EMAIL_CERTIFICATE_LOCATION")
EMAIL_CERTIFICATE_FILE = __get_optional_path("PAPERLESS_EMAIL_CERTIFICATE_LOCATION")
###############################################################################
@@ -644,7 +875,7 @@ CELERY_BROKER_URL = _CELERY_REDIS_URL
CELERY_TIMEZONE = TIME_ZONE
CELERY_WORKER_HIJACK_ROOT_LOGGER = False
CELERY_WORKER_CONCURRENCY: Final[int] = get_int_from_env("PAPERLESS_TASK_WORKERS", 1)
CELERY_WORKER_CONCURRENCY: Final[int] = __get_int("PAPERLESS_TASK_WORKERS", 1)
TASK_WORKERS = CELERY_WORKER_CONCURRENCY
CELERY_WORKER_MAX_TASKS_PER_CHILD = 1
CELERY_WORKER_SEND_TASK_EVENTS = True
@@ -657,7 +888,7 @@ CELERY_BROKER_TRANSPORT_OPTIONS = {
}
CELERY_TASK_TRACK_STARTED = True
CELERY_TASK_TIME_LIMIT: Final[int] = get_int_from_env("PAPERLESS_WORKER_TIMEOUT", 1800)
CELERY_TASK_TIME_LIMIT: Final[int] = __get_int("PAPERLESS_WORKER_TIMEOUT", 1800)
CELERY_RESULT_EXTENDED = True
CELERY_RESULT_BACKEND = "django-db"
@@ -669,7 +900,7 @@ CELERY_TASK_SERIALIZER = "pickle"
CELERY_ACCEPT_CONTENT = ["application/json", "application/x-python-serialize"]
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#beat-schedule
CELERY_BEAT_SCHEDULE = parse_beat_schedule()
CELERY_BEAT_SCHEDULE = _parse_beat_schedule()
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#beat-schedule-filename
CELERY_BEAT_SCHEDULE_FILENAME = str(DATA_DIR / "celerybeat-schedule.db")
@@ -677,14 +908,14 @@ CELERY_BEAT_SCHEDULE_FILENAME = str(DATA_DIR / "celerybeat-schedule.db")
# Cachalot: Database read cache.
def _parse_cachalot_settings():
ttl = get_int_from_env("PAPERLESS_READ_CACHE_TTL", 3600)
ttl = __get_int("PAPERLESS_READ_CACHE_TTL", 3600)
ttl = min(ttl, 31536000) if ttl > 0 else 3600
_, redis_url = parse_redis_url(
_, redis_url = _parse_redis_url(
os.getenv("PAPERLESS_READ_CACHE_REDIS_URL", _CHANNELS_REDIS_URL),
)
result = {
"CACHALOT_CACHE": "read-cache",
"CACHALOT_ENABLED": get_bool_from_env(
"CACHALOT_ENABLED": __get_boolean(
"PAPERLESS_DB_READ_CACHE_ENABLED",
default="no",
),
@@ -769,9 +1000,9 @@ CONSUMER_POLLING_INTERVAL = float(os.getenv("PAPERLESS_CONSUMER_POLLING_INTERVAL
CONSUMER_STABILITY_DELAY = float(os.getenv("PAPERLESS_CONSUMER_STABILITY_DELAY", 5))
CONSUMER_DELETE_DUPLICATES = get_bool_from_env("PAPERLESS_CONSUMER_DELETE_DUPLICATES")
CONSUMER_DELETE_DUPLICATES = __get_boolean("PAPERLESS_CONSUMER_DELETE_DUPLICATES")
CONSUMER_RECURSIVE = get_bool_from_env("PAPERLESS_CONSUMER_RECURSIVE")
CONSUMER_RECURSIVE = __get_boolean("PAPERLESS_CONSUMER_RECURSIVE")
# Ignore regex patterns, matched against filename only
CONSUMER_IGNORE_PATTERNS = list(
@@ -793,13 +1024,13 @@ CONSUMER_IGNORE_DIRS = list(
),
)
CONSUMER_SUBDIRS_AS_TAGS = get_bool_from_env("PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS")
CONSUMER_SUBDIRS_AS_TAGS = __get_boolean("PAPERLESS_CONSUMER_SUBDIRS_AS_TAGS")
CONSUMER_ENABLE_BARCODES: Final[bool] = get_bool_from_env(
CONSUMER_ENABLE_BARCODES: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_ENABLE_BARCODES",
)
CONSUMER_BARCODE_TIFF_SUPPORT: Final[bool] = get_bool_from_env(
CONSUMER_BARCODE_TIFF_SUPPORT: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT",
)
@@ -808,7 +1039,7 @@ CONSUMER_BARCODE_STRING: Final[str] = os.getenv(
"PATCHT",
)
CONSUMER_ENABLE_ASN_BARCODE: Final[bool] = get_bool_from_env(
CONSUMER_ENABLE_ASN_BARCODE: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_ENABLE_ASN_BARCODE",
)
@@ -817,26 +1048,23 @@ CONSUMER_ASN_BARCODE_PREFIX: Final[str] = os.getenv(
"ASN",
)
CONSUMER_BARCODE_UPSCALE: Final[float] = get_float_from_env(
CONSUMER_BARCODE_UPSCALE: Final[float] = __get_float(
"PAPERLESS_CONSUMER_BARCODE_UPSCALE",
0.0,
)
CONSUMER_BARCODE_DPI: Final[int] = get_int_from_env(
"PAPERLESS_CONSUMER_BARCODE_DPI",
300,
)
CONSUMER_BARCODE_DPI: Final[int] = __get_int("PAPERLESS_CONSUMER_BARCODE_DPI", 300)
CONSUMER_BARCODE_MAX_PAGES: Final[int] = get_int_from_env(
CONSUMER_BARCODE_MAX_PAGES: Final[int] = __get_int(
"PAPERLESS_CONSUMER_BARCODE_MAX_PAGES",
0,
)
CONSUMER_BARCODE_RETAIN_SPLIT_PAGES = get_bool_from_env(
CONSUMER_BARCODE_RETAIN_SPLIT_PAGES = __get_boolean(
"PAPERLESS_CONSUMER_BARCODE_RETAIN_SPLIT_PAGES",
)
CONSUMER_ENABLE_TAG_BARCODE: Final[bool] = get_bool_from_env(
CONSUMER_ENABLE_TAG_BARCODE: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_ENABLE_TAG_BARCODE",
)
@@ -849,11 +1077,11 @@ CONSUMER_TAG_BARCODE_MAPPING = dict(
),
)
CONSUMER_TAG_BARCODE_SPLIT: Final[bool] = get_bool_from_env(
CONSUMER_TAG_BARCODE_SPLIT: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_TAG_BARCODE_SPLIT",
)
CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED: Final[bool] = get_bool_from_env(
CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_ENABLE_COLLATE_DOUBLE_SIDED",
)
@@ -862,13 +1090,13 @@ CONSUMER_COLLATE_DOUBLE_SIDED_SUBDIR_NAME: Final[str] = os.getenv(
"double-sided",
)
CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT: Final[bool] = get_bool_from_env(
CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT: Final[bool] = __get_boolean(
"PAPERLESS_CONSUMER_COLLATE_DOUBLE_SIDED_TIFF_SUPPORT",
)
CONSUMER_PDF_RECOVERABLE_MIME_TYPES = ("application/octet-stream",)
OCR_PAGES = get_int_from_env("PAPERLESS_OCR_PAGES")
OCR_PAGES = __get_optional_int("PAPERLESS_OCR_PAGES")
# The default language that tesseract will attempt to use when parsing
# documents. It should be a 3-letter language code consistent with ISO 639.
@@ -882,20 +1110,20 @@ OCR_MODE = os.getenv("PAPERLESS_OCR_MODE", "skip")
OCR_SKIP_ARCHIVE_FILE = os.getenv("PAPERLESS_OCR_SKIP_ARCHIVE_FILE", "never")
OCR_IMAGE_DPI = get_int_from_env("PAPERLESS_OCR_IMAGE_DPI")
OCR_IMAGE_DPI = __get_optional_int("PAPERLESS_OCR_IMAGE_DPI")
OCR_CLEAN = os.getenv("PAPERLESS_OCR_CLEAN", "clean")
OCR_DESKEW: Final[bool] = get_bool_from_env("PAPERLESS_OCR_DESKEW", "true")
OCR_DESKEW: Final[bool] = __get_boolean("PAPERLESS_OCR_DESKEW", "true")
OCR_ROTATE_PAGES: Final[bool] = get_bool_from_env("PAPERLESS_OCR_ROTATE_PAGES", "true")
OCR_ROTATE_PAGES: Final[bool] = __get_boolean("PAPERLESS_OCR_ROTATE_PAGES", "true")
OCR_ROTATE_PAGES_THRESHOLD: Final[float] = get_float_from_env(
OCR_ROTATE_PAGES_THRESHOLD: Final[float] = __get_float(
"PAPERLESS_OCR_ROTATE_PAGES_THRESHOLD",
12.0,
)
OCR_MAX_IMAGE_PIXELS: Final[int | None] = get_int_from_env(
OCR_MAX_IMAGE_PIXELS: Final[int | None] = __get_optional_int(
"PAPERLESS_OCR_MAX_IMAGE_PIXELS",
)
@@ -906,7 +1134,7 @@ OCR_COLOR_CONVERSION_STRATEGY = os.getenv(
OCR_USER_ARGS = os.getenv("PAPERLESS_OCR_USER_ARGS")
MAX_IMAGE_PIXELS: Final[int | None] = get_int_from_env(
MAX_IMAGE_PIXELS: Final[int | None] = __get_optional_int(
"PAPERLESS_MAX_IMAGE_PIXELS",
)
@@ -921,7 +1149,7 @@ CONVERT_MEMORY_LIMIT = os.getenv("PAPERLESS_CONVERT_MEMORY_LIMIT")
GS_BINARY = os.getenv("PAPERLESS_GS_BINARY", "gs")
# Fallback layout for .eml consumption
EMAIL_PARSE_DEFAULT_LAYOUT = get_int_from_env(
EMAIL_PARSE_DEFAULT_LAYOUT = __get_int(
"PAPERLESS_EMAIL_PARSE_DEFAULT_LAYOUT",
1, # MailRule.PdfLayout.TEXT_HTML but that can't be imported here
)
@@ -935,9 +1163,23 @@ DATE_ORDER = os.getenv("PAPERLESS_DATE_ORDER", "DMY")
FILENAME_DATE_ORDER = os.getenv("PAPERLESS_FILENAME_DATE_ORDER")
def _parse_dateparser_languages(languages: str | None):
language_list = languages.split("+") if languages else []
# There is an unfixed issue in zh-Hant and zh-Hans locales in the dateparser lib.
# See: https://github.com/scrapinghub/dateparser/issues/875
for index, language in enumerate(language_list):
if language.startswith("zh-") and "zh" not in language_list:
logger.warning(
f'Chinese locale detected: {language}. dateparser might fail to parse some dates with this locale, so Chinese ("zh") will be used as a fallback.',
)
language_list.append("zh")
return list(LocaleDataLoader().get_locale_map(locales=language_list))
# If not set, we will infer it at runtime
DATE_PARSER_LANGUAGES = (
parse_dateparser_languages(
_parse_dateparser_languages(
os.getenv("PAPERLESS_DATE_PARSER_LANGUAGES"),
)
if os.getenv("PAPERLESS_DATE_PARSER_LANGUAGES")
@@ -948,7 +1190,7 @@ DATE_PARSER_LANGUAGES = (
# Maximum number of dates taken from document start to end to show as suggestions for
# `created` date in the frontend. Duplicates are removed, which can result in
# fewer dates shown.
NUMBER_OF_SUGGESTED_DATES = get_int_from_env("PAPERLESS_NUMBER_OF_SUGGESTED_DATES", 3)
NUMBER_OF_SUGGESTED_DATES = __get_int("PAPERLESS_NUMBER_OF_SUGGESTED_DATES", 3)
# Specify the filename format for out files
FILENAME_FORMAT = os.getenv("PAPERLESS_FILENAME_FORMAT")
@@ -956,7 +1198,7 @@ FILENAME_FORMAT = os.getenv("PAPERLESS_FILENAME_FORMAT")
# If this is enabled, variables in filename format will resolve to
# empty-string instead of 'none'.
# Directories with 'empty names' are omitted, too.
FILENAME_FORMAT_REMOVE_NONE = get_bool_from_env(
FILENAME_FORMAT_REMOVE_NONE = __get_boolean(
"PAPERLESS_FILENAME_FORMAT_REMOVE_NONE",
"NO",
)
@@ -967,7 +1209,7 @@ THUMBNAIL_FONT_NAME = os.getenv(
)
# Tika settings
TIKA_ENABLED = get_bool_from_env("PAPERLESS_TIKA_ENABLED", "NO")
TIKA_ENABLED = __get_boolean("PAPERLESS_TIKA_ENABLED", "NO")
TIKA_ENDPOINT = os.getenv("PAPERLESS_TIKA_ENDPOINT", "http://localhost:9998")
TIKA_GOTENBERG_ENDPOINT = os.getenv(
"PAPERLESS_TIKA_GOTENBERG_ENDPOINT",
@@ -977,21 +1219,52 @@ TIKA_GOTENBERG_ENDPOINT = os.getenv(
if TIKA_ENABLED:
INSTALLED_APPS.append("paperless_tika.apps.PaperlessTikaConfig")
AUDIT_LOG_ENABLED = get_bool_from_env("PAPERLESS_AUDIT_LOG_ENABLED", "true")
AUDIT_LOG_ENABLED = __get_boolean("PAPERLESS_AUDIT_LOG_ENABLED", "true")
if AUDIT_LOG_ENABLED:
INSTALLED_APPS.append("auditlog")
MIDDLEWARE.append("auditlog.middleware.AuditlogMiddleware")
def _parse_ignore_dates(
env_ignore: str,
date_order: str = DATE_ORDER,
) -> set[datetime.datetime]:
"""
If the PAPERLESS_IGNORE_DATES environment variable is set, parse the
user provided string(s) into dates
Args:
env_ignore (str): The value of the environment variable, comma separated dates
date_order (str, optional): The format of the date strings.
Defaults to DATE_ORDER.
Returns:
Set[datetime.datetime]: The set of parsed date objects
"""
import dateparser
ignored_dates = set()
for s in env_ignore.split(","):
d = dateparser.parse(
s,
settings={
"DATE_ORDER": date_order,
},
)
if d:
ignored_dates.add(d.date())
return ignored_dates
# List dates that should be ignored when trying to parse date from document text
IGNORE_DATES: set[datetime.date] = set()
if os.getenv("PAPERLESS_IGNORE_DATES") is not None:
IGNORE_DATES = parse_ignore_dates(os.getenv("PAPERLESS_IGNORE_DATES"), DATE_ORDER)
IGNORE_DATES = _parse_ignore_dates(os.getenv("PAPERLESS_IGNORE_DATES"))
ENABLE_UPDATE_CHECK = os.getenv("PAPERLESS_ENABLE_UPDATE_CHECK", "default")
if ENABLE_UPDATE_CHECK != "default":
ENABLE_UPDATE_CHECK = get_bool_from_env("PAPERLESS_ENABLE_UPDATE_CHECK")
ENABLE_UPDATE_CHECK = __get_boolean("PAPERLESS_ENABLE_UPDATE_CHECK")
APP_TITLE = os.getenv("PAPERLESS_APP_TITLE", None)
APP_LOGO = os.getenv("PAPERLESS_APP_LOGO", None)
@@ -1036,7 +1309,7 @@ def _get_nltk_language_setting(ocr_lang: str) -> str | None:
return iso_code_to_nltk.get(ocr_lang)
NLTK_ENABLED: Final[bool] = get_bool_from_env("PAPERLESS_ENABLE_NLTK", "yes")
NLTK_ENABLED: Final[bool] = __get_boolean("PAPERLESS_ENABLE_NLTK", "yes")
NLTK_LANGUAGE: str | None = _get_nltk_language_setting(OCR_LANGUAGE)
@@ -1045,7 +1318,7 @@ NLTK_LANGUAGE: str | None = _get_nltk_language_setting(OCR_LANGUAGE)
###############################################################################
EMAIL_GNUPG_HOME: Final[str | None] = os.getenv("PAPERLESS_EMAIL_GNUPG_HOME")
EMAIL_ENABLE_GPG_DECRYPTOR: Final[bool] = get_bool_from_env(
EMAIL_ENABLE_GPG_DECRYPTOR: Final[bool] = __get_boolean(
"PAPERLESS_ENABLE_GPG_DECRYPTOR",
)
@@ -1053,7 +1326,7 @@ EMAIL_ENABLE_GPG_DECRYPTOR: Final[bool] = get_bool_from_env(
###############################################################################
# Soft Delete #
###############################################################################
EMPTY_TRASH_DELAY = max(get_int_from_env("PAPERLESS_EMPTY_TRASH_DELAY", 30), 1)
EMPTY_TRASH_DELAY = max(__get_int("PAPERLESS_EMPTY_TRASH_DELAY", 30), 1)
###############################################################################
@@ -1078,17 +1351,21 @@ OUTLOOK_OAUTH_ENABLED = bool(
###############################################################################
# Webhooks
###############################################################################
WEBHOOKS_ALLOWED_SCHEMES = {
WEBHOOKS_ALLOWED_SCHEMES = set(
s.lower()
for s in get_list_from_env(
for s in __get_list(
"PAPERLESS_WEBHOOKS_ALLOWED_SCHEMES",
default=["http", "https"],
["http", "https"],
)
}
WEBHOOKS_ALLOWED_PORTS = {
int(p) for p in get_list_from_env("PAPERLESS_WEBHOOKS_ALLOWED_PORTS", default=[])
}
WEBHOOKS_ALLOW_INTERNAL_REQUESTS = get_bool_from_env(
)
WEBHOOKS_ALLOWED_PORTS = set(
int(p)
for p in __get_list(
"PAPERLESS_WEBHOOKS_ALLOWED_PORTS",
[],
)
)
WEBHOOKS_ALLOW_INTERNAL_REQUESTS = __get_boolean(
"PAPERLESS_WEBHOOKS_ALLOW_INTERNAL_REQUESTS",
"true",
)
@@ -1103,7 +1380,7 @@ REMOTE_OCR_ENDPOINT = os.getenv("PAPERLESS_REMOTE_OCR_ENDPOINT")
################################################################################
# AI Settings #
################################################################################
AI_ENABLED = get_bool_from_env("PAPERLESS_AI_ENABLED", "NO")
AI_ENABLED = __get_boolean("PAPERLESS_AI_ENABLED", "NO")
LLM_EMBEDDING_BACKEND = os.getenv(
"PAPERLESS_AI_LLM_EMBEDDING_BACKEND",
) # "huggingface" or "openai"

View File

@@ -1,191 +1,11 @@
import datetime
import logging
import os
from pathlib import Path
from typing import Any
from celery.schedules import crontab
from dateparser.languages.loader import LocaleDataLoader
from paperless.settings.parsers import get_choice_from_env
from paperless.settings.parsers import get_int_from_env
from paperless.settings.parsers import parse_dict_from_str
logger = logging.getLogger(__name__)
def parse_hosting_settings() -> tuple[str | None, str, str, str, str]:
script_name = os.getenv("PAPERLESS_FORCE_SCRIPT_NAME")
base_url = (script_name or "") + "/"
login_url = base_url + "accounts/login/"
login_redirect_url = base_url + "dashboard"
logout_redirect_url = os.getenv(
"PAPERLESS_LOGOUT_REDIRECT_URL",
login_url + "?loggedout=1",
)
return script_name, base_url, login_url, login_redirect_url, logout_redirect_url
def parse_redis_url(env_redis: str | None) -> tuple[str, str]:
"""
Gets the Redis information from the environment or a default and handles
converting from incompatible django_channels and celery formats.
Returns a tuple of (celery_url, channels_url)
"""
# Not set, return a compatible default
if env_redis is None:
return ("redis://localhost:6379", "redis://localhost:6379")
if "unix" in env_redis.lower():
# channels_redis socket format, looks like:
# "unix:///path/to/redis.sock"
_, path = env_redis.split(":", maxsplit=1)
# Optionally setting a db number
if "?db=" in env_redis:
path, number = path.split("?db=")
return (f"redis+socket:{path}?virtual_host={number}", env_redis)
else:
return (f"redis+socket:{path}", env_redis)
elif "+socket" in env_redis.lower():
# celery socket style, looks like:
# "redis+socket:///path/to/redis.sock"
_, path = env_redis.split(":", maxsplit=1)
if "?virtual_host=" in env_redis:
# Virtual host (aka db number)
path, number = path.split("?virtual_host=")
return (env_redis, f"unix:{path}?db={number}")
else:
return (env_redis, f"unix:{path}")
# Not a socket
return (env_redis, env_redis)
def parse_beat_schedule() -> dict:
"""
Configures the scheduled tasks, according to default or
environment variables. Task expiration is configured so the task will
expire (and not run), shortly before the default frequency will put another
of the same task into the queue
https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html#beat-entries
https://docs.celeryq.dev/en/latest/userguide/calling.html#expiration
"""
schedule = {}
tasks = [
{
"name": "Check all e-mail accounts",
"env_key": "PAPERLESS_EMAIL_TASK_CRON",
# Default every ten minutes
"env_default": "*/10 * * * *",
"task": "paperless_mail.tasks.process_mail_accounts",
"options": {
# 1 minute before default schedule sends again
"expires": 9.0 * 60.0,
},
},
{
"name": "Train the classifier",
"env_key": "PAPERLESS_TRAIN_TASK_CRON",
# Default hourly at 5 minutes past the hour
"env_default": "5 */1 * * *",
"task": "documents.tasks.train_classifier",
"options": {
# 1 minute before default schedule sends again
"expires": 59.0 * 60.0,
},
},
{
"name": "Optimize the index",
"env_key": "PAPERLESS_INDEX_TASK_CRON",
# Default daily at midnight
"env_default": "0 0 * * *",
"task": "documents.tasks.index_optimize",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
{
"name": "Perform sanity check",
"env_key": "PAPERLESS_SANITY_TASK_CRON",
# Default Sunday at 00:30
"env_default": "30 0 * * sun",
"task": "documents.tasks.sanity_check",
"options": {
# 1 hour before default schedule sends again
"expires": ((7.0 * 24.0) - 1.0) * 60.0 * 60.0,
},
},
{
"name": "Empty trash",
"env_key": "PAPERLESS_EMPTY_TRASH_TASK_CRON",
# Default daily at 01:00
"env_default": "0 1 * * *",
"task": "documents.tasks.empty_trash",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
{
"name": "Check and run scheduled workflows",
"env_key": "PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON",
# Default hourly at 5 minutes past the hour
"env_default": "5 */1 * * *",
"task": "documents.tasks.check_scheduled_workflows",
"options": {
# 1 minute before default schedule sends again
"expires": 59.0 * 60.0,
},
},
{
"name": "Rebuild LLM index",
"env_key": "PAPERLESS_LLM_INDEX_TASK_CRON",
# Default daily at 02:10
"env_default": "10 2 * * *",
"task": "documents.tasks.llmindex_index",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
{
"name": "Cleanup expired share link bundles",
"env_key": "PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON",
# Default daily at 02:00
"env_default": "0 2 * * *",
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"options": {
# 1 hour before default schedule sends again
"expires": 23.0 * 60.0 * 60.0,
},
},
]
for task in tasks:
# Either get the environment setting or use the default
value = os.getenv(task["env_key"], task["env_default"])
# Don't add disabled tasks to the schedule
if value == "disable":
continue
# I find https://crontab.guru/ super helpful
# crontab(5) format
# - five time-and-date fields
# - separated by at least one blank
minute, hour, day_month, month, day_week = value.split(" ")
schedule[task["name"]] = {
"task": task["task"],
"schedule": crontab(minute, hour, day_week, day_month, month),
"options": task["options"],
}
return schedule
def parse_db_settings(data_dir: Path) -> dict[str, dict[str, Any]]:
"""Parse database settings from environment variables.
@@ -209,6 +29,7 @@ def parse_db_settings(data_dir: Path) -> dict[str, dict[str, Any]]:
engine = get_choice_from_env(
"PAPERLESS_DBENGINE",
{"sqlite", "postgresql", "mariadb"},
default="sqlite",
)
except ValueError:
# MariaDB users already had to set PAPERLESS_DBENGINE, so it was picked up above
@@ -299,48 +120,3 @@ def parse_db_settings(data_dir: Path) -> dict[str, dict[str, Any]]:
)
return {"default": db_config}
def parse_dateparser_languages(languages: str | None) -> list[str]:
language_list = languages.split("+") if languages else []
# There is an unfixed issue in zh-Hant and zh-Hans locales in the dateparser lib.
# See: https://github.com/scrapinghub/dateparser/issues/875
for index, language in enumerate(language_list):
if language.startswith("zh-") and "zh" not in language_list:
logger.warning(
f"Chinese locale detected: {language}. dateparser might fail to parse"
f' some dates with this locale, so Chinese ("zh") will be used as a fallback.',
)
language_list.append("zh")
return list(LocaleDataLoader().get_locale_map(locales=language_list))
def parse_ignore_dates(
env_ignore: str,
date_order: str,
) -> set[datetime.date]:
"""
If the PAPERLESS_IGNORE_DATES environment variable is set, parse the
user provided string(s) into dates
Args:
env_ignore (str): The value of the environment variable, comma separated dates
date_order (str): The format of the date strings.
Returns:
set[datetime.date]: The set of parsed date objects
"""
import dateparser
ignored_dates = set()
for s in env_ignore.split(","):
d = dateparser.parse(
s,
settings={
"DATE_ORDER": date_order,
},
)
if d:
ignored_dates.add(d.date())
return ignored_dates

View File

@@ -156,108 +156,6 @@ def parse_dict_from_str(
return settings
def get_bool_from_env(key: str, default: str = "NO") -> bool:
"""
Return a boolean value based on whatever the user has supplied in the
environment based on whether the value "looks like" it's True or not.
"""
return str_to_bool(os.getenv(key, default))
@overload
def get_float_from_env(key: str) -> float | None: ...
@overload
def get_float_from_env(key: str, default: None) -> float | None: ...
@overload
def get_float_from_env(key: str, default: float) -> float: ...
def get_float_from_env(key: str, default: float | None = None) -> float | None:
"""
Return a float value based on the environment variable.
If default is provided, returns that value when key is missing.
If default is None, returns None when key is missing.
"""
if key not in os.environ:
return default
return float(os.environ[key])
@overload
def get_path_from_env(key: str) -> Path | None: ...
@overload
def get_path_from_env(key: str, default: None) -> Path | None: ...
@overload
def get_path_from_env(key: str, default: Path | str) -> Path: ...
def get_path_from_env(key: str, default: Path | str | None = None) -> Path | None:
"""
Return a Path object based on the environment variable.
If default is provided, returns that value when key is missing.
If default is None, returns None when key is missing.
"""
if key not in os.environ:
return default if default is None else Path(default).resolve()
return Path(os.environ[key]).resolve()
def get_list_from_env(
key: str,
separator: str = ",",
default: list[T] | None = None,
*,
strip_whitespace: bool = True,
remove_empty: bool = True,
required: bool = False,
) -> list[str] | list[T]:
"""
Get and parse a list from an environment variable or return a default.
Args:
key: Environment variable name
separator: Character(s) to split on (default: ',')
default: Default value to return if env var is not set or empty
strip_whitespace: Whether to strip whitespace from each element
remove_empty: Whether to remove empty strings from the result
required: If True, raise an error when the env var is missing and no default provided
Returns:
List of strings or list of type-cast values, or default if env var is empty/None
Raises:
ValueError: If required=True and env var is missing and there is no default
"""
# Get the environment variable value
env_value = os.environ.get(key)
# Handle required environment variables
if required and env_value is None and default is None:
raise ValueError(f"Required environment variable '{key}' is not set")
if env_value:
items = env_value.split(separator)
if strip_whitespace:
items = [item.strip() for item in items]
if remove_empty:
items = [item for item in items if item]
return items
elif default is not None:
return default
else:
return []
def get_choice_from_env(
env_key: str,
choices: set[str],

View File

@@ -1,279 +1,10 @@
import datetime
import os
from pathlib import Path
from typing import Any
import pytest
from celery.schedules import crontab
from pytest_mock import MockerFixture
from paperless.settings.custom import parse_beat_schedule
from paperless.settings.custom import parse_dateparser_languages
from paperless.settings.custom import parse_db_settings
from paperless.settings.custom import parse_hosting_settings
from paperless.settings.custom import parse_ignore_dates
from paperless.settings.custom import parse_redis_url
class TestRedisSocketConversion:
@pytest.mark.parametrize(
("input_url", "expected"),
[
pytest.param(
None,
("redis://localhost:6379", "redis://localhost:6379"),
id="none_uses_default",
),
pytest.param(
"redis+socket:///run/redis/redis.sock",
(
"redis+socket:///run/redis/redis.sock",
"unix:///run/redis/redis.sock",
),
id="celery_style_socket",
),
pytest.param(
"unix:///run/redis/redis.sock",
(
"redis+socket:///run/redis/redis.sock",
"unix:///run/redis/redis.sock",
),
id="redis_py_style_socket",
),
pytest.param(
"redis+socket:///run/redis/redis.sock?virtual_host=5",
(
"redis+socket:///run/redis/redis.sock?virtual_host=5",
"unix:///run/redis/redis.sock?db=5",
),
id="celery_style_socket_with_db",
),
pytest.param(
"unix:///run/redis/redis.sock?db=10",
(
"redis+socket:///run/redis/redis.sock?virtual_host=10",
"unix:///run/redis/redis.sock?db=10",
),
id="redis_py_style_socket_with_db",
),
pytest.param(
"redis://myredishost:6379",
("redis://myredishost:6379", "redis://myredishost:6379"),
id="host_with_port_unchanged",
),
# Credentials in unix:// URL contain multiple colons (user:password@)
# Regression test for https://github.com/paperless-ngx/paperless-ngx/pull/12239
pytest.param(
"unix://user:password@/run/redis/redis.sock",
(
"redis+socket://user:password@/run/redis/redis.sock",
"unix://user:password@/run/redis/redis.sock",
),
id="redis_py_style_socket_with_credentials",
),
pytest.param(
"redis+socket://user:password@/run/redis/redis.sock",
(
"redis+socket://user:password@/run/redis/redis.sock",
"unix://user:password@/run/redis/redis.sock",
),
id="celery_style_socket_with_credentials",
),
],
)
def test_redis_socket_parsing(
self,
input_url: str | None,
expected: tuple[str, str],
) -> None:
"""
GIVEN:
- Various Redis connection URI formats
WHEN:
- The URI is parsed
THEN:
- Socket based URIs are translated
- Non-socket URIs are unchanged
- None provided uses default
"""
result = parse_redis_url(input_url)
assert expected == result
class TestParseHostingSettings:
@pytest.mark.parametrize(
("env", "expected"),
[
pytest.param(
{},
(
None,
"/",
"/accounts/login/",
"/dashboard",
"/accounts/login/?loggedout=1",
),
id="no_env_vars",
),
pytest.param(
{"PAPERLESS_FORCE_SCRIPT_NAME": "/paperless"},
(
"/paperless",
"/paperless/",
"/paperless/accounts/login/",
"/paperless/dashboard",
"/paperless/accounts/login/?loggedout=1",
),
id="force_script_name_only",
),
pytest.param(
{
"PAPERLESS_FORCE_SCRIPT_NAME": "/docs",
"PAPERLESS_LOGOUT_REDIRECT_URL": "/custom/logout",
},
(
"/docs",
"/docs/",
"/docs/accounts/login/",
"/docs/dashboard",
"/custom/logout",
),
id="force_script_name_and_logout_redirect",
),
],
)
def test_parse_hosting_settings(
self,
mocker: MockerFixture,
env: dict[str, str],
expected: tuple[str | None, str, str, str, str],
) -> None:
"""Test parse_hosting_settings with various env configurations."""
mocker.patch.dict(os.environ, env, clear=True)
result = parse_hosting_settings()
assert result == expected
def make_expected_schedule(
overrides: dict[str, dict[str, Any]] | None = None,
disabled: set[str] | None = None,
) -> dict[str, Any]:
"""
Build the expected schedule with optional overrides and disabled tasks.
"""
mail_expire = 9.0 * 60.0
classifier_expire = 59.0 * 60.0
index_expire = 23.0 * 60.0 * 60.0
sanity_expire = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
empty_trash_expire = 23.0 * 60.0 * 60.0
workflow_expire = 59.0 * 60.0
llm_index_expire = 23.0 * 60.0 * 60.0
share_link_cleanup_expire = 23.0 * 60.0 * 60.0
schedule: dict[str, Any] = {
"Check all e-mail accounts": {
"task": "paperless_mail.tasks.process_mail_accounts",
"schedule": crontab(minute="*/10"),
"options": {"expires": mail_expire},
},
"Train the classifier": {
"task": "documents.tasks.train_classifier",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": classifier_expire},
},
"Optimize the index": {
"task": "documents.tasks.index_optimize",
"schedule": crontab(minute=0, hour=0),
"options": {"expires": index_expire},
},
"Perform sanity check": {
"task": "documents.tasks.sanity_check",
"schedule": crontab(minute=30, hour=0, day_of_week="sun"),
"options": {"expires": sanity_expire},
},
"Empty trash": {
"task": "documents.tasks.empty_trash",
"schedule": crontab(minute=0, hour="1"),
"options": {"expires": empty_trash_expire},
},
"Check and run scheduled workflows": {
"task": "documents.tasks.check_scheduled_workflows",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": workflow_expire},
},
"Rebuild LLM index": {
"task": "documents.tasks.llmindex_index",
"schedule": crontab(minute="10", hour="2"),
"options": {"expires": llm_index_expire},
},
"Cleanup expired share link bundles": {
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"schedule": crontab(minute=0, hour="2"),
"options": {"expires": share_link_cleanup_expire},
},
}
overrides = overrides or {}
disabled = disabled or set()
for key, val in overrides.items():
schedule[key] = {**schedule.get(key, {}), **val}
for key in disabled:
schedule.pop(key, None)
return schedule
class TestParseBeatSchedule:
@pytest.mark.parametrize(
("env", "expected"),
[
pytest.param({}, make_expected_schedule(), id="defaults"),
pytest.param(
{"PAPERLESS_EMAIL_TASK_CRON": "*/50 * * * mon"},
make_expected_schedule(
overrides={
"Check all e-mail accounts": {
"schedule": crontab(minute="*/50", day_of_week="mon"),
},
},
),
id="email-changed",
),
pytest.param(
{"PAPERLESS_INDEX_TASK_CRON": "disable"},
make_expected_schedule(disabled={"Optimize the index"}),
id="index-disabled",
),
pytest.param(
{
"PAPERLESS_EMAIL_TASK_CRON": "disable",
"PAPERLESS_TRAIN_TASK_CRON": "disable",
"PAPERLESS_SANITY_TASK_CRON": "disable",
"PAPERLESS_INDEX_TASK_CRON": "disable",
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
"PAPERLESS_LLM_INDEX_TASK_CRON": "disable",
"PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON": "disable",
},
{},
id="all-disabled",
),
],
)
def test_parse_beat_schedule(
self,
env: dict[str, str],
expected: dict[str, Any],
mocker: MockerFixture,
) -> None:
mocker.patch.dict(os.environ, env, clear=False)
schedule = parse_beat_schedule()
assert schedule == expected
class TestParseDbSettings:
@@ -533,85 +264,3 @@ class TestParseDbSettings:
settings = parse_db_settings(tmp_path)
assert settings == expected_database_settings
class TestParseIgnoreDates:
"""Tests the parsing of the PAPERLESS_IGNORE_DATES setting value."""
def test_no_ignore_dates_set(self) -> None:
"""
GIVEN:
- No ignore dates are set
THEN:
- No ignore dates are parsed
"""
assert parse_ignore_dates("", "YMD") == set()
@pytest.mark.parametrize(
("env_str", "date_format", "expected"),
[
pytest.param(
"1985-05-01",
"YMD",
{datetime.date(1985, 5, 1)},
id="single-ymd",
),
pytest.param(
"1985-05-01,1991-12-05",
"YMD",
{datetime.date(1985, 5, 1), datetime.date(1991, 12, 5)},
id="multiple-ymd",
),
pytest.param(
"2010-12-13",
"YMD",
{datetime.date(2010, 12, 13)},
id="single-ymd-2",
),
pytest.param(
"11.01.10",
"DMY",
{datetime.date(2010, 1, 11)},
id="single-dmy",
),
pytest.param(
"11.01.2001,15-06-1996",
"DMY",
{datetime.date(2001, 1, 11), datetime.date(1996, 6, 15)},
id="multiple-dmy",
),
],
)
def test_ignore_dates_parsed(
self,
env_str: str,
date_format: str,
expected: set[datetime.date],
) -> None:
"""
GIVEN:
- Ignore dates are set per certain inputs
THEN:
- All ignore dates are parsed
"""
assert parse_ignore_dates(env_str, date_format) == expected
@pytest.mark.parametrize(
("languages", "expected"),
[
("de", ["de"]),
("zh", ["zh"]),
("fr+en", ["fr", "en"]),
# Locales must be supported
("en-001+fr-CA", ["en-001", "fr-CA"]),
("en-001+fr", ["en-001", "fr"]),
# Special case for Chinese: variants seem to miss some dates,
# so we always add "zh" as a fallback.
("en+zh-Hans-HK", ["en", "zh-Hans-HK", "zh"]),
("en+zh-Hans", ["en", "zh-Hans", "zh"]),
("en+zh-Hans+zh-Hant", ["en", "zh-Hans", "zh-Hant", "zh"]),
],
)
def test_parse_dateparser_languages(languages: str, expected: list[str]) -> None:
assert sorted(parse_dateparser_languages(languages)) == sorted(expected)

View File

@@ -4,12 +4,8 @@ from pathlib import Path
import pytest
from pytest_mock import MockerFixture
from paperless.settings.parsers import get_bool_from_env
from paperless.settings.parsers import get_choice_from_env
from paperless.settings.parsers import get_float_from_env
from paperless.settings.parsers import get_int_from_env
from paperless.settings.parsers import get_list_from_env
from paperless.settings.parsers import get_path_from_env
from paperless.settings.parsers import parse_dict_from_str
from paperless.settings.parsers import str_to_bool
@@ -209,29 +205,6 @@ class TestParseDictFromString:
assert isinstance(result["database"]["port"], int)
class TestGetBoolFromEnv:
def test_existing_env_var(self, mocker):
"""Test that an existing environment variable is read and converted."""
mocker.patch.dict(os.environ, {"TEST_VAR": "true"})
assert get_bool_from_env("TEST_VAR") is True
def test_missing_env_var_uses_default_no(self, mocker):
"""Test that a missing environment variable uses default 'NO' and returns False."""
mocker.patch.dict(os.environ, {}, clear=True)
assert get_bool_from_env("MISSING_VAR") is False
def test_missing_env_var_with_explicit_default(self, mocker):
"""Test that a missing environment variable uses the provided default."""
mocker.patch.dict(os.environ, {}, clear=True)
assert get_bool_from_env("MISSING_VAR", default="yes") is True
def test_invalid_value_raises_error(self, mocker):
"""Test that an invalid value raises ValueError (delegates to str_to_bool)."""
mocker.patch.dict(os.environ, {"INVALID_VAR": "maybe"})
with pytest.raises(ValueError):
get_bool_from_env("INVALID_VAR")
class TestGetIntFromEnv:
@pytest.mark.parametrize(
("env_value", "expected"),
@@ -286,199 +259,6 @@ class TestGetIntFromEnv:
get_int_from_env("INVALID_INT")
class TestGetFloatFromEnv:
@pytest.mark.parametrize(
("env_value", "expected"),
[
pytest.param("3.14", 3.14, id="pi"),
pytest.param("42", 42.0, id="int_as_float"),
pytest.param("-2.5", -2.5, id="negative"),
pytest.param("0.0", 0.0, id="zero_float"),
pytest.param("0", 0.0, id="zero_int"),
pytest.param("1.5e2", 150.0, id="sci_positive"),
pytest.param("1e-3", 0.001, id="sci_negative"),
pytest.param("-1.23e4", -12300.0, id="sci_large"),
],
)
def test_existing_env_var_valid_floats(self, mocker, env_value, expected):
"""Test that existing environment variables with valid floats return correct values."""
mocker.patch.dict(os.environ, {"FLOAT_VAR": env_value})
assert get_float_from_env("FLOAT_VAR") == expected
@pytest.mark.parametrize(
("default", "expected"),
[
pytest.param(3.14, 3.14, id="pi_default"),
pytest.param(0.0, 0.0, id="zero_default"),
pytest.param(-2.5, -2.5, id="negative_default"),
pytest.param(None, None, id="none_default"),
],
)
def test_missing_env_var_with_defaults(self, mocker, default, expected):
"""Test that missing environment variables return provided defaults."""
mocker.patch.dict(os.environ, {}, clear=True)
assert get_float_from_env("MISSING_VAR", default=default) == expected
def test_missing_env_var_no_default(self, mocker):
"""Test that missing environment variable with no default returns None."""
mocker.patch.dict(os.environ, {}, clear=True)
assert get_float_from_env("MISSING_VAR") is None
@pytest.mark.parametrize(
"invalid_value",
[
pytest.param("not_a_number", id="text"),
pytest.param("42.5.0", id="double_decimal"),
pytest.param("42a", id="alpha_suffix"),
pytest.param("", id="empty"),
pytest.param(" ", id="whitespace"),
pytest.param("true", id="boolean"),
pytest.param("1.2.3", id="triple_decimal"),
],
)
def test_invalid_float_values_raise_error(self, mocker, invalid_value):
"""Test that invalid float values raise ValueError."""
mocker.patch.dict(os.environ, {"INVALID_FLOAT": invalid_value})
with pytest.raises(ValueError):
get_float_from_env("INVALID_FLOAT")
class TestGetPathFromEnv:
@pytest.mark.parametrize(
"env_value",
[
pytest.param("/tmp/test", id="absolute"),
pytest.param("relative/path", id="relative"),
pytest.param("/path/with spaces/file.txt", id="spaces"),
pytest.param(".", id="current_dir"),
pytest.param("..", id="parent_dir"),
pytest.param("/", id="root"),
],
)
def test_existing_env_var_paths(self, mocker, env_value):
"""Test that existing environment variables with paths return resolved Path objects."""
mocker.patch.dict(os.environ, {"PATH_VAR": env_value})
result = get_path_from_env("PATH_VAR")
assert isinstance(result, Path)
assert result == Path(env_value).resolve()
def test_missing_env_var_no_default(self, mocker):
"""Test that missing environment variable with no default returns None."""
mocker.patch.dict(os.environ, {}, clear=True)
assert get_path_from_env("MISSING_VAR") is None
def test_missing_env_var_with_none_default(self, mocker):
"""Test that missing environment variable with None default returns None."""
mocker.patch.dict(os.environ, {}, clear=True)
assert get_path_from_env("MISSING_VAR", default=None) is None
@pytest.mark.parametrize(
"default_path_str",
[
pytest.param("/default/path", id="absolute_default"),
pytest.param("relative/default", id="relative_default"),
pytest.param(".", id="current_default"),
],
)
def test_missing_env_var_with_path_defaults(self, mocker, default_path_str):
"""Test that missing environment variables return resolved default Path objects."""
mocker.patch.dict(os.environ, {}, clear=True)
default_path = Path(default_path_str)
result = get_path_from_env("MISSING_VAR", default=default_path)
assert isinstance(result, Path)
assert result == default_path.resolve()
def test_relative_paths_are_resolved(self, mocker):
"""Test that relative paths are properly resolved to absolute paths."""
mocker.patch.dict(os.environ, {"REL_PATH": "relative/path"})
result = get_path_from_env("REL_PATH")
assert result is not None
assert result.is_absolute()
class TestGetListFromEnv:
@pytest.mark.parametrize(
("env_value", "expected"),
[
pytest.param("a,b,c", ["a", "b", "c"], id="basic_comma_separated"),
pytest.param("single", ["single"], id="single_element"),
pytest.param("", [], id="empty_string"),
pytest.param("a, b , c", ["a", "b", "c"], id="whitespace_trimmed"),
pytest.param("a,,b,c", ["a", "b", "c"], id="empty_elements_removed"),
],
)
def test_existing_env_var_basic_parsing(self, mocker, env_value, expected):
"""Test that existing environment variables are parsed correctly."""
mocker.patch.dict(os.environ, {"LIST_VAR": env_value})
result = get_list_from_env("LIST_VAR")
assert result == expected
@pytest.mark.parametrize(
("separator", "env_value", "expected"),
[
pytest.param("|", "a|b|c", ["a", "b", "c"], id="pipe_separator"),
pytest.param(":", "a:b:c", ["a", "b", "c"], id="colon_separator"),
pytest.param(";", "a;b;c", ["a", "b", "c"], id="semicolon_separator"),
],
)
def test_custom_separators(self, mocker, separator, env_value, expected):
"""Test that custom separators work correctly."""
mocker.patch.dict(os.environ, {"LIST_VAR": env_value})
result = get_list_from_env("LIST_VAR", separator=separator)
assert result == expected
@pytest.mark.parametrize(
("default", "expected"),
[
pytest.param(
["default1", "default2"],
["default1", "default2"],
id="string_list_default",
),
pytest.param([1, 2, 3], [1, 2, 3], id="int_list_default"),
pytest.param(None, [], id="none_default_returns_empty_list"),
],
)
def test_missing_env_var_with_defaults(self, mocker, default, expected):
"""Test that missing environment variables return provided defaults."""
mocker.patch.dict(os.environ, {}, clear=True)
result = get_list_from_env("MISSING_VAR", default=default)
assert result == expected
def test_missing_env_var_no_default(self, mocker):
"""Test that missing environment variable with no default returns empty list."""
mocker.patch.dict(os.environ, {}, clear=True)
result = get_list_from_env("MISSING_VAR")
assert result == []
def test_required_env_var_missing_raises_error(self, mocker):
"""Test that missing required environment variable raises ValueError."""
mocker.patch.dict(os.environ, {}, clear=True)
with pytest.raises(
ValueError,
match="Required environment variable 'REQUIRED_VAR' is not set",
):
get_list_from_env("REQUIRED_VAR", required=True)
def test_required_env_var_with_default_does_not_raise(self, mocker):
"""Test that required environment variable with default does not raise error."""
mocker.patch.dict(os.environ, {}, clear=True)
result = get_list_from_env("REQUIRED_VAR", default=["default"], required=True)
assert result == ["default"]
def test_strip_whitespace_false(self, mocker):
"""Test that whitespace is preserved when strip_whitespace=False."""
mocker.patch.dict(os.environ, {"LIST_VAR": " a , b , c "})
result = get_list_from_env("LIST_VAR", strip_whitespace=False)
assert result == [" a ", " b ", " c "]
def test_remove_empty_false(self, mocker):
"""Test that empty elements are preserved when remove_empty=False."""
mocker.patch.dict(os.environ, {"LIST_VAR": "a,,b,,c"})
result = get_list_from_env("LIST_VAR", remove_empty=False)
assert result == ["a", "", "b", "", "c"]
class TestGetEnvChoice:
@pytest.fixture
def valid_choices(self) -> set[str]:
@@ -614,3 +394,21 @@ class TestGetEnvChoice:
result = get_choice_from_env("TEST_ENV", large_choices)
assert result == "option_50"
def test_different_env_keys(
self,
mocker: MockerFixture,
valid_choices: set[str],
) -> None:
"""Test function works with different environment variable keys."""
test_cases = [
("DJANGO_ENV", "development"),
("DATABASE_BACKEND", "staging"),
("LOG_LEVEL", "production"),
("APP_MODE", "development"),
]
for env_key, env_value in test_cases:
mocker.patch.dict("os.environ", {env_key: env_value})
result = get_choice_from_env(env_key, valid_choices)
assert result == env_value

View File

@@ -1,56 +0,0 @@
import os
from unittest import TestCase
from unittest import mock
from paperless.settings import _parse_paperless_url
from paperless.settings import default_threads_per_worker
class TestThreadCalculation(TestCase):
def test_workers_threads(self) -> None:
"""
GIVEN:
- Certain CPU counts
WHEN:
- Threads per worker is calculated
THEN:
- Threads per worker less than or equal to CPU count
- At least 1 thread per worker
"""
default_workers = 1
for i in range(1, 64):
with mock.patch(
"paperless.settings.multiprocessing.cpu_count",
) as cpu_count:
cpu_count.return_value = i
default_threads = default_threads_per_worker(default_workers)
self.assertGreaterEqual(default_threads, 1)
self.assertLessEqual(default_workers * default_threads, i)
class TestPaperlessURLSettings(TestCase):
def test_paperless_url(self) -> None:
"""
GIVEN:
- PAPERLESS_URL is set
WHEN:
- The URL is parsed
THEN:
- The URL is returned and present in related settings
"""
with mock.patch.dict(
os.environ,
{
"PAPERLESS_URL": "https://example.com",
},
):
url = _parse_paperless_url()
self.assertEqual("https://example.com", url)
from django.conf import settings
self.assertIn(url, settings.CSRF_TRUSTED_ORIGINS)
self.assertIn(url, settings.CORS_ALLOWED_ORIGINS)

View File

@@ -581,11 +581,11 @@ class TestV3MinimumUpgradeVersionCheck:
) -> None:
"""
GIVEN:
- DB is on an old v2 version (pre-v2.20.10)
- DB is on an old v2 version (pre-v2.20.9)
WHEN:
- The v3 upgrade check runs
THEN:
- The error hint explicitly references v2.20.10 so users know what to do
- The error hint explicitly references v2.20.9 so users know what to do
"""
mocker.patch.dict(
"paperless.checks.connections",
@@ -593,7 +593,7 @@ class TestV3MinimumUpgradeVersionCheck:
)
result = check_v3_minimum_upgrade_version(None)
assert len(result) == 1
assert "v2.20.10" in result[0].hint
assert "v2.20.9" in result[0].hint
def test_db_error_is_swallowed(self, mocker: MockerFixture) -> None:
"""

View File

@@ -0,0 +1,482 @@
import datetime
import os
from unittest import TestCase
from unittest import mock
import pytest
from celery.schedules import crontab
from paperless.settings import _parse_base_paths
from paperless.settings import _parse_beat_schedule
from paperless.settings import _parse_dateparser_languages
from paperless.settings import _parse_ignore_dates
from paperless.settings import _parse_paperless_url
from paperless.settings import _parse_redis_url
from paperless.settings import default_threads_per_worker
class TestIgnoreDateParsing(TestCase):
"""
Tests the parsing of the PAPERLESS_IGNORE_DATES setting value
"""
def _parse_checker(self, test_cases) -> None:
"""
Helper function to check ignore date parsing
Args:
test_cases (_type_): _description_
"""
for env_str, date_format, expected_date_set in test_cases:
self.assertSetEqual(
_parse_ignore_dates(env_str, date_format),
expected_date_set,
)
def test_no_ignore_dates_set(self) -> None:
"""
GIVEN:
- No ignore dates are set
THEN:
- No ignore dates are parsed
"""
self.assertSetEqual(_parse_ignore_dates(""), set())
def test_single_ignore_dates_set(self) -> None:
"""
GIVEN:
- Ignore dates are set per certain inputs
THEN:
- All ignore dates are parsed
"""
test_cases = [
("1985-05-01", "YMD", {datetime.date(1985, 5, 1)}),
(
"1985-05-01,1991-12-05",
"YMD",
{datetime.date(1985, 5, 1), datetime.date(1991, 12, 5)},
),
("2010-12-13", "YMD", {datetime.date(2010, 12, 13)}),
("11.01.10", "DMY", {datetime.date(2010, 1, 11)}),
(
"11.01.2001,15-06-1996",
"DMY",
{datetime.date(2001, 1, 11), datetime.date(1996, 6, 15)},
),
]
self._parse_checker(test_cases)
class TestThreadCalculation(TestCase):
def test_workers_threads(self) -> None:
"""
GIVEN:
- Certain CPU counts
WHEN:
- Threads per worker is calculated
THEN:
- Threads per worker less than or equal to CPU count
- At least 1 thread per worker
"""
default_workers = 1
for i in range(1, 64):
with mock.patch(
"paperless.settings.multiprocessing.cpu_count",
) as cpu_count:
cpu_count.return_value = i
default_threads = default_threads_per_worker(default_workers)
self.assertGreaterEqual(default_threads, 1)
self.assertLessEqual(default_workers * default_threads, i)
class TestRedisSocketConversion(TestCase):
def test_redis_socket_parsing(self) -> None:
"""
GIVEN:
- Various Redis connection URI formats
WHEN:
- The URI is parsed
THEN:
- Socket based URIs are translated
- Non-socket URIs are unchanged
- None provided uses default
"""
for input, expected in [
# Nothing is set
(None, ("redis://localhost:6379", "redis://localhost:6379")),
# celery style
(
"redis+socket:///run/redis/redis.sock",
(
"redis+socket:///run/redis/redis.sock",
"unix:///run/redis/redis.sock",
),
),
# redis-py / channels-redis style
(
"unix:///run/redis/redis.sock",
(
"redis+socket:///run/redis/redis.sock",
"unix:///run/redis/redis.sock",
),
),
# celery style with db
(
"redis+socket:///run/redis/redis.sock?virtual_host=5",
(
"redis+socket:///run/redis/redis.sock?virtual_host=5",
"unix:///run/redis/redis.sock?db=5",
),
),
# redis-py / channels-redis style with db
(
"unix:///run/redis/redis.sock?db=10",
(
"redis+socket:///run/redis/redis.sock?virtual_host=10",
"unix:///run/redis/redis.sock?db=10",
),
),
# Just a host with a port
(
"redis://myredishost:6379",
("redis://myredishost:6379", "redis://myredishost:6379"),
),
]:
result = _parse_redis_url(input)
self.assertTupleEqual(expected, result)
class TestCeleryScheduleParsing(TestCase):
MAIL_EXPIRE_TIME = 9.0 * 60.0
CLASSIFIER_EXPIRE_TIME = 59.0 * 60.0
INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
SANITY_EXPIRE_TIME = ((7.0 * 24.0) - 1.0) * 60.0 * 60.0
EMPTY_TRASH_EXPIRE_TIME = 23.0 * 60.0 * 60.0
RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME = 59.0 * 60.0
LLM_INDEX_EXPIRE_TIME = 23.0 * 60.0 * 60.0
CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME = 23.0 * 60.0 * 60.0
def test_schedule_configuration_default(self) -> None:
"""
GIVEN:
- No configured task schedules
WHEN:
- The celery beat schedule is built
THEN:
- The default schedule is returned
"""
schedule = _parse_beat_schedule()
self.assertDictEqual(
{
"Check all e-mail accounts": {
"task": "paperless_mail.tasks.process_mail_accounts",
"schedule": crontab(minute="*/10"),
"options": {"expires": self.MAIL_EXPIRE_TIME},
},
"Train the classifier": {
"task": "documents.tasks.train_classifier",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.CLASSIFIER_EXPIRE_TIME},
},
"Optimize the index": {
"task": "documents.tasks.index_optimize",
"schedule": crontab(minute=0, hour=0),
"options": {"expires": self.INDEX_EXPIRE_TIME},
},
"Perform sanity check": {
"task": "documents.tasks.sanity_check",
"schedule": crontab(minute=30, hour=0, day_of_week="sun"),
"options": {"expires": self.SANITY_EXPIRE_TIME},
},
"Empty trash": {
"task": "documents.tasks.empty_trash",
"schedule": crontab(minute=0, hour="1"),
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
},
"Check and run scheduled workflows": {
"task": "documents.tasks.check_scheduled_workflows",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
"Rebuild LLM index": {
"task": "documents.tasks.llmindex_index",
"schedule": crontab(minute=10, hour=2),
"options": {
"expires": self.LLM_INDEX_EXPIRE_TIME,
},
},
"Cleanup expired share link bundles": {
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"schedule": crontab(minute=0, hour=2),
"options": {
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
},
},
},
schedule,
)
def test_schedule_configuration_changed(self) -> None:
"""
GIVEN:
- Email task is configured non-default
WHEN:
- The celery beat schedule is built
THEN:
- The email task is configured per environment
- The default schedule is returned for other tasks
"""
with mock.patch.dict(
os.environ,
{"PAPERLESS_EMAIL_TASK_CRON": "*/50 * * * mon"},
):
schedule = _parse_beat_schedule()
self.assertDictEqual(
{
"Check all e-mail accounts": {
"task": "paperless_mail.tasks.process_mail_accounts",
"schedule": crontab(minute="*/50", day_of_week="mon"),
"options": {"expires": self.MAIL_EXPIRE_TIME},
},
"Train the classifier": {
"task": "documents.tasks.train_classifier",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.CLASSIFIER_EXPIRE_TIME},
},
"Optimize the index": {
"task": "documents.tasks.index_optimize",
"schedule": crontab(minute=0, hour=0),
"options": {"expires": self.INDEX_EXPIRE_TIME},
},
"Perform sanity check": {
"task": "documents.tasks.sanity_check",
"schedule": crontab(minute=30, hour=0, day_of_week="sun"),
"options": {"expires": self.SANITY_EXPIRE_TIME},
},
"Empty trash": {
"task": "documents.tasks.empty_trash",
"schedule": crontab(minute=0, hour="1"),
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
},
"Check and run scheduled workflows": {
"task": "documents.tasks.check_scheduled_workflows",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
"Rebuild LLM index": {
"task": "documents.tasks.llmindex_index",
"schedule": crontab(minute=10, hour=2),
"options": {
"expires": self.LLM_INDEX_EXPIRE_TIME,
},
},
"Cleanup expired share link bundles": {
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"schedule": crontab(minute=0, hour=2),
"options": {
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
},
},
},
schedule,
)
def test_schedule_configuration_disabled(self) -> None:
"""
GIVEN:
- Search index task is disabled
WHEN:
- The celery beat schedule is built
THEN:
- The search index task is not present
- The default schedule is returned for other tasks
"""
with mock.patch.dict(os.environ, {"PAPERLESS_INDEX_TASK_CRON": "disable"}):
schedule = _parse_beat_schedule()
self.assertDictEqual(
{
"Check all e-mail accounts": {
"task": "paperless_mail.tasks.process_mail_accounts",
"schedule": crontab(minute="*/10"),
"options": {"expires": self.MAIL_EXPIRE_TIME},
},
"Train the classifier": {
"task": "documents.tasks.train_classifier",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.CLASSIFIER_EXPIRE_TIME},
},
"Perform sanity check": {
"task": "documents.tasks.sanity_check",
"schedule": crontab(minute=30, hour=0, day_of_week="sun"),
"options": {"expires": self.SANITY_EXPIRE_TIME},
},
"Empty trash": {
"task": "documents.tasks.empty_trash",
"schedule": crontab(minute=0, hour="1"),
"options": {"expires": self.EMPTY_TRASH_EXPIRE_TIME},
},
"Check and run scheduled workflows": {
"task": "documents.tasks.check_scheduled_workflows",
"schedule": crontab(minute="5", hour="*/1"),
"options": {"expires": self.RUN_SCHEDULED_WORKFLOWS_EXPIRE_TIME},
},
"Rebuild LLM index": {
"task": "documents.tasks.llmindex_index",
"schedule": crontab(minute=10, hour=2),
"options": {
"expires": self.LLM_INDEX_EXPIRE_TIME,
},
},
"Cleanup expired share link bundles": {
"task": "documents.tasks.cleanup_expired_share_link_bundles",
"schedule": crontab(minute=0, hour=2),
"options": {
"expires": self.CLEANUP_EXPIRED_SHARE_BUNDLES_EXPIRE_TIME,
},
},
},
schedule,
)
def test_schedule_configuration_disabled_all(self) -> None:
"""
GIVEN:
- All tasks are disabled
WHEN:
- The celery beat schedule is built
THEN:
- No tasks are scheduled
"""
with mock.patch.dict(
os.environ,
{
"PAPERLESS_EMAIL_TASK_CRON": "disable",
"PAPERLESS_TRAIN_TASK_CRON": "disable",
"PAPERLESS_SANITY_TASK_CRON": "disable",
"PAPERLESS_INDEX_TASK_CRON": "disable",
"PAPERLESS_EMPTY_TRASH_TASK_CRON": "disable",
"PAPERLESS_WORKFLOW_SCHEDULED_TASK_CRON": "disable",
"PAPERLESS_LLM_INDEX_TASK_CRON": "disable",
"PAPERLESS_SHARE_LINK_BUNDLE_CLEANUP_CRON": "disable",
},
):
schedule = _parse_beat_schedule()
self.assertDictEqual(
{},
schedule,
)
class TestPaperlessURLSettings(TestCase):
def test_paperless_url(self) -> None:
"""
GIVEN:
- PAPERLESS_URL is set
WHEN:
- The URL is parsed
THEN:
- The URL is returned and present in related settings
"""
with mock.patch.dict(
os.environ,
{
"PAPERLESS_URL": "https://example.com",
},
):
url = _parse_paperless_url()
self.assertEqual("https://example.com", url)
from django.conf import settings
self.assertIn(url, settings.CSRF_TRUSTED_ORIGINS)
self.assertIn(url, settings.CORS_ALLOWED_ORIGINS)
class TestPathSettings(TestCase):
def test_default_paths(self) -> None:
"""
GIVEN:
- PAPERLESS_FORCE_SCRIPT_NAME is not set
WHEN:
- Settings are parsed
THEN:
- Paths are as expected
"""
base_paths = _parse_base_paths()
self.assertEqual(None, base_paths[0]) # FORCE_SCRIPT_NAME
self.assertEqual("/", base_paths[1]) # BASE_URL
self.assertEqual("/accounts/login/", base_paths[2]) # LOGIN_URL
self.assertEqual("/dashboard", base_paths[3]) # LOGIN_REDIRECT_URL
self.assertEqual(
"/accounts/login/?loggedout=1",
base_paths[4],
) # LOGOUT_REDIRECT_URL
@mock.patch("os.environ", {"PAPERLESS_FORCE_SCRIPT_NAME": "/paperless"})
def test_subpath(self) -> None:
"""
GIVEN:
- PAPERLESS_FORCE_SCRIPT_NAME is set
WHEN:
- Settings are parsed
THEN:
- The path is returned and present in related settings
"""
base_paths = _parse_base_paths()
self.assertEqual("/paperless", base_paths[0]) # FORCE_SCRIPT_NAME
self.assertEqual("/paperless/", base_paths[1]) # BASE_URL
self.assertEqual("/paperless/accounts/login/", base_paths[2]) # LOGIN_URL
self.assertEqual("/paperless/dashboard", base_paths[3]) # LOGIN_REDIRECT_URL
self.assertEqual(
"/paperless/accounts/login/?loggedout=1",
base_paths[4],
) # LOGOUT_REDIRECT_URL
@mock.patch(
"os.environ",
{
"PAPERLESS_FORCE_SCRIPT_NAME": "/paperless",
"PAPERLESS_LOGOUT_REDIRECT_URL": "/foobar/",
},
)
def test_subpath_with_explicit_logout_url(self) -> None:
"""
GIVEN:
- PAPERLESS_FORCE_SCRIPT_NAME is set and so is PAPERLESS_LOGOUT_REDIRECT_URL
WHEN:
- Settings are parsed
THEN:
- The correct logout redirect URL is returned
"""
base_paths = _parse_base_paths()
self.assertEqual("/paperless/", base_paths[1]) # BASE_URL
self.assertEqual("/foobar/", base_paths[4]) # LOGOUT_REDIRECT_URL
@pytest.mark.parametrize(
("languages", "expected"),
[
("de", ["de"]),
("zh", ["zh"]),
("fr+en", ["fr", "en"]),
# Locales must be supported
("en-001+fr-CA", ["en-001", "fr-CA"]),
("en-001+fr", ["en-001", "fr"]),
# Special case for Chinese: variants seem to miss some dates,
# so we always add "zh" as a fallback.
("en+zh-Hans-HK", ["en", "zh-Hans-HK", "zh"]),
("en+zh-Hans", ["en", "zh-Hans", "zh"]),
("en+zh-Hans+zh-Hant", ["en", "zh-Hans", "zh-Hant", "zh"]),
],
)
def test_parser_date_parser_languages(languages, expected) -> None:
assert sorted(_parse_dateparser_languages(languages)) == sorted(expected)

View File

@@ -21,18 +21,12 @@ from documents.views import BulkEditView
from documents.views import ChatStreamingView
from documents.views import CorrespondentViewSet
from documents.views import CustomFieldViewSet
from documents.views import DeleteDocumentsView
from documents.views import DocumentTypeViewSet
from documents.views import EditPdfDocumentsView
from documents.views import GlobalSearchView
from documents.views import IndexView
from documents.views import LogViewSet
from documents.views import MergeDocumentsView
from documents.views import PostDocumentView
from documents.views import RemoteVersionView
from documents.views import RemovePasswordDocumentsView
from documents.views import ReprocessDocumentsView
from documents.views import RotateDocumentsView
from documents.views import SavedViewViewSet
from documents.views import SearchAutoCompleteView
from documents.views import SelectionDataView
@@ -138,36 +132,6 @@ urlpatterns = [
BulkEditView.as_view(),
name="bulk_edit",
),
re_path(
"^delete/",
DeleteDocumentsView.as_view(),
name="delete_documents",
),
re_path(
"^reprocess/",
ReprocessDocumentsView.as_view(),
name="reprocess_documents",
),
re_path(
"^rotate/",
RotateDocumentsView.as_view(),
name="rotate_documents",
),
re_path(
"^merge/",
MergeDocumentsView.as_view(),
name="merge_documents",
),
re_path(
"^edit_pdf/",
EditPdfDocumentsView.as_view(),
name="edit_pdf_documents",
),
re_path(
"^remove_password/",
RemovePasswordDocumentsView.as_view(),
name="remove_password_documents",
),
re_path(
"^bulk_download/",
BulkDownloadView.as_view(),

View File

@@ -1665,7 +1665,7 @@ class TestManagementCommand(TestCase):
"paperless_mail.management.commands.mail_fetcher.tasks.process_mail_accounts",
)
def test_mail_fetcher(self, m) -> None:
call_command("mail_fetcher", skip_checks=True)
call_command("mail_fetcher")
m.assert_called_once()

399
uv.lock generated
View File

@@ -1172,14 +1172,14 @@ wheels = [
[[package]]
name = "drf-spectacular-sidecar"
version = "2026.3.1"
version = "2026.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/aa/42/2f8c1b2846399d47094ec414bc0d6a7cce7ba95fd6545a97285eee89f7f1/drf_spectacular_sidecar-2026.3.1.tar.gz", hash = "sha256:5b7fedad66e3851f2f442480792c08115d79217959d01645b93d3d2258938be1", size = 2461501, upload-time = "2026-03-01T11:31:19.708Z" }
sdist = { url = "https://files.pythonhosted.org/packages/1e/81/c7b0e3ccbd5a039c4f4fcfecf88391a666ca1406a953886e2f39295b1c90/drf_spectacular_sidecar-2026.1.1.tar.gz", hash = "sha256:6f7c173a8ddbbbdafc7a27e028614b65f07a89ca90f996a432d57460463b56be", size = 2468060, upload-time = "2026-01-01T11:27:12.682Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/28/2d5e64d101ebc5180674fcaf7b5a35e398e2f8d9688b2e8d52b0e1394e7d/drf_spectacular_sidecar-2026.3.1-py3-none-any.whl", hash = "sha256:864edb83e022e13e3941c325c3cc0c954c843fa2e1d0bc95e81887664b2d3dad", size = 2481725, upload-time = "2026-03-01T11:31:18.469Z" },
{ url = "https://files.pythonhosted.org/packages/db/96/38725edda526f3e9e597f531beeec94b0ef433d9494f06a13b7636eecb6e/drf_spectacular_sidecar-2026.1.1-py3-none-any.whl", hash = "sha256:af8df62f1b594ec280351336d837eaf2402ab25a6bc2a1fad7aee9935821070f", size = 2489520, upload-time = "2026-01-01T11:27:11.056Z" },
]
[[package]]
@@ -1230,11 +1230,11 @@ wheels = [
[[package]]
name = "faker"
version = "40.8.0"
version = "40.5.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/70/03/14428edc541467c460d363f6e94bee9acc271f3e62470630fc9a647d0cf2/faker-40.8.0.tar.gz", hash = "sha256:936a3c9be6c004433f20aa4d99095df5dec82b8c7ad07459756041f8c1728875", size = 1956493, upload-time = "2026-03-04T16:18:48.161Z" }
sdist = { url = "https://files.pythonhosted.org/packages/03/2a/96fff3edcb10f6505143448a4b91535f77b74865cec45be52690ee280443/faker-40.5.1.tar.gz", hash = "sha256:70222361cd82aa10cb86066d1a4e8f47f2bcdc919615c412045a69c4e6da0cd3", size = 1952684, upload-time = "2026-02-23T21:34:38.362Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4c/3b/c6348f1e285e75b069085b18110a4e6325b763a5d35d5e204356fc7c20b3/faker-40.8.0-py3-none-any.whl", hash = "sha256:eb21bdba18f7a8375382eb94fb436fce07046893dc94cb20817d28deb0c3d579", size = 1989124, upload-time = "2026-03-04T16:18:46.45Z" },
{ url = "https://files.pythonhosted.org/packages/4d/a9/1eed4db92d0aec2f9bfdf1faae0ab0418b5e121dda5701f118a7a4f0cd6a/faker-40.5.1-py3-none-any.whl", hash = "sha256:c69640c1e13bad49b4bcebcbf1b52f9f1a872b6ea186c248ada34d798f1661bf", size = 1987053, upload-time = "2026-02-23T21:34:36.418Z" },
]
[[package]]
@@ -1251,11 +1251,11 @@ wheels = [
[[package]]
name = "filelock"
version = "3.25.2"
version = "3.24.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/94/b8/00651a0f559862f3bb7d6f7477b192afe3f583cc5e26403b44e59a55ab34/filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694", size = 40480, upload-time = "2026-03-11T20:45:38.487Z" }
sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a4/a5/842ae8f0c08b61d6484b52f99a03510a3a72d23141942d216ebe81fefbce/filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70", size = 26759, upload-time = "2026-03-11T20:45:37.437Z" },
{ url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" },
]
[[package]]
@@ -1748,73 +1748,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
]
[[package]]
name = "ijson"
version = "3.5.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f4/57/60d1a6a512f2f0508d0bc8b4f1cc5616fd3196619b66bd6a01f9155a1292/ijson-3.5.0.tar.gz", hash = "sha256:94688760720e3f5212731b3cb8d30267f9a045fb38fb3870254e7b9504246f31", size = 68658, upload-time = "2026-02-24T03:58:30.974Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/65/da/644343198abca5e0f6e2486063f8d8f3c443ca0ef5e5c890e51ef6032e33/ijson-3.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5616311404b858d32740b7ad8b9a799c62165f5ecb85d0a8ed16c21665a90533", size = 88964, upload-time = "2026-02-24T03:56:53.099Z" },
{ url = "https://files.pythonhosted.org/packages/5b/63/8621190aa2baf96156dfd4c632b6aa9f1464411e50b98750c09acc0505ea/ijson-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9733f94029dd41702d573ef64752e2556e72aea14623d6dbb7a44ca1ccf30fd", size = 60582, upload-time = "2026-02-24T03:56:54.261Z" },
{ url = "https://files.pythonhosted.org/packages/20/31/6a3f041fdd17dacff33b7d7d3ba3df6dca48740108340c6042f974b2ad20/ijson-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:db8398c6721b98412a4f618da8022550c8b9c5d9214040646071b5deb4d4a393", size = 60632, upload-time = "2026-02-24T03:56:55.159Z" },
{ url = "https://files.pythonhosted.org/packages/e4/68/474541998abbdecfd46a744536878335de89aceb9f085bff1aaf35575ceb/ijson-3.5.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c061314845c08163b1784b6076ea5f075372461a32e6916f4e5f211fd4130b64", size = 131988, upload-time = "2026-02-24T03:56:56.35Z" },
{ url = "https://files.pythonhosted.org/packages/cd/32/e05ff8b72a44fe9d192f41c5dcbc35cfa87efc280cdbfe539ffaf4a7535e/ijson-3.5.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1111a1c5ac79119c5d6e836f900c1a53844b50a18af38311baa6bb61e2645aca", size = 138669, upload-time = "2026-02-24T03:56:57.555Z" },
{ url = "https://files.pythonhosted.org/packages/49/b5/955a83b031102c7a602e2c06d03aff0a0e584212f09edb94ccc754d203ac/ijson-3.5.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e74aff8c681c24002b61b1822f9511d4c384f324f7dbc08c78538e01fdc9fcb", size = 135093, upload-time = "2026-02-24T03:56:59.267Z" },
{ url = "https://files.pythonhosted.org/packages/e8/f2/30250cfcb4d2766669b31f6732689aab2bb91de426a15a3ebe482df7ee48/ijson-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:739a7229b1b0cc5f7e2785a6e7a5fc915e850d3fed9588d0e89a09f88a417253", size = 138715, upload-time = "2026-02-24T03:57:00.491Z" },
{ url = "https://files.pythonhosted.org/packages/a2/05/785a145d7e75e04e04480d59b6323cd4b1d9013a6cd8643fa635fbc93490/ijson-3.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ef88712160360cab3ca6471a4e5418243f8b267cf1fe1620879d1b5558babc71", size = 133194, upload-time = "2026-02-24T03:57:01.759Z" },
{ url = "https://files.pythonhosted.org/packages/14/eb/80d6f8a748dead4034cea0939494a67d10ccf88d6413bf6e860393139676/ijson-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ca0d1b6b5f8166a6248f4309497585fb8553b04bc8179a0260fad636cfdb798", size = 135588, upload-time = "2026-02-24T03:57:03.131Z" },
{ url = "https://files.pythonhosted.org/packages/aa/17/9c63c7688025f3a8c47ea717b8306649c8c7244e49e20a2be4e3515dc75c/ijson-3.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1ebefbe149a6106cc848a3eaf536af51a9b5ccc9082de801389f152dba6ab755", size = 88536, upload-time = "2026-02-24T03:57:06.809Z" },
{ url = "https://files.pythonhosted.org/packages/6f/dd/e15c2400244c117b06585452ebc63ae254f5a6964f712306afd1422daae0/ijson-3.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:19e30d9f00f82e64de689c0b8651b9cfed879c184b139d7e1ea5030cec401c21", size = 60499, upload-time = "2026-02-24T03:57:09.155Z" },
{ url = "https://files.pythonhosted.org/packages/77/a9/bf4fe3538a0c965f16b406f180a06105b875da83f0743e36246be64ef550/ijson-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a04a33ee78a6f27b9b8528c1ca3c207b1df3b8b867a4cf2fcc4109986f35c227", size = 60330, upload-time = "2026-02-24T03:57:10.574Z" },
{ url = "https://files.pythonhosted.org/packages/31/76/6f91bdb019dd978fce1bc5ea1cd620cfc096d258126c91db2c03a20a7f34/ijson-3.5.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7d48dc2984af02eb3c56edfb3f13b3f62f2f3e4fe36f058c8cfc75d93adf4fed", size = 138977, upload-time = "2026-02-24T03:57:11.932Z" },
{ url = "https://files.pythonhosted.org/packages/11/be/bbc983059e48a54b0121ee60042979faed7674490bbe7b2c41560db3f436/ijson-3.5.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f1e73a44844d9adbca9cf2c4132cd875933e83f3d4b23881fcaf82be83644c7d", size = 149785, upload-time = "2026-02-24T03:57:13.255Z" },
{ url = "https://files.pythonhosted.org/packages/6d/81/2fee58f9024a3449aee83edfa7167fb5ccd7e1af2557300e28531bb68e16/ijson-3.5.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7389a56b8562a19948bdf1d7bae3a2edc8c7f86fb59834dcb1c4c722818e645a", size = 149729, upload-time = "2026-02-24T03:57:14.191Z" },
{ url = "https://files.pythonhosted.org/packages/c7/56/f1706761fcc096c9d414b3dcd000b1e6e5c24364c21cfba429837f98ee8d/ijson-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3176f23f8ebec83f374ed0c3b4e5a0c4db7ede54c005864efebbed46da123608", size = 150697, upload-time = "2026-02-24T03:57:15.855Z" },
{ url = "https://files.pythonhosted.org/packages/d9/6e/ee0d9c875a0193b632b3e9ccd1b22a50685fb510256ad57ba483b6529f77/ijson-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6babd88e508630c6ef86c9bebaaf13bb2fb8ec1d8f8868773a03c20253f599bc", size = 142873, upload-time = "2026-02-24T03:57:16.831Z" },
{ url = "https://files.pythonhosted.org/packages/d2/bf/f9d4399d0e6e3fd615035290a71e97c843f17f329b43638c0a01cf112d73/ijson-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dc1b3836b174b6db2fa8319f1926fb5445abd195dc963368092103f8579cb8ed", size = 151583, upload-time = "2026-02-24T03:57:17.757Z" },
{ url = "https://files.pythonhosted.org/packages/a2/71/d67e764a712c3590627480643a3b51efcc3afa4ef3cb54ee4c989073c97e/ijson-3.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e9cedc10e40dd6023c351ed8bfc7dcfce58204f15c321c3c1546b9c7b12562a4", size = 88544, upload-time = "2026-02-24T03:57:21.293Z" },
{ url = "https://files.pythonhosted.org/packages/1a/39/f1c299371686153fa3cf5c0736b96247a87a1bee1b7145e6d21f359c505a/ijson-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3647649f782ee06c97490b43680371186651f3f69bebe64c6083ee7615d185e5", size = 60495, upload-time = "2026-02-24T03:57:22.501Z" },
{ url = "https://files.pythonhosted.org/packages/16/94/b1438e204d75e01541bebe3e668fe3e68612d210e9931ae1611062dd0a56/ijson-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:90e74be1dce05fce73451c62d1118671f78f47c9f6be3991c82b91063bf01fc9", size = 60325, upload-time = "2026-02-24T03:57:23.332Z" },
{ url = "https://files.pythonhosted.org/packages/30/e2/4aa9c116fa86cc8b0f574f3c3a47409edc1cd4face05d0e589a5a176b05d/ijson-3.5.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:78e9ad73e7be2dd80627504bd5cbf512348c55ce2c06e362ed7683b5220e8568", size = 138774, upload-time = "2026-02-24T03:57:24.683Z" },
{ url = "https://files.pythonhosted.org/packages/d2/d2/738b88752a70c3be1505faa4dcd7110668c2712e582a6a36488ed1e295d4/ijson-3.5.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9577449313cc94be89a4fe4b3e716c65f09cc19636d5a6b2861c4e80dddebd58", size = 149820, upload-time = "2026-02-24T03:57:26.062Z" },
{ url = "https://files.pythonhosted.org/packages/ed/df/0b3ab9f393ca8f72ea03bc896ba9fdc987e90ae08cdb51c32a4ee0c14d5e/ijson-3.5.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e4c1178fb50aff5f5701a30a5152ead82a14e189ce0f6102fa1b5f10b2f54ff", size = 149747, upload-time = "2026-02-24T03:57:27.308Z" },
{ url = "https://files.pythonhosted.org/packages/cc/a3/b0037119f75131b78cb00acc2657b1a9d0435475f1f2c5f8f5a170b66b9c/ijson-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0eb402ab026ffb37a918d75af2b7260fe6cfbce13232cc83728a714dd30bd81d", size = 151027, upload-time = "2026-02-24T03:57:28.522Z" },
{ url = "https://files.pythonhosted.org/packages/22/a0/cb344de1862bf09d8f769c9d25c944078c87dd59a1b496feec5ad96309a4/ijson-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5b08ee08355f9f729612a8eb9bf69cc14f9310c3b2a487c6f1c3c65d85216ec4", size = 142996, upload-time = "2026-02-24T03:57:29.774Z" },
{ url = "https://files.pythonhosted.org/packages/ca/32/a8ffd67182e02ea61f70f62daf43ded4fa8a830a2520a851d2782460aba8/ijson-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bda62b6d48442903e7bf56152108afb7f0f1293c2b9bef2f2c369defea76ab18", size = 152068, upload-time = "2026-02-24T03:57:30.969Z" },
{ url = "https://files.pythonhosted.org/packages/42/65/13e2492d17e19a2084523e18716dc2809159f2287fd2700c735f311e76c4/ijson-3.5.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:4d4b0cd676b8c842f7648c1a783448fac5cd3b98289abd83711b3e275e143524", size = 93019, upload-time = "2026-02-24T03:57:33.976Z" },
{ url = "https://files.pythonhosted.org/packages/33/92/483fc97ece0c3f1cecabf48f6a7a36e89d19369eec462faaeaa34c788992/ijson-3.5.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:252dec3680a48bb82d475e36b4ae1b3a9d7eb690b951bb98a76c5fe519e30188", size = 62714, upload-time = "2026-02-24T03:57:34.819Z" },
{ url = "https://files.pythonhosted.org/packages/4b/88/793fe020a0fe9d9eed4c285cf4a5cfdb0a935708b3bde0d72f35c794b513/ijson-3.5.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:aa1b5dca97d323931fde2501172337384c958914d81a9dac7f00f0d4bfc76bc7", size = 62460, upload-time = "2026-02-24T03:57:35.874Z" },
{ url = "https://files.pythonhosted.org/packages/51/69/f1a2690aa8d4df1f4e262b385e65a933ffdc250b091531bac9a449c19e16/ijson-3.5.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7a5ec7fd86d606094bba6f6f8f87494897102fa4584ef653f3005c51a784c320", size = 199273, upload-time = "2026-02-24T03:57:37.07Z" },
{ url = "https://files.pythonhosted.org/packages/ea/a2/f1346d5299e79b988ab472dc773d5381ec2d57c23cb2f1af3ede4a810e62/ijson-3.5.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:009f41443e1521847701c6d87fa3923c0b1961be3c7e7de90947c8cb92ea7c44", size = 216884, upload-time = "2026-02-24T03:57:38.346Z" },
{ url = "https://files.pythonhosted.org/packages/28/3c/8b637e869be87799e6c2c3c275a30a546f086b1aed77e2b7f11512168c5a/ijson-3.5.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e4c3651d1f9fe2839a93fdf8fd1d5ca3a54975349894249f3b1b572bcc4bd577", size = 207306, upload-time = "2026-02-24T03:57:39.718Z" },
{ url = "https://files.pythonhosted.org/packages/7f/7c/18b1c1df6951ca056782d7580ec40cea4ff9a27a0947d92640d1cc8c4ae3/ijson-3.5.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:945b7abcfcfeae2cde17d8d900870f03536494245dda7ad4f8d056faa303256c", size = 211364, upload-time = "2026-02-24T03:57:40.953Z" },
{ url = "https://files.pythonhosted.org/packages/f3/55/e795812e82851574a9dba8a53fde045378f531ef14110c6fb55dbd23b443/ijson-3.5.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0574b0a841ff97495c13e9d7260fbf3d85358b061f540c52a123db9dbbaa2ed6", size = 200608, upload-time = "2026-02-24T03:57:42.272Z" },
{ url = "https://files.pythonhosted.org/packages/5c/cd/013c85b4749b57a4cb4c2670014d1b32b8db4ab1a7be92ea7aeb5d7fe7b5/ijson-3.5.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f969ffb2b89c5cdf686652d7fb66252bc72126fa54d416317411497276056a18", size = 205127, upload-time = "2026-02-24T03:57:43.286Z" },
{ url = "https://files.pythonhosted.org/packages/7a/93/0868efe753dc1df80cc405cf0c1f2527a6991643607c741bff8dcb899b3b/ijson-3.5.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:25a5a6b2045c90bb83061df27cfa43572afa43ba9408611d7bfe237c20a731a9", size = 89094, upload-time = "2026-02-24T03:57:46.115Z" },
{ url = "https://files.pythonhosted.org/packages/24/94/fd5a832a0df52ef5e4e740f14ac8640725d61034a1b0c561e8b5fb424706/ijson-3.5.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:8976c54c0b864bc82b951bae06567566ac77ef63b90a773a69cd73aab47f4f4f", size = 60715, upload-time = "2026-02-24T03:57:47.552Z" },
{ url = "https://files.pythonhosted.org/packages/70/79/1b9a90af5732491f9eec751ee211b86b11011e1158c555c06576d52c3919/ijson-3.5.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:859eb2038f7f1b0664df4241957694cc35e6295992d71c98659b22c69b3cbc10", size = 60638, upload-time = "2026-02-24T03:57:48.428Z" },
{ url = "https://files.pythonhosted.org/packages/23/6f/2c551ea980fe56f68710a8d5389cfbd015fc45aaafd17c3c52c346db6aa1/ijson-3.5.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c911aa02991c7c0d3639b6619b93a93210ff1e7f58bf7225d613abea10adc78e", size = 140667, upload-time = "2026-02-24T03:57:49.314Z" },
{ url = "https://files.pythonhosted.org/packages/25/0e/27b887879ba6a5bc29766e3c5af4942638c952220fd63e1e442674f7883a/ijson-3.5.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:903cbdc350173605220edc19796fbea9b2203c8b3951fb7335abfa8ed37afda8", size = 149850, upload-time = "2026-02-24T03:57:50.329Z" },
{ url = "https://files.pythonhosted.org/packages/da/1e/23e10e1bc04bf31193b21e2960dce14b17dbd5d0c62204e8401c59d62c08/ijson-3.5.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a4549d96ded5b8efa71639b2160235415f6bdb8c83367615e2dbabcb72755c33", size = 149206, upload-time = "2026-02-24T03:57:51.261Z" },
{ url = "https://files.pythonhosted.org/packages/8e/90/e552f6495063b235cf7fa2c592f6597c057077195e517b842a0374fd470c/ijson-3.5.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6b2dcf6349e6042d83f3f8c39ce84823cf7577eba25bac5aae5e39bbbbbe9c1c", size = 150438, upload-time = "2026-02-24T03:57:52.198Z" },
{ url = "https://files.pythonhosted.org/packages/5c/18/45bf8f297c41b42a1c231d261141097babd953d2c28a07be57ae4c3a1a02/ijson-3.5.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:e44af39e6f8a17e5627dcd89715d8279bf3474153ff99aae031a936e5c5572e5", size = 144369, upload-time = "2026-02-24T03:57:53.22Z" },
{ url = "https://files.pythonhosted.org/packages/9b/3a/deb9772bb2c0cead7ad64f00c3598eec9072bdf511818e70e2c512eeabbe/ijson-3.5.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9260332304b7e7828db56d43f08fc970a3ab741bf84ff10189361ea1b60c395b", size = 151352, upload-time = "2026-02-24T03:57:54.375Z" },
{ url = "https://files.pythonhosted.org/packages/9f/d9/86f7fac35e0835faa188085ae0579e813493d5261ce056484015ad533445/ijson-3.5.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:2ea4b676ec98e374c1df400a47929859e4fa1239274339024df4716e802aa7e4", size = 93069, upload-time = "2026-02-24T03:57:57.849Z" },
{ url = "https://files.pythonhosted.org/packages/33/d2/e7366ed9c6e60228d35baf4404bac01a126e7775ea8ce57f560125ed190a/ijson-3.5.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:014586eec043e23c80be9a923c56c3a0920a0f1f7d17478ce7bc20ba443968ef", size = 62767, upload-time = "2026-02-24T03:57:58.758Z" },
{ url = "https://files.pythonhosted.org/packages/35/8b/3e703e8cc4b3ada79f13b28070b51d9550c578f76d1968657905857b2ddd/ijson-3.5.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d5b8b886b0248652d437f66e7c5ac318bbdcb2c7137a7e5327a68ca00b286f5f", size = 62467, upload-time = "2026-02-24T03:58:00.261Z" },
{ url = "https://files.pythonhosted.org/packages/21/42/0c91af32c1ee8a957fdac2e051b5780756d05fd34e4b60d94a08d51bac1d/ijson-3.5.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:498fd46ae2349297e43acf97cdc421e711dbd7198418677259393d2acdc62d78", size = 200447, upload-time = "2026-02-24T03:58:01.591Z" },
{ url = "https://files.pythonhosted.org/packages/f9/80/796ea0e391b7e2d45c5b1b451734bba03f81c2984cf955ea5eaa6c4920ad/ijson-3.5.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:22a51b4f9b81f12793731cf226266d1de2112c3c04ba4a04117ad4e466897e05", size = 217820, upload-time = "2026-02-24T03:58:02.598Z" },
{ url = "https://files.pythonhosted.org/packages/38/14/52b6613fdda4078c62eb5b4fe3efc724ddc55a4ad524c93de51830107aa3/ijson-3.5.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9636c710dc4ac4a281baa266a64f323b4cc165cec26836af702c44328b59a515", size = 208310, upload-time = "2026-02-24T03:58:04.759Z" },
{ url = "https://files.pythonhosted.org/packages/6a/ad/8b3105a78774fd4a65e534a21d975ef3a77e189489fe3029ebcaeba5e243/ijson-3.5.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f7168a39e8211107666d71b25693fd1b2bac0b33735ef744114c403c6cac21e1", size = 211843, upload-time = "2026-02-24T03:58:05.836Z" },
{ url = "https://files.pythonhosted.org/packages/36/ab/a2739f6072d6e1160581bc3ed32da614c8cced023dcd519d9c5fa66e0425/ijson-3.5.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8696454245415bc617ab03b0dc3ae4c86987df5dc6a90bad378fe72c5409d89e", size = 200906, upload-time = "2026-02-24T03:58:07.788Z" },
{ url = "https://files.pythonhosted.org/packages/6d/5e/e06c2de3c3d4a9cfb655c1ad08a68fb72838d271072cdd3196576ac4431a/ijson-3.5.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c21bfb61f71f191565885bf1bc29e0a186292d866b4880637b833848360bdc1b", size = 205495, upload-time = "2026-02-24T03:58:09.163Z" },
{ url = "https://files.pythonhosted.org/packages/d9/3b/d31ecfa63a218978617446159f3d77aab2417a5bd2885c425b176353ff78/ijson-3.5.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d64c624da0e9d692d6eb0ff63a79656b59d76bf80773a17c5b0f835e4e8ef627", size = 57715, upload-time = "2026-02-24T03:58:24.545Z" },
{ url = "https://files.pythonhosted.org/packages/30/51/b170e646d378e8cccf9637c05edb5419b00c2c4df64b0258c3af5355608e/ijson-3.5.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:876f7df73b7e0d6474f9caa729b9cdbfc8e76de9075a4887dfd689e29e85c4ca", size = 57205, upload-time = "2026-02-24T03:58:25.681Z" },
{ url = "https://files.pythonhosted.org/packages/ef/83/44dbd0231b0a8c6c14d27473d10c4e27dfbce7d5d9a833c79e3e6c33eb40/ijson-3.5.0-pp311-pypy311_pp73-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:e7dbff2c8d9027809b0cde663df44f3210da10ea377121d42896fb6ee405dd31", size = 71229, upload-time = "2026-02-24T03:58:27.103Z" },
{ url = "https://files.pythonhosted.org/packages/c8/98/cf84048b7c6cec888826e696a31f45bee7ebcac15e532b6be1fc4c2c9608/ijson-3.5.0-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4217a1edc278660679e1197c83a1a2a2d367792bfbb2a3279577f4b59b93730d", size = 71217, upload-time = "2026-02-24T03:58:28.021Z" },
{ url = "https://files.pythonhosted.org/packages/3c/0a/e34c729a87ff67dc6540f6bcc896626158e691d433ab57db0086d73decd2/ijson-3.5.0-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:04f0fc740311388ee745ba55a12292b722d6f52000b11acbb913982ba5fbdf87", size = 68618, upload-time = "2026-02-24T03:58:28.918Z" },
]
[[package]]
name = "imagehash"
version = "4.3.2"
@@ -2139,15 +2072,15 @@ wheels = [
[[package]]
name = "llama-index-embeddings-openai"
version = "0.5.2"
version = "0.5.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "llama-index-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "openai", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ea/a1/d238dfa453ba8ebc4f6261d6384b663f50b8dba6f4b22d8be800b305863d/llama_index_embeddings_openai-0.5.2.tar.gz", hash = "sha256:091bd0c3e9182748e8827de7d79713a219d5f5e0dc97d1bb7b271cf524520e4b", size = 7630, upload-time = "2026-03-03T11:27:38.127Z" }
sdist = { url = "https://files.pythonhosted.org/packages/10/36/90336d054a5061a3f5bc17ac2c18ef63d9d84c55c14d557de484e811ea4d/llama_index_embeddings_openai-0.5.1.tar.gz", hash = "sha256:1c89867a48b0d0daa3d2d44f5e76b394b2b2ef9935932daf921b9e77939ccda8", size = 7020, upload-time = "2025-09-08T20:17:44.681Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1f/5e/da156f9c77443d22287eeaea341fe35fdcc25e59a9250e4cb10d4d5a066a/llama_index_embeddings_openai-0.5.2-py3-none-any.whl", hash = "sha256:37e7967de05b05f16c9b171091110bb1c6e5a0720198ea306d57cd3920cb81b7", size = 7667, upload-time = "2026-03-03T11:27:37.394Z" },
{ url = "https://files.pythonhosted.org/packages/23/4a/8ab11026cf8deff8f555aa73919be0bac48332683111e5fc4290f352dc50/llama_index_embeddings_openai-0.5.1-py3-none-any.whl", hash = "sha256:a2fcda3398bbd987b5ce3f02367caee8e84a56b930fdf43cc1d059aa9fd20ca5", size = 7011, upload-time = "2025-09-08T20:17:44.015Z" },
]
[[package]]
@@ -2178,15 +2111,15 @@ wheels = [
[[package]]
name = "llama-index-llms-openai"
version = "0.6.26"
version = "0.6.21"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "llama-index-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "openai", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4a/5e/a7a47d46dc2eb30953d83654112c8af6f61821ca78ef3ea22e30729aac3a/llama_index_llms_openai-0.6.26.tar.gz", hash = "sha256:3474602ecbc30c88a8b585cfd5737891d45da78251a5e067c4dbc2d3cc3d08db", size = 27262, upload-time = "2026-03-05T02:53:50.581Z" }
sdist = { url = "https://files.pythonhosted.org/packages/d8/5b/775289b3064302966cc839bbccfdbe314f706eaf58ad4233b86e5d53343d/llama_index_llms_openai-0.6.21.tar.gz", hash = "sha256:0b92dcfb01cbc7752f5b8bdf6d93430643d295210cf9392b45291d6fdd81e0ee", size = 25961, upload-time = "2026-02-26T04:19:33.604Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2e/8a/f46f59279c078b001374813f69987b43b7c3bd9df01981af545cf2d954d7/llama_index_llms_openai-0.6.26-py3-none-any.whl", hash = "sha256:2062ef505676d0a1c7c116c138c2f890aa7653619fc3ca697e47df7bd2ef8b3f", size = 28330, upload-time = "2026-03-05T02:53:40.421Z" },
{ url = "https://files.pythonhosted.org/packages/e3/d7/5b513acbf0bfc2b6ef281b6bbca764062facc431e8f13763c16005fbd34b/llama_index_llms_openai-0.6.21-py3-none-any.whl", hash = "sha256:ef8c048849f844c7db9ff4208cca9878a799bc5fcdd72954197ea11e64b37c97", size = 26965, upload-time = "2026-02-26T04:19:34.561Z" },
]
[[package]]
@@ -2818,7 +2751,6 @@ dependencies = [
{ name = "flower", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "gotenberg-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "httpx-oauth", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "ijson", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "imap-tools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "jinja2", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "langdetect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -2958,15 +2890,14 @@ requires-dist = [
{ name = "djangorestframework", specifier = "~=3.16" },
{ name = "djangorestframework-guardian", specifier = "~=0.4.0" },
{ name = "drf-spectacular", specifier = "~=0.28" },
{ name = "drf-spectacular-sidecar", specifier = "~=2026.3.1" },
{ name = "drf-spectacular-sidecar", specifier = "~=2026.1.1" },
{ name = "drf-writable-nested", specifier = "~=0.7.1" },
{ name = "faiss-cpu", specifier = ">=1.10" },
{ name = "filelock", specifier = "~=3.25.2" },
{ name = "filelock", specifier = "~=3.24.3" },
{ name = "flower", specifier = "~=2.0.1" },
{ name = "gotenberg-client", specifier = "~=0.13.1" },
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.7.0" },
{ name = "httpx-oauth", specifier = "~=0.16" },
{ name = "ijson", specifier = ">=3.2" },
{ name = "imap-tools", specifier = "~=1.11.0" },
{ name = "jinja2", specifier = "~=3.1.5" },
{ name = "langdetect", specifier = "~=1.0.9" },
@@ -2995,7 +2926,7 @@ requires-dist = [
{ name = "rapidfuzz", specifier = "~=3.14.0" },
{ name = "redis", extras = ["hiredis"], specifier = "~=5.2.1" },
{ name = "regex", specifier = ">=2025.9.18" },
{ name = "scikit-learn", specifier = "~=1.8.0" },
{ name = "scikit-learn", specifier = "~=1.7.0" },
{ name = "sentence-transformers", specifier = ">=4.1" },
{ name = "setproctitle", specifier = "~=1.3.4" },
{ name = "tika-client", specifier = "~=0.10.0" },
@@ -3011,7 +2942,7 @@ provides-extras = ["mariadb", "postgres", "webserver"]
dev = [
{ name = "daphne" },
{ name = "factory-boy", specifier = "~=3.3.1" },
{ name = "faker", specifier = "~=40.8.0" },
{ name = "faker", specifier = "~=40.5.1" },
{ name = "imagehash" },
{ name = "prek", specifier = "~=0.3.0" },
{ name = "pytest", specifier = "~=9.0.0" },
@@ -3034,7 +2965,7 @@ lint = [
testing = [
{ name = "daphne" },
{ name = "factory-boy", specifier = "~=3.3.1" },
{ name = "faker", specifier = "~=40.8.0" },
{ name = "faker", specifier = "~=40.5.1" },
{ name = "imagehash" },
{ name = "pytest", specifier = "~=9.0.0" },
{ name = "pytest-cov", specifier = "~=7.0.0" },
@@ -3285,23 +3216,23 @@ wheels = [
[[package]]
name = "prek"
version = "0.3.5"
version = "0.3.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/46/d6/277e002e56eeab3a9d48f1ca4cc067d249d6326fc1783b770d70ad5ae2be/prek-0.3.5.tar.gz", hash = "sha256:ca40b6685a4192256bc807f32237af94bf9b8799c0d708b98735738250685642", size = 374806, upload-time = "2026-03-09T10:35:18.842Z" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/f1/7613dc8347a33e40fc5b79eec6bc7d458d8bbc339782333d8433b665f86f/prek-0.3.3.tar.gz", hash = "sha256:117bd46ebeb39def24298ce021ccc73edcf697b81856fcff36d762dd56093f6f", size = 343697, upload-time = "2026-02-15T13:33:28.723Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8f/a9/16dd8d3a50362ebccffe58518af1f1f571c96f0695d7fcd8bbd386585f58/prek-0.3.5-py3-none-linux_armv6l.whl", hash = "sha256:44b3e12791805804f286d103682b42a84e0f98a2687faa37045e9d3375d3d73d", size = 5105604, upload-time = "2026-03-09T10:35:00.332Z" },
{ url = "https://files.pythonhosted.org/packages/e4/74/bc6036f5bf03860cda66ab040b32737e54802b71a81ec381839deb25df9e/prek-0.3.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e3cb451cc51ac068974557491beb4c7d2d41dfde29ed559c1694c8ce23bf53e8", size = 5506155, upload-time = "2026-03-09T10:35:17.64Z" },
{ url = "https://files.pythonhosted.org/packages/02/d9/a3745c2a10509c63b6a118ada766614dd705efefd08f275804d5c807aa4a/prek-0.3.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ad8f5f0d8da53dc94d00b76979af312b3dacccc9dcbc6417756c5dca3633c052", size = 5100383, upload-time = "2026-03-09T10:35:13.302Z" },
{ url = "https://files.pythonhosted.org/packages/43/8e/de965fc515d39309a332789cd3778161f7bc80cde15070bedf17f9f8cb93/prek-0.3.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:4511e15d34072851ac88e4b2006868fbe13655059ad941d7a0ff9ee17138fd9f", size = 5334913, upload-time = "2026-03-09T10:35:14.813Z" },
{ url = "https://files.pythonhosted.org/packages/3f/8c/44f07e8940256059cfd82520e3cbe0764ab06ddb4aa43148465db00b39ad/prek-0.3.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc0b63b8337e2046f51267facaac63ba755bc14aad53991840a5eccba3e5c28", size = 5033825, upload-time = "2026-03-09T10:35:06.976Z" },
{ url = "https://files.pythonhosted.org/packages/94/85/3ff0f96881ff2360c212d310ff23c3cf5a15b223d34fcfa8cdcef203be69/prek-0.3.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5fc0d78c3896a674aeb8247a83bbda7efec85274dbdfbc978ceff8d37e4ed20", size = 5438586, upload-time = "2026-03-09T10:34:58.779Z" },
{ url = "https://files.pythonhosted.org/packages/79/a5/c6d08d31293400fcb5d427f8e7e6bacfc959988e868ad3a9d97b4d87c4b7/prek-0.3.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64cad21cb9072d985179495b77b312f6b81e7b45357d0c68dc1de66e0408eabc", size = 6359714, upload-time = "2026-03-09T10:34:57.454Z" },
{ url = "https://files.pythonhosted.org/packages/ba/18/321dcff9ece8065d42c8c1c7a53a23b45d2b4330aa70993be75dc5f2822f/prek-0.3.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45ee84199bb48e013bdfde0c84352c17a44cc42d5792681b86d94e9474aab6f8", size = 5717632, upload-time = "2026-03-09T10:35:08.634Z" },
{ url = "https://files.pythonhosted.org/packages/a3/7f/1288226aa381d0cea403157f4e6b64b356e1a745f2441c31dd9d8a1d63da/prek-0.3.5-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:f43275e5d564e18e52133129ebeb5cb071af7ce4a547766c7f025aa0955dfbb6", size = 5339040, upload-time = "2026-03-09T10:35:03.665Z" },
{ url = "https://files.pythonhosted.org/packages/22/94/cfec83df9c2b8e7ed1608087bcf9538a6a77b4c2e7365123e9e0a3162cd1/prek-0.3.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:abcee520d31522bcbad9311f21326b447694cd5edba33618c25fd023fc9865ec", size = 5162586, upload-time = "2026-03-09T10:35:11.564Z" },
{ url = "https://files.pythonhosted.org/packages/13/b7/741d62132f37a5f7cc0fad1168bd31f20dea9628f482f077f569547e0436/prek-0.3.5-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:499c56a94a155790c75a973d351a33f8065579d9094c93f6d451ada5d1e469be", size = 5002933, upload-time = "2026-03-09T10:35:16.347Z" },
{ url = "https://files.pythonhosted.org/packages/6f/83/630a5671df6550fcfa67c54955e8a8174eb9b4d97ac38fb05a362029245b/prek-0.3.5-py3-none-musllinux_1_1_i686.whl", hash = "sha256:de1065b59f194624adc9dea269d4ff6b50e98a1b5bb662374a9adaa496b3c1eb", size = 5304934, upload-time = "2026-03-09T10:35:09.975Z" },
{ url = "https://files.pythonhosted.org/packages/de/79/67a7afd0c0b6c436630b7dba6e586a42d21d5d6e5778fbd9eba7bbd3dd26/prek-0.3.5-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:a1c4869e45ee341735d07179da3a79fa2afb5959cef8b3c8a71906eb52dc6933", size = 5829914, upload-time = "2026-03-09T10:35:05.39Z" },
{ url = "https://files.pythonhosted.org/packages/2d/8b/dce13d2a3065fd1e8ffce593a0e51c4a79c3cde9c9a15dc0acc8d9d1573d/prek-0.3.3-py3-none-linux_armv6l.whl", hash = "sha256:e8629cac4bdb131be8dc6e5a337f0f76073ad34a8305f3fe2bc1ab6201ede0a4", size = 4644636, upload-time = "2026-02-15T13:33:43.609Z" },
{ url = "https://files.pythonhosted.org/packages/01/30/06ab4dbe7ce02a8ce833e92deb1d9a8e85ae9d40e33d1959a2070b7494c6/prek-0.3.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4b9e819b9e4118e1e785047b1c8bd9aec7e4d836ed034cb58b7db5bcaaf49437", size = 4651410, upload-time = "2026-02-15T13:33:34.277Z" },
{ url = "https://files.pythonhosted.org/packages/d4/fc/da3bc5cb38471e7192eda06b7a26b7c24ef83e82da2c1dbc145f2bf33640/prek-0.3.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bf29db3b5657c083eb8444c25aadeeec5167dc492e9019e188f87932f01ea50a", size = 4273163, upload-time = "2026-02-15T13:33:42.106Z" },
{ url = "https://files.pythonhosted.org/packages/b4/74/47839395091e2937beced81a5dd2f8ea9c8239c853da8611aaf78ee21a8b/prek-0.3.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:ae09736149815b26e64a9d350ca05692bab32c2afdf2939114d3211aaad68a3e", size = 4631808, upload-time = "2026-02-15T13:33:20.076Z" },
{ url = "https://files.pythonhosted.org/packages/e2/89/3f5ef6f7c928c017cb63b029349d6bc03598ab7f6979d4a770ce02575f82/prek-0.3.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:856c2b55c51703c366bb4ce81c6a91102b70573a9fc8637db2ac61c66e4565f9", size = 4548959, upload-time = "2026-02-15T13:33:36.325Z" },
{ url = "https://files.pythonhosted.org/packages/b2/18/80002c4c4475f90ca025f27739a016927a0e5d905c60612fc95da1c56ab7/prek-0.3.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3acdf13a018f685beaff0a71d4b0d2ccbab4eaa1aced6d08fd471c1a654183eb", size = 4862256, upload-time = "2026-02-15T13:33:37.754Z" },
{ url = "https://files.pythonhosted.org/packages/c5/25/648bf084c2468fa7cfcdbbe9e59956bbb31b81f36e113bc9107d80af26a7/prek-0.3.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0f035667a8bd0a77b2bfa2b2e125da8cb1793949e9eeef0d8daab7f8ac8b57fe", size = 5404486, upload-time = "2026-02-15T13:33:39.239Z" },
{ url = "https://files.pythonhosted.org/packages/8b/43/261fb60a11712a327da345912bd8b338dc5a050199de800faafa278a6133/prek-0.3.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d09b2ad14332eede441d977de08eb57fb3f61226ed5fd2ceb7aadf5afcdb6794", size = 4887513, upload-time = "2026-02-15T13:33:40.702Z" },
{ url = "https://files.pythonhosted.org/packages/c7/2c/581e757ee57ec6046b32e0ee25660fc734bc2622c319f57119c49c0cab58/prek-0.3.3-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:c0c3ffac16e37a9daba43a7e8316778f5809b70254be138761a8b5b9ef0df28e", size = 4632336, upload-time = "2026-02-15T13:33:25.867Z" },
{ url = "https://files.pythonhosted.org/packages/d5/d8/aa276ce5d11b77882da4102ca0cb7161095831105043ae7979bbfdcc3dc4/prek-0.3.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a3dc7720b580c07c0386e17af2486a5b4bc2f6cc57034a288a614dcbc4abe555", size = 4679370, upload-time = "2026-02-15T13:33:22.247Z" },
{ url = "https://files.pythonhosted.org/packages/70/19/9d4fa7bde428e58d9f48a74290c08736d42aeb5690dcdccc7a713e34a449/prek-0.3.3-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:60e0fa15da5020a03df2ee40268145ec5b88267ec2141a205317ad4df8c992d6", size = 4540316, upload-time = "2026-02-15T13:33:24.088Z" },
{ url = "https://files.pythonhosted.org/packages/25/b5/973cce29257e0b47b16cc9b4c162772ea01dbb7c080791ea0c068e106e05/prek-0.3.3-py3-none-musllinux_1_1_i686.whl", hash = "sha256:553515da9586d9624dc42db32b744fdb91cf62b053753037a0cadb3c2d8d82a2", size = 4724566, upload-time = "2026-02-15T13:33:29.832Z" },
{ url = "https://files.pythonhosted.org/packages/d6/8b/ad8b2658895a8ed2b0bc630bf38686fe38b7ff2c619c58953a80e4de3048/prek-0.3.3-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:9512cf370e0d1496503463a4a65621480efb41b487841a9e9ff1661edf14b238", size = 4995072, upload-time = "2026-02-15T13:33:27.417Z" },
]
[[package]]
@@ -3657,15 +3588,15 @@ wheels = [
[[package]]
name = "pyrefly"
version = "0.55.0"
version = "0.54.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/c4/76e0797215e62d007f81f86c9c4fb5d6202685a3f5e70810f3fd94294f92/pyrefly-0.55.0.tar.gz", hash = "sha256:434c3282532dd4525c4840f2040ed0eb79b0ec8224fe18d957956b15471f2441", size = 5135682, upload-time = "2026-03-03T00:46:38.122Z" }
sdist = { url = "https://files.pythonhosted.org/packages/81/44/c10b16a302fda90d0af1328f880b232761b510eab546616a7be2fdf35a57/pyrefly-0.54.0.tar.gz", hash = "sha256:c6663be64d492f0d2f2a411ada9f28a6792163d34133639378b7f3dd9a8dca94", size = 5098893, upload-time = "2026-02-23T15:44:35.111Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/39/b0/16e50cf716784513648e23e726a24f71f9544aa4f86103032dcaa5ff71a2/pyrefly-0.55.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:49aafcefe5e2dd4256147db93e5b0ada42bff7d9a60db70e03d1f7055338eec9", size = 12210073, upload-time = "2026-03-03T00:46:15.51Z" },
{ url = "https://files.pythonhosted.org/packages/3a/ad/89500c01bac3083383011600370289fbc67700c5be46e781787392628a3a/pyrefly-0.55.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2827426e6b28397c13badb93c0ede0fb0f48046a7a89e3d774cda04e8e2067cd", size = 11767474, upload-time = "2026-03-03T00:46:18.003Z" },
{ url = "https://files.pythonhosted.org/packages/78/68/4c66b260f817f304ead11176ff13985625f7c269e653304b4bdb546551af/pyrefly-0.55.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7346b2d64dc575bd61aa3bca854fbf8b5a19a471cbdb45e0ca1e09861b63488c", size = 33260395, upload-time = "2026-03-03T00:46:20.509Z" },
{ url = "https://files.pythonhosted.org/packages/47/09/10bd48c9f860064f29f412954126a827d60f6451512224912c265e26bbe6/pyrefly-0.55.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:233b861b4cff008b1aff62f4f941577ed752e4d0060834229eb9b6826e6973c9", size = 35848269, upload-time = "2026-03-03T00:46:23.418Z" },
{ url = "https://files.pythonhosted.org/packages/a9/39/bc65cdd5243eb2dfea25dd1321f9a5a93e8d9c3a308501c4c6c05d011585/pyrefly-0.55.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5aa85657d76da1d25d081a49f0e33c8fc3ec91c1a0f185a8ed393a5a3d9e178", size = 38449820, upload-time = "2026-03-03T00:46:26.309Z" },
{ url = "https://files.pythonhosted.org/packages/5f/99/8fdcdb4e55f0227fdd9f6abce36b619bab1ecb0662b83b66adc8cba3c788/pyrefly-0.54.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:58a3f092b6dc25ef79b2dc6c69a40f36784ca157c312bfc0baea463926a9db6d", size = 12223973, upload-time = "2026-02-23T15:44:14.278Z" },
{ url = "https://files.pythonhosted.org/packages/90/35/c2aaf87a76003ad27b286594d2e5178f811eaa15bfe3d98dba2b47d56dd1/pyrefly-0.54.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:615081414106dd95873bc39c3a4bed68754c6cc24a8177ac51d22f88f88d3eb3", size = 11785585, upload-time = "2026-02-23T15:44:17.468Z" },
{ url = "https://files.pythonhosted.org/packages/c4/4a/ced02691ed67e5a897714979196f08ad279ec7ec7f63c45e00a75a7f3c0e/pyrefly-0.54.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbcaf20f5fe585079079a95205c1f3cd4542d17228cdf1df560288880623b70", size = 33381977, upload-time = "2026-02-23T15:44:19.736Z" },
{ url = "https://files.pythonhosted.org/packages/0b/ce/72a117ed437c8f6950862181014b41e36f3c3997580e29b772b71e78d587/pyrefly-0.54.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d5da116c0d34acfbd66663addd3ca8aa78a636f6692a66e078126d3620a883", size = 35962821, upload-time = "2026-02-23T15:44:22.357Z" },
{ url = "https://files.pythonhosted.org/packages/85/de/89013f5ae0a35d2b6b01274a92a35ee91431ea001050edf0a16748d39875/pyrefly-0.54.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef3ac27f1a4baaf67aead64287d3163350844794aca6315ad1a9650b16ec26a", size = 38496689, upload-time = "2026-02-23T15:44:25.236Z" },
]
[[package]]
@@ -3800,11 +3731,11 @@ wheels = [
[[package]]
name = "python-dotenv"
version = "1.2.2"
version = "1.2.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
{ url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" },
]
[[package]]
@@ -4035,88 +3966,88 @@ wheels = [
[[package]]
name = "regex"
version = "2026.2.28"
version = "2026.2.19"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/8b/71/41455aa99a5a5ac1eaf311f5d8efd9ce6433c03ac1e0962de163350d0d97/regex-2026.2.28.tar.gz", hash = "sha256:a729e47d418ea11d03469f321aaf67cdee8954cde3ff2cf8403ab87951ad10f2", size = 415184, upload-time = "2026-02-28T02:19:42.792Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ff/c0/d8079d4f6342e4cec5c3e7d7415b5cd3e633d5f4124f7a4626908dbe84c7/regex-2026.2.19.tar.gz", hash = "sha256:6fb8cb09b10e38f3ae17cc6dc04a1df77762bd0351b6ba9041438e7cc85ec310", size = 414973, upload-time = "2026-02-19T19:03:47.899Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/04/db/8cbfd0ba3f302f2d09dd0019a9fcab74b63fee77a76c937d0e33161fb8c1/regex-2026.2.28-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e621fb7c8dc147419b28e1702f58a0177ff8308a76fa295c71f3e7827849f5d9", size = 488462, upload-time = "2026-02-28T02:16:22.616Z" },
{ url = "https://files.pythonhosted.org/packages/5d/10/ccc22c52802223f2368731964ddd117799e1390ffc39dbb31634a83022ee/regex-2026.2.28-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0d5bef2031cbf38757a0b0bc4298bb4824b6332d28edc16b39247228fbdbad97", size = 290774, upload-time = "2026-02-28T02:16:23.993Z" },
{ url = "https://files.pythonhosted.org/packages/62/b9/6796b3bf3101e64117201aaa3a5a030ec677ecf34b3cd6141b5d5c6c67d5/regex-2026.2.28-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bcb399ed84eabf4282587ba151f2732ad8168e66f1d3f85b1d038868fe547703", size = 288724, upload-time = "2026-02-28T02:16:25.403Z" },
{ url = "https://files.pythonhosted.org/packages/9c/02/291c0ae3f3a10cea941d0f5366da1843d8d1fa8a25b0671e20a0e454bb38/regex-2026.2.28-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7c1b34dfa72f826f535b20712afa9bb3ba580020e834f3c69866c5bddbf10098", size = 791924, upload-time = "2026-02-28T02:16:26.863Z" },
{ url = "https://files.pythonhosted.org/packages/0f/57/f0235cc520d9672742196c5c15098f8f703f2758d48d5a7465a56333e496/regex-2026.2.28-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:851fa70df44325e1e4cdb79c5e676e91a78147b1b543db2aec8734d2add30ec2", size = 860095, upload-time = "2026-02-28T02:16:28.772Z" },
{ url = "https://files.pythonhosted.org/packages/b3/7c/393c94cbedda79a0f5f2435ebd01644aba0b338d327eb24b4aa5b8d6c07f/regex-2026.2.28-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:516604edd17b1c2c3e579cf4e9b25a53bf8fa6e7cedddf1127804d3e0140ca64", size = 906583, upload-time = "2026-02-28T02:16:30.977Z" },
{ url = "https://files.pythonhosted.org/packages/2c/73/a72820f47ca5abf2b5d911d0407ba5178fc52cf9780191ed3a54f5f419a2/regex-2026.2.28-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e7ce83654d1ab701cb619285a18a8e5a889c1216d746ddc710c914ca5fd71022", size = 800234, upload-time = "2026-02-28T02:16:32.55Z" },
{ url = "https://files.pythonhosted.org/packages/34/b3/6e6a4b7b31fa998c4cf159a12cbeaf356386fbd1a8be743b1e80a3da51e4/regex-2026.2.28-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2791948f7c70bb9335a9102df45e93d428f4b8128020d85920223925d73b9e1", size = 772803, upload-time = "2026-02-28T02:16:34.029Z" },
{ url = "https://files.pythonhosted.org/packages/10/e7/5da0280c765d5a92af5e1cd324b3fe8464303189cbaa449de9a71910e273/regex-2026.2.28-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:03a83cc26aa2acda6b8b9dfe748cf9e84cbd390c424a1de34fdcef58961a297a", size = 781117, upload-time = "2026-02-28T02:16:36.253Z" },
{ url = "https://files.pythonhosted.org/packages/76/39/0b8d7efb256ae34e1b8157acc1afd8758048a1cf0196e1aec2e71fd99f4b/regex-2026.2.28-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ec6f5674c5dc836994f50f1186dd1fafde4be0666aae201ae2fcc3d29d8adf27", size = 854224, upload-time = "2026-02-28T02:16:38.119Z" },
{ url = "https://files.pythonhosted.org/packages/21/ff/a96d483ebe8fe6d1c67907729202313895d8de8495569ec319c6f29d0438/regex-2026.2.28-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:50c2fc924749543e0eacc93ada6aeeb3ea5f6715825624baa0dccaec771668ae", size = 761898, upload-time = "2026-02-28T02:16:40.333Z" },
{ url = "https://files.pythonhosted.org/packages/89/bd/d4f2e75cb4a54b484e796017e37c0d09d8a0a837de43d17e238adf163f4e/regex-2026.2.28-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:ba55c50f408fb5c346a3a02d2ce0ebc839784e24f7c9684fde328ff063c3cdea", size = 844832, upload-time = "2026-02-28T02:16:41.875Z" },
{ url = "https://files.pythonhosted.org/packages/8a/a7/428a135cf5e15e4e11d1e696eb2bf968362f8ea8a5f237122e96bc2ae950/regex-2026.2.28-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:edb1b1b3a5576c56f08ac46f108c40333f222ebfd5cf63afdfa3aab0791ebe5b", size = 788347, upload-time = "2026-02-28T02:16:43.472Z" },
{ url = "https://files.pythonhosted.org/packages/07/42/9061b03cf0fc4b5fa2c3984cbbaed54324377e440a5c5a29d29a72518d62/regex-2026.2.28-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fcf26c3c6d0da98fada8ae4ef0aa1c3405a431c0a77eb17306d38a89b02adcd7", size = 489574, upload-time = "2026-02-28T02:16:50.455Z" },
{ url = "https://files.pythonhosted.org/packages/77/83/0c8a5623a233015595e3da499c5a1c13720ac63c107897a6037bb97af248/regex-2026.2.28-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02473c954af35dd2defeb07e44182f5705b30ea3f351a7cbffa9177beb14da5d", size = 291426, upload-time = "2026-02-28T02:16:52.52Z" },
{ url = "https://files.pythonhosted.org/packages/9e/06/3ef1ac6910dc3295ebd71b1f9bfa737e82cfead211a18b319d45f85ddd09/regex-2026.2.28-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9b65d33a17101569f86d9c5966a8b1d7fbf8afdda5a8aa219301b0a80f58cf7d", size = 289200, upload-time = "2026-02-28T02:16:54.08Z" },
{ url = "https://files.pythonhosted.org/packages/dd/c9/8cc8d850b35ab5650ff6756a1cb85286e2000b66c97520b29c1587455344/regex-2026.2.28-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e71dcecaa113eebcc96622c17692672c2d104b1d71ddf7adeda90da7ddeb26fc", size = 796765, upload-time = "2026-02-28T02:16:55.905Z" },
{ url = "https://files.pythonhosted.org/packages/e9/5d/57702597627fc23278ebf36fbb497ac91c0ce7fec89ac6c81e420ca3e38c/regex-2026.2.28-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:481df4623fa4969c8b11f3433ed7d5e3dc9cec0f008356c3212b3933fb77e3d8", size = 863093, upload-time = "2026-02-28T02:16:58.094Z" },
{ url = "https://files.pythonhosted.org/packages/02/6d/f3ecad537ca2811b4d26b54ca848cf70e04fcfc138667c146a9f3157779c/regex-2026.2.28-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:64e7c6ad614573e0640f271e811a408d79a9e1fe62a46adb602f598df42a818d", size = 909455, upload-time = "2026-02-28T02:17:00.918Z" },
{ url = "https://files.pythonhosted.org/packages/9e/40/bb226f203caa22c1043c1ca79b36340156eca0f6a6742b46c3bb222a3a57/regex-2026.2.28-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6b08a06976ff4fb0d83077022fde3eca06c55432bb997d8c0495b9a4e9872f4", size = 802037, upload-time = "2026-02-28T02:17:02.842Z" },
{ url = "https://files.pythonhosted.org/packages/44/7c/c6d91d8911ac6803b45ca968e8e500c46934e58c0903cbc6d760ee817a0a/regex-2026.2.28-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:864cdd1a2ef5716b0ab468af40139e62ede1b3a53386b375ec0786bb6783fc05", size = 775113, upload-time = "2026-02-28T02:17:04.506Z" },
{ url = "https://files.pythonhosted.org/packages/dc/8d/4a9368d168d47abd4158580b8c848709667b1cd293ff0c0c277279543bd0/regex-2026.2.28-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:511f7419f7afab475fd4d639d4aedfc54205bcb0800066753ef68a59f0f330b5", size = 784194, upload-time = "2026-02-28T02:17:06.888Z" },
{ url = "https://files.pythonhosted.org/packages/cc/bf/2c72ab5d8b7be462cb1651b5cc333da1d0068740342f350fcca3bca31947/regex-2026.2.28-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b42f7466e32bf15a961cf09f35fa6323cc72e64d3d2c990b10de1274a5da0a59", size = 856846, upload-time = "2026-02-28T02:17:09.11Z" },
{ url = "https://files.pythonhosted.org/packages/7c/f4/6b65c979bb6d09f51bb2d2a7bc85de73c01ec73335d7ddd202dcb8cd1c8f/regex-2026.2.28-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8710d61737b0c0ce6836b1da7109f20d495e49b3809f30e27e9560be67a257bf", size = 763516, upload-time = "2026-02-28T02:17:11.004Z" },
{ url = "https://files.pythonhosted.org/packages/8e/32/29ea5e27400ee86d2cc2b4e80aa059df04eaf78b4f0c18576ae077aeff68/regex-2026.2.28-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:4390c365fd2d45278f45afd4673cb90f7285f5701607e3ad4274df08e36140ae", size = 849278, upload-time = "2026-02-28T02:17:12.693Z" },
{ url = "https://files.pythonhosted.org/packages/1d/91/3233d03b5f865111cd517e1c95ee8b43e8b428d61fa73764a80c9bb6f537/regex-2026.2.28-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cb3b1db8ff6c7b8bf838ab05583ea15230cb2f678e569ab0e3a24d1e8320940b", size = 790068, upload-time = "2026-02-28T02:17:14.9Z" },
{ url = "https://files.pythonhosted.org/packages/87/f6/dc9ef48c61b79c8201585bf37fa70cd781977da86e466cd94e8e95d2443b/regex-2026.2.28-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6d63a07e5ec8ce7184452cb00c41c37b49e67dc4f73b2955b5b8e782ea970784", size = 489311, upload-time = "2026-02-28T02:17:22.591Z" },
{ url = "https://files.pythonhosted.org/packages/95/c8/c20390f2232d3f7956f420f4ef1852608ad57aa26c3dd78516cb9f3dc913/regex-2026.2.28-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e59bc8f30414d283ae8ee1617b13d8112e7135cb92830f0ec3688cb29152585a", size = 291285, upload-time = "2026-02-28T02:17:24.355Z" },
{ url = "https://files.pythonhosted.org/packages/d2/a6/ba1068a631ebd71a230e7d8013fcd284b7c89c35f46f34a7da02082141b1/regex-2026.2.28-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:de0cf053139f96219ccfabb4a8dd2d217c8c82cb206c91d9f109f3f552d6b43d", size = 289051, upload-time = "2026-02-28T02:17:26.722Z" },
{ url = "https://files.pythonhosted.org/packages/1d/1b/7cc3b7af4c244c204b7a80924bd3d85aecd9ba5bc82b485c5806ee8cda9e/regex-2026.2.28-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb4db2f17e6484904f986c5a657cec85574c76b5c5e61c7aae9ffa1bc6224f95", size = 796842, upload-time = "2026-02-28T02:17:29.064Z" },
{ url = "https://files.pythonhosted.org/packages/24/87/26bd03efc60e0d772ac1e7b60a2e6325af98d974e2358f659c507d3c76db/regex-2026.2.28-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:52b017b35ac2214d0db5f4f90e303634dc44e4aba4bd6235a27f97ecbe5b0472", size = 863083, upload-time = "2026-02-28T02:17:31.363Z" },
{ url = "https://files.pythonhosted.org/packages/ae/54/aeaf4afb1aa0a65e40de52a61dc2ac5b00a83c6cb081c8a1d0dda74f3010/regex-2026.2.28-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:69fc560ccbf08a09dc9b52ab69cacfae51e0ed80dc5693078bdc97db2f91ae96", size = 909412, upload-time = "2026-02-28T02:17:33.248Z" },
{ url = "https://files.pythonhosted.org/packages/12/2f/049901def913954e640d199bbc6a7ca2902b6aeda0e5da9d17f114100ec2/regex-2026.2.28-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e61eea47230eba62a31f3e8a0e3164d0f37ef9f40529fb2c79361bc6b53d2a92", size = 802101, upload-time = "2026-02-28T02:17:35.053Z" },
{ url = "https://files.pythonhosted.org/packages/7d/a5/512fb9ff7f5b15ea204bb1967ebb649059446decacccb201381f9fa6aad4/regex-2026.2.28-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4f5c0b182ad4269e7381b7c27fdb0408399881f7a92a4624fd5487f2971dfc11", size = 775260, upload-time = "2026-02-28T02:17:37.692Z" },
{ url = "https://files.pythonhosted.org/packages/d1/a8/9a92935878aba19bd72706b9db5646a6f993d99b3f6ed42c02ec8beb1d61/regex-2026.2.28-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:96f6269a2882fbb0ee76967116b83679dc628e68eaea44e90884b8d53d833881", size = 784311, upload-time = "2026-02-28T02:17:39.855Z" },
{ url = "https://files.pythonhosted.org/packages/09/d3/fc51a8a738a49a6b6499626580554c9466d3ea561f2b72cfdc72e4149773/regex-2026.2.28-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b5acd4b6a95f37c3c3828e5d053a7d4edaedb85de551db0153754924cb7c83e3", size = 856876, upload-time = "2026-02-28T02:17:42.317Z" },
{ url = "https://files.pythonhosted.org/packages/08/b7/2e641f3d084b120ca4c52e8c762a78da0b32bf03ef546330db3e2635dc5f/regex-2026.2.28-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2234059cfe33d9813a3677ef7667999caea9eeaa83fef98eb6ce15c6cf9e0215", size = 763632, upload-time = "2026-02-28T02:17:45.073Z" },
{ url = "https://files.pythonhosted.org/packages/fe/6d/0009021d97e79ee99f3d8641f0a8d001eed23479ade4c3125a5480bf3e2d/regex-2026.2.28-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c15af43c72a7fb0c97cbc66fa36a43546eddc5c06a662b64a0cbf30d6ac40944", size = 849320, upload-time = "2026-02-28T02:17:47.192Z" },
{ url = "https://files.pythonhosted.org/packages/05/7a/51cfbad5758f8edae430cb21961a9c8d04bce1dae4d2d18d4186eec7cfa1/regex-2026.2.28-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9185cc63359862a6e80fe97f696e04b0ad9a11c4ac0a4a927f979f611bfe3768", size = 790152, upload-time = "2026-02-28T02:17:49.067Z" },
{ url = "https://files.pythonhosted.org/packages/24/07/6c7e4cec1e585959e96cbc24299d97e4437a81173217af54f1804994e911/regex-2026.2.28-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:97054c55db06ab020342cc0d35d6f62a465fa7662871190175f1ad6c655c028f", size = 492541, upload-time = "2026-02-28T02:17:56.813Z" },
{ url = "https://files.pythonhosted.org/packages/7c/13/55eb22ada7f43d4f4bb3815b6132183ebc331c81bd496e2d1f3b8d862e0d/regex-2026.2.28-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d25a10811de831c2baa6aef3c0be91622f44dd8d31dd12e69f6398efb15e48b", size = 292984, upload-time = "2026-02-28T02:17:58.538Z" },
{ url = "https://files.pythonhosted.org/packages/5b/11/c301f8cb29ce9644a5ef85104c59244e6e7e90994a0f458da4d39baa8e17/regex-2026.2.28-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d6cfe798d8da41bb1862ed6e0cba14003d387c3c0c4a5d45591076ae9f0ce2f8", size = 291509, upload-time = "2026-02-28T02:18:00.208Z" },
{ url = "https://files.pythonhosted.org/packages/b5/43/aabe384ec1994b91796e903582427bc2ffaed9c4103819ed3c16d8e749f3/regex-2026.2.28-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fd0ce43e71d825b7c0661f9c54d4d74bd97c56c3fd102a8985bcfea48236bacb", size = 809429, upload-time = "2026-02-28T02:18:02.328Z" },
{ url = "https://files.pythonhosted.org/packages/04/b8/8d2d987a816720c4f3109cee7c06a4b24ad0e02d4fc74919ab619e543737/regex-2026.2.28-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00945d007fd74a9084d2ab79b695b595c6b7ba3698972fadd43e23230c6979c1", size = 869422, upload-time = "2026-02-28T02:18:04.23Z" },
{ url = "https://files.pythonhosted.org/packages/fc/ad/2c004509e763c0c3719f97c03eca26473bffb3868d54c5f280b8cd4f9e3d/regex-2026.2.28-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bec23c11cbbf09a4df32fe50d57cbdd777bc442269b6e39a1775654f1c95dee2", size = 915175, upload-time = "2026-02-28T02:18:06.791Z" },
{ url = "https://files.pythonhosted.org/packages/55/c2/fd429066da487ef555a9da73bf214894aec77fc8c66a261ee355a69871a8/regex-2026.2.28-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5cdcc17d935c8f9d3f4db5c2ebe2640c332e3822ad5d23c2f8e0228e6947943a", size = 812044, upload-time = "2026-02-28T02:18:08.736Z" },
{ url = "https://files.pythonhosted.org/packages/5b/ca/feedb7055c62a3f7f659971bf45f0e0a87544b6b0cf462884761453f97c5/regex-2026.2.28-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a448af01e3d8031c89c5d902040b124a5e921a25c4e5e07a861ca591ce429341", size = 782056, upload-time = "2026-02-28T02:18:10.777Z" },
{ url = "https://files.pythonhosted.org/packages/95/30/1aa959ed0d25c1dd7dd5047ea8ba482ceaef38ce363c401fd32a6b923e60/regex-2026.2.28-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:10d28e19bd4888e4abf43bd3925f3c134c52fdf7259219003588a42e24c2aa25", size = 798743, upload-time = "2026-02-28T02:18:13.025Z" },
{ url = "https://files.pythonhosted.org/packages/3b/1f/dadb9cf359004784051c897dcf4d5d79895f73a1bbb7b827abaa4814ae80/regex-2026.2.28-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:99985a2c277dcb9ccb63f937451af5d65177af1efdeb8173ac55b61095a0a05c", size = 864633, upload-time = "2026-02-28T02:18:16.84Z" },
{ url = "https://files.pythonhosted.org/packages/a7/f1/b9a25eb24e1cf79890f09e6ec971ee5b511519f1851de3453bc04f6c902b/regex-2026.2.28-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:e1e7b24cb3ae9953a560c563045d1ba56ee4749fbd05cf21ba571069bd7be81b", size = 770862, upload-time = "2026-02-28T02:18:18.892Z" },
{ url = "https://files.pythonhosted.org/packages/02/9a/c5cb10b7aa6f182f9247a30cc9527e326601f46f4df864ac6db588d11fcd/regex-2026.2.28-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d8511a01d0e4ee1992eb3ba19e09bc1866fe03f05129c3aec3fdc4cbc77aad3f", size = 854788, upload-time = "2026-02-28T02:18:21.475Z" },
{ url = "https://files.pythonhosted.org/packages/0a/50/414ba0731c4bd40b011fa4703b2cc86879ec060c64f2a906e65a56452589/regex-2026.2.28-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:aaffaecffcd2479ce87aa1e74076c221700b7c804e48e98e62500ee748f0f550", size = 800184, upload-time = "2026-02-28T02:18:23.492Z" },
{ url = "https://files.pythonhosted.org/packages/cf/03/691015f7a7cb1ed6dacb2ea5de5682e4858e05a4c5506b2839cd533bbcd6/regex-2026.2.28-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:78454178c7df31372ea737996fb7f36b3c2c92cccc641d251e072478afb4babc", size = 489497, upload-time = "2026-02-28T02:18:30.889Z" },
{ url = "https://files.pythonhosted.org/packages/c6/ba/8db8fd19afcbfa0e1036eaa70c05f20ca8405817d4ad7a38a6b4c2f031ac/regex-2026.2.28-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:5d10303dd18cedfd4d095543998404df656088240bcfd3cd20a8f95b861f74bd", size = 291295, upload-time = "2026-02-28T02:18:33.426Z" },
{ url = "https://files.pythonhosted.org/packages/5a/79/9aa0caf089e8defef9b857b52fc53801f62ff868e19e5c83d4a96612eba1/regex-2026.2.28-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:19a9c9e0a8f24f39d575a6a854d516b48ffe4cbdcb9de55cb0570a032556ecff", size = 289275, upload-time = "2026-02-28T02:18:35.247Z" },
{ url = "https://files.pythonhosted.org/packages/eb/26/ee53117066a30ef9c883bf1127eece08308ccf8ccd45c45a966e7a665385/regex-2026.2.28-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:09500be324f49b470d907b3ef8af9afe857f5cca486f853853f7945ddbf75911", size = 797176, upload-time = "2026-02-28T02:18:37.15Z" },
{ url = "https://files.pythonhosted.org/packages/05/1b/67fb0495a97259925f343ae78b5d24d4a6624356ae138b57f18bd43006e4/regex-2026.2.28-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fb1c4ff62277d87a7335f2c1ea4e0387b8f2b3ad88a64efd9943906aafad4f33", size = 863813, upload-time = "2026-02-28T02:18:39.478Z" },
{ url = "https://files.pythonhosted.org/packages/a0/1d/93ac9bbafc53618091c685c7ed40239a90bf9f2a82c983f0baa97cb7ae07/regex-2026.2.28-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b8b3f1be1738feadc69f62daa250c933e85c6f34fa378f54a7ff43807c1b9117", size = 908678, upload-time = "2026-02-28T02:18:41.619Z" },
{ url = "https://files.pythonhosted.org/packages/c7/7a/a8f5e0561702b25239846a16349feece59712ae20598ebb205580332a471/regex-2026.2.28-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc8ed8c3f41c27acb83f7b6a9eb727a73fc6663441890c5cb3426a5f6a91ce7d", size = 801528, upload-time = "2026-02-28T02:18:43.624Z" },
{ url = "https://files.pythonhosted.org/packages/96/5d/ed6d4cbde80309854b1b9f42d9062fee38ade15f7eb4909f6ef2440403b5/regex-2026.2.28-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa539be029844c0ce1114762d2952ab6cfdd7c7c9bd72e0db26b94c3c36dcc5a", size = 775373, upload-time = "2026-02-28T02:18:46.102Z" },
{ url = "https://files.pythonhosted.org/packages/6a/e9/6e53c34e8068b9deec3e87210086ecb5b9efebdefca6b0d3fa43d66dcecb/regex-2026.2.28-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7900157786428a79615a8264dac1f12c9b02957c473c8110c6b1f972dcecaddf", size = 784859, upload-time = "2026-02-28T02:18:48.269Z" },
{ url = "https://files.pythonhosted.org/packages/48/3c/736e1c7ca7f0dcd2ae33819888fdc69058a349b7e5e84bc3e2f296bbf794/regex-2026.2.28-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0b1d2b07614d95fa2bf8a63fd1e98bd8fa2b4848dc91b1efbc8ba219fdd73952", size = 857813, upload-time = "2026-02-28T02:18:50.576Z" },
{ url = "https://files.pythonhosted.org/packages/6e/7c/48c4659ad9da61f58e79dbe8c05223e0006696b603c16eb6b5cbfbb52c27/regex-2026.2.28-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:b389c61aa28a79c2e0527ac36da579869c2e235a5b208a12c5b5318cda2501d8", size = 763705, upload-time = "2026-02-28T02:18:52.59Z" },
{ url = "https://files.pythonhosted.org/packages/cf/a1/bc1c261789283128165f71b71b4b221dd1b79c77023752a6074c102f18d8/regex-2026.2.28-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f467cb602f03fbd1ab1908f68b53c649ce393fde056628dc8c7e634dab6bfc07", size = 848734, upload-time = "2026-02-28T02:18:54.595Z" },
{ url = "https://files.pythonhosted.org/packages/10/d8/979407faf1397036e25a5ae778157366a911c0f382c62501009f4957cf86/regex-2026.2.28-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e8c8cb2deba42f5ec1ede46374e990f8adc5e6456a57ac1a261b19be6f28e4e6", size = 789871, upload-time = "2026-02-28T02:18:57.34Z" },
{ url = "https://files.pythonhosted.org/packages/d3/eb/8389f9e940ac89bcf58d185e230a677b4fd07c5f9b917603ad5c0f8fa8fe/regex-2026.2.28-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:a5dac14d0872eeb35260a8e30bac07ddf22adc1e3a0635b52b02e180d17c9c7e", size = 492546, upload-time = "2026-02-28T02:19:05.378Z" },
{ url = "https://files.pythonhosted.org/packages/7b/c7/09441d27ce2a6fa6a61ea3150ea4639c1dcda9b31b2ea07b80d6937b24dd/regex-2026.2.28-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:ec0c608b7a7465ffadb344ed7c987ff2f11ee03f6a130b569aa74d8a70e8333c", size = 292986, upload-time = "2026-02-28T02:19:07.24Z" },
{ url = "https://files.pythonhosted.org/packages/fb/69/4144b60ed7760a6bd235e4087041f487aa4aa62b45618ce018b0c14833ea/regex-2026.2.28-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7815afb0ca45456613fdaf60ea9c993715511c8d53a83bc468305cbc0ee23c7", size = 291518, upload-time = "2026-02-28T02:19:09.698Z" },
{ url = "https://files.pythonhosted.org/packages/2d/be/77e5426cf5948c82f98c53582009ca9e94938c71f73a8918474f2e2990bb/regex-2026.2.28-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b059e71ec363968671693a78c5053bd9cb2fe410f9b8e4657e88377ebd603a2e", size = 809464, upload-time = "2026-02-28T02:19:12.494Z" },
{ url = "https://files.pythonhosted.org/packages/45/99/2c8c5ac90dc7d05c6e7d8e72c6a3599dc08cd577ac476898e91ca787d7f1/regex-2026.2.28-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8cf76f1a29f0e99dcfd7aef1551a9827588aae5a737fe31442021165f1920dc", size = 869553, upload-time = "2026-02-28T02:19:15.151Z" },
{ url = "https://files.pythonhosted.org/packages/53/34/daa66a342f0271e7737003abf6c3097aa0498d58c668dbd88362ef94eb5d/regex-2026.2.28-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:180e08a435a0319e6a4821c3468da18dc7001987e1c17ae1335488dfe7518dd8", size = 915289, upload-time = "2026-02-28T02:19:17.331Z" },
{ url = "https://files.pythonhosted.org/packages/c5/c7/e22c2aaf0a12e7e22ab19b004bb78d32ca1ecc7ef245949935463c5567de/regex-2026.2.28-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e496956106fd59ba6322a8ea17141a27c5040e5ee8f9433ae92d4e5204462a0", size = 812156, upload-time = "2026-02-28T02:19:20.011Z" },
{ url = "https://files.pythonhosted.org/packages/7f/bb/2dc18c1efd9051cf389cd0d7a3a4d90f6804b9fff3a51b5dc3c85b935f71/regex-2026.2.28-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bba2b18d70eeb7b79950f12f633beeecd923f7c9ad6f6bae28e59b4cb3ab046b", size = 782215, upload-time = "2026-02-28T02:19:22.047Z" },
{ url = "https://files.pythonhosted.org/packages/17/1e/9e4ec9b9013931faa32226ec4aa3c71fe664a6d8a2b91ac56442128b332f/regex-2026.2.28-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6db7bfae0f8a2793ff1f7021468ea55e2699d0790eb58ee6ab36ae43aa00bc5b", size = 798925, upload-time = "2026-02-28T02:19:24.173Z" },
{ url = "https://files.pythonhosted.org/packages/71/57/a505927e449a9ccb41e2cc8d735e2abe3444b0213d1cf9cb364a8c1f2524/regex-2026.2.28-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d0b02e8b7e5874b48ae0f077ecca61c1a6a9f9895e9c6dfb191b55b242862033", size = 864701, upload-time = "2026-02-28T02:19:26.376Z" },
{ url = "https://files.pythonhosted.org/packages/a6/ad/c62cb60cdd93e13eac5b3d9d6bd5d284225ed0e3329426f94d2552dd7cca/regex-2026.2.28-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:25b6eb660c5cf4b8c3407a1ed462abba26a926cc9965e164268a3267bcc06a43", size = 770899, upload-time = "2026-02-28T02:19:29.38Z" },
{ url = "https://files.pythonhosted.org/packages/3c/5a/874f861f5c3d5ab99633e8030dee1bc113db8e0be299d1f4b07f5b5ec349/regex-2026.2.28-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:5a932ea8ad5d0430351ff9c76c8db34db0d9f53c1d78f06022a21f4e290c5c18", size = 854727, upload-time = "2026-02-28T02:19:31.494Z" },
{ url = "https://files.pythonhosted.org/packages/6b/ca/d2c03b0efde47e13db895b975b2be6a73ed90b8ba963677927283d43bf74/regex-2026.2.28-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:1c2c95e1a2b0f89d01e821ff4de1be4b5d73d1f4b0bf679fa27c1ad8d2327f1a", size = 800366, upload-time = "2026-02-28T02:19:34.248Z" },
{ url = "https://files.pythonhosted.org/packages/6f/93/43f405a98f54cc59c786efb4fc0b644615ed2392fc89d57d30da11f35b5b/regex-2026.2.19-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:93b16a18cadb938f0f2306267161d57eb33081a861cee9ffcd71e60941eb5dfc", size = 488365, upload-time = "2026-02-19T19:00:17.857Z" },
{ url = "https://files.pythonhosted.org/packages/66/46/da0efce22cd8f5ae28eeb25ac69703f49edcad3331ac22440776f4ea0867/regex-2026.2.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78af1e499cab704131f6f4e2f155b7f54ce396ca2acb6ef21a49507e4752e0be", size = 290737, upload-time = "2026-02-19T19:00:19.869Z" },
{ url = "https://files.pythonhosted.org/packages/fb/19/f735078448132c1c974974d30d5306337bc297fe6b6f126164bff72c1019/regex-2026.2.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eb20c11aa4c3793c9ad04c19a972078cdadb261b8429380364be28e867a843f2", size = 288654, upload-time = "2026-02-19T19:00:21.307Z" },
{ url = "https://files.pythonhosted.org/packages/e2/3e/6d7c24a2f423c03ad03e3fbddefa431057186ac1c4cb4fa98b03c7f39808/regex-2026.2.19-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db5fd91eec71e7b08de10011a2223d0faa20448d4e1380b9daa179fa7bf58906", size = 793785, upload-time = "2026-02-19T19:00:22.926Z" },
{ url = "https://files.pythonhosted.org/packages/67/32/fdb8107504b3122a79bde6705ac1f9d495ed1fe35b87d7cfc1864471999a/regex-2026.2.19-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fdbade8acba71bb45057c2b72f477f0b527c4895f9c83e6cfc30d4a006c21726", size = 860731, upload-time = "2026-02-19T19:00:25.196Z" },
{ url = "https://files.pythonhosted.org/packages/9a/fd/cc8c6f05868defd840be6e75919b1c3f462357969ac2c2a0958363b4dc23/regex-2026.2.19-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:31a5f561eb111d6aae14202e7043fb0b406d3c8dddbbb9e60851725c9b38ab1d", size = 907350, upload-time = "2026-02-19T19:00:27.093Z" },
{ url = "https://files.pythonhosted.org/packages/b5/1b/4590db9caa8db3d5a3fe31197c4e42c15aab3643b549ef6a454525fa3a61/regex-2026.2.19-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4584a3ee5f257b71e4b693cc9be3a5104249399f4116fe518c3f79b0c6fc7083", size = 800628, upload-time = "2026-02-19T19:00:29.392Z" },
{ url = "https://files.pythonhosted.org/packages/76/05/513eaa5b96fa579fd0b813e19ec047baaaf573d7374ff010fa139b384bf7/regex-2026.2.19-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:196553ba2a2f47904e5dc272d948a746352e2644005627467e055be19d73b39e", size = 773711, upload-time = "2026-02-19T19:00:30.996Z" },
{ url = "https://files.pythonhosted.org/packages/95/65/5aed06d8c54563d37fea496cf888be504879a3981a7c8e12c24b2c92c209/regex-2026.2.19-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0c10869d18abb759a3317c757746cc913d6324ce128b8bcec99350df10419f18", size = 783186, upload-time = "2026-02-19T19:00:34.598Z" },
{ url = "https://files.pythonhosted.org/packages/2c/57/79a633ad90f2371b4ef9cd72ba3a69a1a67d0cfaab4fe6fa8586d46044ef/regex-2026.2.19-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e689fed279cbe797a6b570bd18ff535b284d057202692c73420cb93cca41aa32", size = 854854, upload-time = "2026-02-19T19:00:37.306Z" },
{ url = "https://files.pythonhosted.org/packages/eb/2d/0f113d477d9e91ec4545ec36c82e58be25038d06788229c91ad52da2b7f5/regex-2026.2.19-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0782bd983f19ac7594039c9277cd6f75c89598c1d72f417e4d30d874105eb0c7", size = 762279, upload-time = "2026-02-19T19:00:39.793Z" },
{ url = "https://files.pythonhosted.org/packages/39/cb/237e9fa4f61469fd4f037164dbe8e675a376c88cf73aaaa0aedfd305601c/regex-2026.2.19-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:dbb240c81cfed5d4a67cb86d7676d9f7ec9c3f186310bec37d8a1415210e111e", size = 846172, upload-time = "2026-02-19T19:00:42.134Z" },
{ url = "https://files.pythonhosted.org/packages/ac/7c/104779c5915cc4eb557a33590f8a3f68089269c64287dd769afd76c7ce61/regex-2026.2.19-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80d31c3f1fe7e4c6cd1831cd4478a0609903044dfcdc4660abfe6fb307add7f0", size = 789078, upload-time = "2026-02-19T19:00:43.908Z" },
{ url = "https://files.pythonhosted.org/packages/b3/73/13b39c7c9356f333e564ab4790b6cb0df125b8e64e8d6474e73da49b1955/regex-2026.2.19-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c1665138776e4ac1aa75146669236f7a8a696433ec4e525abf092ca9189247cc", size = 489541, upload-time = "2026-02-19T19:00:52.728Z" },
{ url = "https://files.pythonhosted.org/packages/15/77/fcc7bd9a67000d07fbcc11ed226077287a40d5c84544e62171d29d3ef59c/regex-2026.2.19-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d792b84709021945597e05656aac059526df4e0c9ef60a0eaebb306f8fafcaa8", size = 291414, upload-time = "2026-02-19T19:00:54.51Z" },
{ url = "https://files.pythonhosted.org/packages/f9/87/3997fc72dc59233426ef2e18dfdd105bb123812fff740ee9cc348f1a3243/regex-2026.2.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db970bcce4d63b37b3f9eb8c893f0db980bbf1d404a1d8d2b17aa8189de92c53", size = 289140, upload-time = "2026-02-19T19:00:56.841Z" },
{ url = "https://files.pythonhosted.org/packages/f3/d0/b7dd3883ed1cff8ee0c0c9462d828aaf12be63bf5dc55453cbf423523b13/regex-2026.2.19-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:03d706fbe7dfec503c8c3cb76f9352b3e3b53b623672aa49f18a251a6c71b8e6", size = 798767, upload-time = "2026-02-19T19:00:59.014Z" },
{ url = "https://files.pythonhosted.org/packages/4a/7e/8e2d09103832891b2b735a2515abf377db21144c6dd5ede1fb03c619bf09/regex-2026.2.19-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8dbff048c042beef60aa1848961384572c5afb9e8b290b0f1203a5c42cf5af65", size = 864436, upload-time = "2026-02-19T19:01:00.772Z" },
{ url = "https://files.pythonhosted.org/packages/8a/2e/afea8d23a6db1f67f45e3a0da3057104ce32e154f57dd0c8997274d45fcd/regex-2026.2.19-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccaaf9b907ea6b4223d5cbf5fa5dff5f33dc66f4907a25b967b8a81339a6e332", size = 912391, upload-time = "2026-02-19T19:01:02.865Z" },
{ url = "https://files.pythonhosted.org/packages/59/3c/ea5a4687adaba5e125b9bd6190153d0037325a0ba3757cc1537cc2c8dd90/regex-2026.2.19-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75472631eee7898e16a8a20998d15106cb31cfde21cdf96ab40b432a7082af06", size = 803702, upload-time = "2026-02-19T19:01:05.298Z" },
{ url = "https://files.pythonhosted.org/packages/dc/c5/624a0705e8473a26488ec1a3a4e0b8763ecfc682a185c302dfec71daea35/regex-2026.2.19-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d89f85a5ccc0cec125c24be75610d433d65295827ebaf0d884cbe56df82d4774", size = 775980, upload-time = "2026-02-19T19:01:07.047Z" },
{ url = "https://files.pythonhosted.org/packages/4d/4b/ed776642533232b5599b7c1f9d817fe11faf597e8a92b7a44b841daaae76/regex-2026.2.19-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0d9f81806abdca3234c3dd582b8a97492e93de3602c8772013cb4affa12d1668", size = 788122, upload-time = "2026-02-19T19:01:08.744Z" },
{ url = "https://files.pythonhosted.org/packages/8c/58/e93e093921d13b9784b4f69896b6e2a9e09580a265c59d9eb95e87d288f2/regex-2026.2.19-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9dadc10d1c2bbb1326e572a226d2ec56474ab8aab26fdb8cf19419b372c349a9", size = 858910, upload-time = "2026-02-19T19:01:10.488Z" },
{ url = "https://files.pythonhosted.org/packages/85/77/ff1d25a0c56cd546e0455cbc93235beb33474899690e6a361fa6b52d265b/regex-2026.2.19-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:6bc25d7e15f80c9dc7853cbb490b91c1ec7310808b09d56bd278fe03d776f4f6", size = 764153, upload-time = "2026-02-19T19:01:12.156Z" },
{ url = "https://files.pythonhosted.org/packages/cd/ef/8ec58df26d52d04443b1dc56f9be4b409f43ed5ae6c0248a287f52311fc4/regex-2026.2.19-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:965d59792f5037d9138da6fed50ba943162160443b43d4895b182551805aff9c", size = 850348, upload-time = "2026-02-19T19:01:14.147Z" },
{ url = "https://files.pythonhosted.org/packages/f5/b3/c42fd5ed91639ce5a4225b9df909180fc95586db071f2bf7c68d2ccbfbe6/regex-2026.2.19-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:38d88c6ed4a09ed61403dbdf515d969ccba34669af3961ceb7311ecd0cef504a", size = 789977, upload-time = "2026-02-19T19:01:15.838Z" },
{ url = "https://files.pythonhosted.org/packages/d2/2d/a849835e76ac88fcf9e8784e642d3ea635d183c4112150ca91499d6703af/regex-2026.2.19-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8df08decd339e8b3f6a2eb5c05c687fe9d963ae91f352bc57beb05f5b2ac6879", size = 489329, upload-time = "2026-02-19T19:01:23.841Z" },
{ url = "https://files.pythonhosted.org/packages/da/aa/78ff4666d3855490bae87845a5983485e765e1f970da20adffa2937b241d/regex-2026.2.19-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3aa0944f1dc6e92f91f3b306ba7f851e1009398c84bfd370633182ee4fc26a64", size = 291308, upload-time = "2026-02-19T19:01:25.605Z" },
{ url = "https://files.pythonhosted.org/packages/cd/58/714384efcc07ae6beba528a541f6e99188c5cc1bc0295337f4e8a868296d/regex-2026.2.19-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c13228fbecb03eadbfd8f521732c5fda09ef761af02e920a3148e18ad0e09968", size = 289033, upload-time = "2026-02-19T19:01:27.243Z" },
{ url = "https://files.pythonhosted.org/packages/75/ec/6438a9344d2869cf5265236a06af1ca6d885e5848b6561e10629bc8e5a11/regex-2026.2.19-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0d0e72703c60d68b18b27cde7cdb65ed2570ae29fb37231aa3076bfb6b1d1c13", size = 798798, upload-time = "2026-02-19T19:01:28.877Z" },
{ url = "https://files.pythonhosted.org/packages/c2/be/b1ce2d395e3fd2ce5f2fde2522f76cade4297cfe84cd61990ff48308749c/regex-2026.2.19-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:46e69a4bf552e30e74a8aa73f473c87efcb7f6e8c8ece60d9fd7bf13d5c86f02", size = 864444, upload-time = "2026-02-19T19:01:30.933Z" },
{ url = "https://files.pythonhosted.org/packages/d5/97/a3406460c504f7136f140d9461960c25f058b0240e4424d6fb73c7a067ab/regex-2026.2.19-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8edda06079bd770f7f0cf7f3bba1a0b447b96b4a543c91fe0c142d034c166161", size = 912633, upload-time = "2026-02-19T19:01:32.744Z" },
{ url = "https://files.pythonhosted.org/packages/8b/d9/e5dbef95008d84e9af1dc0faabbc34a7fbc8daa05bc5807c5cf86c2bec49/regex-2026.2.19-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9cbc69eae834afbf634f7c902fc72ff3e993f1c699156dd1af1adab5d06b7fe7", size = 803718, upload-time = "2026-02-19T19:01:34.61Z" },
{ url = "https://files.pythonhosted.org/packages/2f/e5/61d80132690a1ef8dc48e0f44248036877aebf94235d43f63a20d1598888/regex-2026.2.19-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bcf57d30659996ee5c7937999874504c11b5a068edc9515e6a59221cc2744dd1", size = 775975, upload-time = "2026-02-19T19:01:36.525Z" },
{ url = "https://files.pythonhosted.org/packages/05/32/ae828b3b312c972cf228b634447de27237d593d61505e6ad84723f8eabba/regex-2026.2.19-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8e6e77cd92216eb489e21e5652a11b186afe9bdefca8a2db739fd6b205a9e0a4", size = 788129, upload-time = "2026-02-19T19:01:38.498Z" },
{ url = "https://files.pythonhosted.org/packages/cb/25/d74f34676f22bec401eddf0e5e457296941e10cbb2a49a571ca7a2c16e5a/regex-2026.2.19-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b9ab8dec42afefa6314ea9b31b188259ffdd93f433d77cad454cd0b8d235ce1c", size = 858818, upload-time = "2026-02-19T19:01:40.409Z" },
{ url = "https://files.pythonhosted.org/packages/1e/eb/0bc2b01a6b0b264e1406e5ef11cae3f634c3bd1a6e61206fd3227ce8e89c/regex-2026.2.19-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:294c0fb2e87c6bcc5f577c8f609210f5700b993151913352ed6c6af42f30f95f", size = 764186, upload-time = "2026-02-19T19:01:43.009Z" },
{ url = "https://files.pythonhosted.org/packages/eb/37/5fe5a630d0d99ecf0c3570f8905dafbc160443a2d80181607770086c9812/regex-2026.2.19-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c0924c64b082d4512b923ac016d6e1dcf647a3560b8a4c7e55cbbd13656cb4ed", size = 850363, upload-time = "2026-02-19T19:01:45.015Z" },
{ url = "https://files.pythonhosted.org/packages/c3/45/ef68d805294b01ec030cfd388724ba76a5a21a67f32af05b17924520cb0b/regex-2026.2.19-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:790dbf87b0361606cb0d79b393c3e8f4436a14ee56568a7463014565d97da02a", size = 790026, upload-time = "2026-02-19T19:01:47.51Z" },
{ url = "https://files.pythonhosted.org/packages/a9/a2/e0b4575b93bc84db3b1fab24183e008691cd2db5c0ef14ed52681fbd94dd/regex-2026.2.19-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:93d881cab5afdc41a005dba1524a40947d6f7a525057aa64aaf16065cf62faa9", size = 492202, upload-time = "2026-02-19T19:01:54.816Z" },
{ url = "https://files.pythonhosted.org/packages/24/b5/b84fec8cbb5f92a7eed2b6b5353a6a9eed9670fee31817c2da9eb85dc797/regex-2026.2.19-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:80caaa1ddcc942ec7be18427354f9d58a79cee82dea2a6b3d4fd83302e1240d7", size = 292884, upload-time = "2026-02-19T19:01:58.254Z" },
{ url = "https://files.pythonhosted.org/packages/70/0c/fe89966dfae43da46f475362401f03e4d7dc3a3c955b54f632abc52669e0/regex-2026.2.19-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d793c5b4d2b4c668524cd1651404cfc798d40694c759aec997e196fe9729ec60", size = 291236, upload-time = "2026-02-19T19:01:59.966Z" },
{ url = "https://files.pythonhosted.org/packages/f2/f7/bda2695134f3e63eb5cccbbf608c2a12aab93d261ff4e2fe49b47fabc948/regex-2026.2.19-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5100acb20648d9efd3f4e7e91f51187f95f22a741dcd719548a6cf4e1b34b3f", size = 807660, upload-time = "2026-02-19T19:02:01.632Z" },
{ url = "https://files.pythonhosted.org/packages/11/56/6e3a4bf5e60d17326b7003d91bbde8938e439256dec211d835597a44972d/regex-2026.2.19-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5e3a31e94d10e52a896adaa3adf3621bd526ad2b45b8c2d23d1bbe74c7423007", size = 873585, upload-time = "2026-02-19T19:02:03.522Z" },
{ url = "https://files.pythonhosted.org/packages/35/5e/c90c6aa4d1317cc11839359479cfdd2662608f339e84e81ba751c8a4e461/regex-2026.2.19-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8497421099b981f67c99eba4154cf0dfd8e47159431427a11cfb6487f7791d9e", size = 915243, upload-time = "2026-02-19T19:02:05.608Z" },
{ url = "https://files.pythonhosted.org/packages/90/7c/981ea0694116793001496aaf9524e5c99e122ec3952d9e7f1878af3a6bf1/regex-2026.2.19-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e7a08622f7d51d7a068f7e4052a38739c412a3e74f55817073d2e2418149619", size = 812922, upload-time = "2026-02-19T19:02:08.115Z" },
{ url = "https://files.pythonhosted.org/packages/2d/be/9eda82afa425370ffdb3fa9f3ea42450b9ae4da3ff0a4ec20466f69e371b/regex-2026.2.19-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8abe671cf0f15c26b1ad389bf4043b068ce7d3b1c5d9313e12895f57d6738555", size = 781318, upload-time = "2026-02-19T19:02:10.072Z" },
{ url = "https://files.pythonhosted.org/packages/c6/d5/50f0bbe56a8199f60a7b6c714e06e54b76b33d31806a69d0703b23ce2a9e/regex-2026.2.19-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5a8f28dd32a4ce9c41758d43b5b9115c1c497b4b1f50c457602c1d571fa98ce1", size = 795649, upload-time = "2026-02-19T19:02:11.96Z" },
{ url = "https://files.pythonhosted.org/packages/c5/09/d039f081e44a8b0134d0bb2dd805b0ddf390b69d0b58297ae098847c572f/regex-2026.2.19-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:654dc41a5ba9b8cc8432b3f1aa8906d8b45f3e9502442a07c2f27f6c63f85db5", size = 868844, upload-time = "2026-02-19T19:02:14.043Z" },
{ url = "https://files.pythonhosted.org/packages/ef/53/e2903b79a19ec8557fe7cd21cd093956ff2dbc2e0e33969e3adbe5b184dd/regex-2026.2.19-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4a02faea614e7fdd6ba8b3bec6c8e79529d356b100381cec76e638f45d12ca04", size = 770113, upload-time = "2026-02-19T19:02:16.161Z" },
{ url = "https://files.pythonhosted.org/packages/8f/e2/784667767b55714ebb4e59bf106362327476b882c0b2f93c25e84cc99b1a/regex-2026.2.19-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d96162140bb819814428800934c7b71b7bffe81fb6da2d6abc1dcca31741eca3", size = 854922, upload-time = "2026-02-19T19:02:18.155Z" },
{ url = "https://files.pythonhosted.org/packages/59/78/9ef4356bd4aed752775bd18071034979b85f035fec51f3a4f9dea497a254/regex-2026.2.19-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c227f2922153ee42bbeb355fd6d009f8c81d9d7bdd666e2276ce41f53ed9a743", size = 799636, upload-time = "2026-02-19T19:02:20.04Z" },
{ url = "https://files.pythonhosted.org/packages/2d/e2/7ad4e76a6dddefc0d64dbe12a4d3ca3947a19ddc501f864a5df2a8222ddd/regex-2026.2.19-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:03d191a9bcf94d31af56d2575210cb0d0c6a054dbcad2ea9e00aa4c42903b919", size = 489306, upload-time = "2026-02-19T19:02:29.058Z" },
{ url = "https://files.pythonhosted.org/packages/14/95/ee1736135733afbcf1846c58671046f99c4d5170102a150ebb3dd8d701d9/regex-2026.2.19-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:516ee067c6c721d0d0bfb80a2004edbd060fffd07e456d4e1669e38fe82f922e", size = 291218, upload-time = "2026-02-19T19:02:31.083Z" },
{ url = "https://files.pythonhosted.org/packages/ef/08/180d1826c3d7065200a5168c6b993a44947395c7bb6e04b2c2a219c34225/regex-2026.2.19-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:997862c619994c4a356cb7c3592502cbd50c2ab98da5f61c5c871f10f22de7e5", size = 289097, upload-time = "2026-02-19T19:02:33.485Z" },
{ url = "https://files.pythonhosted.org/packages/28/93/0651924c390c5740f5f896723f8ddd946a6c63083a7d8647231c343912ff/regex-2026.2.19-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02b9e1b8a7ebe2807cd7bbdf662510c8e43053a23262b9f46ad4fc2dfc9d204e", size = 799147, upload-time = "2026-02-19T19:02:35.669Z" },
{ url = "https://files.pythonhosted.org/packages/a7/00/2078bd8bcd37d58a756989adbfd9f1d0151b7ca4085a9c2a07e917fbac61/regex-2026.2.19-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6c8fb3b19652e425ff24169dad3ee07f99afa7996caa9dfbb3a9106cd726f49a", size = 865239, upload-time = "2026-02-19T19:02:38.012Z" },
{ url = "https://files.pythonhosted.org/packages/2a/13/75195161ec16936b35a365fa8c1dd2ab29fd910dd2587765062b174d8cfc/regex-2026.2.19-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50f1ee9488dd7a9fda850ec7c68cad7a32fa49fd19733f5403a3f92b451dcf73", size = 911904, upload-time = "2026-02-19T19:02:40.737Z" },
{ url = "https://files.pythonhosted.org/packages/96/72/ac42f6012179343d1c4bd0ffee8c948d841cb32ea188d37e96d80527fcc9/regex-2026.2.19-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ab780092b1424d13200aa5a62996e95f65ee3db8509be366437439cdc0af1a9f", size = 803518, upload-time = "2026-02-19T19:02:42.923Z" },
{ url = "https://files.pythonhosted.org/packages/bc/d1/75a08e2269b007b9783f0f86aa64488e023141219cb5f14dc1e69cda56c6/regex-2026.2.19-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:17648e1a88e72d88641b12635e70e6c71c5136ba14edba29bf8fc6834005a265", size = 775866, upload-time = "2026-02-19T19:02:45.189Z" },
{ url = "https://files.pythonhosted.org/packages/92/41/70e7d05faf6994c2ca7a9fcaa536da8f8e4031d45b0ec04b57040ede201f/regex-2026.2.19-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f914ae8c804c8a8a562fe216100bc156bfb51338c1f8d55fe32cf407774359a", size = 788224, upload-time = "2026-02-19T19:02:47.804Z" },
{ url = "https://files.pythonhosted.org/packages/c8/83/34a2dd601f9deb13c20545c674a55f4a05c90869ab73d985b74d639bac43/regex-2026.2.19-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c7e121a918bbee3f12ac300ce0a0d2f2c979cf208fb071ed8df5a6323281915c", size = 859682, upload-time = "2026-02-19T19:02:50.583Z" },
{ url = "https://files.pythonhosted.org/packages/8e/30/136db9a09a7f222d6e48b806f3730e7af6499a8cad9c72ac0d49d52c746e/regex-2026.2.19-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2fedd459c791da24914ecc474feecd94cf7845efb262ac3134fe27cbd7eda799", size = 764223, upload-time = "2026-02-19T19:02:52.777Z" },
{ url = "https://files.pythonhosted.org/packages/9e/ea/bb947743c78a16df481fa0635c50aa1a439bb80b0e6dc24cd4e49c716679/regex-2026.2.19-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:ea8dfc99689240e61fb21b5fc2828f68b90abf7777d057b62d3166b7c1543c4c", size = 850101, upload-time = "2026-02-19T19:02:55.87Z" },
{ url = "https://files.pythonhosted.org/packages/25/27/e3bfe6e97a99f7393665926be02fef772da7f8aa59e50bc3134e4262a032/regex-2026.2.19-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fff45852160960f29e184ec8a5be5ab4063cfd0b168d439d1fc4ac3744bf29e", size = 789904, upload-time = "2026-02-19T19:02:58.523Z" },
{ url = "https://files.pythonhosted.org/packages/49/0b/f901cfeb4efd83e4f5c3e9f91a6de77e8e5ceb18555698aca3a27e215ed3/regex-2026.2.19-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:5ec1d7c080832fdd4e150c6f5621fe674c70c63b3ae5a4454cebd7796263b175", size = 492196, upload-time = "2026-02-19T19:03:08.188Z" },
{ url = "https://files.pythonhosted.org/packages/94/0a/349b959e3da874e15eda853755567b4cde7e5309dbb1e07bfe910cfde452/regex-2026.2.19-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8457c1bc10ee9b29cdfd897ccda41dce6bde0e9abd514bcfef7bcd05e254d411", size = 292878, upload-time = "2026-02-19T19:03:10.272Z" },
{ url = "https://files.pythonhosted.org/packages/98/b0/9d81b3c2c5ddff428f8c506713737278979a2c476f6e3675a9c51da0c389/regex-2026.2.19-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cce8027010d1ffa3eb89a0b19621cdc78ae548ea2b49fea1f7bfb3ea77064c2b", size = 291235, upload-time = "2026-02-19T19:03:12.5Z" },
{ url = "https://files.pythonhosted.org/packages/04/e7/be7818df8691dbe9508c381ea2cc4c1153e4fdb1c4b06388abeaa93bd712/regex-2026.2.19-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11c138febb40546ff9e026dbbc41dc9fb8b29e61013fa5848ccfe045f5b23b83", size = 807893, upload-time = "2026-02-19T19:03:15.064Z" },
{ url = "https://files.pythonhosted.org/packages/0c/b6/b898a8b983190cfa0276031c17beb73cfd1db07c03c8c37f606d80b655e2/regex-2026.2.19-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:74ff212aa61532246bb3036b3dfea62233414b0154b8bc3676975da78383cac3", size = 873696, upload-time = "2026-02-19T19:03:17.848Z" },
{ url = "https://files.pythonhosted.org/packages/1a/98/126ba671d54f19080ec87cad228fb4f3cc387fff8c4a01cb4e93f4ff9d94/regex-2026.2.19-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d00c95a2b6bfeb3ea1cb68d1751b1dfce2b05adc2a72c488d77a780db06ab867", size = 915493, upload-time = "2026-02-19T19:03:20.343Z" },
{ url = "https://files.pythonhosted.org/packages/b2/10/550c84a1a1a7371867fe8be2bea7df55e797cbca4709974811410e195c5d/regex-2026.2.19-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:311fcccb76af31be4c588d5a17f8f1a059ae8f4b097192896ebffc95612f223a", size = 813094, upload-time = "2026-02-19T19:03:23.287Z" },
{ url = "https://files.pythonhosted.org/packages/29/fb/ba221d2fc76a27b6b7d7a60f73a7a6a7bac21c6ba95616a08be2bcb434b0/regex-2026.2.19-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:77cfd6b5e7c4e8bf7a39d243ea05882acf5e3c7002b0ef4756de6606893b0ecd", size = 781583, upload-time = "2026-02-19T19:03:26.872Z" },
{ url = "https://files.pythonhosted.org/packages/26/f1/af79231301297c9e962679efc04a31361b58dc62dec1fc0cb4b8dd95956a/regex-2026.2.19-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6380f29ff212ec922b6efb56100c089251940e0526a0d05aa7c2d9b571ddf2fe", size = 795875, upload-time = "2026-02-19T19:03:29.223Z" },
{ url = "https://files.pythonhosted.org/packages/a0/90/1e1d76cb0a2d0a4f38a039993e1c5cd971ae50435d751c5bae4f10e1c302/regex-2026.2.19-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:655f553a1fa3ab8a7fd570eca793408b8d26a80bfd89ed24d116baaf13a38969", size = 868916, upload-time = "2026-02-19T19:03:31.415Z" },
{ url = "https://files.pythonhosted.org/packages/9a/67/a1c01da76dbcfed690855a284c665cc0a370e7d02d1bd635cf9ff7dd74b8/regex-2026.2.19-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:015088b8558502f1f0bccd58754835aa154a7a5b0bd9d4c9b7b96ff4ae9ba876", size = 770386, upload-time = "2026-02-19T19:03:33.972Z" },
{ url = "https://files.pythonhosted.org/packages/49/6f/94842bf294f432ff3836bfd91032e2ecabea6d284227f12d1f935318c9c4/regex-2026.2.19-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9e6693b8567a59459b5dda19104c4a4dbbd4a1c78833eacc758796f2cfef1854", size = 855007, upload-time = "2026-02-19T19:03:36.238Z" },
{ url = "https://files.pythonhosted.org/packages/ff/93/393cd203ca0d1d368f05ce12d2c7e91a324bc93c240db2e6d5ada05835f4/regex-2026.2.19-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4071209fd4376ab5ceec72ad3507e9d3517c59e38a889079b98916477a871868", size = 799863, upload-time = "2026-02-19T19:03:38.497Z" },
]
[[package]]
@@ -4241,24 +4172,24 @@ wheels = [
[[package]]
name = "ruff"
version = "0.15.5"
version = "0.15.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/77/9b/840e0039e65fcf12758adf684d2289024d6140cde9268cc59887dc55189c/ruff-0.15.5.tar.gz", hash = "sha256:7c3601d3b6d76dce18c5c824fc8d06f4eef33d6df0c21ec7799510cde0f159a2", size = 4574214, upload-time = "2026-03-05T20:06:34.946Z" }
sdist = { url = "https://files.pythonhosted.org/packages/da/31/d6e536cdebb6568ae75a7f00e4b4819ae0ad2640c3604c305a0428680b0c/ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1", size = 4569550, upload-time = "2026-02-26T20:04:14.959Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/47/20/5369c3ce21588c708bcbe517a8fbe1a8dfdb5dfd5137e14790b1da71612c/ruff-0.15.5-py3-none-linux_armv6l.whl", hash = "sha256:4ae44c42281f42e3b06b988e442d344a5b9b72450ff3c892e30d11b29a96a57c", size = 10478185, upload-time = "2026-03-05T20:06:29.093Z" },
{ url = "https://files.pythonhosted.org/packages/44/ed/e81dd668547da281e5dce710cf0bc60193f8d3d43833e8241d006720e42b/ruff-0.15.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6edd3792d408ebcf61adabc01822da687579a1a023f297618ac27a5b51ef0080", size = 10859201, upload-time = "2026-03-05T20:06:32.632Z" },
{ url = "https://files.pythonhosted.org/packages/c4/8f/533075f00aaf19b07c5cd6aa6e5d89424b06b3b3f4583bfa9c640a079059/ruff-0.15.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:89f463f7c8205a9f8dea9d658d59eff49db05f88f89cc3047fb1a02d9f344010", size = 10184752, upload-time = "2026-03-05T20:06:40.312Z" },
{ url = "https://files.pythonhosted.org/packages/66/0e/ba49e2c3fa0395b3152bad634c7432f7edfc509c133b8f4529053ff024fb/ruff-0.15.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba786a8295c6574c1116704cf0b9e6563de3432ac888d8f83685654fe528fd65", size = 10534857, upload-time = "2026-03-05T20:06:19.581Z" },
{ url = "https://files.pythonhosted.org/packages/59/71/39234440f27a226475a0659561adb0d784b4d247dfe7f43ffc12dd02e288/ruff-0.15.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd4b801e57955fe9f02b31d20375ab3a5c4415f2e5105b79fb94cf2642c91440", size = 10309120, upload-time = "2026-03-05T20:06:00.435Z" },
{ url = "https://files.pythonhosted.org/packages/f5/87/4140aa86a93df032156982b726f4952aaec4a883bb98cb6ef73c347da253/ruff-0.15.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391f7c73388f3d8c11b794dbbc2959a5b5afe66642c142a6effa90b45f6f5204", size = 11047428, upload-time = "2026-03-05T20:05:51.867Z" },
{ url = "https://files.pythonhosted.org/packages/5a/f7/4953e7e3287676f78fbe85e3a0ca414c5ca81237b7575bdadc00229ac240/ruff-0.15.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dc18f30302e379fe1e998548b0f5e9f4dff907f52f73ad6da419ea9c19d66c8", size = 11914251, upload-time = "2026-03-05T20:06:22.887Z" },
{ url = "https://files.pythonhosted.org/packages/77/46/0f7c865c10cf896ccf5a939c3e84e1cfaeed608ff5249584799a74d33835/ruff-0.15.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1cc6e7f90087e2d27f98dc34ed1b3ab7c8f0d273cc5431415454e22c0bd2a681", size = 11333801, upload-time = "2026-03-05T20:05:57.168Z" },
{ url = "https://files.pythonhosted.org/packages/d3/01/a10fe54b653061585e655f5286c2662ebddb68831ed3eaebfb0eb08c0a16/ruff-0.15.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cb7169f53c1ddb06e71a9aebd7e98fc0fea936b39afb36d8e86d36ecc2636a", size = 11206821, upload-time = "2026-03-05T20:06:03.441Z" },
{ url = "https://files.pythonhosted.org/packages/7a/0d/2132ceaf20c5e8699aa83da2706ecb5c5dcdf78b453f77edca7fb70f8a93/ruff-0.15.5-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9b037924500a31ee17389b5c8c4d88874cc6ea8e42f12e9c61a3d754ff72f1ca", size = 11133326, upload-time = "2026-03-05T20:06:25.655Z" },
{ url = "https://files.pythonhosted.org/packages/72/cb/2e5259a7eb2a0f87c08c0fe5bf5825a1e4b90883a52685524596bfc93072/ruff-0.15.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:65bb414e5b4eadd95a8c1e4804f6772bbe8995889f203a01f77ddf2d790929dd", size = 10510820, upload-time = "2026-03-05T20:06:37.79Z" },
{ url = "https://files.pythonhosted.org/packages/ff/20/b67ce78f9e6c59ffbdb5b4503d0090e749b5f2d31b599b554698a80d861c/ruff-0.15.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d20aa469ae3b57033519c559e9bc9cd9e782842e39be05b50e852c7c981fa01d", size = 10302395, upload-time = "2026-03-05T20:05:54.504Z" },
{ url = "https://files.pythonhosted.org/packages/5f/e5/719f1acccd31b720d477751558ed74e9c88134adcc377e5e886af89d3072/ruff-0.15.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:15388dd28c9161cdb8eda68993533acc870aa4e646a0a277aa166de9ad5a8752", size = 10754069, upload-time = "2026-03-05T20:06:06.422Z" },
{ url = "https://files.pythonhosted.org/packages/c3/9c/d1db14469e32d98f3ca27079dbd30b7b44dbb5317d06ab36718dee3baf03/ruff-0.15.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b30da330cbd03bed0c21420b6b953158f60c74c54c5f4c1dabbdf3a57bf355d2", size = 11304315, upload-time = "2026-03-05T20:06:10.867Z" },
{ url = "https://files.pythonhosted.org/packages/f2/82/c11a03cfec3a4d26a0ea1e571f0f44be5993b923f905eeddfc397c13d360/ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0", size = 10453333, upload-time = "2026-02-26T20:04:20.093Z" },
{ url = "https://files.pythonhosted.org/packages/ce/5d/6a1f271f6e31dffb31855996493641edc3eef8077b883eaf007a2f1c2976/ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992", size = 10853356, upload-time = "2026-02-26T20:04:05.808Z" },
{ url = "https://files.pythonhosted.org/packages/b1/d8/0fab9f8842b83b1a9c2bf81b85063f65e93fb512e60effa95b0be49bfc54/ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba", size = 10187434, upload-time = "2026-02-26T20:03:54.656Z" },
{ url = "https://files.pythonhosted.org/packages/85/cc/cc220fd9394eff5db8d94dec199eec56dd6c9f3651d8869d024867a91030/ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75", size = 10535456, upload-time = "2026-02-26T20:03:52.738Z" },
{ url = "https://files.pythonhosted.org/packages/fa/0f/bced38fa5cf24373ec767713c8e4cadc90247f3863605fb030e597878661/ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac", size = 10287772, upload-time = "2026-02-26T20:04:08.138Z" },
{ url = "https://files.pythonhosted.org/packages/2b/90/58a1802d84fed15f8f281925b21ab3cecd813bde52a8ca033a4de8ab0e7a/ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a", size = 11049051, upload-time = "2026-02-26T20:04:03.53Z" },
{ url = "https://files.pythonhosted.org/packages/d2/ac/b7ad36703c35f3866584564dc15f12f91cb1a26a897dc2fd13d7cb3ae1af/ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85", size = 11890494, upload-time = "2026-02-26T20:04:10.497Z" },
{ url = "https://files.pythonhosted.org/packages/93/3d/3eb2f47a39a8b0da99faf9c54d3eb24720add1e886a5309d4d1be73a6380/ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db", size = 11326221, upload-time = "2026-02-26T20:04:12.84Z" },
{ url = "https://files.pythonhosted.org/packages/ff/90/bf134f4c1e5243e62690e09d63c55df948a74084c8ac3e48a88468314da6/ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec", size = 11168459, upload-time = "2026-02-26T20:04:00.969Z" },
{ url = "https://files.pythonhosted.org/packages/b5/e5/a64d27688789b06b5d55162aafc32059bb8c989c61a5139a36e1368285eb/ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f", size = 11104366, upload-time = "2026-02-26T20:03:48.099Z" },
{ url = "https://files.pythonhosted.org/packages/f1/f6/32d1dcb66a2559763fc3027bdd65836cad9eb09d90f2ed6a63d8e9252b02/ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338", size = 10510887, upload-time = "2026-02-26T20:03:45.771Z" },
{ url = "https://files.pythonhosted.org/packages/ff/92/22d1ced50971c5b6433aed166fcef8c9343f567a94cf2b9d9089f6aa80fe/ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc", size = 10285939, upload-time = "2026-02-26T20:04:22.42Z" },
{ url = "https://files.pythonhosted.org/packages/e6/f4/7c20aec3143837641a02509a4668fb146a642fd1211846634edc17eb5563/ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68", size = 10765471, upload-time = "2026-02-26T20:03:58.924Z" },
{ url = "https://files.pythonhosted.org/packages/d0/09/6d2f7586f09a16120aebdff8f64d962d7c4348313c77ebb29c566cefc357/ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3", size = 11263382, upload-time = "2026-02-26T20:04:24.424Z" },
]
[[package]]
@@ -4283,7 +4214,7 @@ wheels = [
[[package]]
name = "scikit-learn"
version = "1.8.0"
version = "1.7.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "joblib", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -4291,32 +4222,28 @@ dependencies = [
{ name = "scipy", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "threadpoolctl", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0e/d4/40988bf3b8e34feec1d0e6a051446b1f66225f8529b9309becaeef62b6c4/scikit_learn-1.8.0.tar.gz", hash = "sha256:9bccbb3b40e3de10351f8f5068e105d0f4083b1a65fa07b6634fbc401a6287fd", size = 7335585, upload-time = "2025-12-10T07:08:53.618Z" }
sdist = { url = "https://files.pythonhosted.org/packages/98/c2/a7855e41c9d285dfe86dc50b250978105dce513d6e459ea66a6aeb0e1e0c/scikit_learn-1.7.2.tar.gz", hash = "sha256:20e9e49ecd130598f1ca38a1d85090e1a600147b9c02fa6f15d69cb53d968fda", size = 7193136, upload-time = "2025-09-09T08:21:29.075Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c9/92/53ea2181da8ac6bf27170191028aee7251f8f841f8d3edbfdcaf2008fde9/scikit_learn-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:146b4d36f800c013d267b29168813f7a03a43ecd2895d04861f1240b564421da", size = 8595835, upload-time = "2025-12-10T07:07:39.385Z" },
{ url = "https://files.pythonhosted.org/packages/01/18/d154dc1638803adf987910cdd07097d9c526663a55666a97c124d09fb96a/scikit_learn-1.8.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f984ca4b14914e6b4094c5d52a32ea16b49832c03bd17a110f004db3c223e8e1", size = 8080381, upload-time = "2025-12-10T07:07:41.93Z" },
{ url = "https://files.pythonhosted.org/packages/8a/44/226142fcb7b7101e64fdee5f49dbe6288d4c7af8abf593237b70fca080a4/scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5e30adb87f0cc81c7690a84f7932dd66be5bac57cfe16b91cb9151683a4a2d3b", size = 8799632, upload-time = "2025-12-10T07:07:43.899Z" },
{ url = "https://files.pythonhosted.org/packages/36/4d/4a67f30778a45d542bbea5db2dbfa1e9e100bf9ba64aefe34215ba9f11f6/scikit_learn-1.8.0-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ada8121bcb4dac28d930febc791a69f7cb1673c8495e5eee274190b73a4559c1", size = 9103788, upload-time = "2025-12-10T07:07:45.982Z" },
{ url = "https://files.pythonhosted.org/packages/90/74/e6a7cc4b820e95cc38cf36cd74d5aa2b42e8ffc2d21fe5a9a9c45c1c7630/scikit_learn-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5fb63362b5a7ddab88e52b6dbb47dac3fd7dafeee740dc6c8d8a446ddedade8e", size = 8548242, upload-time = "2025-12-10T07:07:51.568Z" },
{ url = "https://files.pythonhosted.org/packages/49/d8/9be608c6024d021041c7f0b3928d4749a706f4e2c3832bbede4fb4f58c95/scikit_learn-1.8.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:5025ce924beccb28298246e589c691fe1b8c1c96507e6d27d12c5fadd85bfd76", size = 8079075, upload-time = "2025-12-10T07:07:53.697Z" },
{ url = "https://files.pythonhosted.org/packages/dd/47/f187b4636ff80cc63f21cd40b7b2d177134acaa10f6bb73746130ee8c2e5/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4496bb2cf7a43ce1a2d7524a79e40bc5da45cf598dbf9545b7e8316ccba47bb4", size = 8660492, upload-time = "2025-12-10T07:07:55.574Z" },
{ url = "https://files.pythonhosted.org/packages/97/74/b7a304feb2b49df9fafa9382d4d09061a96ee9a9449a7cbea7988dda0828/scikit_learn-1.8.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0bcfe4d0d14aec44921545fd2af2338c7471de9cb701f1da4c9d85906ab847a", size = 8931904, upload-time = "2025-12-10T07:07:57.666Z" },
{ url = "https://files.pythonhosted.org/packages/03/aa/e22e0768512ce9255eba34775be2e85c2048da73da1193e841707f8f039c/scikit_learn-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d6ae97234d5d7079dc0040990a6f7aeb97cb7fa7e8945f1999a429b23569e0a", size = 8513770, upload-time = "2025-12-10T07:08:03.251Z" },
{ url = "https://files.pythonhosted.org/packages/58/37/31b83b2594105f61a381fc74ca19e8780ee923be2d496fcd8d2e1147bd99/scikit_learn-1.8.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:edec98c5e7c128328124a029bceb09eda2d526997780fef8d65e9a69eead963e", size = 8044458, upload-time = "2025-12-10T07:08:05.336Z" },
{ url = "https://files.pythonhosted.org/packages/2d/5a/3f1caed8765f33eabb723596666da4ebbf43d11e96550fb18bdec42b467b/scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:74b66d8689d52ed04c271e1329f0c61635bcaf5b926db9b12d58914cdc01fe57", size = 8610341, upload-time = "2025-12-10T07:08:07.732Z" },
{ url = "https://files.pythonhosted.org/packages/38/cf/06896db3f71c75902a8e9943b444a56e727418f6b4b4a90c98c934f51ed4/scikit_learn-1.8.0-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fdf95767f989b0cfedb85f7ed8ca215d4be728031f56ff5a519ee1e3276dc2e", size = 8900022, upload-time = "2025-12-10T07:08:09.862Z" },
{ url = "https://files.pythonhosted.org/packages/d2/7d/a630359fc9dcc95496588c8d8e3245cc8fd81980251079bc09c70d41d951/scikit_learn-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7cc267b6108f0a1499a734167282c00c4ebf61328566b55ef262d48e9849c735", size = 8826045, upload-time = "2025-12-10T07:08:15.215Z" },
{ url = "https://files.pythonhosted.org/packages/cc/56/a0c86f6930cfcd1c7054a2bc417e26960bb88d32444fe7f71d5c2cfae891/scikit_learn-1.8.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:fe1c011a640a9f0791146011dfd3c7d9669785f9fed2b2a5f9e207536cf5c2fd", size = 8420324, upload-time = "2025-12-10T07:08:17.561Z" },
{ url = "https://files.pythonhosted.org/packages/46/1e/05962ea1cebc1cf3876667ecb14c283ef755bf409993c5946ade3b77e303/scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:72358cce49465d140cc4e7792015bb1f0296a9742d5622c67e31399b75468b9e", size = 8680651, upload-time = "2025-12-10T07:08:19.952Z" },
{ url = "https://files.pythonhosted.org/packages/fe/56/a85473cd75f200c9759e3a5f0bcab2d116c92a8a02ee08ccd73b870f8bb4/scikit_learn-1.8.0-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:80832434a6cc114f5219211eec13dcbc16c2bac0e31ef64c6d346cde3cf054cb", size = 8925045, upload-time = "2025-12-10T07:08:22.11Z" },
{ url = "https://files.pythonhosted.org/packages/24/05/1af2c186174cc92dcab2233f327336058c077d38f6fe2aceb08e6ab4d509/scikit_learn-1.8.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:c22a2da7a198c28dd1a6e1136f19c830beab7fdca5b3e5c8bba8394f8a5c45b3", size = 8528667, upload-time = "2025-12-10T07:08:27.541Z" },
{ url = "https://files.pythonhosted.org/packages/a8/25/01c0af38fe969473fb292bba9dc2b8f9b451f3112ff242c647fee3d0dfe7/scikit_learn-1.8.0-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:6b595b07a03069a2b1740dc08c2299993850ea81cce4fe19b2421e0c970de6b7", size = 8066524, upload-time = "2025-12-10T07:08:29.822Z" },
{ url = "https://files.pythonhosted.org/packages/be/ce/a0623350aa0b68647333940ee46fe45086c6060ec604874e38e9ab7d8e6c/scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:29ffc74089f3d5e87dfca4c2c8450f88bdc61b0fc6ed5d267f3988f19a1309f6", size = 8657133, upload-time = "2025-12-10T07:08:31.865Z" },
{ url = "https://files.pythonhosted.org/packages/b8/cb/861b41341d6f1245e6ca80b1c1a8c4dfce43255b03df034429089ca2a2c5/scikit_learn-1.8.0-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fb65db5d7531bccf3a4f6bec3462223bea71384e2cda41da0f10b7c292b9e7c4", size = 8923223, upload-time = "2025-12-10T07:08:34.166Z" },
{ url = "https://files.pythonhosted.org/packages/2d/d1/ef294ca754826daa043b2a104e59960abfab4cf653891037d19dd5b6f3cf/scikit_learn-1.8.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:4511be56637e46c25721e83d1a9cea9614e7badc7040c4d573d75fbe257d6fd7", size = 8848305, upload-time = "2025-12-10T07:08:41.013Z" },
{ url = "https://files.pythonhosted.org/packages/5b/e2/b1f8b05138ee813b8e1a4149f2f0d289547e60851fd1bb268886915adbda/scikit_learn-1.8.0-cp314-cp314t-macosx_12_0_arm64.whl", hash = "sha256:a69525355a641bf8ef136a7fa447672fb54fe8d60cab5538d9eb7c6438543fb9", size = 8432257, upload-time = "2025-12-10T07:08:42.873Z" },
{ url = "https://files.pythonhosted.org/packages/26/11/c32b2138a85dcb0c99f6afd13a70a951bfdff8a6ab42d8160522542fb647/scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c2656924ec73e5939c76ac4c8b026fc203b83d8900362eb2599d8aee80e4880f", size = 8678673, upload-time = "2025-12-10T07:08:45.362Z" },
{ url = "https://files.pythonhosted.org/packages/c7/57/51f2384575bdec454f4fe4e7a919d696c9ebce914590abf3e52d47607ab8/scikit_learn-1.8.0-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15fc3b5d19cc2be65404786857f2e13c70c83dd4782676dd6814e3b89dc8f5b9", size = 8922467, upload-time = "2025-12-10T07:08:47.408Z" },
{ url = "https://files.pythonhosted.org/packages/43/83/564e141eef908a5863a54da8ca342a137f45a0bfb71d1d79704c9894c9d1/scikit_learn-1.7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7509693451651cd7361d30ce4e86a1347493554f172b1c72a39300fa2aea79e", size = 9331967, upload-time = "2025-09-09T08:20:32.421Z" },
{ url = "https://files.pythonhosted.org/packages/18/d6/ba863a4171ac9d7314c4d3fc251f015704a2caeee41ced89f321c049ed83/scikit_learn-1.7.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:0486c8f827c2e7b64837c731c8feff72c0bd2b998067a8a9cbc10643c31f0fe1", size = 8648645, upload-time = "2025-09-09T08:20:34.436Z" },
{ url = "https://files.pythonhosted.org/packages/ef/0e/97dbca66347b8cf0ea8b529e6bb9367e337ba2e8be0ef5c1a545232abfde/scikit_learn-1.7.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:89877e19a80c7b11a2891a27c21c4894fb18e2c2e077815bcade10d34287b20d", size = 9715424, upload-time = "2025-09-09T08:20:36.776Z" },
{ url = "https://files.pythonhosted.org/packages/f7/32/1f3b22e3207e1d2c883a7e09abb956362e7d1bd2f14458c7de258a26ac15/scikit_learn-1.7.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8da8bf89d4d79aaec192d2bda62f9b56ae4e5b4ef93b6a56b5de4977e375c1f1", size = 9509234, upload-time = "2025-09-09T08:20:38.957Z" },
{ url = "https://files.pythonhosted.org/packages/a7/aa/3996e2196075689afb9fce0410ebdb4a09099d7964d061d7213700204409/scikit_learn-1.7.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8d91a97fa2b706943822398ab943cde71858a50245e31bc71dba62aab1d60a96", size = 9259818, upload-time = "2025-09-09T08:20:43.19Z" },
{ url = "https://files.pythonhosted.org/packages/43/5d/779320063e88af9c4a7c2cf463ff11c21ac9c8bd730c4a294b0000b666c9/scikit_learn-1.7.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acbc0f5fd2edd3432a22c69bed78e837c70cf896cd7993d71d51ba6708507476", size = 8636997, upload-time = "2025-09-09T08:20:45.468Z" },
{ url = "https://files.pythonhosted.org/packages/5c/d0/0c577d9325b05594fdd33aa970bf53fb673f051a45496842caee13cfd7fe/scikit_learn-1.7.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e5bf3d930aee75a65478df91ac1225ff89cd28e9ac7bd1196853a9229b6adb0b", size = 9478381, upload-time = "2025-09-09T08:20:47.982Z" },
{ url = "https://files.pythonhosted.org/packages/82/70/8bf44b933837ba8494ca0fc9a9ab60f1c13b062ad0197f60a56e2fc4c43e/scikit_learn-1.7.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4d6e9deed1a47aca9fe2f267ab8e8fe82ee20b4526b2c0cd9e135cea10feb44", size = 9300296, upload-time = "2025-09-09T08:20:50.366Z" },
{ url = "https://files.pythonhosted.org/packages/ae/93/a3038cb0293037fd335f77f31fe053b89c72f17b1c8908c576c29d953e84/scikit_learn-1.7.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b7dacaa05e5d76759fb071558a8b5130f4845166d88654a0f9bdf3eb57851b7", size = 9212382, upload-time = "2025-09-09T08:20:54.731Z" },
{ url = "https://files.pythonhosted.org/packages/40/dd/9a88879b0c1104259136146e4742026b52df8540c39fec21a6383f8292c7/scikit_learn-1.7.2-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:abebbd61ad9e1deed54cca45caea8ad5f79e1b93173dece40bb8e0c658dbe6fe", size = 8592042, upload-time = "2025-09-09T08:20:57.313Z" },
{ url = "https://files.pythonhosted.org/packages/46/af/c5e286471b7d10871b811b72ae794ac5fe2989c0a2df07f0ec723030f5f5/scikit_learn-1.7.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:502c18e39849c0ea1a5d681af1dbcf15f6cce601aebb657aabbfe84133c1907f", size = 9434180, upload-time = "2025-09-09T08:20:59.671Z" },
{ url = "https://files.pythonhosted.org/packages/f1/fd/df59faa53312d585023b2da27e866524ffb8faf87a68516c23896c718320/scikit_learn-1.7.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a4c328a71785382fe3fe676a9ecf2c86189249beff90bf85e22bdb7efaf9ae0", size = 9283660, upload-time = "2025-09-09T08:21:01.71Z" },
{ url = "https://files.pythonhosted.org/packages/55/87/ef5eb1f267084532c8e4aef98a28b6ffe7425acbfd64b5e2f2e066bc29b3/scikit_learn-1.7.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:9acb6c5e867447b4e1390930e3944a005e2cb115922e693c08a323421a6966e8", size = 9558731, upload-time = "2025-09-09T08:21:06.381Z" },
{ url = "https://files.pythonhosted.org/packages/93/f8/6c1e3fc14b10118068d7938878a9f3f4e6d7b74a8ddb1e5bed65159ccda8/scikit_learn-1.7.2-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:2a41e2a0ef45063e654152ec9d8bcfc39f7afce35b08902bfe290c2498a67a6a", size = 9038852, upload-time = "2025-09-09T08:21:08.628Z" },
{ url = "https://files.pythonhosted.org/packages/83/87/066cafc896ee540c34becf95d30375fe5cbe93c3b75a0ee9aa852cd60021/scikit_learn-1.7.2-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:98335fb98509b73385b3ab2bd0639b1f610541d3988ee675c670371d6a87aa7c", size = 9527094, upload-time = "2025-09-09T08:21:11.486Z" },
{ url = "https://files.pythonhosted.org/packages/9c/2b/4903e1ccafa1f6453b1ab78413938c8800633988c838aa0be386cbb33072/scikit_learn-1.7.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191e5550980d45449126e23ed1d5e9e24b2c68329ee1f691a3987476e115e09c", size = 9367436, upload-time = "2025-09-09T08:21:13.602Z" },
{ url = "https://files.pythonhosted.org/packages/d9/82/dee5acf66837852e8e68df6d8d3a6cb22d3df997b733b032f513d95205b7/scikit_learn-1.7.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fa8f63940e29c82d1e67a45d5297bdebbcb585f5a5a50c4914cc2e852ab77f33", size = 9208906, upload-time = "2025-09-09T08:21:18.557Z" },
{ url = "https://files.pythonhosted.org/packages/3c/30/9029e54e17b87cb7d50d51a5926429c683d5b4c1732f0507a6c3bed9bf65/scikit_learn-1.7.2-cp314-cp314-macosx_12_0_arm64.whl", hash = "sha256:f95dc55b7902b91331fa4e5845dd5bde0580c9cd9612b1b2791b7e80c3d32615", size = 8627836, upload-time = "2025-09-09T08:21:20.695Z" },
{ url = "https://files.pythonhosted.org/packages/60/18/4a52c635c71b536879f4b971c2cedf32c35ee78f48367885ed8025d1f7ee/scikit_learn-1.7.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9656e4a53e54578ad10a434dc1f993330568cfee176dff07112b8785fb413106", size = 9426236, upload-time = "2025-09-09T08:21:22.645Z" },
{ url = "https://files.pythonhosted.org/packages/99/7e/290362f6ab582128c53445458a5befd471ed1ea37953d5bcf80604619250/scikit_learn-1.7.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96dc05a854add0e50d3f47a1ef21a10a595016da5b007c7d9cd9d0bffd1fcc61", size = 9312593, upload-time = "2025-09-09T08:21:24.65Z" },
]
[[package]]