mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-02-26 21:36:26 +00:00
Compare commits
19 Commits
dependabot
...
feature-li
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4387635091 | ||
|
|
47d15273f9 | ||
|
|
53e2d9b850 | ||
|
|
33a26e50d9 | ||
|
|
3788144484 | ||
|
|
90da16f5d4 | ||
|
|
26501800e4 | ||
|
|
f03d8d1476 | ||
|
|
3a66ece118 | ||
|
|
17295a963a | ||
|
|
3ce4d3cfdd | ||
|
|
3d30bbbe48 | ||
|
|
81049476d9 | ||
|
|
679738e610 | ||
|
|
d9f8862e1f | ||
|
|
2863a32146 | ||
|
|
d73be8bf43 | ||
|
|
946e2367ca | ||
|
|
e19eddc078 |
2
.github/workflows/ci-backend.yml
vendored
2
.github/workflows/ci-backend.yml
vendored
@@ -129,7 +129,6 @@ jobs:
|
||||
run: |
|
||||
uv pip list
|
||||
- name: Check typing (pyrefly)
|
||||
continue-on-error: true
|
||||
run: |
|
||||
uv run pyrefly \
|
||||
check \
|
||||
@@ -144,7 +143,6 @@ jobs:
|
||||
${{ runner.os }}-mypy-py${{ env.DEFAULT_PYTHON }}-
|
||||
${{ runner.os }}-mypy-
|
||||
- name: Check typing (mypy)
|
||||
continue-on-error: true
|
||||
run: |
|
||||
uv run mypy \
|
||||
--show-error-codes \
|
||||
|
||||
@@ -96,7 +96,9 @@ src/documents/conditionals.py:0: error: Function is missing a type annotation fo
|
||||
src/documents/conditionals.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/conditionals.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/conditionals.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/conditionals.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "input_doc" [attr-defined]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "log" [attr-defined]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "metadata" [attr-defined]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "metadata" [attr-defined]
|
||||
src/documents/consumer.py:0: error: "ConsumerPluginMixin" has no attribute "metadata" [attr-defined]
|
||||
@@ -171,6 +173,7 @@ src/documents/filters.py:0: error: Function is missing a type annotation [no-un
|
||||
src/documents/filters.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/filters.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/filters.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/filters.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/filters.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/filters.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/filters.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
@@ -342,11 +345,18 @@ src/documents/migrations/0001_initial.py:0: error: Skipping analyzing "multisele
|
||||
src/documents/migrations/0008_sharelinkbundle.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/migrations/0008_sharelinkbundle.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/models.py:0: error: Argument 1 to "Path" has incompatible type "Path | None"; expected "str | PathLike[str]" [arg-type]
|
||||
src/documents/models.py:0: error: Could not resolve manager type for "documents.models.Document.deleted_objects" [django-manager-missing]
|
||||
src/documents/models.py:0: error: Could not resolve manager type for "documents.models.Document.global_objects" [django-manager-missing]
|
||||
src/documents/models.py:0: error: Could not resolve manager type for "documents.models.Document.objects" [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'custom_fields' for relation 'documents.models.CustomFieldInstance.document'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'documents' for relation 'documents.models.Document.correspondent'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'documents' for relation 'documents.models.Document.document_type'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'documents' for relation 'documents.models.Document.storage_path'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'fields' for relation 'documents.models.CustomFieldInstance.field'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'notes' for relation 'documents.models.Note.document'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'runs' for relation 'documents.models.WorkflowRun.workflow'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'share_links' for relation 'documents.models.ShareLink.document'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Couldn't resolve related manager 'workflow_runs' for relation 'documents.models.WorkflowRun.document'. [django-manager-missing]
|
||||
src/documents/models.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/models.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/models.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
@@ -440,9 +450,6 @@ src/documents/permissions.py:0: error: Item "list[str]" of "Any | list[str] | Qu
|
||||
src/documents/permissions.py:0: error: Item "list[str]" of "Any | list[str] | QuerySet[User, User]" has no attribute "exists" [union-attr]
|
||||
src/documents/permissions.py:0: error: Missing type parameters for generic type "QuerySet" [type-arg]
|
||||
src/documents/permissions.py:0: error: Missing type parameters for generic type "dict" [type-arg]
|
||||
src/documents/plugins/helpers.py:0: error: "Collection[str]" has no attribute "update" [attr-defined]
|
||||
src/documents/plugins/helpers.py:0: error: Argument 1 to "send" of "BaseStatusManager" has incompatible type "dict[str, Collection[str]]"; expected "dict[str, str | int | None]" [arg-type]
|
||||
src/documents/plugins/helpers.py:0: error: Argument 1 to "send" of "BaseStatusManager" has incompatible type "dict[str, Collection[str]]"; expected "dict[str, str | int | None]" [arg-type]
|
||||
src/documents/plugins/helpers.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/plugins/helpers.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/plugins/helpers.py:0: error: Skipping analyzing "channels_redis.pubsub": module is installed, but missing library stubs or py.typed marker [import-untyped]
|
||||
@@ -549,7 +556,6 @@ src/documents/serialisers.py:0: error: Function is missing a type annotation [n
|
||||
src/documents/serialisers.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/serialisers.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/serialisers.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/serialisers.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/serialisers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/serialisers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/serialisers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
@@ -667,7 +673,6 @@ src/documents/signals/handlers.py:0: error: Argument 3 to "validate_move" has in
|
||||
src/documents/signals/handlers.py:0: error: Argument 5 to "_suggestion_printer" has incompatible type "Any | None"; expected "MatchingModel" [arg-type]
|
||||
src/documents/signals/handlers.py:0: error: Argument 5 to "_suggestion_printer" has incompatible type "Any | None"; expected "MatchingModel" [arg-type]
|
||||
src/documents/signals/handlers.py:0: error: Argument 5 to "_suggestion_printer" has incompatible type "Any | None"; expected "MatchingModel" [arg-type]
|
||||
src/documents/signals/handlers.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/signals/handlers.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/signals/handlers.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
@@ -691,11 +696,15 @@ src/documents/signals/handlers.py:0: error: Function is missing a type annotatio
|
||||
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/signals/handlers.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/signals/handlers.py:0: error: Incompatible return value type (got "tuple[DocumentMetadataOverrides | None, str]", expected "tuple[DocumentMetadataOverrides, str] | None") [return-value]
|
||||
src/documents/signals/handlers.py:0: error: Incompatible types in assignment (expression has type "list[Tag]", variable has type "set[Tag]") [assignment]
|
||||
src/documents/signals/handlers.py:0: error: Incompatible types in assignment (expression has type "tuple[Any, Any, Any]", variable has type "tuple[Any, Any]") [assignment]
|
||||
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "refresh_from_db" [union-attr]
|
||||
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "save" [union-attr]
|
||||
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "source_path" [union-attr]
|
||||
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "tags" [union-attr]
|
||||
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "tags" [union-attr]
|
||||
src/documents/signals/handlers.py:0: error: Item "ConsumableDocument" of "Document | ConsumableDocument" has no attribute "title" [union-attr]
|
||||
src/documents/signals/handlers.py:0: error: Item "None" of "Any | None" has no attribute "get" [union-attr]
|
||||
src/documents/signals/handlers.py:0: error: Item "None" of "Any | None" has no attribute "get" [union-attr]
|
||||
@@ -966,6 +975,10 @@ src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annot
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
@@ -991,6 +1004,7 @@ src/documents/tests/test_bulk_edit.py:0: error: Item "dict[Any, Any]" of "Group
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Item "dict[Any, Any]" of "Group | dict[Any, Any]" has no attribute "count" [union-attr]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Too few arguments for "count" of "list" [call-arg]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Too few arguments for "count" of "list" [call-arg]
|
||||
src/documents/tests/test_bulk_edit.py:0: error: Unsupported operand types for - ("None" and "int") [operator]
|
||||
src/documents/tests/test_caching.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/documents/tests/test_classifier.py:0: error: "None" has no attribute "classes_" [attr-defined]
|
||||
src/documents/tests/test_classifier.py:0: error: "None" has no attribute "classes_" [attr-defined]
|
||||
@@ -1552,7 +1566,6 @@ src/documents/views.py:0: error: Function is missing a return type annotation [
|
||||
src/documents/views.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/views.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/views.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/views.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/documents/views.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/views.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
src/documents/views.py:0: error: Function is missing a type annotation [no-untyped-def]
|
||||
@@ -1928,6 +1941,7 @@ src/paperless/tests/test_websockets.py:0: error: Item "None" of "BaseChannelLaye
|
||||
src/paperless/tests/test_websockets.py:0: error: Item "None" of "BaseChannelLayer | None" has no attribute "group_send" [union-attr]
|
||||
src/paperless/tests/test_websockets.py:0: error: Item "None" of "BaseChannelLayer | None" has no attribute "group_send" [union-attr]
|
||||
src/paperless/tests/test_websockets.py:0: error: Item "None" of "BaseChannelLayer | None" has no attribute "group_send" [union-attr]
|
||||
src/paperless/tests/test_websockets.py:0: error: Item "None" of "BaseChannelLayer | None" has no attribute "group_send" [union-attr]
|
||||
src/paperless/tests/test_websockets.py:0: error: TypedDict "_WebsocketTestScope" has no key "user" [typeddict-item]
|
||||
src/paperless/tests/test_websockets.py:0: error: TypedDict "_WebsocketTestScope" has no key "user" [typeddict-item]
|
||||
src/paperless/tests/test_websockets.py:0: error: TypedDict "_WebsocketTestScope" has no key "user" [typeddict-item]
|
||||
@@ -2105,6 +2119,7 @@ src/paperless_mail/mail.py:0: error: Function is missing a return type annotatio
|
||||
src/paperless_mail/mail.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/paperless_mail/mail.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/paperless_mail/mail.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/paperless_mail/mail.py:0: error: Function is missing a return type annotation [no-untyped-def]
|
||||
src/paperless_mail/mail.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/paperless_mail/mail.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
src/paperless_mail/mail.py:0: error: Function is missing a type annotation for one or more arguments [no-untyped-def]
|
||||
|
||||
@@ -30,7 +30,7 @@ RUN set -eux \
|
||||
# Purpose: Installs s6-overlay and rootfs
|
||||
# Comments:
|
||||
# - Don't leave anything extra in here either
|
||||
FROM ghcr.io/astral-sh/uv:0.10.5-python3.12-trixie-slim AS s6-overlay-base
|
||||
FROM ghcr.io/astral-sh/uv:0.10.0-python3.12-trixie-slim AS s6-overlay-base
|
||||
|
||||
WORKDIR /usr/src/s6
|
||||
|
||||
|
||||
19
docs/api.md
19
docs/api.md
@@ -211,21 +211,6 @@ However, querying the tasks endpoint with the returned UUID e.g.
|
||||
`/api/tasks/?task_id={uuid}` will provide information on the state of the
|
||||
consumption including the ID of a created document if consumption succeeded.
|
||||
|
||||
## Document Versions
|
||||
|
||||
Document versions are file-level versions linked to one root document.
|
||||
|
||||
- Root document metadata (title, tags, correspondent, document type, storage path, custom fields, permissions) remains shared.
|
||||
- Version-specific file data (file, mime type, checksums, archive info, extracted text content) belongs to the selected/latest version.
|
||||
|
||||
Version-aware endpoints:
|
||||
|
||||
- `GET /api/documents/{id}/`: returns root document data; `content` resolves to latest version content by default. Use `?version={version_id}` to resolve content for a specific version.
|
||||
- `PATCH /api/documents/{id}/`: content updates target the selected version (`?version={version_id}`) or latest version by default; non-content metadata updates target the root document.
|
||||
- `GET /api/documents/{id}/download/`, `GET /api/documents/{id}/preview/`, `GET /api/documents/{id}/thumb/`, `GET /api/documents/{id}/metadata/`: accept `?version={version_id}`.
|
||||
- `POST /api/documents/{id}/update_version/`: uploads a new version using multipart form field `document` and optional `version_label`.
|
||||
- `DELETE /api/documents/{root_id}/versions/{version_id}/`: deletes a non-root version.
|
||||
|
||||
## Permissions
|
||||
|
||||
All objects (documents, tags, etc.) allow setting object-level permissions
|
||||
@@ -315,13 +300,13 @@ The following methods are supported:
|
||||
- `"doc": OUTPUT_DOCUMENT_INDEX` Optional index of the output document for split operations.
|
||||
- Optional `parameters`:
|
||||
- `"delete_original": true` to delete the original documents after editing.
|
||||
- `"update_document": true` to add the edited PDF as a new version of the root document.
|
||||
- `"update_document": true` to update the existing document with the edited PDF.
|
||||
- `"include_metadata": true` to copy metadata from the original document to the edited document.
|
||||
- `remove_password`
|
||||
- Requires `parameters`:
|
||||
- `"password": "PASSWORD_STRING"` The password to remove from the PDF documents.
|
||||
- Optional `parameters`:
|
||||
- `"update_document": true` to add the password-less PDF as a new version of the root document.
|
||||
- `"update_document": true` to replace the existing document with the password-less PDF.
|
||||
- `"delete_original": true` to delete the original document after editing.
|
||||
- `"include_metadata": true` to copy metadata from the original document to the new password-less document.
|
||||
- `merge`
|
||||
|
||||
@@ -358,7 +358,7 @@ If you want to build the documentation locally, this is how you do it:
|
||||
$ uv run zensical serve
|
||||
```
|
||||
|
||||
## Building the Docker image {#docker_build}
|
||||
## Building the Docker image
|
||||
|
||||
The docker image is primarily built by the GitHub actions workflow, but
|
||||
it can be faster when developing to build and tag an image locally.
|
||||
|
||||
562
docs/setup.md
562
docs/setup.md
@@ -4,74 +4,53 @@ title: Setup
|
||||
|
||||
# Installation
|
||||
|
||||
!!! tip "Quick Start"
|
||||
You can go multiple routes to setup and run Paperless:
|
||||
|
||||
- [Use the script to setup a Docker install](#docker_script)
|
||||
- [Use the Docker compose templates](#docker)
|
||||
- [Build the Docker image yourself](#docker_build)
|
||||
- [Install Paperless-ngx directly on your system manually ("bare metal")](#bare_metal)
|
||||
- A user-maintained list of commercial hosting providers can be found [in the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Related-Projects)
|
||||
|
||||
The Docker routes are quick & easy. These are the recommended routes.
|
||||
This configures all the stuff from the above automatically so that it
|
||||
just works and uses sensible defaults for all configuration options.
|
||||
Here you find a cheat-sheet for docker beginners: [CLI
|
||||
Basics](https://www.sehn.tech/refs/devops-with-docker/)
|
||||
|
||||
The bare metal route is complicated to setup but makes it easier should
|
||||
you want to contribute some code back. You need to configure and run the
|
||||
above mentioned components yourself.
|
||||
|
||||
### Use the Installation Script {#docker_script}
|
||||
|
||||
Paperless provides an interactive installation script to setup a Docker Compose
|
||||
installation. The script asks for a couple configuration options, and will then create the
|
||||
necessary configuration files, pull the docker image, start Paperless-ngx and create your superuser
|
||||
account. The script essentially automatically performs the steps described in [Docker setup](#docker).
|
||||
|
||||
1. Make sure that Docker and Docker Compose are [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
|
||||
|
||||
2. Download and run the installation script:
|
||||
|
||||
If you just want Paperless-ngx running quickly, use our installation script:
|
||||
```shell-session
|
||||
bash -c "$(curl --location --silent --show-error https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||
```
|
||||
_If piping into a shell directly from the internet makes you nervous, inspect [the script](https://github.com/paperless-ngx/paperless-ngx/blob/main/install-paperless-ngx.sh) first!_
|
||||
|
||||
## Overview
|
||||
!!! note
|
||||
|
||||
Choose the installation route that best fits your setup:
|
||||
macOS users will need to install [gnu-sed](https://formulae.brew.sh/formula/gnu-sed) with support
|
||||
for running as `sed` as well as [wget](https://formulae.brew.sh/formula/wget).
|
||||
|
||||
| Route | Best for | Effort |
|
||||
| ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | ------ |
|
||||
| [Installation script](#docker_script) | Fastest first-time setup with guided prompts (recommended for most users) | Low |
|
||||
| [Docker Compose templates](#docker) | Manual control over compose files and settings | Medium |
|
||||
| [Bare metal](#bare_metal) | Advanced setups, packaging, and development-adjacent workflows | High |
|
||||
| [Hosted providers (wiki)](https://github.com/paperless-ngx/paperless-ngx/wiki/Related-Projects#hosting-providers) | Managed hosting options maintained by the community — check details carefully | Varies |
|
||||
### Use Docker Compose {#docker}
|
||||
|
||||
For most users, Docker is the best option. It is faster to set up,
|
||||
easier to maintain, and ships with sensible defaults.
|
||||
1. Make sure that Docker and Docker Compose are [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
|
||||
|
||||
The bare-metal route gives you more control, but it requires manual
|
||||
installation and operation of all components. It is usually best suited
|
||||
for advanced users and contributors.
|
||||
|
||||
!!! info
|
||||
|
||||
Because [superuser](usage.md#superusers) accounts have full access to all objects and documents, you may want to create a separate user account for daily use,
|
||||
or "downgrade" your superuser account to a normal user account after setup.
|
||||
|
||||
## Installation Script {#docker_script}
|
||||
|
||||
Paperless-ngx provides an interactive script for Docker Compose setups.
|
||||
It asks a few configuration questions, then creates the required files,
|
||||
pulls the image, starts the containers, and creates your [superuser](usage.md#superusers)
|
||||
account. In short, it automates the [Docker Compose setup](#docker) described below.
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
- Docker and Docker Compose must be [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
|
||||
- macOS users will need [GNU sed](https://formulae.brew.sh/formula/gnu-sed) with support for running as `sed` as well as [wget](https://formulae.brew.sh/formula/wget).
|
||||
|
||||
#### Run the installation script
|
||||
|
||||
```shell-session
|
||||
bash -c "$(curl --location --silent --show-error https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||
```
|
||||
|
||||
#### After installation
|
||||
|
||||
Paperless-ngx should be available at `http://127.0.0.1:8000` (or similar,
|
||||
depending on your configuration) and you will be able to login with the
|
||||
credentials you provided during the installation script.
|
||||
|
||||
## Docker Compose Install {#docker}
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
- Docker and Docker Compose must be [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
|
||||
|
||||
#### Installation
|
||||
|
||||
1. Go to the [/docker/compose directory on the project
|
||||
2. Go to the [/docker/compose directory on the project
|
||||
page](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose){:target="\_blank"}
|
||||
and download one `docker-compose.*.yml` file for your preferred
|
||||
database backend. Save it in a local directory as `docker-compose.yml`.
|
||||
Also download `docker-compose.env` and `.env` into that same directory.
|
||||
and download one of the `docker-compose.*.yml` files, depending on which database backend
|
||||
you want to use. Place the files in a local directory and rename it `docker-compose.yml`. Download the
|
||||
`docker-compose.env` file and the `.env` file as well in the same directory.
|
||||
|
||||
If you want to enable optional support for Office and other documents, download a
|
||||
file with `-tika` in the file name.
|
||||
@@ -81,16 +60,15 @@ credentials you provided during the installation script.
|
||||
For new installations, it is recommended to use PostgreSQL as the
|
||||
database backend.
|
||||
|
||||
2. Modify `docker-compose.yml` as needed. For example, you may want to
|
||||
change the paths for `consume`, `media`, and other directories to
|
||||
use bind mounts.
|
||||
3. Modify `docker-compose.yml` as needed. For example, you may want to change the paths to the
|
||||
consumption, media etc. directories to use 'bind mounts'.
|
||||
Find the line that specifies where to mount the directory, e.g.:
|
||||
|
||||
```yaml
|
||||
- ./consume:/usr/src/paperless/consume
|
||||
```
|
||||
|
||||
Replace the part _before_ the colon with your local directory:
|
||||
Replace the part _before_ the colon with a local directory of your choice:
|
||||
|
||||
```yaml
|
||||
- /home/jonaswinkler/paperless-inbox:/usr/src/paperless/consume
|
||||
@@ -104,15 +82,38 @@ credentials you provided during the installation script.
|
||||
- 8010:8000
|
||||
```
|
||||
|
||||
3. Modify `docker-compose.env` with any configuration options you need.
|
||||
**Rootless**
|
||||
|
||||
!!! warning
|
||||
|
||||
It is currently not possible to run the container rootless if additional languages are specified via `PAPERLESS_OCR_LANGUAGES`.
|
||||
|
||||
If you want to run Paperless as a rootless container, you will need
|
||||
to do the following in your `docker-compose.yml`:
|
||||
|
||||
- set the `user` running the container to map to the `paperless`
|
||||
user in the container. This value (`user_id` below), should be
|
||||
the same id that `USERMAP_UID` and `USERMAP_GID` are set to in
|
||||
the next step. See `USERMAP_UID` and `USERMAP_GID`
|
||||
[here](configuration.md#docker).
|
||||
|
||||
Your entry for Paperless should contain something like:
|
||||
|
||||
> ```
|
||||
> webserver:
|
||||
> image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
> user: <user_id>
|
||||
> ```
|
||||
|
||||
4. Modify `docker-compose.env` with any configuration options you'd like.
|
||||
See the [configuration documentation](configuration.md) for all options.
|
||||
|
||||
You may also need to set `USERMAP_UID` and `USERMAP_GID` to
|
||||
the UID and GID of your user on the host system. Use `id -u` and
|
||||
`id -g` to get these values. This ensures both the container and the
|
||||
host user can write to the consumption directory. If your UID and
|
||||
GID are `1000` (the default for the first normal user on many
|
||||
systems), this usually works out of the box without
|
||||
the uid and gid of your user on the host system. Use `id -u` and
|
||||
`id -g` to get these. This ensures that both the container and the host
|
||||
user have write access to the consumption directory. If your UID
|
||||
and GID on the host system is 1000 (the default for the first normal
|
||||
user on most systems), it will work out of the box without any
|
||||
modifications. Run `id "username"` to check.
|
||||
|
||||
!!! note
|
||||
@@ -121,62 +122,78 @@ credentials you provided during the installation script.
|
||||
appending `_FILE` to configuration values. For example [`PAPERLESS_DBUSER`](configuration.md#PAPERLESS_DBUSER)
|
||||
can be set using `PAPERLESS_DBUSER_FILE=/var/run/secrets/password.txt`.
|
||||
|
||||
4. Run `docker compose pull`. This pulls the image from the GitHub container registry
|
||||
by default, but you can pull from Docker Hub by changing the `image`
|
||||
!!! warning
|
||||
|
||||
Some file systems such as NFS network shares don't support file
|
||||
system notifications with `inotify`. When storing the consumption
|
||||
directory on such a file system, paperless will not pick up new
|
||||
files with the default configuration. You will need to use
|
||||
[`PAPERLESS_CONSUMER_POLLING_INTERVAL`](configuration.md#PAPERLESS_CONSUMER_POLLING_INTERVAL), which will disable inotify.
|
||||
|
||||
5. Run `docker compose pull`. This will pull the image from the GitHub container registry
|
||||
by default but you can change the image to pull from Docker Hub by changing the `image`
|
||||
line to `image: paperlessngx/paperless-ngx:latest`.
|
||||
|
||||
5. Run `docker compose up -d`. This will create and start the necessary containers.
|
||||
6. Run `docker compose up -d`. This will create and start the necessary containers.
|
||||
|
||||
#### After installation
|
||||
7. Congratulations! Your Paperless-ngx instance should now be accessible at `http://127.0.0.1:8000`
|
||||
(or similar, depending on your configuration). When you first access the web interface, you will be
|
||||
prompted to create a superuser account.
|
||||
|
||||
Your Paperless-ngx instance should now be accessible at
|
||||
`http://127.0.0.1:8000` (or similar, depending on your configuration).
|
||||
When you first access the web interface, you will be prompted to create
|
||||
a [superuser](usage.md#superusers) account.
|
||||
### Build the Docker image yourself {#docker_build}
|
||||
|
||||
#### Optional Advanced Compose Configurations {#advanced_compose data-toc-label="Advanced Compose Configurations"}
|
||||
1. Clone the entire repository of paperless:
|
||||
|
||||
**Rootless**
|
||||
```shell-session
|
||||
git clone https://github.com/paperless-ngx/paperless-ngx
|
||||
```
|
||||
|
||||
!!! warning
|
||||
The main branch always reflects the latest stable version.
|
||||
|
||||
It is currently not possible to run the container rootless if additional languages are specified via `PAPERLESS_OCR_LANGUAGES`.
|
||||
2. Copy one of the `docker/compose/docker-compose.*.yml` to
|
||||
`docker-compose.yml` in the root folder, depending on which database
|
||||
backend you want to use. Copy `docker-compose.env` into the project
|
||||
root as well.
|
||||
|
||||
If you want to run Paperless as a rootless container, make this
|
||||
change in `docker-compose.yml`:
|
||||
3. In the `docker-compose.yml` file, find the line that instructs
|
||||
Docker Compose to pull the paperless image from Docker Hub:
|
||||
|
||||
- Set the `user` running the container to map to the `paperless`
|
||||
user in the container. This value (`user_id` below) should be
|
||||
the same ID that `USERMAP_UID` and `USERMAP_GID` are set to in
|
||||
`docker-compose.env`. See `USERMAP_UID` and `USERMAP_GID`
|
||||
[here](configuration.md#docker).
|
||||
```yaml
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
```
|
||||
|
||||
Your entry for Paperless should contain something like:
|
||||
and replace it with a line that instructs Docker Compose to build
|
||||
the image from the current working directory instead:
|
||||
|
||||
> ```
|
||||
> webserver:
|
||||
> image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
> user: <user_id>
|
||||
> ```
|
||||
```yaml
|
||||
webserver:
|
||||
build:
|
||||
context: .
|
||||
```
|
||||
|
||||
**File systems without inotify support (e.g. NFS)**
|
||||
4. Follow the [Docker setup](#docker) above except when asked to run
|
||||
`docker compose pull` to pull the image, run
|
||||
|
||||
Some file systems, such as NFS network shares, don't support file system
|
||||
notifications with `inotify`. When the consumption directory is on such a
|
||||
file system, Paperless-ngx will not pick up new files with the default
|
||||
configuration. Use [`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING)
|
||||
to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
```shell-session
|
||||
docker compose build
|
||||
```
|
||||
|
||||
## Bare Metal Install {#bare_metal}
|
||||
instead to build the image.
|
||||
|
||||
#### Prerequisites
|
||||
### Bare Metal Route {#bare_metal}
|
||||
|
||||
- Paperless runs on Linux only, Windows is not supported.
|
||||
- Python 3 is required with versions 3.10 - 3.12 currently supported. Newer versions may work, but some dependencies may not be fully compatible.
|
||||
Paperless runs on linux only. The following procedure has been tested on
|
||||
a minimal installation of Debian/Buster, which is the current stable
|
||||
release at the time of writing. Windows is not and will never be
|
||||
supported.
|
||||
|
||||
#### Installation
|
||||
Paperless requires Python 3. At this time, 3.10 - 3.12 are tested versions.
|
||||
Newer versions may work, but some dependencies may not fully support newer versions.
|
||||
Support for older Python versions may be dropped as they reach end of life or as newer versions
|
||||
are released, dependency support is confirmed, etc.
|
||||
|
||||
1. Install dependencies. Paperless requires the following packages:
|
||||
1. Install dependencies. Paperless requires the following packages.
|
||||
|
||||
- `python3`
|
||||
- `python3-pip`
|
||||
@@ -239,8 +256,8 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
|
||||
2. Install `redis` >= 6.0 and configure it to start automatically.
|
||||
|
||||
3. Optional: Install `postgresql` and configure a database, user, and
|
||||
password for Paperless-ngx. If you do not wish to use PostgreSQL,
|
||||
3. Optional. Install `postgresql` and configure a database, user and
|
||||
password for paperless. If you do not wish to use PostgreSQL,
|
||||
MariaDB and SQLite are available as well.
|
||||
|
||||
!!! note
|
||||
@@ -249,60 +266,61 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
extension](https://code.djangoproject.com/wiki/JSON1Extension) is
|
||||
enabled. This is usually the case, but not always.
|
||||
|
||||
4. Create a system user with a new home folder in which you want
|
||||
to run Paperless-ngx.
|
||||
4. Create a system user with a new home folder under which you wish
|
||||
to run paperless.
|
||||
|
||||
```shell-session
|
||||
adduser paperless --system --home /opt/paperless --group
|
||||
```
|
||||
|
||||
5. Download a release archive from
|
||||
<https://github.com/paperless-ngx/paperless-ngx/releases>. For example:
|
||||
5. Get the release archive from
|
||||
<https://github.com/paperless-ngx/paperless-ngx/releases> for example with
|
||||
|
||||
```shell-session
|
||||
curl -O -L https://github.com/paperless-ngx/paperless-ngx/releases/download/vX.Y.Z/paperless-ngx-vX.Y.Z.tar.xz
|
||||
curl -O -L https://github.com/paperless-ngx/paperless-ngx/releases/download/v1.10.2/paperless-ngx-v1.10.2.tar.xz
|
||||
```
|
||||
|
||||
Extract the archive with
|
||||
|
||||
```shell-session
|
||||
tar -xf paperless-ngx-vX.Y.Z.tar.xz
|
||||
tar -xf paperless-ngx-v1.10.2.tar.xz
|
||||
```
|
||||
|
||||
and copy the contents to the home directory of the user you created
|
||||
earlier (`/opt/paperless`).
|
||||
and copy the contents to the
|
||||
home folder of the user you created before (`/opt/paperless`).
|
||||
|
||||
Optional: If you cloned the Git repository, you will need to
|
||||
compile the frontend yourself. See [here](development.md#front-end-development)
|
||||
Optional: If you cloned the git repo, you will have to
|
||||
compile the frontend yourself, see [here](development.md#front-end-development)
|
||||
and use the `build` step, not `serve`.
|
||||
|
||||
6. Configure Paperless-ngx. See [configuration](configuration.md) for details.
|
||||
6. Configure paperless. See [configuration](configuration.md) for details.
|
||||
Edit the included `paperless.conf` and adjust the settings to your
|
||||
needs. Required settings for getting Paperless-ngx running are:
|
||||
needs. Required settings for getting
|
||||
paperless running are:
|
||||
|
||||
- [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) should point to your Redis server, such as
|
||||
`redis://localhost:6379`.
|
||||
- [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) is optional, and should be one of `postgres`,
|
||||
- [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) should point to your redis server, such as
|
||||
<redis://localhost:6379>.
|
||||
- [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) optional, and should be one of `postgres`,
|
||||
`mariadb`, or `sqlite`
|
||||
- [`PAPERLESS_DBHOST`](configuration.md#PAPERLESS_DBHOST) should be the hostname on which your
|
||||
PostgreSQL server is running. Do not configure this to use
|
||||
SQLite instead. Also configure port, database name, user and
|
||||
password as necessary.
|
||||
- [`PAPERLESS_CONSUMPTION_DIR`](configuration.md#PAPERLESS_CONSUMPTION_DIR) should point to the folder
|
||||
that Paperless-ngx should watch for incoming documents.
|
||||
Likewise, [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) and
|
||||
[`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) define where Paperless-ngx stores its data.
|
||||
If needed, these can point to the same directory.
|
||||
- [`PAPERLESS_CONSUMPTION_DIR`](configuration.md#PAPERLESS_CONSUMPTION_DIR) should point to a folder which
|
||||
paperless should watch for documents. You might want to have
|
||||
this somewhere else. Likewise, [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) and
|
||||
[`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) define where paperless stores its data.
|
||||
If you like, you can point both to the same directory.
|
||||
- [`PAPERLESS_SECRET_KEY`](configuration.md#PAPERLESS_SECRET_KEY) should be a random sequence of
|
||||
characters. It's used for authentication. Failure to do so
|
||||
allows third parties to forge authentication credentials.
|
||||
- Set [`PAPERLESS_URL`](configuration.md#PAPERLESS_URL) if you are behind a reverse proxy. This should
|
||||
- [`PAPERLESS_URL`](configuration.md#PAPERLESS_URL) if you are behind a reverse proxy. This should
|
||||
point to your domain. Please see
|
||||
[configuration](configuration.md) for more
|
||||
information.
|
||||
|
||||
You can make many more adjustments, especially for OCR.
|
||||
The following options are recommended for most users:
|
||||
Many more adjustments can be made to paperless, especially the OCR
|
||||
part. The following options are recommended for everyone:
|
||||
|
||||
- Set [`PAPERLESS_OCR_LANGUAGE`](configuration.md#PAPERLESS_OCR_LANGUAGE) to the language most of your
|
||||
documents are written in.
|
||||
@@ -312,14 +330,15 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
|
||||
Ensure your Redis instance [is secured](https://redis.io/docs/latest/operate/oss_and_stack/management/security/).
|
||||
|
||||
7. Create the following directories if they do not already exist:
|
||||
7. Create the following directories if they are missing:
|
||||
|
||||
- `/opt/paperless/media`
|
||||
- `/opt/paperless/data`
|
||||
- `/opt/paperless/consume`
|
||||
|
||||
Adjust these paths if you configured different folders.
|
||||
Then verify that the `paperless` user has write permissions:
|
||||
Adjust as necessary if you configured different folders.
|
||||
Ensure that the paperless user has write permissions for every one
|
||||
of these folders with
|
||||
|
||||
```shell-session
|
||||
ls -l -d /opt/paperless/media
|
||||
@@ -333,44 +352,45 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
sudo chown paperless:paperless /opt/paperless/consume
|
||||
```
|
||||
|
||||
8. Install Python dependencies from `requirements.txt`.
|
||||
8. Install python requirements from the `requirements.txt` file.
|
||||
|
||||
```shell-session
|
||||
sudo -Hu paperless pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
This will install all Python dependencies in the home directory of
|
||||
This will install all python dependencies in the home directory of
|
||||
the new paperless user.
|
||||
|
||||
!!! tip
|
||||
|
||||
You can use a virtual environment if you prefer. If you do,
|
||||
you may need to adjust the example scripts for your virtual
|
||||
environment paths.
|
||||
It is up to you if you wish to use a virtual environment or not for the Python
|
||||
dependencies. This is an alternative to the above and may require adjusting
|
||||
the example scripts to utilize the virtual environment paths
|
||||
|
||||
!!! tip
|
||||
|
||||
If you use modern Python tooling, such as `uv`, installation will not include
|
||||
dependencies for PostgreSQL or MariaDB. You can select those
|
||||
extras with `--extra <EXTRA>`, or install all extras with
|
||||
`--all-extras`.
|
||||
dependencies for Postgres or Mariadb. You can select those extras with `--extra <EXTRA>`
|
||||
or all with `--all-extras`
|
||||
|
||||
9. Go to `/opt/paperless/src` and execute the following command:
|
||||
9. Go to `/opt/paperless/src`, and execute the following command:
|
||||
|
||||
```bash
|
||||
# This creates the database schema.
|
||||
sudo -Hu paperless python3 manage.py migrate
|
||||
```
|
||||
|
||||
10. Optional: Test that Paperless-ngx is working by running
|
||||
When you first access the web interface you will be prompted to create a superuser account.
|
||||
|
||||
10. Optional: Test that paperless is working by executing
|
||||
|
||||
```bash
|
||||
# Manually starts the webserver
|
||||
sudo -Hu paperless python3 manage.py runserver
|
||||
```
|
||||
|
||||
Then point your browser to `http://localhost:8000` if
|
||||
accessing from the same device on which Paperless-ngx is installed.
|
||||
and pointing your browser to http://localhost:8000 if
|
||||
accessing from the same devices on which paperless is installed.
|
||||
If accessing from another machine, set up systemd services. You may need
|
||||
to set `PAPERLESS_DEBUG=true` in order for the development server to work
|
||||
normally in your browser.
|
||||
@@ -378,24 +398,23 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
!!! warning
|
||||
|
||||
This is a development server which should not be used in production.
|
||||
It is not audited for security, and performance is inferior to
|
||||
production-ready web servers.
|
||||
It is not audited for security and performance is inferior to
|
||||
production ready web servers.
|
||||
|
||||
!!! tip
|
||||
|
||||
This will not start the consumer. Paperless does this in a separate
|
||||
process.
|
||||
|
||||
11. Set up systemd services to run Paperless-ngx automatically. You may use
|
||||
11. Setup systemd services to run paperless automatically. You may use
|
||||
the service definition files included in the `scripts` folder as a
|
||||
starting point.
|
||||
|
||||
Paperless needs:
|
||||
|
||||
- The `webserver` script to run the webserver.
|
||||
- The `consumer` script to watch the input folder.
|
||||
- The `taskqueue` script for background workers (document consumption, etc.).
|
||||
- The `scheduler` script for periodic tasks such as email checking.
|
||||
Paperless needs the `webserver` script to run the webserver, the
|
||||
`consumer` script to watch the input folder, `taskqueue` for the
|
||||
background workers used to handle things like document consumption
|
||||
and the `scheduler` script to run tasks such as email checking at
|
||||
certain times .
|
||||
|
||||
!!! note
|
||||
|
||||
@@ -404,9 +423,9 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
`Require=paperless-webserver.socket` in the `webserver` script
|
||||
and configure `granian` to listen on port 80 (set `GRANIAN_PORT`).
|
||||
|
||||
These services rely on Redis and optionally the database server, but
|
||||
These services rely on redis and optionally the database server, but
|
||||
don't need to be started in any particular order. The example files
|
||||
depend on Redis being started. If you use a database server, you
|
||||
depend on redis being started. If you use a database server, you
|
||||
should add additional dependencies.
|
||||
|
||||
!!! note
|
||||
@@ -416,15 +435,18 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
|
||||
!!! warning
|
||||
|
||||
If Celery won't start, check
|
||||
If celery won't start (check with
|
||||
`sudo systemctl status paperless-task-queue.service` for
|
||||
`paperless-task-queue.service` and `paperless-scheduler.service`.
|
||||
You may need to change the path in the files. Example:
|
||||
paperless-task-queue.service and paperless-scheduler.service
|
||||
) you need to change the path in the files. Example:
|
||||
`ExecStart=/opt/paperless/.local/bin/celery --app paperless worker --loglevel INFO`
|
||||
|
||||
12. Configure ImageMagick to allow processing of PDF documents. Most
|
||||
12. Optional: Install a samba server and make the consumption folder
|
||||
available as a network share.
|
||||
|
||||
13. Configure ImageMagick to allow processing of PDF documents. Most
|
||||
distributions have this disabled by default, since PDF documents can
|
||||
contain malware. If you don't do this, Paperless-ngx will fall back to
|
||||
contain malware. If you don't do this, paperless will fall back to
|
||||
Ghostscript for certain steps such as thumbnail generation.
|
||||
|
||||
Edit `/etc/ImageMagick-6/policy.xml` and adjust
|
||||
@@ -439,38 +461,32 @@ to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
<policy domain="coder" rights="read|write" pattern="PDF" />
|
||||
```
|
||||
|
||||
**Optional: Install the [jbig2enc](https://ocrmypdf.readthedocs.io/en/latest/jbig2.html) encoder.**
|
||||
This will reduce the size of generated PDF documents. You'll most likely need to compile this yourself, because this
|
||||
software has been patented until around 2017 and binary packages are not available for most distributions.
|
||||
14. Optional: Install the
|
||||
[jbig2enc](https://ocrmypdf.readthedocs.io/en/latest/jbig2.html)
|
||||
encoder. This will reduce the size of generated PDF documents.
|
||||
You'll most likely need to compile this by yourself, because this
|
||||
software has been patented until around 2017 and binary packages are
|
||||
not available for most distributions.
|
||||
|
||||
**Optional: download the NLTK data**
|
||||
If using the NLTK machine-learning processing (see [`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) for details),
|
||||
download the NLTK data for the Snowball Stemmer, Stopwords and Punkt tokenizer to `/usr/share/nltk_data`. Refer to the [NLTK
|
||||
instructions](https://www.nltk.org/data.html) for details on how to download the data.
|
||||
15. Optional: If using the NLTK machine learning processing (see
|
||||
[`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) for details),
|
||||
download the NLTK data for the Snowball
|
||||
Stemmer, Stopwords and Punkt tokenizer to `/usr/share/nltk_data`. Refer to the [NLTK
|
||||
instructions](https://www.nltk.org/data.html) for details on how to
|
||||
download the data.
|
||||
|
||||
#### After installation
|
||||
# Migrating to Paperless-ngx
|
||||
|
||||
Your Paperless-ngx instance should now be accessible at `http://localhost:8000` (or similar, depending on your configuration).
|
||||
When you first access the web interface you will be prompted to create a [superuser](usage.md#superusers) account.
|
||||
Migration is possible both from Paperless-ng or directly from the
|
||||
'original' Paperless.
|
||||
|
||||
## Build the Docker image yourself {#docker_build data-toc-label="Building the Docker image"}
|
||||
## Migrating from Paperless-ng
|
||||
|
||||
Building the Docker image yourself is typically used for development, but it can also be used for production
|
||||
if you want to customize the image. See [Building the Docker image](development.md#docker_build) in the
|
||||
development documentation.
|
||||
|
||||
## Migrating to Paperless-ngx
|
||||
|
||||
You can migrate to Paperless-ngx from Paperless-ng or from the original
|
||||
Paperless project.
|
||||
|
||||
<h3 id="migration_ng">Migrating from Paperless-ng</h3>
|
||||
|
||||
Paperless-ngx is meant to be a drop-in replacement for Paperless-ng, and
|
||||
upgrading should be trivial for most users, especially when using
|
||||
Docker. However, as with any major change, it is recommended to take a
|
||||
Paperless-ngx is meant to be a drop-in replacement for Paperless-ng and
|
||||
thus upgrading should be trivial for most users, especially when using
|
||||
docker. However, as with any major change, it is recommended to take a
|
||||
full backup first. Once you are ready, simply change the docker image to
|
||||
point to the new source. For example, if using Docker Compose, edit
|
||||
point to the new source. E.g. if using Docker Compose, edit
|
||||
`docker-compose.yml` and change:
|
||||
|
||||
```
|
||||
@@ -483,65 +499,66 @@ to
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
```
|
||||
|
||||
and then run `docker compose up -d`, which will pull the new image and
|
||||
recreate the container. That's it.
|
||||
and then run `docker compose up -d` which will pull the new image
|
||||
recreate the container. That's it!
|
||||
|
||||
Users who installed with the bare-metal route should also update their
|
||||
Git clone to point to `https://github.com/paperless-ngx/paperless-ngx`,
|
||||
for example using:
|
||||
e.g. using the command
|
||||
`git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`
|
||||
and then pull the latest version.
|
||||
|
||||
<h3 id="migration_paperless">Migrating from Paperless</h3>
|
||||
## Migrating from Paperless
|
||||
|
||||
At its core, Paperless-ngx is still Paperless and fully compatible.
|
||||
At its core, paperless-ngx is still paperless and fully compatible.
|
||||
However, some things have changed under the hood, so you need to adapt
|
||||
your setup depending on how you installed Paperless.
|
||||
your setup depending on how you installed paperless.
|
||||
|
||||
This section describes how to update an existing Paperless Docker
|
||||
installation. Keep these points in mind:
|
||||
This setup describes how to update an existing paperless Docker
|
||||
installation. The important things to keep in mind are as follows:
|
||||
|
||||
- Read the [changelog](changelog.md) and
|
||||
take note of breaking changes.
|
||||
- Decide whether to stay on SQLite or migrate to PostgreSQL.
|
||||
See [documentation](#sqlite_to_psql) for details on moving data
|
||||
from SQLite to PostgreSQL. Both work fine with
|
||||
Paperless. However, if you already have a database server running
|
||||
for other services, you might as well use it for Paperless as well.
|
||||
- The task scheduler of Paperless, which is used to execute periodic
|
||||
- You should decide if you want to stick with SQLite or want to
|
||||
migrate your database to PostgreSQL. See [documentation](#sqlite_to_psql)
|
||||
for details on
|
||||
how to move your data from SQLite to PostgreSQL. Both work fine with
|
||||
paperless. However, if you already have a database server running
|
||||
for other services, you might as well use it for paperless as well.
|
||||
- The task scheduler of paperless, which is used to execute periodic
|
||||
tasks such as email checking and maintenance, requires a
|
||||
[Redis](https://redis.io/) message broker instance. The
|
||||
[redis](https://redis.io/) message broker instance. The
|
||||
Docker Compose route takes care of that.
|
||||
- The layout of the folder structure for your documents and data
|
||||
remains the same, so you can plug your old Docker volumes into
|
||||
remains the same, so you can just plug your old docker volumes into
|
||||
paperless-ngx and expect it to find everything where it should be.
|
||||
|
||||
Migration to Paperless-ngx is then performed in a few simple steps:
|
||||
Migration to paperless-ngx is then performed in a few simple steps:
|
||||
|
||||
1. Stop Paperless.
|
||||
1. Stop paperless.
|
||||
|
||||
```bash
|
||||
cd /path/to/current/paperless
|
||||
docker compose down
|
||||
```
|
||||
|
||||
2. Create a backup for two reasons: if something goes wrong, you still
|
||||
have your data; and if you don't like paperless-ngx, you can
|
||||
switch back to Paperless.
|
||||
2. Do a backup for two purposes: If something goes wrong, you still
|
||||
have your data. Second, if you don't like paperless-ngx, you can
|
||||
switch back to paperless.
|
||||
|
||||
3. Download the latest release of Paperless-ngx. You can either use
|
||||
3. Download the latest release of paperless-ngx. You can either go with
|
||||
the Docker Compose files from
|
||||
[here](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||
or clone the repository to build the image yourself (see
|
||||
[development docs](development.md#docker_build)). You can either replace your current paperless
|
||||
folder or put Paperless-ngx in
|
||||
[above](#docker_build)). You can
|
||||
either replace your current paperless folder or put paperless-ngx in
|
||||
a different location.
|
||||
|
||||
!!! warning
|
||||
|
||||
Paperless-ngx includes a `.env` file. This will set the project name
|
||||
for Docker Compose to `paperless`, which will also define the
|
||||
volume names created by Paperless-ngx. However, if you notice that
|
||||
for docker compose to `paperless`, which will also define the name
|
||||
of the volumes by paperless-ngx. However, if you experience that
|
||||
paperless-ngx is not using your old paperless volumes, verify the
|
||||
names of your volumes with
|
||||
|
||||
@@ -557,10 +574,10 @@ Migration to Paperless-ngx is then performed in a few simple steps:
|
||||
after you migrated your existing SQLite database.
|
||||
|
||||
5. Adjust `docker-compose.yml` and `docker-compose.env` to your needs.
|
||||
See [Docker setup](#docker) for details on
|
||||
which edits are recommended.
|
||||
See [Docker setup](#docker) details on
|
||||
which edits are advised.
|
||||
|
||||
6. Follow the update procedure in [Update paperless](administration.md#updating).
|
||||
6. [Update paperless.](administration.md#updating)
|
||||
|
||||
7. In order to find your existing documents with the new search
|
||||
feature, you need to invoke a one-time operation that will create
|
||||
@@ -571,99 +588,136 @@ Migration to Paperless-ngx is then performed in a few simple steps:
|
||||
```
|
||||
|
||||
This will migrate your database and create the search index. After
|
||||
that, Paperless-ngx will maintain the index automatically.
|
||||
that, paperless will take care of maintaining the index by itself.
|
||||
|
||||
8. Start Paperless-ngx.
|
||||
8. Start paperless-ngx.
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This will run Paperless-ngx in the background and automatically start it
|
||||
This will run paperless in the background and automatically start it
|
||||
on system boot.
|
||||
|
||||
9. Paperless may have installed a permanent redirect to `admin/` in your
|
||||
9. Paperless installed a permanent redirect to `admin/` in your
|
||||
browser. This redirect is still in place and prevents access to the
|
||||
new UI. Clear your browser cache to fix this.
|
||||
new UI. Clear your browsing cache in order to fix this.
|
||||
|
||||
10. Optionally, follow the instructions below to migrate your existing
|
||||
data to PostgreSQL.
|
||||
|
||||
<h3 id="migration_lsio">Migrating from LinuxServer.io Docker Image</h3>
|
||||
## Migrating from LinuxServer.io Docker Image
|
||||
|
||||
As with any upgrade or large change, it is highly recommended to
|
||||
As with any upgrades and large changes, it is highly recommended to
|
||||
create a backup before starting. This assumes the image was running
|
||||
using Docker Compose, but the instructions are translatable to Docker
|
||||
commands as well.
|
||||
|
||||
1. Stop and remove the Paperless container.
|
||||
2. If using an external database, stop that container.
|
||||
3. Update Redis configuration.
|
||||
1. Stop and remove the paperless container
|
||||
2. If using an external database, stop the container
|
||||
3. Update Redis configuration
|
||||
|
||||
1. If `REDIS_URL` is already set, change it to [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS)
|
||||
and continue to step 4.
|
||||
|
||||
1. Otherwise, add a new Redis service in `docker-compose.yml`,
|
||||
following [the example compose
|
||||
1. Otherwise, in the `docker-compose.yml` add a new service for
|
||||
Redis, following [the example compose
|
||||
files](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||
|
||||
1. Set the environment variable [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) so it points to
|
||||
the new Redis container.
|
||||
the new Redis container
|
||||
|
||||
4. Update user mapping.
|
||||
4. Update user mapping
|
||||
|
||||
1. If set, change the environment variable `PUID` to `USERMAP_UID`.
|
||||
1. If set, change the environment variable `PUID` to `USERMAP_UID`
|
||||
|
||||
1. If set, change the environment variable `PGID` to `USERMAP_GID`.
|
||||
1. If set, change the environment variable `PGID` to `USERMAP_GID`
|
||||
|
||||
5. Update configuration paths.
|
||||
5. Update configuration paths
|
||||
|
||||
1. Set the environment variable [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) to `/config`.
|
||||
1. Set the environment variable [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) to `/config`
|
||||
|
||||
6. Update media paths.
|
||||
6. Update media paths
|
||||
|
||||
1. Set the environment variable [`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) to
|
||||
`/data/media`.
|
||||
`/data/media`
|
||||
|
||||
7. Update timezone.
|
||||
7. Update timezone
|
||||
|
||||
1. Set the environment variable [`PAPERLESS_TIME_ZONE`](configuration.md#PAPERLESS_TIME_ZONE) to the same
|
||||
value as `TZ`.
|
||||
value as `TZ`
|
||||
|
||||
8. Modify `image:` to point to
|
||||
8. Modify the `image:` to point to
|
||||
`ghcr.io/paperless-ngx/paperless-ngx:latest` or a specific version
|
||||
if preferred.
|
||||
9. Start the containers as before, using `docker compose`.
|
||||
|
||||
## Running Paperless-ngx on less powerful devices {#less-powerful-devices data-toc-label="Less Powerful Devices"}
|
||||
## Moving data from SQLite to PostgreSQL or MySQL/MariaDB {#sqlite_to_psql}
|
||||
|
||||
Paperless runs on Raspberry Pi. Some tasks can be slow on lower-powered
|
||||
hardware, but a few settings can improve performance:
|
||||
The best way to migrate between database types is to perform an [export](administration.md#exporter) and then
|
||||
[import](administration.md#importer) into a clean installation of Paperless-ngx.
|
||||
|
||||
## Moving back to Paperless
|
||||
|
||||
Lets say you migrated to Paperless-ngx and used it for a while, but
|
||||
decided that you don't like it and want to move back (If you do, send
|
||||
me a mail about what part you didn't like!), you can totally do that
|
||||
with a few simple steps.
|
||||
|
||||
Paperless-ngx modified the database schema slightly, however, these
|
||||
changes can be reverted while keeping your current data, so that your
|
||||
current data will be compatible with original Paperless. Thumbnails
|
||||
were also changed from PNG to WEBP format and will need to be
|
||||
re-generated.
|
||||
|
||||
Execute this:
|
||||
|
||||
```shell-session
|
||||
$ cd /path/to/paperless
|
||||
$ docker compose run --rm webserver migrate documents 0023
|
||||
```
|
||||
|
||||
Or without docker:
|
||||
|
||||
```shell-session
|
||||
$ cd /path/to/paperless/src
|
||||
$ python3 manage.py migrate documents 0023
|
||||
```
|
||||
|
||||
After regenerating thumbnails, you'll need to clear your cookies
|
||||
(Paperless-ngx comes with updated dependencies that do cookie-processing
|
||||
differently) and probably your cache as well.
|
||||
|
||||
# Considerations for less powerful devices {#less-powerful-devices}
|
||||
|
||||
Paperless runs on Raspberry Pi. However, some things are rather slow on
|
||||
the Pi and configuring some options in paperless can help improve
|
||||
performance immensely:
|
||||
|
||||
- Stick with SQLite to save some resources. See [troubleshooting](troubleshooting.md#log-reports-creating-paperlesstask-failed)
|
||||
if you encounter issues with SQLite locking.
|
||||
- If you do not need the filesystem-based consumer, consider disabling it
|
||||
entirely by setting [`PAPERLESS_CONSUMER_DISABLE`](configuration.md#PAPERLESS_CONSUMER_DISABLE) to `true`.
|
||||
- Consider setting [`PAPERLESS_OCR_PAGES`](configuration.md#PAPERLESS_OCR_PAGES) to 1, so that Paperless
|
||||
OCRs only the first page of your documents. In most cases, this page
|
||||
- Consider setting [`PAPERLESS_OCR_PAGES`](configuration.md#PAPERLESS_OCR_PAGES) to 1, so that paperless will
|
||||
only OCR the first page of your documents. In most cases, this page
|
||||
contains enough information to be able to find it.
|
||||
- [`PAPERLESS_TASK_WORKERS`](configuration.md#PAPERLESS_TASK_WORKERS) and [`PAPERLESS_THREADS_PER_WORKER`](configuration.md#PAPERLESS_THREADS_PER_WORKER) are
|
||||
configured to use all cores. The Raspberry Pi models 3 and up have 4
|
||||
cores, meaning that Paperless will use 2 workers and 2 threads per
|
||||
cores, meaning that paperless will use 2 workers and 2 threads per
|
||||
worker. This may result in sluggish response times during
|
||||
consumption, so you might want to lower these settings (example: 2
|
||||
workers and 1 thread to always have some computing power left for
|
||||
other tasks).
|
||||
- Keep [`PAPERLESS_OCR_MODE`](configuration.md#PAPERLESS_OCR_MODE) at its default value `skip` and consider
|
||||
OCRing your documents before feeding them into Paperless. Some
|
||||
OCR'ing your documents before feeding them into paperless. Some
|
||||
scanners are able to do this!
|
||||
- Set [`PAPERLESS_OCR_SKIP_ARCHIVE_FILE`](configuration.md#PAPERLESS_OCR_SKIP_ARCHIVE_FILE) to `with_text` to skip archive
|
||||
file generation for already OCRed documents, or `always` to skip it
|
||||
file generation for already ocr'ed documents, or `always` to skip it
|
||||
for all documents.
|
||||
- If you want to perform OCR on the device, consider using
|
||||
`PAPERLESS_OCR_CLEAN=none`. This will speed up OCR times and use
|
||||
less memory at the expense of slightly worse OCR results.
|
||||
- If using Docker, consider setting [`PAPERLESS_WEBSERVER_WORKERS`](configuration.md#PAPERLESS_WEBSERVER_WORKERS) to 1. This will save some memory.
|
||||
- If using docker, consider setting [`PAPERLESS_WEBSERVER_WORKERS`](configuration.md#PAPERLESS_WEBSERVER_WORKERS) to 1. This will save some memory.
|
||||
- Consider setting [`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) to false, to disable the
|
||||
more advanced language processing, which can take more memory and
|
||||
processing time.
|
||||
@@ -675,19 +729,17 @@ For details, refer to [configuration](configuration.md).
|
||||
Updating the
|
||||
[automatic matching algorithm](advanced_usage.md#automatic-matching) takes quite a bit of time. However, the update mechanism
|
||||
checks if your data has changed before doing the heavy lifting. If you
|
||||
experience the algorithm taking too much CPU time, consider changing the
|
||||
experience the algorithm taking too much cpu time, consider changing the
|
||||
schedule in the admin interface to daily. You can also manually invoke
|
||||
the task by changing the date and time of the next run to today/now.
|
||||
|
||||
The actual matching of the algorithm is fast and works on Raspberry Pi
|
||||
as well as on any other device.
|
||||
|
||||
## Additional considerations
|
||||
# Using nginx as a reverse proxy {#nginx}
|
||||
|
||||
**Using a reverse proxy with Paperless-ngx**
|
||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx) for user-maintained documentation of using nginx with Paperless-ngx.
|
||||
|
||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx) for user-maintained documentation on using nginx with Paperless-ngx.
|
||||
# Enhancing security {#security}
|
||||
|
||||
**Enhancing security**
|
||||
|
||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-Security-Tools-with-Paperless-ngx) for user-maintained documentation on configuring security tools like Fail2ban with Paperless-ngx.
|
||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-Security-Tools-with-Paperless-ngx) for user-maintained documentation of how to configure security tools like Fail2ban with Paperless-ngx.
|
||||
|
||||
@@ -89,16 +89,6 @@ You can view the document, edit its metadata, assign tags, correspondents,
|
||||
document types, and custom fields. You can also view the document history,
|
||||
download the document or share it via a share link.
|
||||
|
||||
### Document File Versions
|
||||
|
||||
Think of versions as **file history** for a document.
|
||||
|
||||
- Versions track the underlying file and extracted text content (OCR/text).
|
||||
- Metadata such as tags, correspondent, document type, storage path and custom fields stay on the "root" document.
|
||||
- By default, search and document content use the latest version.
|
||||
- In document detail, selecting a version switches the preview, file metadata and content (and download etc buttons) to that version.
|
||||
- Deleting a non-root version keeps metadata and falls back to the latest remaining version.
|
||||
|
||||
### Management Lists
|
||||
|
||||
Paperless-ngx includes management lists for tags, correspondents, document types
|
||||
@@ -382,11 +372,6 @@ permissions can be granted to limit access to certain parts of the UI (and corre
|
||||
|
||||
Superusers can access all parts of the front and backend application as well as any and all objects. Superuser status can only be granted by another superuser.
|
||||
|
||||
!!! tip
|
||||
|
||||
Because superuser accounts can see all objects and documents, you may want to use a regular account for day-to-day use. Additional superuser accounts can
|
||||
be created via [cli](administration.md#create-superuser) or granted superuser status from an existing superuser account.
|
||||
|
||||
#### Admin Status
|
||||
|
||||
Admin status (Django 'staff status') grants access to viewing the paperless logs and the system status dialog
|
||||
@@ -579,18 +564,6 @@ For security reasons, webhooks can be limited to specific ports and disallowed f
|
||||
[configuration settings](configuration.md#workflow-webhooks) to change this behavior. If you are allowing non-admins to create workflows,
|
||||
you may want to adjust these settings to prevent abuse.
|
||||
|
||||
##### Move to Trash {#workflow-action-move-to-trash}
|
||||
|
||||
"Move to Trash" actions move the document to the trash. The document can be restored
|
||||
from the trash until the trash is emptied (after the configured delay or manually).
|
||||
|
||||
The "Move to Trash" action will always be executed at the end of the workflow run,
|
||||
regardless of its position in the action list. After a "Move to Trash" action is executed
|
||||
no other workflow will be executed on the document.
|
||||
|
||||
If a "Move to Trash" action is executed in a consume pipeline, the consumption
|
||||
will be aborted and the file will be deleted.
|
||||
|
||||
#### Workflow placeholders
|
||||
|
||||
Titles and webhook payloads can be generated by workflows using [Jinja templates](https://jinja.palletsprojects.com/en/3.1.x/templates/).
|
||||
|
||||
@@ -35,9 +35,8 @@ dependencies = [
|
||||
"django-cors-headers~=4.9.0",
|
||||
"django-extensions~=4.1",
|
||||
"django-filter~=25.1",
|
||||
"django-guardian~=3.3.0",
|
||||
"django-guardian~=3.2.0",
|
||||
"django-multiselectfield~=1.0.1",
|
||||
"django-rich~=2.2.0",
|
||||
"django-soft-delete~=1.0.18",
|
||||
"django-treenode>=0.23.2",
|
||||
"djangorestframework~=3.16",
|
||||
@@ -46,7 +45,7 @@ dependencies = [
|
||||
"drf-spectacular-sidecar~=2026.1.1",
|
||||
"drf-writable-nested~=0.7.1",
|
||||
"faiss-cpu>=1.10",
|
||||
"filelock~=3.24.3",
|
||||
"filelock~=3.20.0",
|
||||
"flower~=2.0.1",
|
||||
"gotenberg-client~=0.13.1",
|
||||
"httpx-oauth~=0.16",
|
||||
@@ -77,6 +76,7 @@ dependencies = [
|
||||
"setproctitle~=1.3.4",
|
||||
"tika-client~=0.10.0",
|
||||
"torch~=2.10.0",
|
||||
"tqdm~=4.67.1",
|
||||
"watchfiles>=1.1.1",
|
||||
"whitenoise~=6.11",
|
||||
"whoosh-reloaded>=2.7.5",
|
||||
@@ -114,8 +114,8 @@ testing = [
|
||||
"imagehash",
|
||||
"pytest~=9.0.0",
|
||||
"pytest-cov~=7.0.0",
|
||||
"pytest-django~=4.12.0",
|
||||
"pytest-env~=1.5.0",
|
||||
"pytest-django~=4.11.1",
|
||||
"pytest-env~=1.2.0",
|
||||
"pytest-httpx",
|
||||
"pytest-mock~=3.15.1",
|
||||
#"pytest-randomly~=4.0.1",
|
||||
@@ -149,6 +149,7 @@ typing = [
|
||||
"types-pytz",
|
||||
"types-redis",
|
||||
"types-setuptools",
|
||||
"types-tqdm",
|
||||
]
|
||||
|
||||
[tool.uv]
|
||||
@@ -303,7 +304,6 @@ markers = [
|
||||
"tika: Tests requiring Tika service",
|
||||
"greenmail: Tests requiring Greenmail service",
|
||||
"date_parsing: Tests which cover date parsing from content or filename",
|
||||
"management: Tests which cover management commands/functionality",
|
||||
]
|
||||
|
||||
[tool.pytest_env]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -448,13 +448,6 @@
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
@case (WorkflowActionType.MoveToTrash) {
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<p class="text-muted small" i18n>The document will be moved to the trash at the end of the workflow run.</p>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
</div>
|
||||
</ng-template>
|
||||
|
||||
@@ -143,10 +143,6 @@ export const WORKFLOW_ACTION_OPTIONS = [
|
||||
id: WorkflowActionType.PasswordRemoval,
|
||||
name: $localize`Password removal`,
|
||||
},
|
||||
{
|
||||
id: WorkflowActionType.MoveToTrash,
|
||||
name: $localize`Move to trash`,
|
||||
},
|
||||
]
|
||||
|
||||
export enum TriggerFilterType {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<pngx-page-header [(title)]="title" [id]="documentId">
|
||||
@if (archiveContentRenderType === ContentRenderType.PDF && !useNativePdfViewer) {
|
||||
@if (previewNumPages) {
|
||||
<div class="input-group input-group-sm ms-2 d-none d-md-flex">
|
||||
<div class="input-group input-group-sm d-none d-md-flex">
|
||||
<div class="input-group-text" i18n>Page</div>
|
||||
<input class="form-control flex-grow-0 w-auto" type="number" min="1" [max]="previewNumPages" [(ngModel)]="previewCurrentPage" />
|
||||
<div class="input-group-text" i18n>of {{previewNumPages}}</div>
|
||||
@@ -24,16 +24,6 @@
|
||||
<i-bs width="1.2em" height="1.2em" name="trash"></i-bs><span class="d-none d-lg-inline ps-1" i18n>Delete</span>
|
||||
</button>
|
||||
|
||||
<pngx-document-version-dropdown
|
||||
[documentId]="documentId"
|
||||
[versions]="document?.versions ?? []"
|
||||
[selectedVersionId]="selectedVersionId"
|
||||
[userIsOwner]="userIsOwner"
|
||||
[userCanEdit]="userCanEdit"
|
||||
(versionSelected)="onVersionSelected($event)"
|
||||
(versionsUpdated)="onVersionsUpdated($event)"
|
||||
/>
|
||||
|
||||
<div class="btn-group">
|
||||
<button (click)="download()" class="btn btn-sm btn-outline-primary" [disabled]="downloading">
|
||||
@if (downloading) {
|
||||
@@ -44,21 +34,14 @@
|
||||
<span class="d-none d-lg-inline ps-1" i18n>Download</span>
|
||||
</button>
|
||||
|
||||
<div class="btn-group" ngbDropdown role="group">
|
||||
<button class="btn btn-sm btn-outline-primary dropdown-toggle" [disabled]="downloading" ngbDropdownToggle></button>
|
||||
<div class="dropdown-menu shadow" ngbDropdownMenu>
|
||||
@if (metadata?.has_archive_version) {
|
||||
@if (metadata?.has_archive_version) {
|
||||
<div class="btn-group" ngbDropdown role="group">
|
||||
<button class="btn btn-sm btn-outline-primary dropdown-toggle" [disabled]="downloading" ngbDropdownToggle></button>
|
||||
<div class="dropdown-menu shadow" ngbDropdownMenu>
|
||||
<button ngbDropdownItem (click)="download(true)" [disabled]="downloading" i18n>Download original</button>
|
||||
<div class="dropdown-divider"></div>
|
||||
}
|
||||
<form class="px-3 py-1">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" class="form-check-input" id="downloadUseFormatting" [(ngModel)]="useFormattedFilename" [ngModelOptions]="{standalone: true}" />
|
||||
<label class="form-check-label" for="downloadUseFormatting" i18n>Use formatted filename</label>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
<div class="ms-auto" ngbDropdown>
|
||||
@@ -112,7 +95,6 @@
|
||||
<div class="col-md-6 col-xl-5 mb-4">
|
||||
|
||||
<form [formGroup]='documentForm' (ngSubmit)="save()">
|
||||
|
||||
<div class="btn-toolbar mb-1 border-bottom">
|
||||
<div class="btn-group pb-3">
|
||||
<button type="button" class="btn btn-sm btn-outline-secondary" i18n-title title="Close" (click)="close()">
|
||||
|
||||
@@ -65,6 +65,7 @@ import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { WebsocketStatusService } from 'src/app/services/websocket-status.service'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { ConfirmDialogComponent } from '../common/confirm-dialog/confirm-dialog.component'
|
||||
import { PasswordRemovalConfirmDialogComponent } from '../common/confirm-dialog/password-removal-confirm-dialog/password-removal-confirm-dialog.component'
|
||||
@@ -83,9 +84,9 @@ const doc: Document = {
|
||||
storage_path: 31,
|
||||
tags: [41, 42, 43],
|
||||
content: 'text content',
|
||||
added: new Date('May 4, 2014 03:24:00'),
|
||||
created: new Date('May 4, 2014 03:24:00'),
|
||||
modified: new Date('May 4, 2014 03:24:00'),
|
||||
added: new Date('May 4, 2014 03:24:00').toISOString(),
|
||||
created: new Date('May 4, 2014 03:24:00').toISOString(),
|
||||
modified: new Date('May 4, 2014 03:24:00').toISOString(),
|
||||
archive_serial_number: null,
|
||||
original_file_name: 'file.pdf',
|
||||
owner: null,
|
||||
@@ -294,27 +295,6 @@ describe('DocumentDetailComponent', () => {
|
||||
component = fixture.componentInstance
|
||||
})
|
||||
|
||||
function initNormally() {
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
|
||||
jest
|
||||
.spyOn(documentService, 'get')
|
||||
.mockReturnValueOnce(of(Object.assign({}, doc)))
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(null)
|
||||
jest
|
||||
.spyOn(openDocumentsService, 'openDocument')
|
||||
.mockReturnValueOnce(of(true))
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
count: customFields.length,
|
||||
all: customFields.map((f) => f.id),
|
||||
results: customFields,
|
||||
})
|
||||
)
|
||||
fixture.detectChanges()
|
||||
}
|
||||
|
||||
it('should load four tabs via url params', () => {
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
@@ -327,6 +307,29 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(component.activeNavID).toEqual(component.DocumentDetailNavIDs.Notes)
|
||||
})
|
||||
|
||||
it('should switch from preview to details when pdf preview enters the DOM', fakeAsync(() => {
|
||||
component.nav = {
|
||||
activeId: component.DocumentDetailNavIDs.Preview,
|
||||
select: jest.fn(),
|
||||
} as any
|
||||
;(component as any).pdfPreview = {
|
||||
nativeElement: { offsetParent: {} },
|
||||
}
|
||||
|
||||
tick()
|
||||
expect(component.nav.select).toHaveBeenCalledWith(
|
||||
component.DocumentDetailNavIDs.Details
|
||||
)
|
||||
}))
|
||||
|
||||
it('should forward title key up value to titleSubject', () => {
|
||||
const subjectSpy = jest.spyOn(component.titleSubject, 'next')
|
||||
|
||||
component.titleKeyUp({ target: { value: 'Updated title' } })
|
||||
|
||||
expect(subjectSpy).toHaveBeenCalledWith('Updated title')
|
||||
})
|
||||
|
||||
it('should change url on tab switch', () => {
|
||||
initNormally()
|
||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||
@@ -375,117 +378,6 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(component.document).toEqual(doc)
|
||||
})
|
||||
|
||||
it('should redirect to root when opening a version document id', () => {
|
||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 10, section: 'details' })))
|
||||
jest
|
||||
.spyOn(documentService, 'get')
|
||||
.mockReturnValueOnce(throwError(() => ({ status: 404 }) as any))
|
||||
const getRootSpy = jest
|
||||
.spyOn(documentService, 'getRootId')
|
||||
.mockReturnValue(of({ root_id: 3 }))
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(null)
|
||||
jest
|
||||
.spyOn(openDocumentsService, 'openDocument')
|
||||
.mockReturnValueOnce(of(true))
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
count: customFields.length,
|
||||
all: customFields.map((f) => f.id),
|
||||
results: customFields,
|
||||
})
|
||||
)
|
||||
|
||||
fixture.detectChanges()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
expect(getRootSpy).toHaveBeenCalledWith(10)
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['documents', 3, 'details'], {
|
||||
replaceUrl: true,
|
||||
})
|
||||
})
|
||||
|
||||
it('should navigate to 404 when root lookup fails', () => {
|
||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 10, section: 'details' })))
|
||||
jest
|
||||
.spyOn(documentService, 'get')
|
||||
.mockReturnValueOnce(throwError(() => ({ status: 404 }) as any))
|
||||
jest
|
||||
.spyOn(documentService, 'getRootId')
|
||||
.mockReturnValue(throwError(() => new Error('boom')))
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(null)
|
||||
jest
|
||||
.spyOn(openDocumentsService, 'openDocument')
|
||||
.mockReturnValueOnce(of(true))
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
count: customFields.length,
|
||||
all: customFields.map((f) => f.id),
|
||||
results: customFields,
|
||||
})
|
||||
)
|
||||
|
||||
fixture.detectChanges()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['404'], { replaceUrl: true })
|
||||
})
|
||||
|
||||
it('should not render a delete button for the root/original version', () => {
|
||||
const docWithVersions = {
|
||||
...doc,
|
||||
versions: [
|
||||
{
|
||||
id: doc.id,
|
||||
added: new Date('2024-01-01T00:00:00Z'),
|
||||
version_label: 'Original',
|
||||
checksum: 'aaaa',
|
||||
is_root: true,
|
||||
},
|
||||
{
|
||||
id: 10,
|
||||
added: new Date('2024-01-02T00:00:00Z'),
|
||||
version_label: 'Edited',
|
||||
checksum: 'bbbb',
|
||||
is_root: false,
|
||||
},
|
||||
],
|
||||
} as Document
|
||||
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
|
||||
jest.spyOn(documentService, 'get').mockReturnValueOnce(of(docWithVersions))
|
||||
jest
|
||||
.spyOn(documentService, 'getMetadata')
|
||||
.mockReturnValue(of({ has_archive_version: true } as any))
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(null)
|
||||
jest
|
||||
.spyOn(openDocumentsService, 'openDocument')
|
||||
.mockReturnValueOnce(of(true))
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
count: customFields.length,
|
||||
all: customFields.map((f) => f.id),
|
||||
results: customFields,
|
||||
})
|
||||
)
|
||||
|
||||
fixture.detectChanges()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
fixture.detectChanges()
|
||||
|
||||
const deleteButtons = fixture.debugElement.queryAll(
|
||||
By.css('pngx-confirm-button')
|
||||
)
|
||||
expect(deleteButtons.length).toEqual(1)
|
||||
})
|
||||
|
||||
it('should fall back to details tab when duplicates tab is active but no duplicates', () => {
|
||||
initNormally()
|
||||
component.activeNavID = component.DocumentDetailNavIDs.Duplicates
|
||||
@@ -524,7 +416,7 @@ describe('DocumentDetailComponent', () => {
|
||||
jest.spyOn(documentService, 'get').mockReturnValue(
|
||||
of({
|
||||
...doc,
|
||||
modified: new Date('2024-01-02T00:00:00Z'),
|
||||
modified: '2024-01-02T00:00:00Z',
|
||||
duplicate_documents: updatedDuplicates,
|
||||
})
|
||||
)
|
||||
@@ -664,18 +556,6 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(navigateSpy).toHaveBeenCalledWith(['404'], { replaceUrl: true })
|
||||
})
|
||||
|
||||
it('discard should request the currently selected version', () => {
|
||||
initNormally()
|
||||
const getSpy = jest.spyOn(documentService, 'get')
|
||||
getSpy.mockClear()
|
||||
getSpy.mockReturnValueOnce(of(doc))
|
||||
|
||||
component.selectedVersionId = 10
|
||||
component.discard()
|
||||
|
||||
expect(getSpy).toHaveBeenCalledWith(component.documentId, 10)
|
||||
})
|
||||
|
||||
it('should 404 on invalid id', () => {
|
||||
const navigateSpy = jest.spyOn(router, 'navigate')
|
||||
jest
|
||||
@@ -728,18 +608,6 @@ describe('DocumentDetailComponent', () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('save should target currently selected version', () => {
|
||||
initNormally()
|
||||
component.selectedVersionId = 10
|
||||
const patchSpy = jest.spyOn(documentService, 'patch')
|
||||
patchSpy.mockReturnValue(of(doc))
|
||||
|
||||
component.save()
|
||||
|
||||
expect(patchSpy).toHaveBeenCalled()
|
||||
expect(patchSpy.mock.calls[0][1]).toEqual(10)
|
||||
})
|
||||
|
||||
it('should show toast error on save if error occurs', () => {
|
||||
currentUserHasObjectPermissions = true
|
||||
initNormally()
|
||||
@@ -1192,32 +1060,7 @@ describe('DocumentDetailComponent', () => {
|
||||
const metadataSpy = jest.spyOn(documentService, 'getMetadata')
|
||||
metadataSpy.mockReturnValue(of({ has_archive_version: true }))
|
||||
initNormally()
|
||||
expect(metadataSpy).toHaveBeenCalledWith(doc.id, null)
|
||||
})
|
||||
|
||||
it('should pass metadata version only for non-latest selected versions', () => {
|
||||
const metadataSpy = jest.spyOn(documentService, 'getMetadata')
|
||||
metadataSpy.mockReturnValue(of({ has_archive_version: true }))
|
||||
initNormally()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
expect(metadataSpy).toHaveBeenCalledWith(doc.id, null)
|
||||
|
||||
metadataSpy.mockClear()
|
||||
component.document.versions = [
|
||||
{ id: doc.id, is_root: true },
|
||||
{ id: 10, is_root: false },
|
||||
] as any
|
||||
jest.spyOn(documentService, 'getPreviewUrl').mockReturnValue('preview-root')
|
||||
jest.spyOn(documentService, 'getThumbUrl').mockReturnValue('thumb-root')
|
||||
jest
|
||||
.spyOn(documentService, 'get')
|
||||
.mockReturnValue(of({ content: 'root' } as Document))
|
||||
|
||||
component.selectVersion(doc.id)
|
||||
httpTestingController.expectOne('preview-root').flush('root')
|
||||
|
||||
expect(metadataSpy).toHaveBeenCalledWith(doc.id, doc.id)
|
||||
expect(metadataSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should show an error if failed metadata retrieval', () => {
|
||||
@@ -1386,17 +1229,21 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(errorSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should warn when open document does not match doc retrieved from backend on init', () => {
|
||||
it('should show incoming update modal when open local draft is older than backend on init', () => {
|
||||
let openModal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((modals) => (openModal = modals[0]))
|
||||
const modalSpy = jest.spyOn(modalService, 'open')
|
||||
const openDoc = Object.assign({}, doc)
|
||||
const openDoc = Object.assign({}, doc, {
|
||||
__changedFields: ['title'],
|
||||
})
|
||||
// simulate a document being modified elsewhere and db updated
|
||||
doc.modified = new Date()
|
||||
const remoteDoc = Object.assign({}, doc, {
|
||||
modified: new Date(new Date(doc.modified).getTime() + 1000).toISOString(),
|
||||
})
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
|
||||
jest.spyOn(documentService, 'get').mockReturnValueOnce(of(doc))
|
||||
jest.spyOn(documentService, 'get').mockReturnValueOnce(of(remoteDoc))
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
@@ -1406,11 +1253,185 @@ describe('DocumentDetailComponent', () => {
|
||||
})
|
||||
)
|
||||
fixture.detectChanges() // calls ngOnInit
|
||||
expect(modalSpy).toHaveBeenCalledWith(ConfirmDialogComponent)
|
||||
const closeSpy = jest.spyOn(openModal, 'close')
|
||||
expect(modalSpy).toHaveBeenCalledWith(ConfirmDialogComponent, {
|
||||
backdrop: 'static',
|
||||
})
|
||||
const confirmDialog = openModal.componentInstance as ConfirmDialogComponent
|
||||
confirmDialog.confirmClicked.next(confirmDialog)
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
expect(confirmDialog.messageBold).toContain('Document was updated at')
|
||||
})
|
||||
|
||||
it('should react to websocket document updated notifications', () => {
|
||||
initNormally()
|
||||
const updateMessage = {
|
||||
document_id: component.documentId,
|
||||
modified: '2026-02-17T00:00:00Z',
|
||||
owner_id: 1,
|
||||
}
|
||||
const handleSpy = jest
|
||||
.spyOn(component as any, 'handleIncomingDocumentUpdated')
|
||||
.mockImplementation(() => {})
|
||||
const websocketStatusService = TestBed.inject(WebsocketStatusService)
|
||||
|
||||
websocketStatusService.handleDocumentUpdated(updateMessage)
|
||||
|
||||
expect(handleSpy).toHaveBeenCalledWith(updateMessage)
|
||||
})
|
||||
|
||||
it('should queue incoming update while network is active and flush after', () => {
|
||||
initNormally()
|
||||
const loadSpy = jest.spyOn(component as any, 'loadDocument')
|
||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||
|
||||
component.networkActive = true
|
||||
;(component as any).handleIncomingDocumentUpdated({
|
||||
document_id: component.documentId,
|
||||
modified: '2026-02-17T00:00:00Z',
|
||||
})
|
||||
|
||||
expect(loadSpy).not.toHaveBeenCalled()
|
||||
|
||||
component.networkActive = false
|
||||
;(component as any).flushPendingIncomingUpdate()
|
||||
|
||||
expect(loadSpy).toHaveBeenCalledWith(component.documentId, true)
|
||||
expect(toastSpy).toHaveBeenCalledWith(
|
||||
'Document reloaded with latest changes.'
|
||||
)
|
||||
})
|
||||
|
||||
it('should ignore queued incoming update matching local save modified', () => {
|
||||
initNormally()
|
||||
const loadSpy = jest.spyOn(component as any, 'loadDocument')
|
||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||
|
||||
component.networkActive = true
|
||||
;(component as any).lastLocalSaveModified = '2026-02-17T00:00:00+00:00'
|
||||
;(component as any).handleIncomingDocumentUpdated({
|
||||
document_id: component.documentId,
|
||||
modified: '2026-02-17T00:00:00+00:00',
|
||||
})
|
||||
|
||||
component.networkActive = false
|
||||
;(component as any).flushPendingIncomingUpdate()
|
||||
|
||||
expect(loadSpy).not.toHaveBeenCalled()
|
||||
expect(toastSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should clear pdf source if preview URL is empty', () => {
|
||||
component.pdfSource = { url: '/preview', password: 'secret' } as any
|
||||
component.previewUrl = null
|
||||
;(component as any).updatePdfSource()
|
||||
|
||||
expect(component.pdfSource).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should close incoming update modal if one is open', () => {
|
||||
const modalRef = { close: jest.fn() } as unknown as NgbModalRef
|
||||
;(component as any).incomingUpdateModal = modalRef
|
||||
;(component as any).closeIncomingUpdateModal()
|
||||
|
||||
expect(modalRef.close).toHaveBeenCalled()
|
||||
expect((component as any).incomingUpdateModal).toBeNull()
|
||||
})
|
||||
|
||||
it('should reload remote version when incoming update modal is confirmed', async () => {
|
||||
let openModal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((modals) => (openModal = modals[0]))
|
||||
const reloadSpy = jest
|
||||
.spyOn(component as any, 'reloadRemoteVersion')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
;(component as any).showIncomingUpdateModal('2026-02-17T00:00:00Z')
|
||||
|
||||
const dialog = openModal.componentInstance as ConfirmDialogComponent
|
||||
dialog.confirmClicked.next()
|
||||
await openModal.result
|
||||
|
||||
expect(dialog.buttonsEnabled).toBe(false)
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
expect((component as any).incomingUpdateModal).toBeNull()
|
||||
})
|
||||
|
||||
it('should overwrite open document state when loading remote version with force', () => {
|
||||
const openDoc = Object.assign({}, doc, {
|
||||
title: 'Locally edited title',
|
||||
__changedFields: ['title'],
|
||||
})
|
||||
const remoteDoc = Object.assign({}, doc, {
|
||||
title: 'Remote title',
|
||||
modified: '2026-02-17T00:00:00Z',
|
||||
})
|
||||
jest.spyOn(documentService, 'get').mockReturnValue(of(remoteDoc))
|
||||
jest.spyOn(documentService, 'getMetadata').mockReturnValue(
|
||||
of({
|
||||
has_archive_version: false,
|
||||
original_mime_type: 'application/pdf',
|
||||
})
|
||||
)
|
||||
jest.spyOn(documentService, 'getSuggestions').mockReturnValue(
|
||||
of({
|
||||
suggested_tags: [],
|
||||
suggested_document_types: [],
|
||||
suggested_correspondents: [],
|
||||
})
|
||||
)
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
|
||||
const setDirtySpy = jest.spyOn(openDocumentsService, 'setDirty')
|
||||
const saveSpy = jest.spyOn(openDocumentsService, 'save')
|
||||
|
||||
;(component as any).loadDocument(doc.id, true)
|
||||
|
||||
expect(openDoc.title).toEqual('Remote title')
|
||||
expect(openDoc.__changedFields).toEqual([])
|
||||
expect(setDirtySpy).toHaveBeenCalledWith(openDoc, false)
|
||||
expect(saveSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should ignore incoming update for a different document id', () => {
|
||||
initNormally()
|
||||
const loadSpy = jest.spyOn(component as any, 'loadDocument')
|
||||
|
||||
;(component as any).handleIncomingDocumentUpdated({
|
||||
document_id: component.documentId + 1,
|
||||
modified: '2026-02-17T00:00:00Z',
|
||||
})
|
||||
|
||||
expect(loadSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should show incoming update modal when local document has unsaved edits', () => {
|
||||
initNormally()
|
||||
jest.spyOn(openDocumentsService, 'isDirty').mockReturnValue(true)
|
||||
const modalSpy = jest
|
||||
.spyOn(component as any, 'showIncomingUpdateModal')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
;(component as any).handleIncomingDocumentUpdated({
|
||||
document_id: component.documentId,
|
||||
modified: '2026-02-17T00:00:00Z',
|
||||
})
|
||||
|
||||
expect(modalSpy).toHaveBeenCalledWith('2026-02-17T00:00:00Z')
|
||||
})
|
||||
|
||||
it('should reload current document and show toast when reloading remote version', () => {
|
||||
component.documentId = doc.id
|
||||
const closeModalSpy = jest
|
||||
.spyOn(component as any, 'closeIncomingUpdateModal')
|
||||
.mockImplementation(() => {})
|
||||
const loadSpy = jest
|
||||
.spyOn(component as any, 'loadDocument')
|
||||
.mockImplementation(() => {})
|
||||
const notifySpy = jest.spyOn(component.docChangeNotifier, 'next')
|
||||
const toastSpy = jest.spyOn(toastService, 'showInfo')
|
||||
|
||||
;(component as any).reloadRemoteVersion()
|
||||
|
||||
expect(closeModalSpy).toHaveBeenCalled()
|
||||
expect(notifySpy).toHaveBeenCalledWith(doc.id)
|
||||
expect(loadSpy).toHaveBeenCalledWith(doc.id, true)
|
||||
expect(toastSpy).toHaveBeenCalledWith('Document reloaded.')
|
||||
})
|
||||
|
||||
it('should change preview element by render type', () => {
|
||||
@@ -1622,88 +1643,26 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(closeSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('selectVersion should update preview and handle preview failures', () => {
|
||||
const previewSpy = jest.spyOn(documentService, 'getPreviewUrl')
|
||||
initNormally()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
previewSpy.mockReturnValueOnce('preview-version')
|
||||
jest.spyOn(documentService, 'getThumbUrl').mockReturnValue('thumb-version')
|
||||
function initNormally() {
|
||||
jest
|
||||
.spyOn(activatedRoute, 'paramMap', 'get')
|
||||
.mockReturnValue(of(convertToParamMap({ id: 3, section: 'details' })))
|
||||
jest
|
||||
.spyOn(documentService, 'get')
|
||||
.mockReturnValue(of({ content: 'version-content' } as Document))
|
||||
|
||||
component.selectVersion(10)
|
||||
httpTestingController.expectOne('preview-version').flush('version text')
|
||||
|
||||
expect(component.previewUrl).toBe('preview-version')
|
||||
expect(component.thumbUrl).toBe('thumb-version')
|
||||
expect(component.previewText).toBe('version text')
|
||||
expect(component.documentForm.get('content').value).toBe('version-content')
|
||||
const pdfSource = component.pdfSource as { url: string; password?: string }
|
||||
expect(pdfSource.url).toBe('preview-version')
|
||||
expect(pdfSource.password).toBeUndefined()
|
||||
|
||||
previewSpy.mockReturnValueOnce('preview-error')
|
||||
component.selectVersion(11)
|
||||
httpTestingController
|
||||
.expectOne('preview-error')
|
||||
.error(new ErrorEvent('fail'))
|
||||
|
||||
expect(component.previewText).toContain('An error occurred loading content')
|
||||
})
|
||||
|
||||
it('selectVersion should show toast if version content retrieval fails', () => {
|
||||
initNormally()
|
||||
httpTestingController.expectOne(component.previewUrl).flush('preview')
|
||||
|
||||
jest.spyOn(documentService, 'getPreviewUrl').mockReturnValue('preview-ok')
|
||||
jest.spyOn(documentService, 'getThumbUrl').mockReturnValue('thumb-ok')
|
||||
.mockReturnValueOnce(of(Object.assign({}, doc)))
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(null)
|
||||
jest
|
||||
.spyOn(documentService, 'getMetadata')
|
||||
.mockReturnValue(of({ has_archive_version: true } as any))
|
||||
const contentError = new Error('content failed')
|
||||
jest
|
||||
.spyOn(documentService, 'get')
|
||||
.mockReturnValue(throwError(() => contentError))
|
||||
const toastSpy = jest.spyOn(toastService, 'showError')
|
||||
|
||||
component.selectVersion(10)
|
||||
httpTestingController.expectOne('preview-ok').flush('preview text')
|
||||
|
||||
expect(toastSpy).toHaveBeenCalledWith(
|
||||
'Error retrieving version content',
|
||||
contentError
|
||||
.spyOn(openDocumentsService, 'openDocument')
|
||||
.mockReturnValueOnce(of(true))
|
||||
jest.spyOn(customFieldsService, 'listAll').mockReturnValue(
|
||||
of({
|
||||
count: customFields.length,
|
||||
all: customFields.map((f) => f.id),
|
||||
results: customFields,
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('onVersionSelected should delegate to selectVersion', () => {
|
||||
const selectVersionSpy = jest
|
||||
.spyOn(component, 'selectVersion')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
component.onVersionSelected(42)
|
||||
|
||||
expect(selectVersionSpy).toHaveBeenCalledWith(42)
|
||||
})
|
||||
|
||||
it('onVersionsUpdated should sync open document versions and save', () => {
|
||||
component.documentId = doc.id
|
||||
component.document = { ...doc, versions: [] } as Document
|
||||
const updatedVersions = [
|
||||
{ id: doc.id, is_root: true },
|
||||
{ id: 10, is_root: false },
|
||||
] as any
|
||||
const openDoc = { ...doc, versions: [] } as Document
|
||||
jest.spyOn(openDocumentsService, 'getOpenDocument').mockReturnValue(openDoc)
|
||||
const saveSpy = jest.spyOn(openDocumentsService, 'save')
|
||||
|
||||
component.onVersionsUpdated(updatedVersions)
|
||||
|
||||
expect(component.document.versions).toEqual(updatedVersions)
|
||||
expect(openDoc.versions).toEqual(updatedVersions)
|
||||
expect(saveSpy).toHaveBeenCalled()
|
||||
})
|
||||
fixture.detectChanges()
|
||||
}
|
||||
|
||||
it('createDisabled should return true if the user does not have permission to add the specified data type', () => {
|
||||
currentUserCan = false
|
||||
@@ -1721,6 +1680,14 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(component.createDisabled(DataType.Tag)).toBeFalsy()
|
||||
})
|
||||
|
||||
it('should expose add permission via userCanAdd getter', () => {
|
||||
currentUserCan = true
|
||||
expect(component.userCanAdd).toBeTruthy()
|
||||
|
||||
currentUserCan = false
|
||||
expect(component.userCanAdd).toBeFalsy()
|
||||
})
|
||||
|
||||
it('should call tryRenderTiff when no archive and file is tiff', () => {
|
||||
initNormally()
|
||||
const tiffRenderSpy = jest.spyOn(
|
||||
@@ -1797,88 +1764,6 @@ describe('DocumentDetailComponent', () => {
|
||||
expect(urlRevokeSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should include version in download and print only for non-latest selected version', () => {
|
||||
initNormally()
|
||||
component.document.versions = [
|
||||
{ id: doc.id, is_root: true },
|
||||
{ id: 10, is_root: false },
|
||||
] as any
|
||||
|
||||
const getDownloadUrlSpy = jest
|
||||
.spyOn(documentService, 'getDownloadUrl')
|
||||
.mockReturnValueOnce('download-latest')
|
||||
.mockReturnValueOnce('print-latest')
|
||||
.mockReturnValueOnce('download-non-latest')
|
||||
.mockReturnValueOnce('print-non-latest')
|
||||
|
||||
component.selectedVersionId = 10
|
||||
component.download()
|
||||
expect(getDownloadUrlSpy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
doc.id,
|
||||
false,
|
||||
null,
|
||||
false
|
||||
)
|
||||
httpTestingController
|
||||
.expectOne('download-latest')
|
||||
.error(new ProgressEvent('failed'))
|
||||
|
||||
component.printDocument()
|
||||
expect(getDownloadUrlSpy).toHaveBeenNthCalledWith(2, doc.id, false, null)
|
||||
httpTestingController
|
||||
.expectOne('print-latest')
|
||||
.error(new ProgressEvent('failed'))
|
||||
|
||||
component.selectedVersionId = doc.id
|
||||
component.download()
|
||||
expect(getDownloadUrlSpy).toHaveBeenNthCalledWith(
|
||||
3,
|
||||
doc.id,
|
||||
false,
|
||||
doc.id,
|
||||
false
|
||||
)
|
||||
httpTestingController
|
||||
.expectOne('download-non-latest')
|
||||
.error(new ProgressEvent('failed'))
|
||||
|
||||
component.printDocument()
|
||||
expect(getDownloadUrlSpy).toHaveBeenNthCalledWith(4, doc.id, false, doc.id)
|
||||
httpTestingController
|
||||
.expectOne('print-non-latest')
|
||||
.error(new ProgressEvent('failed'))
|
||||
})
|
||||
|
||||
it('should omit version in download and print when no version is selected', () => {
|
||||
initNormally()
|
||||
component.document.versions = [] as any
|
||||
;(component as any).selectedVersionId = undefined
|
||||
|
||||
const getDownloadUrlSpy = jest
|
||||
.spyOn(documentService, 'getDownloadUrl')
|
||||
.mockReturnValueOnce('download-no-version')
|
||||
.mockReturnValueOnce('print-no-version')
|
||||
|
||||
component.download()
|
||||
expect(getDownloadUrlSpy).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
doc.id,
|
||||
false,
|
||||
null,
|
||||
false
|
||||
)
|
||||
httpTestingController
|
||||
.expectOne('download-no-version')
|
||||
.error(new ProgressEvent('failed'))
|
||||
|
||||
component.printDocument()
|
||||
expect(getDownloadUrlSpy).toHaveBeenNthCalledWith(2, doc.id, false, null)
|
||||
httpTestingController
|
||||
.expectOne('print-no-version')
|
||||
.error(new ProgressEvent('failed'))
|
||||
})
|
||||
|
||||
it('should download a file with the correct filename', () => {
|
||||
const mockBlob = new Blob(['test content'], { type: 'text/plain' })
|
||||
const mockResponse = new HttpResponse({
|
||||
|
||||
@@ -13,6 +13,7 @@ import {
|
||||
NgbDateStruct,
|
||||
NgbDropdownModule,
|
||||
NgbModal,
|
||||
NgbModalRef,
|
||||
NgbNav,
|
||||
NgbNavChangeEvent,
|
||||
NgbNavModule,
|
||||
@@ -36,7 +37,7 @@ import { Correspondent } from 'src/app/data/correspondent'
|
||||
import { CustomField, CustomFieldDataType } from 'src/app/data/custom-field'
|
||||
import { CustomFieldInstance } from 'src/app/data/custom-field-instance'
|
||||
import { DataType } from 'src/app/data/datatype'
|
||||
import { Document, DocumentVersionInfo } from 'src/app/data/document'
|
||||
import { Document } from 'src/app/data/document'
|
||||
import { DocumentMetadata } from 'src/app/data/document-metadata'
|
||||
import { DocumentNote } from 'src/app/data/document-note'
|
||||
import { DocumentSuggestions } from 'src/app/data/document-suggestions'
|
||||
@@ -80,6 +81,7 @@ import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import { WebsocketStatusService } from 'src/app/services/websocket-status.service'
|
||||
import { getFilenameFromContentDisposition } from 'src/app/utils/http'
|
||||
import { ISODateAdapter } from 'src/app/utils/ngb-iso-date-adapter'
|
||||
import * as UTIF from 'utif'
|
||||
@@ -120,7 +122,6 @@ import { SuggestionsDropdownComponent } from '../common/suggestions-dropdown/sug
|
||||
import { DocumentNotesComponent } from '../document-notes/document-notes.component'
|
||||
import { ComponentWithPermissions } from '../with-permissions/with-permissions.component'
|
||||
import { DocumentHistoryComponent } from './document-history/document-history.component'
|
||||
import { DocumentVersionDropdownComponent } from './document-version-dropdown/document-version-dropdown.component'
|
||||
import { MetadataCollapseComponent } from './metadata-collapse/metadata-collapse.component'
|
||||
|
||||
enum DocumentDetailNavIDs {
|
||||
@@ -143,6 +144,11 @@ enum ContentRenderType {
|
||||
TIFF = 'tiff',
|
||||
}
|
||||
|
||||
interface IncomingDocumentUpdate {
|
||||
document_id: number
|
||||
modified: string
|
||||
}
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-document-detail',
|
||||
templateUrl: './document-detail.component.html',
|
||||
@@ -178,7 +184,6 @@ enum ContentRenderType {
|
||||
TextAreaComponent,
|
||||
RouterModule,
|
||||
PngxPdfViewerComponent,
|
||||
DocumentVersionDropdownComponent,
|
||||
],
|
||||
})
|
||||
export class DocumentDetailComponent
|
||||
@@ -186,7 +191,6 @@ export class DocumentDetailComponent
|
||||
implements OnInit, OnDestroy, DirtyComponent
|
||||
{
|
||||
PdfRenderMode = PdfRenderMode
|
||||
|
||||
documentsService = inject(DocumentService)
|
||||
private route = inject(ActivatedRoute)
|
||||
private tagService = inject(TagService)
|
||||
@@ -208,6 +212,7 @@ export class DocumentDetailComponent
|
||||
private componentRouterService = inject(ComponentRouterService)
|
||||
private deviceDetectorService = inject(DeviceDetectorService)
|
||||
private savedViewService = inject(SavedViewService)
|
||||
private readonly websocketStatusService = inject(WebsocketStatusService)
|
||||
|
||||
@ViewChild('inputTitle')
|
||||
titleInput: TextComponent
|
||||
@@ -238,9 +243,6 @@ export class DocumentDetailComponent
|
||||
tiffURL: string
|
||||
tiffError: string
|
||||
|
||||
// Versioning
|
||||
selectedVersionId: number
|
||||
|
||||
correspondents: Correspondent[]
|
||||
documentTypes: DocumentType[]
|
||||
storagePaths: StoragePath[]
|
||||
@@ -267,6 +269,9 @@ export class DocumentDetailComponent
|
||||
isDirty$: Observable<boolean>
|
||||
unsubscribeNotifier: Subject<any> = new Subject()
|
||||
docChangeNotifier: Subject<any> = new Subject()
|
||||
private incomingUpdateModal: NgbModalRef
|
||||
private pendingIncomingUpdate: IncomingDocumentUpdate
|
||||
private lastLocalSaveModified: string | null = null
|
||||
|
||||
requiresPassword: boolean = false
|
||||
password: string
|
||||
@@ -276,7 +281,6 @@ export class DocumentDetailComponent
|
||||
customFields: CustomField[]
|
||||
|
||||
public downloading: boolean = false
|
||||
public useFormattedFilename: boolean = false
|
||||
|
||||
public readonly CustomFieldDataType = CustomFieldDataType
|
||||
|
||||
@@ -319,19 +323,13 @@ export class DocumentDetailComponent
|
||||
}
|
||||
|
||||
get archiveContentRenderType(): ContentRenderType {
|
||||
const hasArchiveVersion =
|
||||
this.metadata?.has_archive_version ?? !!this.document?.archived_file_name
|
||||
return hasArchiveVersion
|
||||
return this.document?.archived_file_name
|
||||
? this.getRenderType('application/pdf')
|
||||
: this.getRenderType(
|
||||
this.metadata?.original_mime_type || this.document?.mime_type
|
||||
)
|
||||
: this.getRenderType(this.document?.mime_type)
|
||||
}
|
||||
|
||||
get originalContentRenderType(): ContentRenderType {
|
||||
return this.getRenderType(
|
||||
this.metadata?.original_mime_type || this.document?.mime_type
|
||||
)
|
||||
return this.getRenderType(this.document?.mime_type)
|
||||
}
|
||||
|
||||
get showThumbnailOverlay(): boolean {
|
||||
@@ -361,46 +359,16 @@ export class DocumentDetailComponent
|
||||
}
|
||||
|
||||
private updatePdfSource() {
|
||||
if (!this.previewUrl) {
|
||||
this.pdfSource = undefined
|
||||
return
|
||||
}
|
||||
this.pdfSource = {
|
||||
url: this.previewUrl,
|
||||
password: this.password,
|
||||
password: this.password || undefined,
|
||||
}
|
||||
}
|
||||
|
||||
private loadMetadataForSelectedVersion() {
|
||||
const selectedVersionId = this.getSelectedNonLatestVersionId()
|
||||
this.documentsService
|
||||
.getMetadata(this.documentId, selectedVersionId)
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
this.metadata = result
|
||||
this.tiffURL = null
|
||||
this.tiffError = null
|
||||
if (this.archiveContentRenderType === ContentRenderType.TIFF) {
|
||||
this.tryRenderTiff()
|
||||
}
|
||||
if (
|
||||
this.archiveContentRenderType !== ContentRenderType.PDF ||
|
||||
this.useNativePdfViewer
|
||||
) {
|
||||
this.previewLoaded = true
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
this.metadata = {} // allow display to fallback to <object> tag
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving metadata`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
get isRTL() {
|
||||
if (!this.metadata || !this.metadata.lang) return false
|
||||
else {
|
||||
@@ -475,12 +443,59 @@ export class DocumentDetailComponent
|
||||
)
|
||||
}
|
||||
|
||||
private loadDocument(documentId: number): void {
|
||||
let redirectedToRoot = false
|
||||
this.selectedVersionId = documentId
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(
|
||||
this.selectedVersionId
|
||||
private hasLocalEdits(doc: Document): boolean {
|
||||
return (
|
||||
this.openDocumentService.isDirty(doc) || !!doc.__changedFields?.length
|
||||
)
|
||||
}
|
||||
|
||||
private showIncomingUpdateModal(modified: string): void {
|
||||
if (this.incomingUpdateModal) return
|
||||
|
||||
const modal = this.modalService.open(ConfirmDialogComponent, {
|
||||
backdrop: 'static',
|
||||
})
|
||||
this.incomingUpdateModal = modal
|
||||
|
||||
let formattedModified = null
|
||||
const parsed = new Date(modified)
|
||||
formattedModified = parsed.toLocaleString()
|
||||
|
||||
modal.componentInstance.title = $localize`Document was updated`
|
||||
modal.componentInstance.messageBold = $localize`Document was updated at ${formattedModified}.`
|
||||
modal.componentInstance.message = $localize`Reload to discard your local unsaved edits and load the latest remote version.`
|
||||
modal.componentInstance.btnClass = 'btn-warning'
|
||||
modal.componentInstance.btnCaption = $localize`Reload`
|
||||
modal.componentInstance.cancelBtnCaption = $localize`Dismiss`
|
||||
|
||||
modal.componentInstance.confirmClicked.pipe(first()).subscribe(() => {
|
||||
modal.componentInstance.buttonsEnabled = false
|
||||
modal.close()
|
||||
this.reloadRemoteVersion()
|
||||
})
|
||||
modal.result.finally(() => {
|
||||
this.incomingUpdateModal = null
|
||||
})
|
||||
}
|
||||
|
||||
private closeIncomingUpdateModal() {
|
||||
if (!this.incomingUpdateModal) return
|
||||
this.incomingUpdateModal.close()
|
||||
this.incomingUpdateModal = null
|
||||
}
|
||||
|
||||
private flushPendingIncomingUpdate() {
|
||||
if (!this.pendingIncomingUpdate || this.networkActive) return
|
||||
const pendingUpdate = this.pendingIncomingUpdate
|
||||
this.pendingIncomingUpdate = null
|
||||
this.handleIncomingDocumentUpdated(pendingUpdate)
|
||||
}
|
||||
|
||||
private loadDocument(documentId: number, forceRemote: boolean = false): void {
|
||||
this.closeIncomingUpdateModal()
|
||||
this.pendingIncomingUpdate = null
|
||||
this.lastLocalSaveModified = null
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(documentId)
|
||||
this.updatePdfSource()
|
||||
this.http
|
||||
.get(this.previewUrl, { responseType: 'text' })
|
||||
@@ -496,29 +511,11 @@ export class DocumentDetailComponent
|
||||
err.message ?? err.toString()
|
||||
}`),
|
||||
})
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(this.selectedVersionId)
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(documentId)
|
||||
this.documentsService
|
||||
.get(documentId)
|
||||
.pipe(
|
||||
catchError((error) => {
|
||||
if (error?.status === 404) {
|
||||
// if not found, check if there's root document that exists and redirect if so
|
||||
return this.documentsService.getRootId(documentId).pipe(
|
||||
map((result) => {
|
||||
const rootId = result?.root_id
|
||||
if (rootId && rootId !== documentId) {
|
||||
const section =
|
||||
this.route.snapshot.paramMap.get('section') || 'details'
|
||||
redirectedToRoot = true
|
||||
this.router.navigate(['documents', rootId, section], {
|
||||
replaceUrl: true,
|
||||
})
|
||||
}
|
||||
return null
|
||||
}),
|
||||
catchError(() => of(null))
|
||||
)
|
||||
}
|
||||
catchError(() => {
|
||||
// 404 is handled in the subscribe below
|
||||
return of(null)
|
||||
}),
|
||||
@@ -529,9 +526,6 @@ export class DocumentDetailComponent
|
||||
.subscribe({
|
||||
next: (doc) => {
|
||||
if (!doc) {
|
||||
if (redirectedToRoot) {
|
||||
return
|
||||
}
|
||||
this.router.navigate(['404'], { replaceUrl: true })
|
||||
return
|
||||
}
|
||||
@@ -545,21 +539,25 @@ export class DocumentDetailComponent
|
||||
openDocument.duplicate_documents = doc.duplicate_documents
|
||||
this.openDocumentService.save()
|
||||
}
|
||||
const useDoc = openDocument || doc
|
||||
if (openDocument) {
|
||||
if (
|
||||
new Date(doc.modified) > new Date(openDocument.modified) &&
|
||||
!this.modalService.hasOpenModals()
|
||||
) {
|
||||
const modal = this.modalService.open(ConfirmDialogComponent)
|
||||
modal.componentInstance.title = $localize`Document changes detected`
|
||||
modal.componentInstance.messageBold = $localize`The version of this document in your browser session appears older than the existing version.`
|
||||
modal.componentInstance.message = $localize`Saving the document here may overwrite other changes that were made. To restore the existing version, discard your changes or close the document.`
|
||||
modal.componentInstance.cancelBtnClass = 'visually-hidden'
|
||||
modal.componentInstance.btnCaption = $localize`Ok`
|
||||
modal.componentInstance.confirmClicked.subscribe(() =>
|
||||
modal.close()
|
||||
)
|
||||
let useDoc = openDocument || doc
|
||||
if (openDocument && forceRemote) {
|
||||
Object.assign(openDocument, doc)
|
||||
openDocument.__changedFields = []
|
||||
this.openDocumentService.setDirty(openDocument, false)
|
||||
this.openDocumentService.save()
|
||||
useDoc = openDocument
|
||||
} else if (openDocument) {
|
||||
if (new Date(doc.modified) > new Date(openDocument.modified)) {
|
||||
if (this.hasLocalEdits(openDocument)) {
|
||||
this.showIncomingUpdateModal(doc.modified)
|
||||
} else {
|
||||
// No local edits to preserve, so keep the tab in sync automatically.
|
||||
Object.assign(openDocument, doc)
|
||||
openDocument.__changedFields = []
|
||||
this.openDocumentService.setDirty(openDocument, false)
|
||||
this.openDocumentService.save()
|
||||
useDoc = openDocument
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.openDocumentService
|
||||
@@ -590,6 +588,50 @@ export class DocumentDetailComponent
|
||||
})
|
||||
}
|
||||
|
||||
private handleIncomingDocumentUpdated(data: IncomingDocumentUpdate): void {
|
||||
if (
|
||||
!this.documentId ||
|
||||
!this.document ||
|
||||
data.document_id !== this.documentId
|
||||
)
|
||||
return
|
||||
if (this.networkActive) {
|
||||
this.pendingIncomingUpdate = data
|
||||
return
|
||||
}
|
||||
// If modified timestamp of the incoming update is the same as the last local save,
|
||||
// we assume this update is from our own save and dont notify
|
||||
const incomingModified = data.modified
|
||||
if (
|
||||
incomingModified &&
|
||||
this.lastLocalSaveModified &&
|
||||
incomingModified === this.lastLocalSaveModified
|
||||
) {
|
||||
this.lastLocalSaveModified = null
|
||||
return
|
||||
}
|
||||
this.lastLocalSaveModified = null
|
||||
|
||||
if (this.openDocumentService.isDirty(this.document)) {
|
||||
this.showIncomingUpdateModal(data.modified)
|
||||
} else {
|
||||
this.docChangeNotifier.next(this.documentId)
|
||||
this.loadDocument(this.documentId, true)
|
||||
this.toastService.showInfo(
|
||||
$localize`Document reloaded with latest changes.`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
private reloadRemoteVersion() {
|
||||
if (!this.documentId) return
|
||||
|
||||
this.closeIncomingUpdateModal()
|
||||
this.docChangeNotifier.next(this.documentId)
|
||||
this.loadDocument(this.documentId, true)
|
||||
this.toastService.showInfo($localize`Document reloaded.`)
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
this.setZoom(
|
||||
this.settings.get(SETTINGS_KEYS.PDF_VIEWER_ZOOM_SETTING) as PdfZoomScale
|
||||
@@ -648,6 +690,11 @@ export class DocumentDetailComponent
|
||||
|
||||
this.getCustomFields()
|
||||
|
||||
this.websocketStatusService
|
||||
.onDocumentUpdated()
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe((data) => this.handleIncomingDocumentUpdated(data))
|
||||
|
||||
this.route.paramMap
|
||||
.pipe(
|
||||
filter(
|
||||
@@ -748,15 +795,36 @@ export class DocumentDetailComponent
|
||||
|
||||
updateComponent(doc: Document) {
|
||||
this.document = doc
|
||||
// Default selected version is the newest version
|
||||
const versions = doc.versions ?? []
|
||||
this.selectedVersionId = versions.length
|
||||
? Math.max(...versions.map((version) => version.id))
|
||||
: doc.id
|
||||
this.previewLoaded = false
|
||||
this.requiresPassword = false
|
||||
this.updateFormForCustomFields()
|
||||
this.loadMetadataForSelectedVersion()
|
||||
if (this.archiveContentRenderType === ContentRenderType.TIFF) {
|
||||
this.tryRenderTiff()
|
||||
}
|
||||
this.documentsService
|
||||
.getMetadata(doc.id)
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
this.metadata = result
|
||||
if (
|
||||
this.archiveContentRenderType !== ContentRenderType.PDF ||
|
||||
this.useNativePdfViewer
|
||||
) {
|
||||
this.previewLoaded = true
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
this.metadata = {} // allow display to fallback to <object> tag
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving metadata`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
if (
|
||||
this.permissionsService.currentUserHasObjectPermissions(
|
||||
PermissionAction.Change,
|
||||
@@ -785,78 +853,6 @@ export class DocumentDetailComponent
|
||||
}
|
||||
}
|
||||
|
||||
// Update file preview and download target to a specific version (by document id)
|
||||
selectVersion(versionId: number) {
|
||||
this.selectedVersionId = versionId
|
||||
this.previewLoaded = false
|
||||
this.previewUrl = this.documentsService.getPreviewUrl(
|
||||
this.documentId,
|
||||
false,
|
||||
this.selectedVersionId
|
||||
)
|
||||
this.updatePdfSource()
|
||||
this.thumbUrl = this.documentsService.getThumbUrl(
|
||||
this.documentId,
|
||||
this.selectedVersionId
|
||||
)
|
||||
this.loadMetadataForSelectedVersion()
|
||||
this.documentsService
|
||||
.get(this.documentId, this.selectedVersionId, 'content')
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (doc) => {
|
||||
const content = doc?.content ?? ''
|
||||
this.document.content = content
|
||||
this.documentForm.patchValue(
|
||||
{
|
||||
content,
|
||||
},
|
||||
{
|
||||
emitEvent: false,
|
||||
}
|
||||
)
|
||||
},
|
||||
error: (error) => {
|
||||
this.toastService.showError(
|
||||
$localize`Error retrieving version content`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
// For text previews, refresh content
|
||||
this.http
|
||||
.get(this.previewUrl, { responseType: 'text' })
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
takeUntil(this.docChangeNotifier)
|
||||
)
|
||||
.subscribe({
|
||||
next: (res) => (this.previewText = res.toString()),
|
||||
error: (err) =>
|
||||
(this.previewText = $localize`An error occurred loading content: ${
|
||||
err.message ?? err.toString()
|
||||
}`),
|
||||
})
|
||||
}
|
||||
|
||||
onVersionSelected(versionId: number) {
|
||||
this.selectVersion(versionId)
|
||||
}
|
||||
|
||||
onVersionsUpdated(versions: DocumentVersionInfo[]) {
|
||||
this.document.versions = versions
|
||||
const openDoc = this.openDocumentService.getOpenDocument(this.documentId)
|
||||
if (openDoc) {
|
||||
openDoc.versions = versions
|
||||
this.openDocumentService.save()
|
||||
}
|
||||
}
|
||||
|
||||
get customFieldFormFields(): FormArray {
|
||||
return this.documentForm.get('custom_fields') as FormArray
|
||||
}
|
||||
@@ -1025,7 +1021,7 @@ export class DocumentDetailComponent
|
||||
|
||||
discard() {
|
||||
this.documentsService
|
||||
.get(this.documentId, this.selectedVersionId)
|
||||
.get(this.documentId)
|
||||
.pipe(
|
||||
first(),
|
||||
takeUntil(this.unsubscribeNotifier),
|
||||
@@ -1033,6 +1029,7 @@ export class DocumentDetailComponent
|
||||
)
|
||||
.subscribe({
|
||||
next: (doc) => {
|
||||
this.closeIncomingUpdateModal()
|
||||
Object.assign(this.document, doc)
|
||||
doc['permissions_form'] = {
|
||||
owner: doc.owner,
|
||||
@@ -1075,10 +1072,12 @@ export class DocumentDetailComponent
|
||||
this.networkActive = true
|
||||
;(document.activeElement as HTMLElement)?.dispatchEvent(new Event('change'))
|
||||
this.documentsService
|
||||
.patch(this.getChangedFields(), this.selectedVersionId)
|
||||
.patch(this.getChangedFields())
|
||||
.pipe(first())
|
||||
.subscribe({
|
||||
next: (docValues) => {
|
||||
this.closeIncomingUpdateModal()
|
||||
this.lastLocalSaveModified = docValues.modified ?? null
|
||||
// in case data changed while saving eg removing inbox_tags
|
||||
this.documentForm.patchValue(docValues)
|
||||
const newValues = Object.assign({}, this.documentForm.value)
|
||||
@@ -1093,16 +1092,19 @@ export class DocumentDetailComponent
|
||||
this.networkActive = false
|
||||
this.error = null
|
||||
if (close) {
|
||||
this.pendingIncomingUpdate = null
|
||||
this.close(() =>
|
||||
this.openDocumentService.refreshDocument(this.documentId)
|
||||
)
|
||||
} else {
|
||||
this.openDocumentService.refreshDocument(this.documentId)
|
||||
this.flushPendingIncomingUpdate()
|
||||
}
|
||||
this.savedViewService.maybeRefreshDocumentCounts()
|
||||
},
|
||||
error: (error) => {
|
||||
this.networkActive = false
|
||||
this.lastLocalSaveModified = null
|
||||
const canEdit =
|
||||
this.permissionsService.currentUserHasObjectPermissions(
|
||||
PermissionAction.Change,
|
||||
@@ -1122,6 +1124,7 @@ export class DocumentDetailComponent
|
||||
error
|
||||
)
|
||||
}
|
||||
this.flushPendingIncomingUpdate()
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1130,7 +1133,7 @@ export class DocumentDetailComponent
|
||||
this.networkActive = true
|
||||
this.store.next(this.documentForm.value)
|
||||
this.documentsService
|
||||
.patch(this.getChangedFields(), this.selectedVersionId)
|
||||
.patch(this.getChangedFields())
|
||||
.pipe(
|
||||
switchMap((updateResult) => {
|
||||
this.savedViewService.maybeRefreshDocumentCounts()
|
||||
@@ -1158,8 +1161,11 @@ export class DocumentDetailComponent
|
||||
.pipe(first())
|
||||
.subscribe({
|
||||
next: ({ updateResult, nextDocId, closeResult }) => {
|
||||
this.closeIncomingUpdateModal()
|
||||
this.error = null
|
||||
this.networkActive = false
|
||||
this.pendingIncomingUpdate = null
|
||||
this.lastLocalSaveModified = null
|
||||
if (closeResult && updateResult && nextDocId) {
|
||||
this.router.navigate(['documents', nextDocId])
|
||||
this.titleInput?.focus()
|
||||
@@ -1167,8 +1173,10 @@ export class DocumentDetailComponent
|
||||
},
|
||||
error: (error) => {
|
||||
this.networkActive = false
|
||||
this.lastLocalSaveModified = null
|
||||
this.error = error.error
|
||||
this.toastService.showError($localize`Error saving document`, error)
|
||||
this.flushPendingIncomingUpdate()
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -1254,7 +1262,7 @@ export class DocumentDetailComponent
|
||||
.subscribe({
|
||||
next: () => {
|
||||
this.toastService.showInfo(
|
||||
$localize`Reprocess operation for "${this.document.title}" will begin in the background. Close and re-open or reload this document after the operation has completed to see new content.`
|
||||
$localize`Reprocess operation for "${this.document.title}" will begin in the background.`
|
||||
)
|
||||
if (modal) {
|
||||
modal.close()
|
||||
@@ -1273,25 +1281,11 @@ export class DocumentDetailComponent
|
||||
})
|
||||
}
|
||||
|
||||
private getSelectedNonLatestVersionId(): number | null {
|
||||
const versions = this.document?.versions ?? []
|
||||
if (!versions.length || !this.selectedVersionId) {
|
||||
return null
|
||||
}
|
||||
const latestVersionId = Math.max(...versions.map((version) => version.id))
|
||||
return this.selectedVersionId === latestVersionId
|
||||
? null
|
||||
: this.selectedVersionId
|
||||
}
|
||||
|
||||
download(original: boolean = false) {
|
||||
this.downloading = true
|
||||
const selectedVersionId = this.getSelectedNonLatestVersionId()
|
||||
const downloadUrl = this.documentsService.getDownloadUrl(
|
||||
this.documentId,
|
||||
original,
|
||||
selectedVersionId,
|
||||
this.useFormattedFilename
|
||||
original
|
||||
)
|
||||
this.http
|
||||
.get(downloadUrl, { observe: 'response', responseType: 'blob' })
|
||||
@@ -1723,11 +1717,9 @@ export class DocumentDetailComponent
|
||||
}
|
||||
|
||||
printDocument() {
|
||||
const selectedVersionId = this.getSelectedNonLatestVersionId()
|
||||
const printUrl = this.documentsService.getDownloadUrl(
|
||||
this.document.id,
|
||||
false,
|
||||
selectedVersionId
|
||||
false
|
||||
)
|
||||
this.http
|
||||
.get(printUrl, { responseType: 'blob' })
|
||||
@@ -1775,7 +1767,7 @@ export class DocumentDetailComponent
|
||||
const modal = this.modalService.open(ShareLinksDialogComponent)
|
||||
modal.componentInstance.documentId = this.document.id
|
||||
modal.componentInstance.hasArchiveVersion =
|
||||
this.metadata?.has_archive_version ?? !!this.document?.archived_file_name
|
||||
!!this.document?.archived_file_name
|
||||
}
|
||||
|
||||
get emailEnabled(): boolean {
|
||||
@@ -1788,7 +1780,7 @@ export class DocumentDetailComponent
|
||||
})
|
||||
modal.componentInstance.documentIds = [this.document.id]
|
||||
modal.componentInstance.hasArchiveVersion =
|
||||
this.metadata?.has_archive_version ?? !!this.document?.archived_file_name
|
||||
!!this.document?.archived_file_name
|
||||
}
|
||||
|
||||
private tryRenderTiff() {
|
||||
|
||||
@@ -1,134 +0,0 @@
|
||||
<div class="btn-group" ngbDropdown autoClose="outside">
|
||||
<button class="btn btn-sm btn-outline-secondary dropdown-toggle" ngbDropdownToggle>
|
||||
<i-bs name="file-earmark-diff"></i-bs>
|
||||
<span class="d-none d-lg-inline ps-1" i18n>Versions</span>
|
||||
</button>
|
||||
<div class="dropdown-menu shadow" ngbDropdownMenu>
|
||||
<div class="px-3 py-2 mb-2">
|
||||
@if (versionUploadState === UploadState.Idle) {
|
||||
<div class="input-group input-group-sm mb-2">
|
||||
<span class="input-group-text" i18n>Label</span>
|
||||
<input
|
||||
class="form-control"
|
||||
type="text"
|
||||
[(ngModel)]="newVersionLabel"
|
||||
i18n-placeholder
|
||||
placeholder="Optional"
|
||||
[disabled]="!userIsOwner || !userCanEdit"
|
||||
/>
|
||||
</div>
|
||||
<input
|
||||
#versionFileInput
|
||||
type="file"
|
||||
class="visually-hidden"
|
||||
(change)="onVersionFileSelected($event)"
|
||||
/>
|
||||
<button
|
||||
class="btn btn-sm btn-outline-secondary w-100"
|
||||
(click)="versionFileInput.click()"
|
||||
[disabled]="!userIsOwner || !userCanEdit"
|
||||
>
|
||||
<i-bs name="file-earmark-plus"></i-bs><span class="ps-1" i18n>Add new version</span>
|
||||
</button>
|
||||
} @else {
|
||||
@switch (versionUploadState) {
|
||||
@case (UploadState.Uploading) {
|
||||
<div class="small text-muted mt-1 d-flex align-items-center">
|
||||
<output class="spinner-border spinner-border-sm me-2" aria-hidden="true"></output>
|
||||
<span i18n>Uploading version...</span>
|
||||
</div>
|
||||
}
|
||||
@case (UploadState.Processing) {
|
||||
<div class="small text-muted mt-1 d-flex align-items-center">
|
||||
<output class="spinner-border spinner-border-sm me-2" aria-hidden="true"></output>
|
||||
<span i18n>Processing version...</span>
|
||||
</div>
|
||||
}
|
||||
@case (UploadState.Failed) {
|
||||
<div class="small text-danger mt-1 d-flex align-items-center justify-content-between">
|
||||
<span i18n>Version upload failed.</span>
|
||||
<button type="button" class="btn btn-link btn-sm p-0 ms-2" (click)="clearVersionUploadStatus()" i18n>Dismiss</button>
|
||||
</div>
|
||||
@if (versionUploadError) {
|
||||
<div class="small text-muted mt-1">{{ versionUploadError }}</div>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
</div>
|
||||
@for (version of versions; track version.id) {
|
||||
<div class="dropdown-item border-top px-0">
|
||||
<div class="d-flex align-items-center w-100 py-2 version-item">
|
||||
<div class="btn btn-link link-underline link-underline-opacity-0 d-flex align-items-center small text-start p-0 version-link"
|
||||
(click)="selectVersion(version.id)"
|
||||
>
|
||||
<div class="check mx-3">
|
||||
@if (selectedVersionId === version.id) {
|
||||
<i-bs name="check-circle"></i-bs>
|
||||
} @else {
|
||||
<i-bs class="text-muted" name="circle"></i-bs>
|
||||
}
|
||||
</div>
|
||||
<div class="d-flex flex-column">
|
||||
<div class="input-group input-group-sm mb-1">
|
||||
@if (isEditingVersion(version.id)) {
|
||||
<input
|
||||
class="form-control"
|
||||
type="text"
|
||||
[(ngModel)]="versionLabelDraft"
|
||||
i18n-placeholder
|
||||
placeholder="Version label"
|
||||
[disabled]="savingVersionLabelId !== null"
|
||||
(keydown.enter)="submitEditedVersionLabel(version, $event)"
|
||||
(keydown.escape)="cancelEditingVersion($event)"
|
||||
(click)="$event.stopPropagation()"
|
||||
/>
|
||||
} @else {
|
||||
<span class="input-group-text version-label">
|
||||
@if (version.version_label) {
|
||||
{{ version.version_label }}
|
||||
} @else {
|
||||
<span i18n>Version</span> #{{ version.id }}
|
||||
}
|
||||
</span>
|
||||
}
|
||||
@if (canEditLabels) {
|
||||
<button
|
||||
type="button"
|
||||
class="btn btn-outline-secondary"
|
||||
[disabled]="savingVersionLabelId !== null"
|
||||
(click)="isEditingVersion(version.id) ? submitEditedVersionLabel(version, $event) : beginEditingVersion(version, $event)"
|
||||
>
|
||||
@if (isEditingVersion(version.id)) {
|
||||
<i-bs width=".8rem" height=".8rem" name="check-lg"></i-bs>
|
||||
} @else {
|
||||
<i-bs width=".8rem" height=".8rem" name="pencil"></i-bs>
|
||||
}
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
<div class="d-flex text-muted small align-items-center mt-1">
|
||||
{{ version.added | customDate:'short' }}
|
||||
<div class="badge bg-light text-muted ms-auto">
|
||||
{{ version.checksum | slice:0:8 }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@if (!version.is_root) {
|
||||
<pngx-confirm-button
|
||||
buttonClasses="btn btn-sm btn-link text-danger mx-1"
|
||||
iconName="trash"
|
||||
confirmMessage="Delete this version?"
|
||||
i18n-confirmMessage
|
||||
[disabled]="!userIsOwner || !userCanEdit"
|
||||
(confirm)="deleteVersion(version.id)"
|
||||
>
|
||||
<span class="visually-hidden" i18n>Delete version</span>
|
||||
</pngx-confirm-button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -1,17 +0,0 @@
|
||||
.version-item {
|
||||
.check {
|
||||
width: 1rem;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
> .version-link {
|
||||
.flex-column {
|
||||
width: 260px;
|
||||
}
|
||||
|
||||
.input-group .version-label, .input-group input {
|
||||
width: 140px;
|
||||
flex: 1 1 auto;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,324 +0,0 @@
|
||||
import { DatePipe } from '@angular/common'
|
||||
import { SimpleChange } from '@angular/core'
|
||||
import { ComponentFixture, TestBed } from '@angular/core/testing'
|
||||
import { NgxBootstrapIconsModule, allIcons } from 'ngx-bootstrap-icons'
|
||||
import { Subject, of, throwError } from 'rxjs'
|
||||
import { DocumentVersionInfo } from 'src/app/data/document'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import {
|
||||
UploadState,
|
||||
WebsocketStatusService,
|
||||
} from 'src/app/services/websocket-status.service'
|
||||
import { DocumentVersionDropdownComponent } from './document-version-dropdown.component'
|
||||
|
||||
describe('DocumentVersionDropdownComponent', () => {
|
||||
let component: DocumentVersionDropdownComponent
|
||||
let fixture: ComponentFixture<DocumentVersionDropdownComponent>
|
||||
let documentService: jest.Mocked<
|
||||
Pick<
|
||||
DocumentService,
|
||||
'deleteVersion' | 'getVersions' | 'uploadVersion' | 'updateVersionLabel'
|
||||
>
|
||||
>
|
||||
let toastService: jest.Mocked<Pick<ToastService, 'showError' | 'showInfo'>>
|
||||
let finished$: Subject<{ taskId: string }>
|
||||
let failed$: Subject<{ taskId: string; message?: string }>
|
||||
|
||||
beforeEach(async () => {
|
||||
finished$ = new Subject<{ taskId: string }>()
|
||||
failed$ = new Subject<{ taskId: string; message?: string }>()
|
||||
documentService = {
|
||||
deleteVersion: jest.fn(),
|
||||
getVersions: jest.fn(),
|
||||
uploadVersion: jest.fn(),
|
||||
updateVersionLabel: jest.fn(),
|
||||
}
|
||||
toastService = {
|
||||
showError: jest.fn(),
|
||||
showInfo: jest.fn(),
|
||||
}
|
||||
|
||||
await TestBed.configureTestingModule({
|
||||
imports: [
|
||||
DocumentVersionDropdownComponent,
|
||||
NgxBootstrapIconsModule.pick(allIcons),
|
||||
],
|
||||
providers: [
|
||||
DatePipe,
|
||||
{
|
||||
provide: DocumentService,
|
||||
useValue: documentService,
|
||||
},
|
||||
{
|
||||
provide: SettingsService,
|
||||
useValue: {
|
||||
get: () => null,
|
||||
},
|
||||
},
|
||||
{
|
||||
provide: ToastService,
|
||||
useValue: toastService,
|
||||
},
|
||||
{
|
||||
provide: WebsocketStatusService,
|
||||
useValue: {
|
||||
onDocumentConsumptionFinished: () => finished$,
|
||||
onDocumentConsumptionFailed: () => failed$,
|
||||
},
|
||||
},
|
||||
],
|
||||
}).compileComponents()
|
||||
|
||||
fixture = TestBed.createComponent(DocumentVersionDropdownComponent)
|
||||
component = fixture.componentInstance
|
||||
component.documentId = 3
|
||||
component.selectedVersionId = 3
|
||||
component.versions = [
|
||||
{
|
||||
id: 3,
|
||||
is_root: true,
|
||||
checksum: 'aaaa',
|
||||
},
|
||||
{
|
||||
id: 10,
|
||||
is_root: false,
|
||||
checksum: 'bbbb',
|
||||
},
|
||||
]
|
||||
fixture.detectChanges()
|
||||
})
|
||||
|
||||
it('selectVersion should emit the selected id', () => {
|
||||
const emitSpy = jest.spyOn(component.versionSelected, 'emit')
|
||||
component.selectVersion(10)
|
||||
expect(emitSpy).toHaveBeenCalledWith(10)
|
||||
})
|
||||
|
||||
it('deleteVersion should refresh versions and select fallback when deleting current selection', () => {
|
||||
const updatedVersions: DocumentVersionInfo[] = [
|
||||
{ id: 3, is_root: true, checksum: 'aaaa' },
|
||||
{ id: 20, is_root: false, checksum: 'cccc' },
|
||||
]
|
||||
component.selectedVersionId = 10
|
||||
documentService.deleteVersion.mockReturnValue(
|
||||
of({ result: 'deleted', current_version_id: 3 })
|
||||
)
|
||||
documentService.getVersions.mockReturnValue(
|
||||
of({ id: 3, versions: updatedVersions } as any)
|
||||
)
|
||||
const versionsEmitSpy = jest.spyOn(component.versionsUpdated, 'emit')
|
||||
const selectedEmitSpy = jest.spyOn(component.versionSelected, 'emit')
|
||||
|
||||
component.deleteVersion(10)
|
||||
|
||||
expect(documentService.deleteVersion).toHaveBeenCalledWith(3, 10)
|
||||
expect(documentService.getVersions).toHaveBeenCalledWith(3)
|
||||
expect(versionsEmitSpy).toHaveBeenCalledWith(updatedVersions)
|
||||
expect(selectedEmitSpy).toHaveBeenCalledWith(3)
|
||||
})
|
||||
|
||||
it('deleteVersion should show an error toast on failure', () => {
|
||||
const error = new Error('delete failed')
|
||||
documentService.deleteVersion.mockReturnValue(throwError(() => error))
|
||||
|
||||
component.deleteVersion(10)
|
||||
|
||||
expect(toastService.showError).toHaveBeenCalledWith(
|
||||
'Error deleting version',
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
it('beginEditingVersion should set active row and draft label', () => {
|
||||
component.userCanEdit = true
|
||||
component.userIsOwner = true
|
||||
const version = {
|
||||
id: 10,
|
||||
is_root: false,
|
||||
checksum: 'bbbb',
|
||||
version_label: 'Current',
|
||||
} as DocumentVersionInfo
|
||||
|
||||
component.beginEditingVersion(version)
|
||||
|
||||
expect(component.editingVersionId).toEqual(10)
|
||||
expect(component.versionLabelDraft).toEqual('Current')
|
||||
})
|
||||
|
||||
it('submitEditedVersionLabel should close editor without save if unchanged', () => {
|
||||
const version = {
|
||||
id: 10,
|
||||
is_root: false,
|
||||
checksum: 'bbbb',
|
||||
version_label: 'Current',
|
||||
} as DocumentVersionInfo
|
||||
const saveSpy = jest.spyOn(component, 'saveVersionLabel')
|
||||
component.editingVersionId = 10
|
||||
component.versionLabelDraft = ' Current '
|
||||
|
||||
component.submitEditedVersionLabel(version)
|
||||
|
||||
expect(saveSpy).not.toHaveBeenCalled()
|
||||
expect(component.editingVersionId).toBeNull()
|
||||
expect(component.versionLabelDraft).toEqual('')
|
||||
})
|
||||
|
||||
it('submitEditedVersionLabel should call saveVersionLabel when changed', () => {
|
||||
const version = {
|
||||
id: 10,
|
||||
is_root: false,
|
||||
checksum: 'bbbb',
|
||||
version_label: 'Current',
|
||||
} as DocumentVersionInfo
|
||||
const saveSpy = jest
|
||||
.spyOn(component, 'saveVersionLabel')
|
||||
.mockImplementation(() => {})
|
||||
component.editingVersionId = 10
|
||||
component.versionLabelDraft = ' Updated '
|
||||
|
||||
component.submitEditedVersionLabel(version)
|
||||
|
||||
expect(saveSpy).toHaveBeenCalledWith(10, 'Updated')
|
||||
expect(component.editingVersionId).toBeNull()
|
||||
})
|
||||
|
||||
it('saveVersionLabel should update the version and emit versionsUpdated', () => {
|
||||
documentService.updateVersionLabel.mockReturnValue(
|
||||
of({
|
||||
id: 10,
|
||||
version_label: 'Updated',
|
||||
is_root: false,
|
||||
} as any)
|
||||
)
|
||||
const emitSpy = jest.spyOn(component.versionsUpdated, 'emit')
|
||||
|
||||
component.saveVersionLabel(10, 'Updated')
|
||||
|
||||
expect(documentService.updateVersionLabel).toHaveBeenCalledWith(
|
||||
3,
|
||||
10,
|
||||
'Updated'
|
||||
)
|
||||
expect(emitSpy).toHaveBeenCalledWith([
|
||||
{ id: 3, is_root: true, checksum: 'aaaa' },
|
||||
{ id: 10, is_root: false, checksum: 'bbbb', version_label: 'Updated' },
|
||||
])
|
||||
expect(component.savingVersionLabelId).toBeNull()
|
||||
})
|
||||
|
||||
it('saveVersionLabel should show error toast on failure', () => {
|
||||
const error = new Error('save failed')
|
||||
documentService.updateVersionLabel.mockReturnValue(throwError(() => error))
|
||||
|
||||
component.saveVersionLabel(10, 'Updated')
|
||||
|
||||
expect(toastService.showError).toHaveBeenCalledWith(
|
||||
'Error updating version label',
|
||||
error
|
||||
)
|
||||
expect(component.savingVersionLabelId).toBeNull()
|
||||
})
|
||||
|
||||
it('onVersionFileSelected should upload and update versions after websocket success', () => {
|
||||
const versions: DocumentVersionInfo[] = [
|
||||
{ id: 3, is_root: true, checksum: 'aaaa' },
|
||||
{ id: 20, is_root: false, checksum: 'cccc' },
|
||||
]
|
||||
const file = new File(['test'], 'new-version.pdf', {
|
||||
type: 'application/pdf',
|
||||
})
|
||||
const input = document.createElement('input')
|
||||
Object.defineProperty(input, 'files', { value: [file] })
|
||||
component.newVersionLabel = ' Updated scan '
|
||||
documentService.uploadVersion.mockReturnValue(
|
||||
of({ task_id: 'task-1' } as any)
|
||||
)
|
||||
documentService.getVersions.mockReturnValue(of({ id: 3, versions } as any))
|
||||
const versionsEmitSpy = jest.spyOn(component.versionsUpdated, 'emit')
|
||||
const selectedEmitSpy = jest.spyOn(component.versionSelected, 'emit')
|
||||
|
||||
component.onVersionFileSelected({ target: input } as Event)
|
||||
finished$.next({ taskId: 'task-1' })
|
||||
|
||||
expect(documentService.uploadVersion).toHaveBeenCalledWith(
|
||||
3,
|
||||
file,
|
||||
'Updated scan'
|
||||
)
|
||||
expect(toastService.showInfo).toHaveBeenCalled()
|
||||
expect(documentService.getVersions).toHaveBeenCalledWith(3)
|
||||
expect(versionsEmitSpy).toHaveBeenCalledWith(versions)
|
||||
expect(selectedEmitSpy).toHaveBeenCalledWith(20)
|
||||
expect(component.newVersionLabel).toEqual('')
|
||||
expect(component.versionUploadState).toEqual(UploadState.Idle)
|
||||
expect(component.versionUploadError).toBeNull()
|
||||
})
|
||||
|
||||
it('onVersionFileSelected should set failed state after websocket failure', () => {
|
||||
const file = new File(['test'], 'new-version.pdf', {
|
||||
type: 'application/pdf',
|
||||
})
|
||||
const input = document.createElement('input')
|
||||
Object.defineProperty(input, 'files', { value: [file] })
|
||||
documentService.uploadVersion.mockReturnValue(of('task-1'))
|
||||
|
||||
component.onVersionFileSelected({ target: input } as Event)
|
||||
failed$.next({ taskId: 'task-1', message: 'processing failed' })
|
||||
|
||||
expect(component.versionUploadState).toEqual(UploadState.Failed)
|
||||
expect(component.versionUploadError).toEqual('processing failed')
|
||||
expect(documentService.getVersions).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('onVersionFileSelected should fail when backend response has no task id', () => {
|
||||
const file = new File(['test'], 'new-version.pdf', {
|
||||
type: 'application/pdf',
|
||||
})
|
||||
const input = document.createElement('input')
|
||||
Object.defineProperty(input, 'files', { value: [file] })
|
||||
documentService.uploadVersion.mockReturnValue(of({} as any))
|
||||
|
||||
component.onVersionFileSelected({ target: input } as Event)
|
||||
|
||||
expect(component.versionUploadState).toEqual(UploadState.Failed)
|
||||
expect(component.versionUploadError).toEqual('Missing task ID.')
|
||||
expect(documentService.getVersions).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('onVersionFileSelected should show error when upload request fails', () => {
|
||||
const file = new File(['test'], 'new-version.pdf', {
|
||||
type: 'application/pdf',
|
||||
})
|
||||
const input = document.createElement('input')
|
||||
Object.defineProperty(input, 'files', { value: [file] })
|
||||
const error = new Error('upload failed')
|
||||
documentService.uploadVersion.mockReturnValue(throwError(() => error))
|
||||
|
||||
component.onVersionFileSelected({ target: input } as Event)
|
||||
|
||||
expect(component.versionUploadState).toEqual(UploadState.Failed)
|
||||
expect(component.versionUploadError).toEqual('upload failed')
|
||||
expect(toastService.showError).toHaveBeenCalledWith(
|
||||
'Error uploading new version',
|
||||
error
|
||||
)
|
||||
})
|
||||
|
||||
it('ngOnChanges should clear upload status on document switch', () => {
|
||||
component.versionUploadState = UploadState.Failed
|
||||
component.versionUploadError = 'something failed'
|
||||
component.editingVersionId = 10
|
||||
component.versionLabelDraft = 'draft'
|
||||
|
||||
component.ngOnChanges({
|
||||
documentId: new SimpleChange(3, 4, false),
|
||||
})
|
||||
|
||||
expect(component.versionUploadState).toEqual(UploadState.Idle)
|
||||
expect(component.versionUploadError).toBeNull()
|
||||
expect(component.editingVersionId).toBeNull()
|
||||
expect(component.versionLabelDraft).toEqual('')
|
||||
})
|
||||
})
|
||||
@@ -1,281 +0,0 @@
|
||||
import { SlicePipe } from '@angular/common'
|
||||
import {
|
||||
Component,
|
||||
EventEmitter,
|
||||
inject,
|
||||
Input,
|
||||
OnChanges,
|
||||
OnDestroy,
|
||||
Output,
|
||||
SimpleChanges,
|
||||
} from '@angular/core'
|
||||
import { FormsModule } from '@angular/forms'
|
||||
import { NgbDropdownModule } from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { merge, of, Subject } from 'rxjs'
|
||||
import {
|
||||
filter,
|
||||
finalize,
|
||||
first,
|
||||
map,
|
||||
switchMap,
|
||||
take,
|
||||
takeUntil,
|
||||
tap,
|
||||
} from 'rxjs/operators'
|
||||
import { DocumentVersionInfo } from 'src/app/data/document'
|
||||
import { CustomDatePipe } from 'src/app/pipes/custom-date.pipe'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { ToastService } from 'src/app/services/toast.service'
|
||||
import {
|
||||
UploadState,
|
||||
WebsocketStatusService,
|
||||
} from 'src/app/services/websocket-status.service'
|
||||
import { ConfirmButtonComponent } from '../../common/confirm-button/confirm-button.component'
|
||||
|
||||
@Component({
|
||||
selector: 'pngx-document-version-dropdown',
|
||||
templateUrl: './document-version-dropdown.component.html',
|
||||
styleUrls: ['./document-version-dropdown.component.scss'],
|
||||
imports: [
|
||||
FormsModule,
|
||||
NgbDropdownModule,
|
||||
NgxBootstrapIconsModule,
|
||||
ConfirmButtonComponent,
|
||||
SlicePipe,
|
||||
CustomDatePipe,
|
||||
],
|
||||
})
|
||||
export class DocumentVersionDropdownComponent implements OnChanges, OnDestroy {
|
||||
UploadState = UploadState
|
||||
|
||||
@Input() documentId: number
|
||||
@Input() versions: DocumentVersionInfo[] = []
|
||||
@Input() selectedVersionId: number
|
||||
@Input() userCanEdit: boolean = false
|
||||
@Input() userIsOwner: boolean = false
|
||||
|
||||
@Output() versionSelected = new EventEmitter<number>()
|
||||
@Output() versionsUpdated = new EventEmitter<DocumentVersionInfo[]>()
|
||||
|
||||
newVersionLabel: string = ''
|
||||
versionUploadState: UploadState = UploadState.Idle
|
||||
versionUploadError: string | null = null
|
||||
savingVersionLabelId: number | null = null
|
||||
editingVersionId: number | null = null
|
||||
versionLabelDraft: string = ''
|
||||
|
||||
private readonly documentsService = inject(DocumentService)
|
||||
private readonly toastService = inject(ToastService)
|
||||
private readonly websocketStatusService = inject(WebsocketStatusService)
|
||||
private readonly destroy$ = new Subject<void>()
|
||||
private readonly documentChange$ = new Subject<void>()
|
||||
|
||||
ngOnChanges(changes: SimpleChanges): void {
|
||||
if (changes.documentId && !changes.documentId.firstChange) {
|
||||
this.documentChange$.next()
|
||||
this.clearVersionUploadStatus()
|
||||
this.cancelEditingVersion()
|
||||
}
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
this.documentChange$.next()
|
||||
this.documentChange$.complete()
|
||||
this.destroy$.next()
|
||||
this.destroy$.complete()
|
||||
}
|
||||
|
||||
selectVersion(versionId: number): void {
|
||||
this.versionSelected.emit(versionId)
|
||||
}
|
||||
|
||||
get canEditLabels(): boolean {
|
||||
return this.userIsOwner && this.userCanEdit
|
||||
}
|
||||
|
||||
isEditingVersion(versionId: number): boolean {
|
||||
return this.editingVersionId === versionId
|
||||
}
|
||||
|
||||
beginEditingVersion(version: DocumentVersionInfo, event?: Event): void {
|
||||
event?.preventDefault()
|
||||
event?.stopPropagation()
|
||||
if (!this.canEditLabels || this.savingVersionLabelId !== null) return
|
||||
this.editingVersionId = version.id
|
||||
this.versionLabelDraft = version.version_label ?? ''
|
||||
}
|
||||
|
||||
cancelEditingVersion(event?: Event): void {
|
||||
event?.preventDefault()
|
||||
event?.stopPropagation()
|
||||
this.editingVersionId = null
|
||||
this.versionLabelDraft = ''
|
||||
}
|
||||
|
||||
submitEditedVersionLabel(version: DocumentVersionInfo, event?: Event): void {
|
||||
event?.preventDefault()
|
||||
event?.stopPropagation()
|
||||
if (this.savingVersionLabelId !== null) return
|
||||
const nextLabel = this.versionLabelDraft?.trim() || null
|
||||
const currentLabel = version.version_label?.trim() || null
|
||||
if (nextLabel === currentLabel) {
|
||||
this.cancelEditingVersion()
|
||||
return
|
||||
}
|
||||
this.saveVersionLabel(version.id, nextLabel)
|
||||
this.cancelEditingVersion()
|
||||
}
|
||||
|
||||
deleteVersion(versionId: number): void {
|
||||
const wasSelected = this.selectedVersionId === versionId
|
||||
this.documentsService
|
||||
.deleteVersion(this.documentId, versionId)
|
||||
.pipe(
|
||||
switchMap((result) =>
|
||||
this.documentsService
|
||||
.getVersions(this.documentId)
|
||||
.pipe(map((doc) => ({ doc, result })))
|
||||
),
|
||||
first(),
|
||||
takeUntil(this.destroy$)
|
||||
)
|
||||
.subscribe({
|
||||
next: ({ doc, result }) => {
|
||||
if (doc?.versions) {
|
||||
this.versionsUpdated.emit(doc.versions)
|
||||
}
|
||||
|
||||
if (wasSelected || this.selectedVersionId === versionId) {
|
||||
const fallbackId = result?.current_version_id ?? this.documentId
|
||||
this.versionSelected.emit(fallbackId)
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
this.toastService.showError($localize`Error deleting version`, error)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
saveVersionLabel(versionId: number, versionLabel: string | null): void {
|
||||
if (this.savingVersionLabelId !== null) return
|
||||
this.savingVersionLabelId = versionId
|
||||
this.documentsService
|
||||
.updateVersionLabel(this.documentId, versionId, versionLabel)
|
||||
.pipe(
|
||||
first(),
|
||||
finalize(() => {
|
||||
if (this.savingVersionLabelId === versionId) {
|
||||
this.savingVersionLabelId = null
|
||||
}
|
||||
}),
|
||||
takeUntil(this.destroy$)
|
||||
)
|
||||
.subscribe({
|
||||
next: (updatedVersion) => {
|
||||
const updatedVersions = this.versions.map((version) =>
|
||||
version.id === versionId
|
||||
? {
|
||||
...version,
|
||||
version_label: updatedVersion.version_label,
|
||||
}
|
||||
: version
|
||||
)
|
||||
this.versionsUpdated.emit(updatedVersions)
|
||||
},
|
||||
error: (error) => {
|
||||
this.toastService.showError(
|
||||
$localize`Error updating version label`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
onVersionFileSelected(event: Event): void {
|
||||
const input = event.target as HTMLInputElement
|
||||
if (!input?.files || input.files.length === 0) return
|
||||
const uploadDocumentId = this.documentId
|
||||
const file = input.files[0]
|
||||
input.value = ''
|
||||
const label = this.newVersionLabel?.trim()
|
||||
this.versionUploadState = UploadState.Uploading
|
||||
this.versionUploadError = null
|
||||
this.documentsService
|
||||
.uploadVersion(uploadDocumentId, file, label)
|
||||
.pipe(
|
||||
first(),
|
||||
tap(() => {
|
||||
this.toastService.showInfo(
|
||||
$localize`Uploading new version. Processing will happen in the background.`
|
||||
)
|
||||
this.newVersionLabel = ''
|
||||
this.versionUploadState = UploadState.Processing
|
||||
}),
|
||||
map((taskId) =>
|
||||
typeof taskId === 'string'
|
||||
? taskId
|
||||
: (taskId as { task_id?: string })?.task_id
|
||||
),
|
||||
switchMap((taskId) => {
|
||||
if (!taskId) {
|
||||
this.versionUploadState = UploadState.Failed
|
||||
this.versionUploadError = $localize`Missing task ID.`
|
||||
return of(null)
|
||||
}
|
||||
return merge(
|
||||
this.websocketStatusService.onDocumentConsumptionFinished().pipe(
|
||||
filter((status) => status.taskId === taskId),
|
||||
map(() => ({ state: 'success' as const }))
|
||||
),
|
||||
this.websocketStatusService.onDocumentConsumptionFailed().pipe(
|
||||
filter((status) => status.taskId === taskId),
|
||||
map((status) => ({
|
||||
state: 'failed' as const,
|
||||
message: status.message,
|
||||
}))
|
||||
)
|
||||
).pipe(takeUntil(merge(this.destroy$, this.documentChange$)), take(1))
|
||||
}),
|
||||
switchMap((result) => {
|
||||
if (result?.state !== 'success') {
|
||||
if (result?.state === 'failed') {
|
||||
this.versionUploadState = UploadState.Failed
|
||||
this.versionUploadError =
|
||||
result.message || $localize`Upload failed.`
|
||||
}
|
||||
return of(null)
|
||||
}
|
||||
return this.documentsService.getVersions(uploadDocumentId)
|
||||
}),
|
||||
takeUntil(this.destroy$),
|
||||
takeUntil(this.documentChange$)
|
||||
)
|
||||
.subscribe({
|
||||
next: (doc) => {
|
||||
if (uploadDocumentId !== this.documentId) return
|
||||
if (doc?.versions) {
|
||||
this.versionsUpdated.emit(doc.versions)
|
||||
this.versionSelected.emit(
|
||||
Math.max(...doc.versions.map((version) => version.id))
|
||||
)
|
||||
this.clearVersionUploadStatus()
|
||||
}
|
||||
},
|
||||
error: (error) => {
|
||||
if (uploadDocumentId !== this.documentId) return
|
||||
this.versionUploadState = UploadState.Failed
|
||||
this.versionUploadError = error?.message || $localize`Upload failed.`
|
||||
this.toastService.showError(
|
||||
$localize`Error uploading new version`,
|
||||
error
|
||||
)
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
clearVersionUploadStatus(): void {
|
||||
this.versionUploadState = UploadState.Idle
|
||||
this.versionUploadError = null
|
||||
}
|
||||
}
|
||||
@@ -62,9 +62,9 @@
|
||||
|
||||
@if (!loading || data.length > 0) {
|
||||
<div class="d-flex mb-2">
|
||||
@if (displayCollectionSize > 0) {
|
||||
@if (collectionSize > 0) {
|
||||
<div>
|
||||
<ng-container i18n>{displayCollectionSize, plural, =1 {One {{typeName}}} other {{{displayCollectionSize || 0}} total {{typeNamePlural}}}}</ng-container>
|
||||
<ng-container i18n>{collectionSize, plural, =1 {One {{typeName}}} other {{{collectionSize || 0}} total {{typeNamePlural}}}}</ng-container>
|
||||
@if (selectedObjects.size > 0) {
|
||||
({{selectedObjects.size}} selected)
|
||||
}
|
||||
|
||||
@@ -231,7 +231,7 @@ describe('ManagementListComponent', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should use API count for pagination and all ids for displayed total', fakeAsync(() => {
|
||||
it('should use the all list length for collection size when provided', fakeAsync(() => {
|
||||
jest.spyOn(tagService, 'listFiltered').mockReturnValueOnce(
|
||||
of({
|
||||
count: 1,
|
||||
@@ -243,8 +243,7 @@ describe('ManagementListComponent', () => {
|
||||
component.reloadData()
|
||||
tick(100)
|
||||
|
||||
expect(component.collectionSize).toBe(1)
|
||||
expect(component.displayCollectionSize).toBe(3)
|
||||
expect(component.collectionSize).toBe(3)
|
||||
}))
|
||||
|
||||
it('should support quick filter for objects', () => {
|
||||
|
||||
@@ -27,7 +27,6 @@ import {
|
||||
MatchingModel,
|
||||
} from 'src/app/data/matching-model'
|
||||
import { ObjectWithPermissions } from 'src/app/data/object-with-permissions'
|
||||
import { Results } from 'src/app/data/results'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import {
|
||||
SortableDirective,
|
||||
@@ -95,7 +94,6 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
public page = 1
|
||||
|
||||
public collectionSize = 0
|
||||
public displayCollectionSize = 0
|
||||
|
||||
public sortField: string
|
||||
public sortReverse: boolean
|
||||
@@ -149,14 +147,6 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
return data
|
||||
}
|
||||
|
||||
protected getCollectionSize(results: Results<T>): number {
|
||||
return results.all?.length ?? results.count
|
||||
}
|
||||
|
||||
protected getDisplayCollectionSize(results: Results<T>): number {
|
||||
return this.getCollectionSize(results)
|
||||
}
|
||||
|
||||
getDocumentCount(object: MatchingModel): number {
|
||||
return (
|
||||
object.document_count ??
|
||||
@@ -187,8 +177,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
tap((c) => {
|
||||
this.unfilteredData = c.results
|
||||
this.data = this.filterData(c.results)
|
||||
this.collectionSize = this.getCollectionSize(c)
|
||||
this.displayCollectionSize = this.getDisplayCollectionSize(c)
|
||||
this.collectionSize = c.all?.length ?? c.count
|
||||
this.allIDs = c.all
|
||||
}),
|
||||
delay(100)
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { TagEditDialogComponent } from 'src/app/components/common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
|
||||
import { FILTER_HAS_TAGS_ALL } from 'src/app/data/filter-rule-type'
|
||||
import { Results } from 'src/app/data/results'
|
||||
import { Tag } from 'src/app/data/tag'
|
||||
import { IfPermissionsDirective } from 'src/app/directives/if-permissions.directive'
|
||||
import { SortableDirective } from 'src/app/directives/sortable.directive'
|
||||
@@ -77,16 +76,6 @@ export class TagListComponent extends ManagementListComponent<Tag> {
|
||||
return data.filter((tag) => !tag.parent || !availableIds.has(tag.parent))
|
||||
}
|
||||
|
||||
protected override getCollectionSize(results: Results<Tag>): number {
|
||||
// Tag list pages are requested with is_root=true (when unfiltered), so
|
||||
// pagination must follow root count even though `all` includes descendants
|
||||
return results.count
|
||||
}
|
||||
|
||||
protected override getDisplayCollectionSize(results: Results<Tag>): number {
|
||||
return super.getCollectionSize(results)
|
||||
}
|
||||
|
||||
protected override getSelectableIDs(tags: Tag[]): number[] {
|
||||
const ids: number[] = []
|
||||
for (const tag of tags.filter(Boolean)) {
|
||||
|
||||
@@ -128,15 +128,15 @@ export interface Document extends ObjectWithPermissions {
|
||||
checksum?: string
|
||||
|
||||
// UTC
|
||||
created?: Date
|
||||
created?: string // ISO string
|
||||
|
||||
modified?: Date
|
||||
modified?: string // ISO string
|
||||
|
||||
added?: Date
|
||||
added?: string // ISO string
|
||||
|
||||
mime_type?: string
|
||||
|
||||
deleted_at?: Date
|
||||
deleted_at?: string // ISO string
|
||||
|
||||
original_file_name?: string
|
||||
|
||||
@@ -161,18 +161,6 @@ export interface Document extends ObjectWithPermissions {
|
||||
|
||||
duplicate_documents?: Document[]
|
||||
|
||||
// Versioning
|
||||
root_document?: number
|
||||
versions?: DocumentVersionInfo[]
|
||||
|
||||
// Frontend only
|
||||
__changedFields?: string[]
|
||||
}
|
||||
|
||||
export interface DocumentVersionInfo {
|
||||
id: number
|
||||
added?: Date
|
||||
version_label?: string
|
||||
checksum?: string
|
||||
is_root: boolean
|
||||
}
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
export interface WebsocketDocumentUpdatedMessage {
|
||||
document_id: number
|
||||
modified: string
|
||||
owner_id?: number
|
||||
users_can_view?: number[]
|
||||
groups_can_view?: number[]
|
||||
}
|
||||
@@ -6,7 +6,6 @@ export enum WorkflowActionType {
|
||||
Email = 3,
|
||||
Webhook = 4,
|
||||
PasswordRemoval = 5,
|
||||
MoveToTrash = 6,
|
||||
}
|
||||
|
||||
export interface WorkflowActionEmail extends ObjectWithId {
|
||||
|
||||
@@ -165,14 +165,6 @@ describe(`DocumentService`, () => {
|
||||
expect(req.request.method).toEqual('GET')
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for versioned metadata', () => {
|
||||
subscription = service.getMetadata(documents[0].id, 123).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/metadata/?version=123`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for getting selection data', () => {
|
||||
const ids = [documents[0].id]
|
||||
subscription = service.getSelectionData(ids).subscribe()
|
||||
@@ -241,22 +233,11 @@ describe(`DocumentService`, () => {
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the correct preview URL for a specific version', () => {
|
||||
const url = service.getPreviewUrl(documents[0].id, false, 123)
|
||||
expect(url).toEqual(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/preview/?version=123`
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the correct thumb URL for a single document', () => {
|
||||
let url = service.getThumbUrl(documents[0].id)
|
||||
expect(url).toEqual(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/thumb/`
|
||||
)
|
||||
url = service.getThumbUrl(documents[0].id, 123)
|
||||
expect(url).toEqual(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/thumb/?version=123`
|
||||
)
|
||||
})
|
||||
|
||||
it('should return the correct download URL for a single document', () => {
|
||||
@@ -268,22 +249,6 @@ describe(`DocumentService`, () => {
|
||||
expect(url).toEqual(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/download/?original=true`
|
||||
)
|
||||
url = service.getDownloadUrl(documents[0].id, false, 123)
|
||||
expect(url).toEqual(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/download/?version=123`
|
||||
)
|
||||
url = service.getDownloadUrl(documents[0].id, true, 123, true)
|
||||
expect(url).toContain('original=true')
|
||||
expect(url).toContain('version=123')
|
||||
expect(url).toContain('follow_formatting=true')
|
||||
})
|
||||
|
||||
it('should pass optional get params for version and fields', () => {
|
||||
subscription = service.get(documents[0].id, 123, 'content').subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/?full_perms=true&version=123&fields=content`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
})
|
||||
|
||||
it('should set search query', () => {
|
||||
@@ -318,77 +283,12 @@ describe(`DocumentService`, () => {
|
||||
expect(req.request.body.remove_inbox_tags).toEqual(true)
|
||||
})
|
||||
|
||||
it('should pass selected version to patch when provided', () => {
|
||||
subscription = service.patch(documents[0], 123).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/?version=123`
|
||||
)
|
||||
expect(req.request.method).toEqual('PATCH')
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for getting audit log', () => {
|
||||
subscription = service.getHistory(documents[0].id).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/history/`
|
||||
)
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for getting root document id', () => {
|
||||
subscription = service.getRootId(documents[0].id).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/root/`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush({ root_id: documents[0].id })
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for getting document versions', () => {
|
||||
subscription = service.getVersions(documents[0].id).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/?fields=id,versions`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for deleting a document version', () => {
|
||||
subscription = service.deleteVersion(documents[0].id, 10).subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/versions/10/`
|
||||
)
|
||||
expect(req.request.method).toEqual('DELETE')
|
||||
req.flush({ result: 'OK', current_version_id: documents[0].id })
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for updating a document version label', () => {
|
||||
subscription = service
|
||||
.updateVersionLabel(documents[0].id, 10, 'Updated label')
|
||||
.subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/versions/10/`
|
||||
)
|
||||
expect(req.request.method).toEqual('PATCH')
|
||||
expect(req.request.body).toEqual({ version_label: 'Updated label' })
|
||||
req.flush({ id: 10, version_label: 'Updated label', is_root: false })
|
||||
})
|
||||
|
||||
it('should call appropriate api endpoint for uploading a new version', () => {
|
||||
const file = new File(['hello'], 'test.pdf', { type: 'application/pdf' })
|
||||
|
||||
subscription = service
|
||||
.uploadVersion(documents[0].id, file, 'Label')
|
||||
.subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}${endpoint}/${documents[0].id}/update_version/`
|
||||
)
|
||||
expect(req.request.method).toEqual('POST')
|
||||
expect(req.request.body).toBeInstanceOf(FormData)
|
||||
|
||||
const body = req.request.body as FormData
|
||||
expect(body.get('version_label')).toEqual('Label')
|
||||
expect(body.get('document')).toBeInstanceOf(File)
|
||||
|
||||
req.flush('task-id')
|
||||
})
|
||||
})
|
||||
|
||||
it('should construct sort fields respecting permissions', () => {
|
||||
|
||||
@@ -7,7 +7,6 @@ import {
|
||||
DOCUMENT_SORT_FIELDS,
|
||||
DOCUMENT_SORT_FIELDS_FULLTEXT,
|
||||
Document,
|
||||
DocumentVersionInfo,
|
||||
} from 'src/app/data/document'
|
||||
import { DocumentMetadata } from 'src/app/data/document-metadata'
|
||||
import { DocumentSuggestions } from 'src/app/data/document-suggestions'
|
||||
@@ -156,123 +155,44 @@ export class DocumentService extends AbstractPaperlessService<Document> {
|
||||
}).pipe(map((response) => response.results.map((doc) => doc.id)))
|
||||
}
|
||||
|
||||
get(
|
||||
id: number,
|
||||
versionID: number = null,
|
||||
fields: string = null
|
||||
): Observable<Document> {
|
||||
const params: { full_perms: boolean; version?: string; fields?: string } = {
|
||||
full_perms: true,
|
||||
}
|
||||
if (versionID) {
|
||||
params.version = versionID.toString()
|
||||
}
|
||||
if (fields) {
|
||||
params.fields = fields
|
||||
}
|
||||
get(id: number): Observable<Document> {
|
||||
return this.http.get<Document>(this.getResourceUrl(id), {
|
||||
params,
|
||||
params: {
|
||||
full_perms: true,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
getPreviewUrl(
|
||||
id: number,
|
||||
original: boolean = false,
|
||||
versionID: number = null
|
||||
): string {
|
||||
getPreviewUrl(id: number, original: boolean = false): string {
|
||||
let url = new URL(this.getResourceUrl(id, 'preview'))
|
||||
if (this._searchQuery) url.hash = `#search="${this.searchQuery}"`
|
||||
if (original) {
|
||||
url.searchParams.append('original', 'true')
|
||||
}
|
||||
if (versionID) {
|
||||
url.searchParams.append('version', versionID.toString())
|
||||
}
|
||||
return url.toString()
|
||||
}
|
||||
|
||||
getThumbUrl(id: number, versionID: number = null): string {
|
||||
let url = new URL(this.getResourceUrl(id, 'thumb'))
|
||||
if (versionID) {
|
||||
url.searchParams.append('version', versionID.toString())
|
||||
}
|
||||
return url.toString()
|
||||
getThumbUrl(id: number): string {
|
||||
return this.getResourceUrl(id, 'thumb')
|
||||
}
|
||||
|
||||
getDownloadUrl(
|
||||
id: number,
|
||||
original: boolean = false,
|
||||
versionID: number = null,
|
||||
followFormatting: boolean = false
|
||||
): string {
|
||||
let url = new URL(this.getResourceUrl(id, 'download'))
|
||||
getDownloadUrl(id: number, original: boolean = false): string {
|
||||
let url = this.getResourceUrl(id, 'download')
|
||||
if (original) {
|
||||
url.searchParams.append('original', 'true')
|
||||
url += '?original=true'
|
||||
}
|
||||
if (versionID) {
|
||||
url.searchParams.append('version', versionID.toString())
|
||||
}
|
||||
if (followFormatting) {
|
||||
url.searchParams.append('follow_formatting', 'true')
|
||||
}
|
||||
return url.toString()
|
||||
}
|
||||
|
||||
uploadVersion(documentId: number, file: File, versionLabel?: string) {
|
||||
const formData = new FormData()
|
||||
formData.append('document', file, file.name)
|
||||
if (versionLabel) {
|
||||
formData.append('version_label', versionLabel)
|
||||
}
|
||||
return this.http.post<string>(
|
||||
this.getResourceUrl(documentId, 'update_version'),
|
||||
formData
|
||||
)
|
||||
}
|
||||
|
||||
getVersions(documentId: number): Observable<Document> {
|
||||
return this.http.get<Document>(this.getResourceUrl(documentId), {
|
||||
params: {
|
||||
fields: 'id,versions',
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
getRootId(documentId: number) {
|
||||
return this.http.get<{ root_id: number }>(
|
||||
this.getResourceUrl(documentId, 'root')
|
||||
)
|
||||
}
|
||||
|
||||
deleteVersion(rootDocumentId: number, versionId: number) {
|
||||
return this.http.delete<{ result: string; current_version_id: number }>(
|
||||
this.getResourceUrl(rootDocumentId, `versions/${versionId}`)
|
||||
)
|
||||
}
|
||||
|
||||
updateVersionLabel(
|
||||
rootDocumentId: number,
|
||||
versionId: number,
|
||||
versionLabel: string | null
|
||||
): Observable<DocumentVersionInfo> {
|
||||
return this.http.patch<DocumentVersionInfo>(
|
||||
this.getResourceUrl(rootDocumentId, `versions/${versionId}`),
|
||||
{ version_label: versionLabel }
|
||||
)
|
||||
return url
|
||||
}
|
||||
|
||||
getNextAsn(): Observable<number> {
|
||||
return this.http.get<number>(this.getResourceUrl(null, 'next_asn'))
|
||||
}
|
||||
|
||||
patch(o: Document, versionID: number = null): Observable<Document> {
|
||||
patch(o: Document): Observable<Document> {
|
||||
o.remove_inbox_tags = !!this.settingsService.get(
|
||||
SETTINGS_KEYS.DOCUMENT_EDITING_REMOVE_INBOX_TAGS
|
||||
)
|
||||
this.clearCache()
|
||||
return this.http.patch<Document>(this.getResourceUrl(o.id), o, {
|
||||
params: versionID ? { version: versionID.toString() } : {},
|
||||
})
|
||||
return super.patch(o)
|
||||
}
|
||||
|
||||
uploadDocument(formData) {
|
||||
@@ -283,15 +203,8 @@ export class DocumentService extends AbstractPaperlessService<Document> {
|
||||
)
|
||||
}
|
||||
|
||||
getMetadata(
|
||||
id: number,
|
||||
versionID: number = null
|
||||
): Observable<DocumentMetadata> {
|
||||
let url = new URL(this.getResourceUrl(id, 'metadata'))
|
||||
if (versionID) {
|
||||
url.searchParams.append('version', versionID.toString())
|
||||
}
|
||||
return this.http.get<DocumentMetadata>(url.toString())
|
||||
getMetadata(id: number): Observable<DocumentMetadata> {
|
||||
return this.http.get<DocumentMetadata>(this.getResourceUrl(id, 'metadata'))
|
||||
}
|
||||
|
||||
bulkEdit(ids: number[], method: string, args: any) {
|
||||
|
||||
@@ -416,4 +416,42 @@ describe('ConsumerStatusService', () => {
|
||||
websocketStatusService.disconnect()
|
||||
expect(deleted).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should trigger updated subject on document updated', () => {
|
||||
let updated = false
|
||||
websocketStatusService.onDocumentUpdated().subscribe((data) => {
|
||||
updated = true
|
||||
expect(data.document_id).toEqual(12)
|
||||
})
|
||||
|
||||
websocketStatusService.connect()
|
||||
server.send({
|
||||
type: WebsocketStatusType.DOCUMENT_UPDATED,
|
||||
data: {
|
||||
document_id: 12,
|
||||
modified: '2026-02-17T00:00:00Z',
|
||||
owner_id: 1,
|
||||
},
|
||||
})
|
||||
|
||||
websocketStatusService.disconnect()
|
||||
expect(updated).toBeTruthy()
|
||||
})
|
||||
|
||||
it('should ignore document updated events the user cannot view', () => {
|
||||
let updated = false
|
||||
websocketStatusService.onDocumentUpdated().subscribe(() => {
|
||||
updated = true
|
||||
})
|
||||
|
||||
websocketStatusService.handleDocumentUpdated({
|
||||
document_id: 12,
|
||||
modified: '2026-02-17T00:00:00Z',
|
||||
owner_id: 2,
|
||||
users_can_view: [],
|
||||
groups_can_view: [],
|
||||
})
|
||||
|
||||
expect(updated).toBeFalsy()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -2,6 +2,7 @@ import { Injectable, inject } from '@angular/core'
|
||||
import { Subject } from 'rxjs'
|
||||
import { environment } from 'src/environments/environment'
|
||||
import { User } from '../data/user'
|
||||
import { WebsocketDocumentUpdatedMessage } from '../data/websocket-document-updated-message'
|
||||
import { WebsocketDocumentsDeletedMessage } from '../data/websocket-documents-deleted-message'
|
||||
import { WebsocketProgressMessage } from '../data/websocket-progress-message'
|
||||
import { SettingsService } from './settings.service'
|
||||
@@ -9,6 +10,7 @@ import { SettingsService } from './settings.service'
|
||||
export enum WebsocketStatusType {
|
||||
STATUS_UPDATE = 'status_update',
|
||||
DOCUMENTS_DELETED = 'documents_deleted',
|
||||
DOCUMENT_UPDATED = 'document_updated',
|
||||
}
|
||||
|
||||
// see ProgressStatusOptions in src/documents/plugins/helpers.py
|
||||
@@ -89,28 +91,24 @@ export class FileStatus {
|
||||
}
|
||||
}
|
||||
|
||||
export enum UploadState {
|
||||
Idle = 'idle',
|
||||
Uploading = 'uploading',
|
||||
Processing = 'processing',
|
||||
Failed = 'failed',
|
||||
}
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root',
|
||||
})
|
||||
export class WebsocketStatusService {
|
||||
private settingsService = inject(SettingsService)
|
||||
private readonly settingsService = inject(SettingsService)
|
||||
|
||||
private statusWebSocket: WebSocket
|
||||
|
||||
private consumerStatus: FileStatus[] = []
|
||||
|
||||
private documentDetectedSubject = new Subject<FileStatus>()
|
||||
private documentConsumptionFinishedSubject = new Subject<FileStatus>()
|
||||
private documentConsumptionFailedSubject = new Subject<FileStatus>()
|
||||
private documentDeletedSubject = new Subject<boolean>()
|
||||
private connectionStatusSubject = new Subject<boolean>()
|
||||
private readonly documentDetectedSubject = new Subject<FileStatus>()
|
||||
private readonly documentConsumptionFinishedSubject =
|
||||
new Subject<FileStatus>()
|
||||
private readonly documentConsumptionFailedSubject = new Subject<FileStatus>()
|
||||
private readonly documentDeletedSubject = new Subject<boolean>()
|
||||
private readonly documentUpdatedSubject =
|
||||
new Subject<WebsocketDocumentUpdatedMessage>()
|
||||
private readonly connectionStatusSubject = new Subject<boolean>()
|
||||
|
||||
private get(taskId: string, filename?: string) {
|
||||
let status =
|
||||
@@ -176,7 +174,10 @@ export class WebsocketStatusService {
|
||||
data: messageData,
|
||||
}: {
|
||||
type: WebsocketStatusType
|
||||
data: WebsocketProgressMessage | WebsocketDocumentsDeletedMessage
|
||||
data:
|
||||
| WebsocketProgressMessage
|
||||
| WebsocketDocumentsDeletedMessage
|
||||
| WebsocketDocumentUpdatedMessage
|
||||
} = JSON.parse(ev.data)
|
||||
|
||||
switch (type) {
|
||||
@@ -184,6 +185,12 @@ export class WebsocketStatusService {
|
||||
this.documentDeletedSubject.next(true)
|
||||
break
|
||||
|
||||
case WebsocketStatusType.DOCUMENT_UPDATED:
|
||||
this.handleDocumentUpdated(
|
||||
messageData as WebsocketDocumentUpdatedMessage
|
||||
)
|
||||
break
|
||||
|
||||
case WebsocketStatusType.STATUS_UPDATE:
|
||||
this.handleProgressUpdate(messageData as WebsocketProgressMessage)
|
||||
break
|
||||
@@ -191,7 +198,11 @@ export class WebsocketStatusService {
|
||||
}
|
||||
}
|
||||
|
||||
private canViewMessage(messageData: WebsocketProgressMessage): boolean {
|
||||
private canViewMessage(messageData: {
|
||||
owner_id?: number
|
||||
users_can_view?: number[]
|
||||
groups_can_view?: number[]
|
||||
}): boolean {
|
||||
// see paperless.consumers.StatusConsumer._can_view
|
||||
const user: User = this.settingsService.currentUser
|
||||
return (
|
||||
@@ -251,6 +262,15 @@ export class WebsocketStatusService {
|
||||
}
|
||||
}
|
||||
|
||||
handleDocumentUpdated(messageData: WebsocketDocumentUpdatedMessage) {
|
||||
// fallback if backend didn't restrict message
|
||||
if (!this.canViewMessage(messageData)) {
|
||||
return
|
||||
}
|
||||
|
||||
this.documentUpdatedSubject.next(messageData)
|
||||
}
|
||||
|
||||
fail(status: FileStatus, message: string) {
|
||||
status.message = message
|
||||
status.phase = FileStatusPhase.FAILED
|
||||
@@ -304,6 +324,10 @@ export class WebsocketStatusService {
|
||||
return this.documentDeletedSubject
|
||||
}
|
||||
|
||||
onDocumentUpdated() {
|
||||
return this.documentUpdatedSubject
|
||||
}
|
||||
|
||||
onConnectionStatus() {
|
||||
return this.connectionStatusSubject.asObservable()
|
||||
}
|
||||
|
||||
@@ -59,7 +59,6 @@ import {
|
||||
chevronDoubleLeft,
|
||||
chevronDoubleRight,
|
||||
chevronRight,
|
||||
circle,
|
||||
clipboard,
|
||||
clipboardCheck,
|
||||
clipboardCheckFill,
|
||||
@@ -80,11 +79,9 @@ import {
|
||||
eye,
|
||||
fileEarmark,
|
||||
fileEarmarkCheck,
|
||||
fileEarmarkDiff,
|
||||
fileEarmarkFill,
|
||||
fileEarmarkLock,
|
||||
fileEarmarkMinus,
|
||||
fileEarmarkPlus,
|
||||
fileEarmarkRichtext,
|
||||
fileText,
|
||||
files,
|
||||
@@ -281,7 +278,6 @@ const icons = {
|
||||
chevronDoubleLeft,
|
||||
chevronDoubleRight,
|
||||
chevronRight,
|
||||
circle,
|
||||
clipboard,
|
||||
clipboardCheck,
|
||||
clipboardCheckFill,
|
||||
@@ -302,11 +298,9 @@ const icons = {
|
||||
eye,
|
||||
fileEarmark,
|
||||
fileEarmarkCheck,
|
||||
fileEarmarkDiff,
|
||||
fileEarmarkFill,
|
||||
fileEarmarkLock,
|
||||
fileEarmarkMinus,
|
||||
fileEarmarkPlus,
|
||||
fileEarmarkRichtext,
|
||||
files,
|
||||
fileText,
|
||||
|
||||
11
src/conftest.py
Normal file
11
src/conftest.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import pytest
|
||||
from pytest_django.fixtures import SettingsWrapper
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def in_memory_channel_layers(settings: SettingsWrapper) -> None:
|
||||
settings.CHANNEL_LAYERS = {
|
||||
"default": {
|
||||
"BACKEND": "channels.layers.InMemoryChannelLayer",
|
||||
},
|
||||
}
|
||||
@@ -15,6 +15,7 @@ class DocumentsConfig(AppConfig):
|
||||
from documents.signals.handlers import add_to_index
|
||||
from documents.signals.handlers import run_workflows_added
|
||||
from documents.signals.handlers import run_workflows_updated
|
||||
from documents.signals.handlers import send_websocket_document_updated
|
||||
from documents.signals.handlers import set_correspondent
|
||||
from documents.signals.handlers import set_document_type
|
||||
from documents.signals.handlers import set_storage_path
|
||||
@@ -29,6 +30,7 @@ class DocumentsConfig(AppConfig):
|
||||
document_consumption_finished.connect(run_workflows_added)
|
||||
document_consumption_finished.connect(add_or_update_document_in_llm_index)
|
||||
document_updated.connect(run_workflows_updated)
|
||||
document_updated.connect(send_websocket_document_updated)
|
||||
|
||||
import documents.schema # noqa: F401
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
@@ -72,48 +73,6 @@ def restore_archive_serial_numbers(backup: dict[int, int | None]) -> None:
|
||||
logger.info(f"Restored archive serial numbers for documents {list(backup.keys())}")
|
||||
|
||||
|
||||
def _get_root_ids_by_doc_id(doc_ids: list[int]) -> dict[int, int]:
|
||||
"""
|
||||
Resolve each provided document id to its root document id.
|
||||
|
||||
- If the id is already a root document: root id is itself.
|
||||
- If the id is a version document: root id is its `root_document_id`.
|
||||
"""
|
||||
qs = Document.objects.filter(id__in=doc_ids).only("id", "root_document_id")
|
||||
return {doc.id: doc.root_document_id or doc.id for doc in qs}
|
||||
|
||||
|
||||
def _get_root_and_current_docs_by_root_id(
|
||||
root_ids: set[int],
|
||||
) -> tuple[dict[int, Document], dict[int, Document]]:
|
||||
"""
|
||||
Returns:
|
||||
- root_docs: root_id -> root Document
|
||||
- current_docs: root_id -> newest version Document (or root if none)
|
||||
"""
|
||||
root_docs = {
|
||||
doc.id: doc
|
||||
for doc in Document.objects.filter(id__in=root_ids).select_related(
|
||||
"owner",
|
||||
)
|
||||
}
|
||||
latest_versions_by_root_id: dict[int, Document] = {}
|
||||
for version_doc in Document.objects.filter(root_document_id__in=root_ids).order_by(
|
||||
"root_document_id",
|
||||
"-id",
|
||||
):
|
||||
root_id = version_doc.root_document_id
|
||||
if root_id is None:
|
||||
continue
|
||||
latest_versions_by_root_id.setdefault(root_id, version_doc)
|
||||
|
||||
current_docs: dict[int, Document] = {
|
||||
root_id: latest_versions_by_root_id.get(root_id, root_docs[root_id])
|
||||
for root_id in root_docs
|
||||
}
|
||||
return root_docs, current_docs
|
||||
|
||||
|
||||
def set_correspondent(
|
||||
doc_ids: list[int],
|
||||
correspondent: Correspondent,
|
||||
@@ -350,29 +309,16 @@ def modify_custom_fields(
|
||||
@shared_task
|
||||
def delete(doc_ids: list[int]) -> Literal["OK"]:
|
||||
try:
|
||||
root_ids = (
|
||||
Document.objects.filter(id__in=doc_ids, root_document__isnull=True)
|
||||
.values_list("id", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
version_ids = (
|
||||
Document.objects.filter(root_document_id__in=root_ids)
|
||||
.exclude(id__in=doc_ids)
|
||||
.values_list("id", flat=True)
|
||||
.distinct()
|
||||
)
|
||||
delete_ids = list({*doc_ids, *version_ids})
|
||||
|
||||
Document.objects.filter(id__in=delete_ids).delete()
|
||||
Document.objects.filter(id__in=doc_ids).delete()
|
||||
|
||||
from documents import index
|
||||
|
||||
with index.open_index_writer() as writer:
|
||||
for id in delete_ids:
|
||||
for id in doc_ids:
|
||||
index.remove_document_by_id(writer, id)
|
||||
|
||||
status_mgr = DocumentsStatusManager()
|
||||
status_mgr.send_documents_deleted(delete_ids)
|
||||
status_mgr.send_documents_deleted(doc_ids)
|
||||
except Exception as e:
|
||||
if "Data too long for column" in str(e):
|
||||
logger.warning(
|
||||
@@ -417,60 +363,43 @@ def set_permissions(
|
||||
return "OK"
|
||||
|
||||
|
||||
def rotate(
|
||||
doc_ids: list[int],
|
||||
degrees: int,
|
||||
*,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
def rotate(doc_ids: list[int], degrees: int) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to rotate {len(doc_ids)} documents by {degrees} degrees.",
|
||||
)
|
||||
doc_to_root_id = _get_root_ids_by_doc_id(doc_ids)
|
||||
root_ids = set(doc_to_root_id.values())
|
||||
root_docs_by_id, current_docs_by_root_id = _get_root_and_current_docs_by_root_id(
|
||||
root_ids,
|
||||
)
|
||||
qs = Document.objects.filter(id__in=doc_ids)
|
||||
affected_docs: list[int] = []
|
||||
import pikepdf
|
||||
|
||||
for root_id in root_ids:
|
||||
root_doc = root_docs_by_id[root_id]
|
||||
source_doc = current_docs_by_root_id[root_id]
|
||||
if source_doc.mime_type != "application/pdf":
|
||||
rotate_tasks = []
|
||||
for doc in qs:
|
||||
if doc.mime_type != "application/pdf":
|
||||
logger.warning(
|
||||
f"Document {root_doc.id} is not a PDF, skipping rotation.",
|
||||
f"Document {doc.id} is not a PDF, skipping rotation.",
|
||||
)
|
||||
continue
|
||||
try:
|
||||
# Write rotated output to a temp file and create a new version via consume pipeline
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_rotated.pdf"
|
||||
)
|
||||
with pikepdf.open(source_doc.source_path) as pdf:
|
||||
with pikepdf.open(doc.source_path, allow_overwriting_input=True) as pdf:
|
||||
for page in pdf.pages:
|
||||
page.rotate(degrees, relative=True)
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(filepath)
|
||||
|
||||
# Preserve metadata/permissions via overrides; mark as new version
|
||||
overrides = DocumentMetadataOverrides().from_document(root_doc)
|
||||
if user is not None:
|
||||
overrides.actor_id = user.id
|
||||
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
logger.info(
|
||||
f"Queued new rotated version for document {root_doc.id} by {degrees} degrees",
|
||||
)
|
||||
pdf.save()
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.save()
|
||||
rotate_tasks.append(
|
||||
update_document_content_maybe_archive_file.s(
|
||||
document_id=doc.id,
|
||||
),
|
||||
)
|
||||
logger.info(
|
||||
f"Rotated document {doc.id} by {degrees} degrees",
|
||||
)
|
||||
affected_docs.append(doc.id)
|
||||
except Exception as e:
|
||||
logger.exception(f"Error rotating document {root_doc.id}: {e}")
|
||||
logger.exception(f"Error rotating document {doc.id}: {e}")
|
||||
|
||||
if len(affected_docs) > 0:
|
||||
bulk_update_task = bulk_update_documents.si(document_ids=affected_docs)
|
||||
chord(header=rotate_tasks, body=bulk_update_task).delay()
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -655,62 +584,30 @@ def split(
|
||||
return "OK"
|
||||
|
||||
|
||||
def delete_pages(
|
||||
doc_ids: list[int],
|
||||
pages: list[int],
|
||||
*,
|
||||
user: User | None = None,
|
||||
) -> Literal["OK"]:
|
||||
def delete_pages(doc_ids: list[int], pages: list[int]) -> Literal["OK"]:
|
||||
logger.info(
|
||||
f"Attempting to delete pages {pages} from {len(doc_ids)} documents",
|
||||
)
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
doc = Document.objects.get(id=doc_ids[0])
|
||||
pages = sorted(pages) # sort pages to avoid index issues
|
||||
import pikepdf
|
||||
|
||||
try:
|
||||
# Produce edited PDF to a temp file and create a new version
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_pages_deleted.pdf"
|
||||
)
|
||||
with pikepdf.open(source_doc.source_path) as pdf:
|
||||
with pikepdf.open(doc.source_path, allow_overwriting_input=True) as pdf:
|
||||
offset = 1 # pages are 1-indexed
|
||||
for page_num in pages:
|
||||
pdf.pages.remove(pdf.pages[page_num - offset])
|
||||
offset += 1 # remove() changes the index of the pages
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(filepath)
|
||||
|
||||
overrides = DocumentMetadataOverrides().from_document(root_doc)
|
||||
if user is not None:
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
logger.info(
|
||||
f"Queued new version for document {root_doc.id} after deleting pages {pages}",
|
||||
)
|
||||
pdf.save()
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
if doc.page_count is not None:
|
||||
doc.page_count = doc.page_count - len(pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
logger.info(f"Deleted pages {pages} from document {doc.id}")
|
||||
except Exception as e:
|
||||
logger.exception(f"Error deleting pages from document {root_doc.id}: {e}")
|
||||
logger.exception(f"Error deleting pages from document {doc.id}: {e}")
|
||||
|
||||
return "OK"
|
||||
|
||||
@@ -735,26 +632,13 @@ def edit_pdf(
|
||||
logger.info(
|
||||
f"Editing PDF of document {doc_ids[0]} with {len(operations)} operations",
|
||||
)
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_ids[0])
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
doc = Document.objects.get(id=doc_ids[0])
|
||||
import pikepdf
|
||||
|
||||
pdf_docs: list[pikepdf.Pdf] = []
|
||||
|
||||
try:
|
||||
with pikepdf.open(source_doc.source_path) as src:
|
||||
with pikepdf.open(doc.source_path) as src:
|
||||
# prepare output documents
|
||||
max_idx = max(op.get("doc", 0) for op in operations)
|
||||
pdf_docs = [pikepdf.new() for _ in range(max_idx + 1)]
|
||||
@@ -773,56 +657,42 @@ def edit_pdf(
|
||||
dst.pages[-1].rotate(op["rotate"], relative=True)
|
||||
|
||||
if update_document:
|
||||
# Create a new version from the edited PDF rather than replacing in-place
|
||||
temp_path = doc.source_path.with_suffix(".tmp.pdf")
|
||||
pdf = pdf_docs[0]
|
||||
pdf.remove_unreferenced_resources()
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_edited.pdf"
|
||||
)
|
||||
pdf.save(filepath)
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(root_doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
# save the edited PDF to a temporary file in case of errors
|
||||
pdf.save(temp_path)
|
||||
# replace the original document with the edited one
|
||||
temp_path.replace(doc.source_path)
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.page_count = len(pdf.pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
else:
|
||||
consume_tasks = []
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(root_doc)
|
||||
DocumentMetadataOverrides().from_document(doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
if not delete_original:
|
||||
overrides.skip_asn_if_exists = True
|
||||
if delete_original and len(pdf_docs) == 1:
|
||||
overrides.asn = root_doc.archive_serial_number
|
||||
overrides.asn = doc.archive_serial_number
|
||||
for idx, pdf in enumerate(pdf_docs, start=1):
|
||||
version_filepath: Path = (
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_edit_{idx}.pdf"
|
||||
/ f"{doc.id}_edit_{idx}.pdf"
|
||||
)
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(version_filepath)
|
||||
pdf.save(filepath)
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=version_filepath,
|
||||
original_file=filepath,
|
||||
),
|
||||
overrides,
|
||||
),
|
||||
@@ -844,7 +714,7 @@ def edit_pdf(
|
||||
group(consume_tasks).delay()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error editing document {root_doc.id}: {e}")
|
||||
logger.exception(f"Error editing document {doc.id}: {e}")
|
||||
raise ValueError(
|
||||
f"An error occurred while editing the document: {e}",
|
||||
) from e
|
||||
@@ -867,61 +737,38 @@ def remove_password(
|
||||
import pikepdf
|
||||
|
||||
for doc_id in doc_ids:
|
||||
doc = Document.objects.select_related("root_document").get(id=doc_id)
|
||||
root_doc: Document
|
||||
if doc.root_document_id is None or doc.root_document is None:
|
||||
root_doc = doc
|
||||
else:
|
||||
root_doc = doc.root_document
|
||||
|
||||
source_doc = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
if source_doc is None:
|
||||
source_doc = root_doc
|
||||
doc = Document.objects.get(id=doc_id)
|
||||
try:
|
||||
logger.info(
|
||||
f"Attempting password removal from document {doc_ids[0]}",
|
||||
)
|
||||
with pikepdf.open(source_doc.source_path, password=password) as pdf:
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{root_doc.id}_unprotected.pdf"
|
||||
)
|
||||
with pikepdf.open(doc.source_path, password=password) as pdf:
|
||||
temp_path = doc.source_path.with_suffix(".tmp.pdf")
|
||||
pdf.remove_unreferenced_resources()
|
||||
pdf.save(filepath)
|
||||
pdf.save(temp_path)
|
||||
|
||||
if update_document:
|
||||
# Create a new version rather than modifying the root/original in place.
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(root_doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
consume_file.delay(
|
||||
ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=filepath,
|
||||
root_document_id=root_doc.id,
|
||||
),
|
||||
overrides,
|
||||
)
|
||||
# replace the original document with the unprotected one
|
||||
temp_path.replace(doc.source_path)
|
||||
doc.checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
doc.page_count = len(pdf.pages)
|
||||
doc.save()
|
||||
update_document_content_maybe_archive_file.delay(document_id=doc.id)
|
||||
else:
|
||||
consume_tasks = []
|
||||
overrides = (
|
||||
DocumentMetadataOverrides().from_document(root_doc)
|
||||
DocumentMetadataOverrides().from_document(doc)
|
||||
if include_metadata
|
||||
else DocumentMetadataOverrides()
|
||||
)
|
||||
if user is not None:
|
||||
overrides.owner_id = user.id
|
||||
overrides.actor_id = user.id
|
||||
|
||||
filepath: Path = (
|
||||
Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR))
|
||||
/ f"{doc.id}_unprotected.pdf"
|
||||
)
|
||||
temp_path.replace(filepath)
|
||||
consume_tasks.append(
|
||||
consume_file.s(
|
||||
ConsumableDocument(
|
||||
@@ -933,17 +780,12 @@ def remove_password(
|
||||
)
|
||||
|
||||
if delete_original:
|
||||
chord(
|
||||
header=consume_tasks,
|
||||
body=delete.si([doc.id]),
|
||||
).delay()
|
||||
chord(header=consume_tasks, body=delete.si([doc.id])).delay()
|
||||
else:
|
||||
group(consume_tasks).delay()
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error removing password from document {root_doc.id}: {e}",
|
||||
)
|
||||
logger.exception(f"Error removing password from document {doc.id}: {e}")
|
||||
raise ValueError(
|
||||
f"An error occurred while removing the password: {e}",
|
||||
) from e
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
@@ -13,7 +12,6 @@ from documents.caching import CLASSIFIER_VERSION_KEY
|
||||
from documents.caching import get_thumbnail_modified_key
|
||||
from documents.classifier import DocumentClassifier
|
||||
from documents.models import Document
|
||||
from documents.versioning import resolve_effective_document_by_pk
|
||||
|
||||
|
||||
def suggestions_etag(request, pk: int) -> str | None:
|
||||
@@ -73,10 +71,12 @@ def metadata_etag(request, pk: int) -> str | None:
|
||||
Metadata is extracted from the original file, so use its checksum as the
|
||||
ETag
|
||||
"""
|
||||
doc = resolve_effective_document_by_pk(pk, request).document
|
||||
if doc is None:
|
||||
try:
|
||||
doc = Document.objects.only("checksum").get(pk=pk)
|
||||
return doc.checksum
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
return doc.checksum
|
||||
return None
|
||||
|
||||
|
||||
def metadata_last_modified(request, pk: int) -> datetime | None:
|
||||
@@ -85,25 +85,28 @@ def metadata_last_modified(request, pk: int) -> datetime | None:
|
||||
not the modification of the original file, but of the database object, but might as well
|
||||
error on the side of more cautious
|
||||
"""
|
||||
doc = resolve_effective_document_by_pk(pk, request).document
|
||||
if doc is None:
|
||||
try:
|
||||
doc = Document.objects.only("modified").get(pk=pk)
|
||||
return doc.modified
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
return doc.modified
|
||||
return None
|
||||
|
||||
|
||||
def preview_etag(request, pk: int) -> str | None:
|
||||
"""
|
||||
ETag for the document preview, using the original or archive checksum, depending on the request
|
||||
"""
|
||||
doc = resolve_effective_document_by_pk(pk, request).document
|
||||
if doc is None:
|
||||
try:
|
||||
doc = Document.objects.only("checksum", "archive_checksum").get(pk=pk)
|
||||
use_original = (
|
||||
"original" in request.query_params
|
||||
and request.query_params["original"] == "true"
|
||||
)
|
||||
return doc.checksum if use_original else doc.archive_checksum
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
use_original = (
|
||||
hasattr(request, "query_params")
|
||||
and "original" in request.query_params
|
||||
and request.query_params["original"] == "true"
|
||||
)
|
||||
return doc.checksum if use_original else doc.archive_checksum
|
||||
return None
|
||||
|
||||
|
||||
def preview_last_modified(request, pk: int) -> datetime | None:
|
||||
@@ -111,25 +114,24 @@ def preview_last_modified(request, pk: int) -> datetime | None:
|
||||
Uses the documents modified time to set the Last-Modified header. Not strictly
|
||||
speaking correct, but close enough and quick
|
||||
"""
|
||||
doc = resolve_effective_document_by_pk(pk, request).document
|
||||
if doc is None:
|
||||
try:
|
||||
doc = Document.objects.only("modified").get(pk=pk)
|
||||
return doc.modified
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
return doc.modified
|
||||
return None
|
||||
|
||||
|
||||
def thumbnail_last_modified(request: Any, pk: int) -> datetime | None:
|
||||
def thumbnail_last_modified(request, pk: int) -> datetime | None:
|
||||
"""
|
||||
Returns the filesystem last modified either from cache or from filesystem.
|
||||
Cache should be (slightly?) faster than filesystem
|
||||
"""
|
||||
try:
|
||||
doc = resolve_effective_document_by_pk(pk, request).document
|
||||
if doc is None:
|
||||
return None
|
||||
doc = Document.objects.only("pk").get(pk=pk)
|
||||
if not doc.thumbnail_path.exists():
|
||||
return None
|
||||
# Use the effective document id for cache key
|
||||
doc_key = get_thumbnail_modified_key(doc.id)
|
||||
doc_key = get_thumbnail_modified_key(pk)
|
||||
|
||||
cache_hit = cache.get(doc_key)
|
||||
if cache_hit is not None:
|
||||
@@ -143,5 +145,5 @@ def thumbnail_last_modified(request: Any, pk: int) -> datetime | None:
|
||||
)
|
||||
cache.set(doc_key, last_modified, CACHE_50_MINUTES)
|
||||
return last_modified
|
||||
except (Document.DoesNotExist, OSError): # pragma: no cover
|
||||
except Document.DoesNotExist: # pragma: no cover
|
||||
return None
|
||||
|
||||
@@ -102,12 +102,6 @@ class ConsumerStatusShortMessage(str, Enum):
|
||||
|
||||
|
||||
class ConsumerPluginMixin:
|
||||
if TYPE_CHECKING:
|
||||
from logging import Logger
|
||||
from logging import LoggerAdapter
|
||||
|
||||
log: "LoggerAdapter" # type: ignore[type-arg]
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
input_doc: ConsumableDocument,
|
||||
@@ -122,22 +116,6 @@ class ConsumerPluginMixin:
|
||||
|
||||
self.filename = self.metadata.filename or self.input_doc.original_file.name
|
||||
|
||||
if input_doc.root_document_id:
|
||||
self.log.debug(
|
||||
f"Document root document id: {input_doc.root_document_id}",
|
||||
)
|
||||
root_document = Document.objects.get(pk=input_doc.root_document_id)
|
||||
version_index = Document.objects.filter(root_document=root_document).count()
|
||||
filename_path = Path(self.filename)
|
||||
if filename_path.suffix:
|
||||
self.filename = str(
|
||||
filename_path.with_name(
|
||||
f"{filename_path.stem}_v{version_index}{filename_path.suffix}",
|
||||
),
|
||||
)
|
||||
else:
|
||||
self.filename = f"{self.filename}_v{version_index}"
|
||||
|
||||
def _send_progress(
|
||||
self,
|
||||
current_progress: int,
|
||||
@@ -183,41 +161,6 @@ class ConsumerPlugin(
|
||||
):
|
||||
logging_name = LOGGING_NAME
|
||||
|
||||
def _clone_root_into_version(
|
||||
self,
|
||||
root_doc: Document,
|
||||
*,
|
||||
text: str | None,
|
||||
page_count: int | None,
|
||||
mime_type: str,
|
||||
) -> Document:
|
||||
self.log.debug("Saving record for updated version to database")
|
||||
version_doc = Document.objects.get(pk=root_doc.pk)
|
||||
setattr(version_doc, "pk", None)
|
||||
version_doc.root_document = root_doc
|
||||
file_for_checksum = (
|
||||
self.unmodified_original
|
||||
if self.unmodified_original is not None
|
||||
else self.working_copy
|
||||
)
|
||||
version_doc.checksum = hashlib.md5(
|
||||
file_for_checksum.read_bytes(),
|
||||
).hexdigest()
|
||||
version_doc.content = text or ""
|
||||
version_doc.page_count = page_count
|
||||
version_doc.mime_type = mime_type
|
||||
version_doc.original_filename = self.filename
|
||||
version_doc.storage_path = root_doc.storage_path
|
||||
# Clear unique file path fields so they can be generated uniquely later
|
||||
version_doc.filename = None
|
||||
version_doc.archive_filename = None
|
||||
version_doc.archive_checksum = None
|
||||
if self.metadata.version_label is not None:
|
||||
version_doc.version_label = self.metadata.version_label
|
||||
version_doc.added = timezone.now()
|
||||
version_doc.modified = timezone.now()
|
||||
return version_doc
|
||||
|
||||
def run_pre_consume_script(self) -> None:
|
||||
"""
|
||||
If one is configured and exists, run the pre-consume script and
|
||||
@@ -534,65 +477,12 @@ class ConsumerPlugin(
|
||||
try:
|
||||
with transaction.atomic():
|
||||
# store the document.
|
||||
if self.input_doc.root_document_id:
|
||||
# If this is a new version of an existing document, we need
|
||||
# to make sure we're not creating a new document, but updating
|
||||
# the existing one.
|
||||
root_doc = Document.objects.get(
|
||||
pk=self.input_doc.root_document_id,
|
||||
)
|
||||
original_document = self._clone_root_into_version(
|
||||
root_doc,
|
||||
text=text,
|
||||
page_count=page_count,
|
||||
mime_type=mime_type,
|
||||
)
|
||||
actor = None
|
||||
|
||||
# Save the new version, potentially creating an audit log entry for the version addition if enabled.
|
||||
if (
|
||||
settings.AUDIT_LOG_ENABLED
|
||||
and self.metadata.actor_id is not None
|
||||
):
|
||||
actor = User.objects.filter(pk=self.metadata.actor_id).first()
|
||||
if actor is not None:
|
||||
from auditlog.context import ( # type: ignore[import-untyped]
|
||||
set_actor,
|
||||
)
|
||||
|
||||
with set_actor(actor):
|
||||
original_document.save()
|
||||
else:
|
||||
original_document.save()
|
||||
else:
|
||||
original_document.save()
|
||||
|
||||
# Create a log entry for the version addition, if enabled
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
from auditlog.models import ( # type: ignore[import-untyped]
|
||||
LogEntry,
|
||||
)
|
||||
|
||||
LogEntry.objects.log_create(
|
||||
instance=root_doc,
|
||||
changes={
|
||||
"Version Added": ["None", original_document.id],
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
actor=actor,
|
||||
additional_data={
|
||||
"reason": "Version added",
|
||||
"version_id": original_document.id,
|
||||
},
|
||||
)
|
||||
document = original_document
|
||||
else:
|
||||
document = self._store(
|
||||
text=text,
|
||||
date=date,
|
||||
page_count=page_count,
|
||||
mime_type=mime_type,
|
||||
)
|
||||
document = self._store(
|
||||
text=text,
|
||||
date=date,
|
||||
page_count=page_count,
|
||||
mime_type=mime_type,
|
||||
)
|
||||
|
||||
# If we get here, it was successful. Proceed with post-consume
|
||||
# hooks. If they fail, nothing will get changed.
|
||||
@@ -810,9 +700,6 @@ class ConsumerPlugin(
|
||||
if self.metadata.asn is not None:
|
||||
document.archive_serial_number = self.metadata.asn
|
||||
|
||||
if self.metadata.version_label is not None:
|
||||
document.version_label = self.metadata.version_label
|
||||
|
||||
if self.metadata.owner_id:
|
||||
document.owner = User.objects.get(
|
||||
pk=self.metadata.owner_id,
|
||||
|
||||
@@ -31,8 +31,6 @@ class DocumentMetadataOverrides:
|
||||
change_groups: list[int] | None = None
|
||||
custom_fields: dict | None = None
|
||||
skip_asn_if_exists: bool = False
|
||||
version_label: str | None = None
|
||||
actor_id: int | None = None
|
||||
|
||||
def update(self, other: "DocumentMetadataOverrides") -> "DocumentMetadataOverrides":
|
||||
"""
|
||||
@@ -52,12 +50,8 @@ class DocumentMetadataOverrides:
|
||||
self.storage_path_id = other.storage_path_id
|
||||
if other.owner_id is not None:
|
||||
self.owner_id = other.owner_id
|
||||
if other.actor_id is not None:
|
||||
self.actor_id = other.actor_id
|
||||
if other.skip_asn_if_exists:
|
||||
self.skip_asn_if_exists = True
|
||||
if other.version_label is not None:
|
||||
self.version_label = other.version_label
|
||||
|
||||
# merge
|
||||
if self.tag_ids is None:
|
||||
@@ -166,7 +160,6 @@ class ConsumableDocument:
|
||||
|
||||
source: DocumentSource
|
||||
original_file: Path
|
||||
root_document_id: int | None = None
|
||||
original_path: Path | None = None
|
||||
mailrule_id: int | None = None
|
||||
mime_type: str = dataclasses.field(init=False, default=None)
|
||||
|
||||
@@ -6,10 +6,8 @@ import json
|
||||
import operator
|
||||
from contextlib import contextmanager
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import FieldError
|
||||
from django.db.models import Case
|
||||
from django.db.models import CharField
|
||||
from django.db.models import Count
|
||||
@@ -162,37 +160,14 @@ class InboxFilter(Filter):
|
||||
|
||||
@extend_schema_field(serializers.CharField)
|
||||
class TitleContentFilter(Filter):
|
||||
def filter(self, qs: Any, value: Any) -> Any:
|
||||
def filter(self, qs, value):
|
||||
value = value.strip() if isinstance(value, str) else value
|
||||
if value:
|
||||
try:
|
||||
return qs.filter(
|
||||
Q(title__icontains=value) | Q(effective_content__icontains=value),
|
||||
)
|
||||
except FieldError:
|
||||
return qs.filter(
|
||||
Q(title__icontains=value) | Q(content__icontains=value),
|
||||
)
|
||||
return qs.filter(Q(title__icontains=value) | Q(content__icontains=value))
|
||||
else:
|
||||
return qs
|
||||
|
||||
|
||||
@extend_schema_field(serializers.CharField)
|
||||
class EffectiveContentFilter(Filter):
|
||||
def filter(self, qs: Any, value: Any) -> Any:
|
||||
value = value.strip() if isinstance(value, str) else value
|
||||
if not value:
|
||||
return qs
|
||||
try:
|
||||
return qs.filter(
|
||||
**{f"effective_content__{self.lookup_expr}": value},
|
||||
)
|
||||
except FieldError:
|
||||
return qs.filter(
|
||||
**{f"content__{self.lookup_expr}": value},
|
||||
)
|
||||
|
||||
|
||||
@extend_schema_field(serializers.BooleanField)
|
||||
class SharedByUser(Filter):
|
||||
def filter(self, qs, value):
|
||||
@@ -749,11 +724,6 @@ class DocumentFilterSet(FilterSet):
|
||||
|
||||
title_content = TitleContentFilter()
|
||||
|
||||
content__istartswith = EffectiveContentFilter(lookup_expr="istartswith")
|
||||
content__iendswith = EffectiveContentFilter(lookup_expr="iendswith")
|
||||
content__icontains = EffectiveContentFilter(lookup_expr="icontains")
|
||||
content__iexact = EffectiveContentFilter(lookup_expr="iexact")
|
||||
|
||||
owner__id__none = ObjectFilter(field_name="owner", exclude=True)
|
||||
|
||||
custom_fields__icontains = CustomFieldsFilter()
|
||||
@@ -794,6 +764,7 @@ class DocumentFilterSet(FilterSet):
|
||||
fields = {
|
||||
"id": ID_KWARGS,
|
||||
"title": CHAR_KWARGS,
|
||||
"content": CHAR_KWARGS,
|
||||
"archive_serial_number": INT_KWARGS,
|
||||
"created": DATE_KWARGS,
|
||||
"added": DATETIME_KWARGS,
|
||||
|
||||
@@ -158,11 +158,7 @@ def open_index_searcher() -> Searcher:
|
||||
searcher.close()
|
||||
|
||||
|
||||
def update_document(
|
||||
writer: AsyncWriter,
|
||||
doc: Document,
|
||||
effective_content: str | None = None,
|
||||
) -> None:
|
||||
def update_document(writer: AsyncWriter, doc: Document) -> None:
|
||||
tags = ",".join([t.name for t in doc.tags.all()])
|
||||
tags_ids = ",".join([str(t.id) for t in doc.tags.all()])
|
||||
notes = ",".join([str(c.note) for c in Note.objects.filter(document=doc)])
|
||||
@@ -192,7 +188,7 @@ def update_document(
|
||||
writer.update_document(
|
||||
id=doc.pk,
|
||||
title=doc.title,
|
||||
content=effective_content or doc.content,
|
||||
content=doc.content,
|
||||
correspondent=doc.correspondent.name if doc.correspondent else None,
|
||||
correspondent_id=doc.correspondent.id if doc.correspondent else None,
|
||||
has_correspondent=doc.correspondent is not None,
|
||||
@@ -235,12 +231,9 @@ def remove_document_by_id(writer: AsyncWriter, doc_id) -> None:
|
||||
writer.delete_by_term("id", doc_id)
|
||||
|
||||
|
||||
def add_or_update_document(
|
||||
document: Document,
|
||||
effective_content: str | None = None,
|
||||
) -> None:
|
||||
def add_or_update_document(document: Document) -> None:
|
||||
with open_index_writer() as writer:
|
||||
update_document(writer, document, effective_content=effective_content)
|
||||
update_document(writer, document)
|
||||
|
||||
|
||||
def remove_document_from_index(document: Document) -> None:
|
||||
|
||||
@@ -1,320 +0,0 @@
|
||||
"""
|
||||
Base command class for Paperless-ngx management commands.
|
||||
|
||||
Provides automatic progress bar and multiprocessing support with minimal boilerplate.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from collections.abc import Iterable
|
||||
from collections.abc import Sized
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from concurrent.futures import as_completed
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
from typing import ClassVar
|
||||
from typing import Generic
|
||||
from typing import TypeVar
|
||||
|
||||
from django import db
|
||||
from django.core.management import CommandError
|
||||
from django.db.models import QuerySet
|
||||
from django_rich.management import RichCommand
|
||||
from rich.console import Console
|
||||
from rich.progress import BarColumn
|
||||
from rich.progress import MofNCompleteColumn
|
||||
from rich.progress import Progress
|
||||
from rich.progress import SpinnerColumn
|
||||
from rich.progress import TextColumn
|
||||
from rich.progress import TimeElapsedColumn
|
||||
from rich.progress import TimeRemainingColumn
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterable
|
||||
from collections.abc import Sequence
|
||||
|
||||
from django.core.management import CommandParser
|
||||
|
||||
T = TypeVar("T")
|
||||
R = TypeVar("R")
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class ProcessResult(Generic[T, R]):
|
||||
"""
|
||||
Result of processing a single item in parallel.
|
||||
|
||||
Attributes:
|
||||
item: The input item that was processed.
|
||||
result: The return value from the processing function, or None if an error occurred.
|
||||
error: The exception if processing failed, or None on success.
|
||||
"""
|
||||
|
||||
item: T
|
||||
result: R | None
|
||||
error: BaseException | None
|
||||
|
||||
@property
|
||||
def success(self) -> bool:
|
||||
"""Return True if the item was processed successfully."""
|
||||
return self.error is None
|
||||
|
||||
|
||||
class PaperlessCommand(RichCommand):
|
||||
"""
|
||||
Base command class with automatic progress bar and multiprocessing support.
|
||||
|
||||
Features are opt-in via class attributes:
|
||||
supports_progress_bar: Adds --no-progress-bar argument (default: True)
|
||||
supports_multiprocessing: Adds --processes argument (default: False)
|
||||
|
||||
Example usage:
|
||||
|
||||
class Command(PaperlessCommand):
|
||||
help = "Process all documents"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
documents = Document.objects.all()
|
||||
for doc in self.track(documents, description="Processing..."):
|
||||
process_document(doc)
|
||||
|
||||
class Command(PaperlessCommand):
|
||||
help = "Regenerate thumbnails"
|
||||
supports_multiprocessing = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
ids = list(Document.objects.values_list("id", flat=True))
|
||||
for result in self.process_parallel(process_doc, ids):
|
||||
if result.error:
|
||||
self.console.print(f"[red]Failed: {result.error}[/red]")
|
||||
"""
|
||||
|
||||
supports_progress_bar: ClassVar[bool] = True
|
||||
supports_multiprocessing: ClassVar[bool] = False
|
||||
|
||||
# Instance attributes set by execute() before handle() runs
|
||||
no_progress_bar: bool
|
||||
process_count: int
|
||||
|
||||
def add_arguments(self, parser: CommandParser) -> None:
|
||||
"""Add arguments based on supported features."""
|
||||
super().add_arguments(parser)
|
||||
|
||||
if self.supports_progress_bar:
|
||||
parser.add_argument(
|
||||
"--no-progress-bar",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Disable the progress bar",
|
||||
)
|
||||
|
||||
if self.supports_multiprocessing:
|
||||
default_processes = max(1, (os.cpu_count() or 1) // 4)
|
||||
parser.add_argument(
|
||||
"--processes",
|
||||
default=default_processes,
|
||||
type=int,
|
||||
help=f"Number of processes to use (default: {default_processes})",
|
||||
)
|
||||
|
||||
def execute(self, *args: Any, **options: Any) -> str | None:
|
||||
"""
|
||||
Set up instance state before handle() is called.
|
||||
|
||||
This is called by Django's command infrastructure after argument parsing
|
||||
but before handle(). We use it to set instance attributes from options.
|
||||
"""
|
||||
# Set progress bar state
|
||||
if self.supports_progress_bar:
|
||||
self.no_progress_bar = options.get("no_progress_bar", False)
|
||||
else:
|
||||
self.no_progress_bar = True
|
||||
|
||||
# Set multiprocessing state
|
||||
if self.supports_multiprocessing:
|
||||
self.process_count = options.get("processes", 1)
|
||||
if self.process_count < 1:
|
||||
raise CommandError("--processes must be at least 1")
|
||||
else:
|
||||
self.process_count = 1
|
||||
|
||||
return super().execute(*args, **options)
|
||||
|
||||
def _create_progress(self, description: str) -> Progress:
|
||||
"""
|
||||
Create a configured Progress instance.
|
||||
|
||||
Progress output is directed to stderr to match the convention that
|
||||
progress bars are transient UI feedback, not command output. This
|
||||
mirrors tqdm's default behavior and prevents progress bar rendering
|
||||
from interfering with stdout-based assertions in tests or piped
|
||||
command output.
|
||||
|
||||
Args:
|
||||
description: Text to display alongside the progress bar.
|
||||
|
||||
Returns:
|
||||
A Progress instance configured with appropriate columns.
|
||||
"""
|
||||
return Progress(
|
||||
SpinnerColumn(),
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
BarColumn(),
|
||||
MofNCompleteColumn(),
|
||||
TimeElapsedColumn(),
|
||||
TimeRemainingColumn(),
|
||||
console=Console(stderr=True),
|
||||
transient=False,
|
||||
)
|
||||
|
||||
def _get_iterable_length(self, iterable: Iterable[object]) -> int | None:
|
||||
"""
|
||||
Attempt to determine the length of an iterable without consuming it.
|
||||
|
||||
Tries .count() first (for Django querysets - executes SELECT COUNT(*)),
|
||||
then falls back to len() for sequences.
|
||||
|
||||
Args:
|
||||
iterable: The iterable to measure.
|
||||
|
||||
Returns:
|
||||
The length if determinable, None otherwise.
|
||||
"""
|
||||
if isinstance(iterable, QuerySet):
|
||||
return iterable.count()
|
||||
|
||||
if isinstance(iterable, Sized):
|
||||
return len(iterable)
|
||||
|
||||
return None
|
||||
|
||||
def track(
|
||||
self,
|
||||
iterable: Iterable[T],
|
||||
*,
|
||||
description: str = "Processing...",
|
||||
total: int | None = None,
|
||||
) -> Generator[T, None, None]:
|
||||
"""
|
||||
Iterate over items with an optional progress bar.
|
||||
|
||||
Respects --no-progress-bar flag. When disabled, simply yields items
|
||||
without any progress display.
|
||||
|
||||
Args:
|
||||
iterable: The items to iterate over.
|
||||
description: Text to display alongside the progress bar.
|
||||
total: Total number of items. If None, attempts to determine
|
||||
automatically via .count() (for querysets) or len().
|
||||
|
||||
Yields:
|
||||
Items from the iterable.
|
||||
|
||||
Example:
|
||||
for doc in self.track(documents, description="Renaming..."):
|
||||
process(doc)
|
||||
"""
|
||||
if self.no_progress_bar:
|
||||
yield from iterable
|
||||
return
|
||||
|
||||
# Attempt to determine total if not provided
|
||||
if total is None:
|
||||
total = self._get_iterable_length(iterable)
|
||||
|
||||
with self._create_progress(description) as progress:
|
||||
task_id = progress.add_task(description, total=total)
|
||||
for item in iterable:
|
||||
yield item
|
||||
progress.advance(task_id)
|
||||
|
||||
def process_parallel(
|
||||
self,
|
||||
fn: Callable[[T], R],
|
||||
items: Sequence[T],
|
||||
*,
|
||||
description: str = "Processing...",
|
||||
) -> Generator[ProcessResult[T, R], None, None]:
|
||||
"""
|
||||
Process items in parallel with progress tracking.
|
||||
|
||||
When --processes=1, runs sequentially in the main process without
|
||||
spawning subprocesses. This is critical for testing, as multiprocessing
|
||||
breaks fixtures, mocks, and database transactions.
|
||||
|
||||
When --processes > 1, uses ProcessPoolExecutor and automatically closes
|
||||
database connections before spawning workers (required for PostgreSQL).
|
||||
|
||||
Args:
|
||||
fn: Function to apply to each item. Must be picklable for parallel
|
||||
execution (i.e., defined at module level, not a lambda or closure).
|
||||
items: Sequence of items to process.
|
||||
description: Text to display alongside the progress bar.
|
||||
|
||||
Yields:
|
||||
ProcessResult for each item, containing the item, result, and any error.
|
||||
|
||||
Example:
|
||||
def regenerate_thumbnail(doc_id: int) -> Path:
|
||||
...
|
||||
|
||||
for result in self.process_parallel(regenerate_thumbnail, doc_ids):
|
||||
if result.error:
|
||||
self.console.print(f"[red]Failed {result.item}[/red]")
|
||||
"""
|
||||
total = len(items)
|
||||
|
||||
if self.process_count == 1:
|
||||
# Sequential execution in main process - critical for testing
|
||||
yield from self._process_sequential(fn, items, description, total)
|
||||
else:
|
||||
# Parallel execution with ProcessPoolExecutor
|
||||
yield from self._process_parallel(fn, items, description, total)
|
||||
|
||||
def _process_sequential(
|
||||
self,
|
||||
fn: Callable[[T], R],
|
||||
items: Sequence[T],
|
||||
description: str,
|
||||
total: int,
|
||||
) -> Generator[ProcessResult[T, R], None, None]:
|
||||
"""Process items sequentially in the main process."""
|
||||
for item in self.track(items, description=description, total=total):
|
||||
try:
|
||||
result = fn(item)
|
||||
yield ProcessResult(item=item, result=result, error=None)
|
||||
except Exception as e:
|
||||
yield ProcessResult(item=item, result=None, error=e)
|
||||
|
||||
def _process_parallel(
|
||||
self,
|
||||
fn: Callable[[T], R],
|
||||
items: Sequence[T],
|
||||
description: str,
|
||||
total: int,
|
||||
) -> Generator[ProcessResult[T, R], None, None]:
|
||||
"""Process items in parallel using ProcessPoolExecutor."""
|
||||
# Close database connections before forking - required for PostgreSQL
|
||||
db.connections.close_all()
|
||||
|
||||
with self._create_progress(description) as progress:
|
||||
task_id = progress.add_task(description, total=total)
|
||||
|
||||
with ProcessPoolExecutor(max_workers=self.process_count) as executor:
|
||||
# Submit all tasks and map futures back to items
|
||||
future_to_item = {executor.submit(fn, item): item for item in items}
|
||||
|
||||
# Yield results as they complete
|
||||
for future in as_completed(future_to_item):
|
||||
item = future_to_item[future]
|
||||
try:
|
||||
result = future.result()
|
||||
yield ProcessResult(item=item, result=result, error=None)
|
||||
except Exception as e:
|
||||
yield ProcessResult(item=item, result=None, error=e)
|
||||
finally:
|
||||
progress.advance(task_id)
|
||||
@@ -1,15 +1,20 @@
|
||||
import logging
|
||||
import multiprocessing
|
||||
|
||||
import tqdm
|
||||
from django import db
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
from documents.management.commands.mixins import MultiProcessMixin
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
from documents.models import Document
|
||||
from documents.tasks import update_document_content_maybe_archive_file
|
||||
|
||||
logger = logging.getLogger("paperless.management.archiver")
|
||||
|
||||
|
||||
class Command(PaperlessCommand):
|
||||
class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
||||
help = (
|
||||
"Using the current classification model, assigns correspondents, tags "
|
||||
"and document types to all documents, effectively allowing you to "
|
||||
@@ -17,10 +22,7 @@ class Command(PaperlessCommand):
|
||||
"modified) after their initial import."
|
||||
)
|
||||
|
||||
supports_multiprocessing = True
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--overwrite",
|
||||
@@ -42,8 +44,13 @@ class Command(PaperlessCommand):
|
||||
"run on this specific document."
|
||||
),
|
||||
)
|
||||
self.add_argument_progress_bar_mixin(parser)
|
||||
self.add_argument_processes_mixin(parser)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.handle_processes_mixin(**options)
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
overwrite = options["overwrite"]
|
||||
@@ -53,21 +60,35 @@ class Command(PaperlessCommand):
|
||||
else:
|
||||
documents = Document.objects.all()
|
||||
|
||||
document_ids = [
|
||||
doc.id for doc in documents if overwrite or not doc.has_archive_version
|
||||
]
|
||||
document_ids = list(
|
||||
map(
|
||||
lambda doc: doc.id,
|
||||
filter(lambda d: overwrite or not d.has_archive_version, documents),
|
||||
),
|
||||
)
|
||||
|
||||
# Note to future self: this prevents django from reusing database
|
||||
# connections between processes, which is bad and does not work
|
||||
# with postgres.
|
||||
db.connections.close_all()
|
||||
|
||||
try:
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
for result in self.process_parallel(
|
||||
update_document_content_maybe_archive_file,
|
||||
document_ids,
|
||||
description="Archiving...",
|
||||
):
|
||||
if result.error:
|
||||
self.console.print(
|
||||
f"[red]Failed document {result.item}: {result.error}[/red]",
|
||||
if self.process_count == 1:
|
||||
for doc_id in document_ids:
|
||||
update_document_content_maybe_archive_file(doc_id)
|
||||
else: # pragma: no cover
|
||||
with multiprocessing.Pool(self.process_count) as pool:
|
||||
list(
|
||||
tqdm.tqdm(
|
||||
pool.imap_unordered(
|
||||
update_document_content_maybe_archive_file,
|
||||
document_ids,
|
||||
),
|
||||
total=len(document_ids),
|
||||
disable=self.no_progress_bar,
|
||||
),
|
||||
)
|
||||
except KeyboardInterrupt: # pragma: no cover
|
||||
self.console.print("[yellow]Aborting...[/yellow]")
|
||||
except KeyboardInterrupt:
|
||||
self.stdout.write(self.style.NOTICE("Aborting..."))
|
||||
|
||||
@@ -1,20 +1,24 @@
|
||||
import dataclasses
|
||||
import multiprocessing
|
||||
from typing import Final
|
||||
|
||||
import rapidfuzz
|
||||
import tqdm
|
||||
from django.core.management import BaseCommand
|
||||
from django.core.management import CommandError
|
||||
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
from documents.management.commands.mixins import MultiProcessMixin
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
from documents.models import Document
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, slots=True)
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _WorkPackage:
|
||||
first_doc: Document
|
||||
second_doc: Document
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, slots=True)
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class _WorkResult:
|
||||
doc_one_pk: int
|
||||
doc_two_pk: int
|
||||
@@ -27,23 +31,22 @@ class _WorkResult:
|
||||
def _process_and_match(work: _WorkPackage) -> _WorkResult:
|
||||
"""
|
||||
Does basic processing of document content, gets the basic ratio
|
||||
and returns the result package.
|
||||
and returns the result package
|
||||
"""
|
||||
# Normalize the string some, lower case, whitespace, etc
|
||||
first_string = rapidfuzz.utils.default_process(work.first_doc.content)
|
||||
second_string = rapidfuzz.utils.default_process(work.second_doc.content)
|
||||
|
||||
# Basic matching ratio
|
||||
match = rapidfuzz.fuzz.ratio(first_string, second_string)
|
||||
|
||||
return _WorkResult(work.first_doc.pk, work.second_doc.pk, match)
|
||||
|
||||
|
||||
class Command(PaperlessCommand):
|
||||
class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
||||
help = "Searches for documents where the content almost matches"
|
||||
|
||||
supports_multiprocessing = True
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
"--ratio",
|
||||
default=85.0,
|
||||
@@ -56,11 +59,16 @@ class Command(PaperlessCommand):
|
||||
action="store_true",
|
||||
help="If set, one document of matches above the ratio WILL BE DELETED",
|
||||
)
|
||||
self.add_argument_progress_bar_mixin(parser)
|
||||
self.add_argument_processes_mixin(parser)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
RATIO_MIN: Final[float] = 0.0
|
||||
RATIO_MAX: Final[float] = 100.0
|
||||
|
||||
self.handle_processes_mixin(**options)
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
|
||||
if options["delete"]:
|
||||
self.stdout.write(
|
||||
self.style.WARNING(
|
||||
@@ -72,58 +80,66 @@ class Command(PaperlessCommand):
|
||||
checked_pairs: set[tuple[int, int]] = set()
|
||||
work_pkgs: list[_WorkPackage] = []
|
||||
|
||||
# Ratio is a float from 0.0 to 100.0
|
||||
if opt_ratio < RATIO_MIN or opt_ratio > RATIO_MAX:
|
||||
raise CommandError("The ratio must be between 0 and 100")
|
||||
|
||||
all_docs = Document.objects.all().order_by("id")
|
||||
|
||||
# Build work packages for processing
|
||||
for first_doc in all_docs:
|
||||
for second_doc in all_docs:
|
||||
# doc to doc is obviously not useful
|
||||
if first_doc.pk == second_doc.pk:
|
||||
continue
|
||||
# Skip empty documents (e.g. password-protected)
|
||||
if first_doc.content.strip() == "" or second_doc.content.strip() == "":
|
||||
continue
|
||||
# Skip matching which have already been matched together
|
||||
# doc 1 to doc 2 is the same as doc 2 to doc 1
|
||||
doc_1_to_doc_2 = (first_doc.pk, second_doc.pk)
|
||||
doc_2_to_doc_1 = doc_1_to_doc_2[::-1]
|
||||
if doc_1_to_doc_2 in checked_pairs or doc_2_to_doc_1 in checked_pairs:
|
||||
continue
|
||||
checked_pairs.update([doc_1_to_doc_2, doc_2_to_doc_1])
|
||||
# Actually something useful to work on now
|
||||
work_pkgs.append(_WorkPackage(first_doc, second_doc))
|
||||
|
||||
results: list[_WorkResult] = []
|
||||
# Don't spin up a pool of 1 process
|
||||
if self.process_count == 1:
|
||||
for work in self.track(work_pkgs, description="Matching..."):
|
||||
results = []
|
||||
for work in tqdm.tqdm(work_pkgs, disable=self.no_progress_bar):
|
||||
results.append(_process_and_match(work))
|
||||
else: # pragma: no cover
|
||||
for proc_result in self.process_parallel(
|
||||
_process_and_match,
|
||||
work_pkgs,
|
||||
description="Matching...",
|
||||
):
|
||||
if proc_result.error:
|
||||
self.console.print(
|
||||
f"[red]Failed: {proc_result.error}[/red]",
|
||||
)
|
||||
elif proc_result.result is not None:
|
||||
results.append(proc_result.result)
|
||||
|
||||
messages: list[str] = []
|
||||
maybe_delete_ids: list[int] = []
|
||||
for match_result in sorted(results):
|
||||
if match_result.ratio >= opt_ratio:
|
||||
messages.append(
|
||||
self.style.NOTICE(
|
||||
f"Document {match_result.doc_one_pk} fuzzy match"
|
||||
f" to {match_result.doc_two_pk}"
|
||||
f" (confidence {match_result.ratio:.3f})\n",
|
||||
with multiprocessing.Pool(processes=self.process_count) as pool:
|
||||
results = list(
|
||||
tqdm.tqdm(
|
||||
pool.imap_unordered(_process_and_match, work_pkgs),
|
||||
total=len(work_pkgs),
|
||||
disable=self.no_progress_bar,
|
||||
),
|
||||
)
|
||||
maybe_delete_ids.append(match_result.doc_two_pk)
|
||||
|
||||
# Check results
|
||||
messages = []
|
||||
maybe_delete_ids = []
|
||||
for result in sorted(results):
|
||||
if result.ratio >= opt_ratio:
|
||||
messages.append(
|
||||
self.style.NOTICE(
|
||||
f"Document {result.doc_one_pk} fuzzy match"
|
||||
f" to {result.doc_two_pk} (confidence {result.ratio:.3f})\n",
|
||||
),
|
||||
)
|
||||
maybe_delete_ids.append(result.doc_two_pk)
|
||||
|
||||
if len(messages) == 0:
|
||||
messages.append(self.style.SUCCESS("No matches found\n"))
|
||||
self.stdout.writelines(messages)
|
||||
|
||||
messages.append(
|
||||
self.style.SUCCESS("No matches found\n"),
|
||||
)
|
||||
self.stdout.writelines(
|
||||
messages,
|
||||
)
|
||||
if options["delete"]:
|
||||
self.stdout.write(
|
||||
self.style.NOTICE(
|
||||
|
||||
@@ -1,12 +1,25 @@
|
||||
import logging
|
||||
|
||||
import tqdm
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db.models.signals import post_save
|
||||
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
from documents.models import Document
|
||||
|
||||
|
||||
class Command(PaperlessCommand):
|
||||
help = "Rename all documents"
|
||||
class Command(ProgressBarMixin, BaseCommand):
|
||||
help = "This will rename all documents to match the latest filename format."
|
||||
|
||||
def add_arguments(self, parser):
|
||||
self.add_argument_progress_bar_mixin(parser)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
for document in self.track(Document.objects.all(), description="Renaming..."):
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
for document in tqdm.tqdm(
|
||||
Document.objects.all(),
|
||||
disable=self.no_progress_bar,
|
||||
):
|
||||
post_save.send(Document, instance=document, created=False)
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
import logging
|
||||
|
||||
import tqdm
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from documents.classifier import load_classifier
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
from documents.models import Document
|
||||
from documents.signals.handlers import set_correspondent
|
||||
from documents.signals.handlers import set_document_type
|
||||
@@ -11,7 +14,7 @@ from documents.signals.handlers import set_tags
|
||||
logger = logging.getLogger("paperless.management.retagger")
|
||||
|
||||
|
||||
class Command(PaperlessCommand):
|
||||
class Command(ProgressBarMixin, BaseCommand):
|
||||
help = (
|
||||
"Using the current classification model, assigns correspondents, tags "
|
||||
"and document types to all documents, effectively allowing you to "
|
||||
@@ -20,7 +23,6 @@ class Command(PaperlessCommand):
|
||||
)
|
||||
|
||||
def add_arguments(self, parser):
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument("-c", "--correspondent", default=False, action="store_true")
|
||||
parser.add_argument("-T", "--tags", default=False, action="store_true")
|
||||
parser.add_argument("-t", "--document_type", default=False, action="store_true")
|
||||
@@ -32,7 +34,7 @@ class Command(PaperlessCommand):
|
||||
action="store_true",
|
||||
help=(
|
||||
"By default this command won't try to assign a correspondent "
|
||||
"if more than one matches the document. Use this flag if "
|
||||
"if more than one matches the document. Use this flag if "
|
||||
"you'd rather it just pick the first one it finds."
|
||||
),
|
||||
)
|
||||
@@ -47,6 +49,7 @@ class Command(PaperlessCommand):
|
||||
"and tags that do not match anymore due to changed rules."
|
||||
),
|
||||
)
|
||||
self.add_argument_progress_bar_mixin(parser)
|
||||
parser.add_argument(
|
||||
"--suggest",
|
||||
default=False,
|
||||
@@ -65,6 +68,8 @@ class Command(PaperlessCommand):
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
|
||||
if options["inbox_only"]:
|
||||
queryset = Document.objects.filter(tags__is_inbox_tag=True)
|
||||
else:
|
||||
@@ -79,7 +84,7 @@ class Command(PaperlessCommand):
|
||||
|
||||
classifier = load_classifier()
|
||||
|
||||
for document in self.track(documents, description="Retagging..."):
|
||||
for document in tqdm.tqdm(documents, disable=self.no_progress_bar):
|
||||
if options["correspondent"]:
|
||||
set_correspondent(
|
||||
sender=None,
|
||||
@@ -117,7 +122,6 @@ class Command(PaperlessCommand):
|
||||
stdout=self.stdout,
|
||||
style_func=self.style,
|
||||
)
|
||||
|
||||
if options["storage_path"]:
|
||||
set_storage_path(
|
||||
sender=None,
|
||||
|
||||
@@ -1,45 +1,43 @@
|
||||
import logging
|
||||
import multiprocessing
|
||||
import shutil
|
||||
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
import tqdm
|
||||
from django import db
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
from documents.management.commands.mixins import MultiProcessMixin
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
from documents.models import Document
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
|
||||
logger = logging.getLogger("paperless.management.thumbnails")
|
||||
|
||||
|
||||
def _process_document(doc_id: int) -> None:
|
||||
def _process_document(doc_id) -> None:
|
||||
document: Document = Document.objects.get(id=doc_id)
|
||||
parser_class = get_parser_class_for_mime_type(document.mime_type)
|
||||
|
||||
if parser_class is None:
|
||||
logger.warning(
|
||||
"%s: No parser for mime type %s",
|
||||
document,
|
||||
document.mime_type,
|
||||
)
|
||||
if parser_class:
|
||||
parser = parser_class(logging_group=None)
|
||||
else:
|
||||
print(f"{document} No parser for mime type {document.mime_type}") # noqa: T201
|
||||
return
|
||||
|
||||
parser = parser_class(logging_group=None)
|
||||
|
||||
try:
|
||||
thumb = parser.get_thumbnail(
|
||||
document.source_path,
|
||||
document.mime_type,
|
||||
document.get_public_filename(),
|
||||
)
|
||||
|
||||
shutil.move(thumb, document.thumbnail_path)
|
||||
finally:
|
||||
parser.cleanup()
|
||||
|
||||
|
||||
class Command(PaperlessCommand):
|
||||
class Command(MultiProcessMixin, ProgressBarMixin, BaseCommand):
|
||||
help = "This will regenerate the thumbnails for all documents."
|
||||
|
||||
supports_multiprocessing = True
|
||||
|
||||
def add_arguments(self, parser) -> None:
|
||||
super().add_arguments(parser)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--document",
|
||||
@@ -51,23 +49,36 @@ class Command(PaperlessCommand):
|
||||
"run on this specific document."
|
||||
),
|
||||
)
|
||||
self.add_argument_progress_bar_mixin(parser)
|
||||
self.add_argument_processes_mixin(parser)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
logging.getLogger().handlers[0].level = logging.ERROR
|
||||
|
||||
self.handle_processes_mixin(**options)
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
|
||||
if options["document"]:
|
||||
documents = Document.objects.filter(pk=options["document"])
|
||||
else:
|
||||
documents = Document.objects.all()
|
||||
|
||||
ids = list(documents.values_list("id", flat=True))
|
||||
ids = [doc.id for doc in documents]
|
||||
|
||||
for result in self.process_parallel(
|
||||
_process_document,
|
||||
ids,
|
||||
description="Regenerating thumbnails...",
|
||||
):
|
||||
if result.error: # pragma: no cover
|
||||
self.console.print(
|
||||
f"[red]Failed document {result.item}: {result.error}[/red]",
|
||||
# Note to future self: this prevents django from reusing database
|
||||
# connections between processes, which is bad and does not work
|
||||
# with postgres.
|
||||
db.connections.close_all()
|
||||
|
||||
if self.process_count == 1:
|
||||
for doc_id in ids:
|
||||
_process_document(doc_id)
|
||||
else: # pragma: no cover
|
||||
with multiprocessing.Pool(processes=self.process_count) as pool:
|
||||
list(
|
||||
tqdm.tqdm(
|
||||
pool.imap_unordered(_process_document, ids),
|
||||
total=len(ids),
|
||||
disable=self.no_progress_bar,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -21,6 +21,26 @@ class CryptFields(TypedDict):
|
||||
fields: list[str]
|
||||
|
||||
|
||||
class MultiProcessMixin:
|
||||
"""
|
||||
Small class to handle adding an argument and validating it
|
||||
for the use of multiple processes
|
||||
"""
|
||||
|
||||
def add_argument_processes_mixin(self, parser: ArgumentParser) -> None:
|
||||
parser.add_argument(
|
||||
"--processes",
|
||||
default=max(1, os.cpu_count() // 4),
|
||||
type=int,
|
||||
help="Number of processes to distribute work amongst",
|
||||
)
|
||||
|
||||
def handle_processes_mixin(self, *args, **options) -> None:
|
||||
self.process_count = options["processes"]
|
||||
if self.process_count < 1:
|
||||
raise CommandError("There must be at least 1 process")
|
||||
|
||||
|
||||
class ProgressBarMixin:
|
||||
"""
|
||||
Many commands use a progress bar, which can be disabled
|
||||
|
||||
@@ -1,21 +1,27 @@
|
||||
from auditlog.models import LogEntry
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.db import transaction
|
||||
from tqdm import tqdm
|
||||
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
from documents.management.commands.mixins import ProgressBarMixin
|
||||
|
||||
|
||||
class Command(PaperlessCommand):
|
||||
"""Prune the audit logs of objects that no longer exist."""
|
||||
class Command(BaseCommand, ProgressBarMixin):
|
||||
"""
|
||||
Prune the audit logs of objects that no longer exist.
|
||||
"""
|
||||
|
||||
help = "Prunes the audit logs of objects that no longer exist."
|
||||
|
||||
def handle(self, *args, **options):
|
||||
def add_arguments(self, parser):
|
||||
self.add_argument_progress_bar_mixin(parser)
|
||||
|
||||
def handle(self, **options):
|
||||
self.handle_progress_bar_mixin(**options)
|
||||
with transaction.atomic():
|
||||
for log_entry in self.track(
|
||||
LogEntry.objects.all(),
|
||||
description="Pruning audit logs...",
|
||||
):
|
||||
for log_entry in tqdm(LogEntry.objects.all(), disable=self.no_progress_bar):
|
||||
model_class = log_entry.content_type.model_class()
|
||||
# use global_objects for SoftDeleteModel
|
||||
objects = (
|
||||
model_class.global_objects
|
||||
if hasattr(model_class, "global_objects")
|
||||
@@ -26,8 +32,8 @@ class Command(PaperlessCommand):
|
||||
and not objects.filter(pk=log_entry.object_id).exists()
|
||||
):
|
||||
log_entry.delete()
|
||||
self.console.print(
|
||||
f"Deleted audit log entry for "
|
||||
f"{model_class.__name__} #{log_entry.object_id}",
|
||||
style="yellow",
|
||||
tqdm.write(
|
||||
self.style.NOTICE(
|
||||
f"Deleted audit log entry for {model_class.__name__} #{log_entry.object_id}",
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1,29 +0,0 @@
|
||||
# Generated by Django 5.2.11 on 2026-02-14 19:19
|
||||
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0011_optimize_integer_field_sizes"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name="workflowaction",
|
||||
name="type",
|
||||
field=models.PositiveSmallIntegerField(
|
||||
choices=[
|
||||
(1, "Assignment"),
|
||||
(2, "Removal"),
|
||||
(3, "Email"),
|
||||
(4, "Webhook"),
|
||||
(5, "Password removal"),
|
||||
(6, "Move to trash"),
|
||||
],
|
||||
default=1,
|
||||
verbose_name="Workflow Action Type",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -1,37 +0,0 @@
|
||||
# Generated by Django 5.1.6 on 2025-02-26 17:08
|
||||
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations
|
||||
from django.db import models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
("documents", "0012_alter_workflowaction_type"),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="root_document",
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.CASCADE,
|
||||
related_name="versions",
|
||||
to="documents.document",
|
||||
verbose_name="root document for this version",
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name="document",
|
||||
name="version_label",
|
||||
field=models.CharField(
|
||||
blank=True,
|
||||
help_text="Optional short label for a document version.",
|
||||
max_length=64,
|
||||
null=True,
|
||||
verbose_name="version label",
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -155,7 +155,7 @@ class StoragePath(MatchingModel):
|
||||
verbose_name_plural = _("storage paths")
|
||||
|
||||
|
||||
class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-missing]
|
||||
class Document(SoftDeleteModel, ModelWithOwner):
|
||||
correspondent = models.ForeignKey(
|
||||
Correspondent,
|
||||
blank=True,
|
||||
@@ -308,23 +308,6 @@ class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-
|
||||
),
|
||||
)
|
||||
|
||||
root_document = models.ForeignKey(
|
||||
"self",
|
||||
blank=True,
|
||||
null=True,
|
||||
related_name="versions",
|
||||
on_delete=models.CASCADE,
|
||||
verbose_name=_("root document for this version"),
|
||||
)
|
||||
|
||||
version_label = models.CharField(
|
||||
_("version label"),
|
||||
max_length=64,
|
||||
blank=True,
|
||||
null=True,
|
||||
help_text=_("Optional short label for a document version."),
|
||||
)
|
||||
|
||||
class Meta:
|
||||
ordering = ("-created",)
|
||||
verbose_name = _("document")
|
||||
@@ -436,19 +419,6 @@ class Document(SoftDeleteModel, ModelWithOwner): # type: ignore[django-manager-
|
||||
tags_to_add = self.tags.model.objects.filter(id__in=tag_ids)
|
||||
self.tags.add(*tags_to_add)
|
||||
|
||||
def delete(
|
||||
self,
|
||||
*args,
|
||||
**kwargs,
|
||||
):
|
||||
# If deleting a root document, move all its versions to trash as well.
|
||||
if self.root_document_id is None:
|
||||
Document.objects.filter(root_document=self).delete()
|
||||
return super().delete(
|
||||
*args,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
class SavedView(ModelWithOwner):
|
||||
class DisplayMode(models.TextChoices):
|
||||
@@ -1439,10 +1409,6 @@ class WorkflowAction(models.Model):
|
||||
5,
|
||||
_("Password removal"),
|
||||
)
|
||||
MOVE_TO_TRASH = (
|
||||
6,
|
||||
_("Move to trash"),
|
||||
)
|
||||
|
||||
type = models.PositiveSmallIntegerField(
|
||||
_("Workflow Action Type"),
|
||||
@@ -1746,5 +1712,5 @@ class WorkflowRun(SoftDeleteModel):
|
||||
verbose_name = _("workflow run")
|
||||
verbose_name_plural = _("workflow runs")
|
||||
|
||||
def __str__(self) -> str:
|
||||
def __str__(self):
|
||||
return f"WorkflowRun of {self.workflow} at {self.run_at} on {self.document}"
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import enum
|
||||
from collections.abc import Mapping
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
@@ -47,7 +48,7 @@ class BaseStatusManager:
|
||||
async_to_sync(self._channel.flush)
|
||||
self._channel = None
|
||||
|
||||
def send(self, payload: dict[str, str | int | None]) -> None:
|
||||
def send(self, payload: Mapping[str, object]) -> None:
|
||||
# Ensure the layer is open
|
||||
self.open()
|
||||
|
||||
@@ -73,26 +74,28 @@ class ProgressManager(BaseStatusManager):
|
||||
max_progress: int,
|
||||
extra_args: dict[str, str | int | None] | None = None,
|
||||
) -> None:
|
||||
payload = {
|
||||
"type": "status_update",
|
||||
"data": {
|
||||
"filename": self.filename,
|
||||
"task_id": self.task_id,
|
||||
"current_progress": current_progress,
|
||||
"max_progress": max_progress,
|
||||
"status": status,
|
||||
"message": message,
|
||||
},
|
||||
data: dict[str, object] = {
|
||||
"filename": self.filename,
|
||||
"task_id": self.task_id,
|
||||
"current_progress": current_progress,
|
||||
"max_progress": max_progress,
|
||||
"status": status,
|
||||
"message": message,
|
||||
}
|
||||
if extra_args is not None:
|
||||
payload["data"].update(extra_args)
|
||||
data.update(extra_args)
|
||||
|
||||
payload: dict[str, object] = {
|
||||
"type": "status_update",
|
||||
"data": data,
|
||||
}
|
||||
|
||||
self.send(payload)
|
||||
|
||||
|
||||
class DocumentsStatusManager(BaseStatusManager):
|
||||
def send_documents_deleted(self, documents: list[int]) -> None:
|
||||
payload = {
|
||||
payload: dict[str, object] = {
|
||||
"type": "documents_deleted",
|
||||
"data": {
|
||||
"documents": documents,
|
||||
@@ -100,3 +103,25 @@ class DocumentsStatusManager(BaseStatusManager):
|
||||
}
|
||||
|
||||
self.send(payload)
|
||||
|
||||
def send_document_updated(
|
||||
self,
|
||||
*,
|
||||
document_id: int,
|
||||
modified: str,
|
||||
owner_id: int | None = None,
|
||||
users_can_view: list[int] | None = None,
|
||||
groups_can_view: list[int] | None = None,
|
||||
) -> None:
|
||||
payload: dict[str, object] = {
|
||||
"type": "document_updated",
|
||||
"data": {
|
||||
"document_id": document_id,
|
||||
"modified": modified,
|
||||
"owner_id": owner_id,
|
||||
"users_can_view": users_can_view or [],
|
||||
"groups_can_view": groups_can_view or [],
|
||||
},
|
||||
}
|
||||
|
||||
self.send(payload)
|
||||
|
||||
@@ -9,7 +9,6 @@ from decimal import Decimal
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
from typing import Literal
|
||||
from typing import TypedDict
|
||||
|
||||
import magic
|
||||
from celery import states
|
||||
@@ -91,8 +90,6 @@ if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from django.db.models.query import QuerySet
|
||||
from rest_framework.relations import ManyRelatedField
|
||||
from rest_framework.relations import RelatedField
|
||||
|
||||
|
||||
logger = logging.getLogger("paperless.serializers")
|
||||
@@ -552,6 +549,28 @@ class ColorField(serializers.Field):
|
||||
return 1
|
||||
|
||||
|
||||
class TagSerializerVersion1(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
colour = ColorField(source="color", default="#a6cee3")
|
||||
|
||||
class Meta:
|
||||
model = Tag
|
||||
fields = (
|
||||
"id",
|
||||
"slug",
|
||||
"name",
|
||||
"colour",
|
||||
"match",
|
||||
"matching_algorithm",
|
||||
"is_insensitive",
|
||||
"is_inbox_tag",
|
||||
"document_count",
|
||||
"owner",
|
||||
"permissions",
|
||||
"user_can_change",
|
||||
"set_permissions",
|
||||
)
|
||||
|
||||
|
||||
class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
|
||||
def get_text_color(self, obj) -> str:
|
||||
try:
|
||||
@@ -1028,7 +1047,6 @@ def _get_viewable_duplicates(
|
||||
duplicates = Document.global_objects.filter(
|
||||
Q(checksum__in=checksums) | Q(archive_checksum__in=checksums),
|
||||
).exclude(pk=document.pk)
|
||||
duplicates = duplicates.filter(root_document__isnull=True)
|
||||
duplicates = duplicates.order_by("-created")
|
||||
allowed = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
@@ -1045,22 +1063,6 @@ class DuplicateDocumentSummarySerializer(serializers.Serializer):
|
||||
deleted_at = serializers.DateTimeField(allow_null=True)
|
||||
|
||||
|
||||
class DocumentVersionInfoSerializer(serializers.Serializer):
|
||||
id = serializers.IntegerField()
|
||||
added = serializers.DateTimeField()
|
||||
version_label = serializers.CharField(required=False, allow_null=True)
|
||||
checksum = serializers.CharField(required=False, allow_null=True)
|
||||
is_root = serializers.BooleanField()
|
||||
|
||||
|
||||
class _DocumentVersionInfo(TypedDict):
|
||||
id: int
|
||||
added: datetime
|
||||
version_label: str | None
|
||||
checksum: str | None
|
||||
is_root: bool
|
||||
|
||||
|
||||
@extend_schema_serializer(
|
||||
deprecate_fields=["created_date"],
|
||||
)
|
||||
@@ -1081,10 +1083,6 @@ class DocumentSerializer(
|
||||
duplicate_documents = SerializerMethodField()
|
||||
|
||||
notes = NotesSerializer(many=True, required=False, read_only=True)
|
||||
root_document: RelatedField[Document, Document, Any] | ManyRelatedField = (
|
||||
serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
)
|
||||
versions = SerializerMethodField()
|
||||
|
||||
custom_fields = CustomFieldInstanceSerializer(
|
||||
many=True,
|
||||
@@ -1118,44 +1116,6 @@ class DocumentSerializer(
|
||||
duplicates = _get_viewable_duplicates(obj, user)
|
||||
return list(duplicates.values("id", "title", "deleted_at"))
|
||||
|
||||
@extend_schema_field(DocumentVersionInfoSerializer(many=True))
|
||||
def get_versions(self, obj):
|
||||
root_doc = obj if obj.root_document_id is None else obj.root_document
|
||||
if root_doc is None:
|
||||
return []
|
||||
|
||||
prefetched_cache = getattr(obj, "_prefetched_objects_cache", None)
|
||||
prefetched_versions = (
|
||||
prefetched_cache.get("versions")
|
||||
if isinstance(prefetched_cache, dict)
|
||||
else None
|
||||
)
|
||||
|
||||
versions: list[Document]
|
||||
if prefetched_versions is not None:
|
||||
versions = [*prefetched_versions, root_doc]
|
||||
else:
|
||||
versions_qs = Document.objects.filter(root_document=root_doc).only(
|
||||
"id",
|
||||
"added",
|
||||
"checksum",
|
||||
"version_label",
|
||||
)
|
||||
versions = [*versions_qs, root_doc]
|
||||
|
||||
def build_info(doc: Document) -> _DocumentVersionInfo:
|
||||
return {
|
||||
"id": doc.id,
|
||||
"added": doc.added,
|
||||
"version_label": doc.version_label,
|
||||
"checksum": doc.checksum,
|
||||
"is_root": doc.id == root_doc.id,
|
||||
}
|
||||
|
||||
info = [build_info(doc) for doc in versions]
|
||||
info.sort(key=lambda item: item["id"], reverse=True)
|
||||
return info
|
||||
|
||||
def get_original_file_name(self, obj) -> str | None:
|
||||
return obj.original_filename
|
||||
|
||||
@@ -1167,8 +1127,6 @@ class DocumentSerializer(
|
||||
|
||||
def to_representation(self, instance):
|
||||
doc = super().to_representation(instance)
|
||||
if "content" in self.fields and hasattr(instance, "effective_content"):
|
||||
doc["content"] = getattr(instance, "effective_content") or ""
|
||||
if self.truncate_content and "content" in self.fields:
|
||||
doc["content"] = doc.get("content")[0:550]
|
||||
|
||||
@@ -1346,8 +1304,6 @@ class DocumentSerializer(
|
||||
"remove_inbox_tags",
|
||||
"page_count",
|
||||
"mime_type",
|
||||
"root_document",
|
||||
"versions",
|
||||
)
|
||||
list_serializer_class = OwnedObjectListSerializer
|
||||
|
||||
@@ -2042,38 +1998,6 @@ class PostDocumentSerializer(serializers.Serializer):
|
||||
return created.date()
|
||||
|
||||
|
||||
class DocumentVersionSerializer(serializers.Serializer):
|
||||
document = serializers.FileField(
|
||||
label="Document",
|
||||
write_only=True,
|
||||
)
|
||||
version_label = serializers.CharField(
|
||||
label="Version label",
|
||||
required=False,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
max_length=64,
|
||||
)
|
||||
|
||||
validate_document = PostDocumentSerializer().validate_document
|
||||
|
||||
|
||||
class DocumentVersionLabelSerializer(serializers.Serializer):
|
||||
version_label = serializers.CharField(
|
||||
label="Version label",
|
||||
required=True,
|
||||
allow_blank=True,
|
||||
allow_null=True,
|
||||
max_length=64,
|
||||
)
|
||||
|
||||
def validate_version_label(self, value):
|
||||
if value is None:
|
||||
return None
|
||||
normalized = value.strip()
|
||||
return normalized or None
|
||||
|
||||
|
||||
class BulkDownloadSerializer(DocumentListSerializer):
|
||||
content = serializers.ChoiceField(
|
||||
choices=["archive", "originals", "both"],
|
||||
@@ -2273,7 +2197,7 @@ class TasksViewSerializer(OwnedObjectSerializer):
|
||||
return list(duplicates.values("id", "title", "deleted_at"))
|
||||
|
||||
|
||||
class RunTaskViewSerializer(serializers.Serializer[dict[str, Any]]):
|
||||
class RunTaskViewSerializer(serializers.Serializer):
|
||||
task_name = serializers.ChoiceField(
|
||||
choices=PaperlessTask.TaskName.choices,
|
||||
label="Task Name",
|
||||
@@ -2281,7 +2205,7 @@ class RunTaskViewSerializer(serializers.Serializer[dict[str, Any]]):
|
||||
)
|
||||
|
||||
|
||||
class AcknowledgeTasksViewSerializer(serializers.Serializer[dict[str, Any]]):
|
||||
class AcknowledgeTasksViewSerializer(serializers.Serializer):
|
||||
tasks = serializers.ListField(
|
||||
required=True,
|
||||
label="Tasks",
|
||||
|
||||
@@ -4,6 +4,7 @@ import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
|
||||
from celery import shared_task
|
||||
from celery import states
|
||||
@@ -23,6 +24,7 @@ from django.db.models import Q
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
from filelock import FileLock
|
||||
from rest_framework import serializers
|
||||
|
||||
from documents import matching
|
||||
from documents.caching import clear_document_caches
|
||||
@@ -45,10 +47,10 @@ from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowRun
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.plugins.helpers import DocumentsStatusManager
|
||||
from documents.templating.utils import convert_format_str_to_template_format
|
||||
from documents.workflows.actions import build_workflow_action_context
|
||||
from documents.workflows.actions import execute_email_action
|
||||
from documents.workflows.actions import execute_move_to_trash_action
|
||||
from documents.workflows.actions import execute_password_removal_action
|
||||
from documents.workflows.actions import execute_webhook_action
|
||||
from documents.workflows.mutations import apply_assignment_to_document
|
||||
@@ -59,13 +61,12 @@ from documents.workflows.utils import get_workflows_for_trigger
|
||||
from paperless.config import AIConfig
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import uuid
|
||||
|
||||
from documents.classifier import DocumentClassifier
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
|
||||
logger = logging.getLogger("paperless.handlers")
|
||||
DRF_DATETIME_FIELD = serializers.DateTimeField()
|
||||
|
||||
|
||||
def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs) -> None:
|
||||
@@ -725,18 +726,12 @@ def add_to_index(sender, document, **kwargs) -> None:
|
||||
from documents import index
|
||||
|
||||
index.add_or_update_document(document)
|
||||
if document.root_document_id is not None and document.root_document is not None:
|
||||
# keep in sync when a new version is consumed.
|
||||
index.add_or_update_document(
|
||||
document.root_document,
|
||||
effective_content=document.content,
|
||||
)
|
||||
|
||||
|
||||
def run_workflows_added(
|
||||
sender,
|
||||
document: Document,
|
||||
logging_group: uuid.UUID | None = None,
|
||||
logging_group=None,
|
||||
original_file=None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
@@ -752,7 +747,7 @@ def run_workflows_added(
|
||||
def run_workflows_updated(
|
||||
sender,
|
||||
document: Document,
|
||||
logging_group: uuid.UUID | None = None,
|
||||
logging_group=None,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
run_workflows(
|
||||
@@ -762,11 +757,33 @@ def run_workflows_updated(
|
||||
)
|
||||
|
||||
|
||||
def send_websocket_document_updated(
|
||||
sender,
|
||||
document: Document,
|
||||
**kwargs,
|
||||
) -> None:
|
||||
# At this point, workflows may already have applied additional changes.
|
||||
document.refresh_from_db()
|
||||
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
|
||||
doc_overrides = DocumentMetadataOverrides.from_document(document)
|
||||
|
||||
with DocumentsStatusManager() as status_mgr:
|
||||
status_mgr.send_document_updated(
|
||||
document_id=document.id,
|
||||
modified=DRF_DATETIME_FIELD.to_representation(document.modified),
|
||||
owner_id=doc_overrides.owner_id,
|
||||
users_can_view=doc_overrides.view_users,
|
||||
groups_can_view=doc_overrides.view_groups,
|
||||
)
|
||||
|
||||
|
||||
def run_workflows(
|
||||
trigger_type: WorkflowTrigger.WorkflowTriggerType,
|
||||
document: Document | ConsumableDocument,
|
||||
workflow_to_run: Workflow | None = None,
|
||||
logging_group: uuid.UUID | None = None,
|
||||
logging_group=None,
|
||||
overrides: DocumentMetadataOverrides | None = None,
|
||||
original_file: Path | None = None,
|
||||
) -> tuple[DocumentMetadataOverrides, str] | None:
|
||||
@@ -792,33 +809,14 @@ def run_workflows(
|
||||
|
||||
for workflow in workflows:
|
||||
if not use_overrides:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(document, Document)
|
||||
try:
|
||||
# This can be called from bulk_update_documents, which may be running multiple times
|
||||
# Refresh this so the matching data is fresh and instance fields are re-freshed
|
||||
# Otherwise, this instance might be behind and overwrite the work another process did
|
||||
document.refresh_from_db()
|
||||
doc_tag_ids = list(document.tags.values_list("pk", flat=True))
|
||||
except Document.DoesNotExist:
|
||||
# Document was hard deleted by a previous workflow or another process
|
||||
logger.info(
|
||||
"Document no longer exists, skipping remaining workflows",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
break
|
||||
|
||||
# Check if document was soft deleted (moved to trash)
|
||||
if document.is_deleted:
|
||||
logger.info(
|
||||
"Document was moved to trash, skipping remaining workflows",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
break
|
||||
# This can be called from bulk_update_documents, which may be running multiple times
|
||||
# Refresh this so the matching data is fresh and instance fields are re-freshed
|
||||
# Otherwise, this instance might be behind and overwrite the work another process did
|
||||
document.refresh_from_db()
|
||||
doc_tag_ids = list(document.tags.values_list("pk", flat=True))
|
||||
|
||||
if matching.document_matches_workflow(document, workflow, trigger_type):
|
||||
action: WorkflowAction
|
||||
has_move_to_trash_action = False
|
||||
for action in workflow.actions.order_by("order", "pk"):
|
||||
message = f"Applying {action} from {workflow}"
|
||||
if not use_overrides:
|
||||
@@ -862,8 +860,6 @@ def run_workflows(
|
||||
)
|
||||
elif action.type == WorkflowAction.WorkflowActionType.PASSWORD_REMOVAL:
|
||||
execute_password_removal_action(action, document, logging_group)
|
||||
elif action.type == WorkflowAction.WorkflowActionType.MOVE_TO_TRASH:
|
||||
has_move_to_trash_action = True
|
||||
|
||||
if not use_overrides:
|
||||
# limit title to 128 characters
|
||||
@@ -878,12 +874,7 @@ def run_workflows(
|
||||
document=document if not use_overrides else None,
|
||||
)
|
||||
|
||||
if has_move_to_trash_action:
|
||||
execute_move_to_trash_action(action, document, logging_group)
|
||||
|
||||
if use_overrides:
|
||||
if TYPE_CHECKING:
|
||||
assert overrides is not None
|
||||
return overrides, "\n".join(messages)
|
||||
|
||||
|
||||
@@ -1035,7 +1026,11 @@ def add_or_update_document_in_llm_index(sender, document, **kwargs):
|
||||
|
||||
|
||||
@receiver(models.signals.post_delete, sender=Document)
|
||||
def delete_document_from_llm_index(sender, instance: Document, **kwargs):
|
||||
def delete_document_from_llm_index(
|
||||
sender: Any,
|
||||
instance: Document,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""
|
||||
Delete a document from the LLM index when it is deleted.
|
||||
"""
|
||||
|
||||
@@ -60,6 +60,7 @@ from documents.sanity_checker import SanityCheckFailedException
|
||||
from documents.signals import document_updated
|
||||
from documents.signals.handlers import cleanup_document_deletion
|
||||
from documents.signals.handlers import run_workflows
|
||||
from documents.signals.handlers import send_websocket_document_updated
|
||||
from documents.workflows.utils import get_workflows_for_trigger
|
||||
from paperless.config import AIConfig
|
||||
from paperless_ai.indexing import llm_index_add_or_update_document
|
||||
@@ -156,22 +157,15 @@ def consume_file(
|
||||
if overrides is None:
|
||||
overrides = DocumentMetadataOverrides()
|
||||
|
||||
plugins: list[type[ConsumeTaskPlugin]] = (
|
||||
[
|
||||
ConsumerPreflightPlugin,
|
||||
ConsumerPlugin,
|
||||
]
|
||||
if input_doc.root_document_id is not None
|
||||
else [
|
||||
ConsumerPreflightPlugin,
|
||||
AsnCheckPlugin,
|
||||
CollatePlugin,
|
||||
BarcodePlugin,
|
||||
AsnCheckPlugin, # Re-run ASN check after barcode reading
|
||||
WorkflowTriggerPlugin,
|
||||
ConsumerPlugin,
|
||||
]
|
||||
)
|
||||
plugins: list[type[ConsumeTaskPlugin]] = [
|
||||
ConsumerPreflightPlugin,
|
||||
AsnCheckPlugin,
|
||||
CollatePlugin,
|
||||
BarcodePlugin,
|
||||
AsnCheckPlugin, # Re-run ASN check after barcode reading
|
||||
WorkflowTriggerPlugin,
|
||||
ConsumerPlugin,
|
||||
]
|
||||
|
||||
with (
|
||||
ProgressManager(
|
||||
@@ -541,6 +535,11 @@ def check_scheduled_workflows() -> None:
|
||||
workflow_to_run=workflow,
|
||||
document=document,
|
||||
)
|
||||
# Scheduled workflows dont send document_updated signal, so send a websocket update here to ensure clients are updated
|
||||
send_websocket_document_updated(
|
||||
sender=None,
|
||||
document=document,
|
||||
)
|
||||
|
||||
|
||||
def update_document_parent_tags(tag: Tag, new_parent: Tag) -> None:
|
||||
|
||||
@@ -1,518 +0,0 @@
|
||||
"""Tests for PaperlessCommand base class."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
from django.core.management import CommandError
|
||||
from django.db.models import QuerySet
|
||||
from rich.console import Console
|
||||
|
||||
from documents.management.commands.base import PaperlessCommand
|
||||
from documents.management.commands.base import ProcessResult
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
|
||||
# --- Test Commands ---
|
||||
# These simulate real command implementations for testing
|
||||
|
||||
|
||||
class SimpleCommand(PaperlessCommand):
|
||||
"""Command with default settings (progress bar, no multiprocessing)."""
|
||||
|
||||
help = "Simple test command"
|
||||
|
||||
def handle(self, *args, **options):
|
||||
items = list(range(5))
|
||||
results = []
|
||||
for item in self.track(items, description="Processing..."):
|
||||
results.append(item * 2)
|
||||
self.stdout.write(f"Results: {results}")
|
||||
|
||||
|
||||
class NoProgressBarCommand(PaperlessCommand):
|
||||
"""Command with progress bar disabled."""
|
||||
|
||||
help = "No progress bar command"
|
||||
supports_progress_bar = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
items = list(range(3))
|
||||
for _ in self.track(items):
|
||||
# We don't need to actually work
|
||||
pass
|
||||
self.stdout.write("Done")
|
||||
|
||||
|
||||
class MultiprocessCommand(PaperlessCommand):
|
||||
"""Command with multiprocessing support."""
|
||||
|
||||
help = "Multiprocess test command"
|
||||
supports_multiprocessing = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
items = list(range(5))
|
||||
results = []
|
||||
for result in self.process_parallel(
|
||||
_double_value,
|
||||
items,
|
||||
description="Processing...",
|
||||
):
|
||||
results.append(result)
|
||||
successes = sum(1 for r in results if r.success)
|
||||
self.stdout.write(f"Successes: {successes}")
|
||||
|
||||
|
||||
# --- Helper Functions for Multiprocessing ---
|
||||
# Must be at module level to be picklable
|
||||
|
||||
|
||||
def _double_value(x: int) -> int:
|
||||
"""Double the input value."""
|
||||
return x * 2
|
||||
|
||||
|
||||
def _divide_ten_by(x: int) -> float:
|
||||
"""Divide 10 by x. Raises ZeroDivisionError if x is 0."""
|
||||
return 10 / x
|
||||
|
||||
|
||||
# --- Fixtures ---
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def console() -> Console:
|
||||
"""Create a non-interactive console for testing."""
|
||||
return Console(force_terminal=False, force_interactive=False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def simple_command(console: Console) -> SimpleCommand:
|
||||
"""Create a SimpleCommand instance configured for testing."""
|
||||
command = SimpleCommand()
|
||||
command.stdout = io.StringIO()
|
||||
command.stderr = io.StringIO()
|
||||
command.console = console
|
||||
command.no_progress_bar = True
|
||||
command.process_count = 1
|
||||
return command
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def multiprocess_command(console: Console) -> MultiprocessCommand:
|
||||
"""Create a MultiprocessCommand instance configured for testing."""
|
||||
command = MultiprocessCommand()
|
||||
command.stdout = io.StringIO()
|
||||
command.stderr = io.StringIO()
|
||||
command.console = console
|
||||
command.no_progress_bar = True
|
||||
command.process_count = 1
|
||||
return command
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_queryset():
|
||||
"""
|
||||
Create a mock Django QuerySet that tracks method calls.
|
||||
|
||||
This verifies we use .count() instead of len() for querysets.
|
||||
"""
|
||||
|
||||
class MockQuerySet(QuerySet):
|
||||
def __init__(self, items: list):
|
||||
self._items = items
|
||||
self.count_called = False
|
||||
|
||||
def count(self) -> int:
|
||||
self.count_called = True
|
||||
return len(self._items)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._items)
|
||||
|
||||
def __len__(self):
|
||||
raise AssertionError("len() should not be called on querysets")
|
||||
|
||||
return MockQuerySet
|
||||
|
||||
|
||||
# --- Test Classes ---
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestProcessResult:
|
||||
"""Tests for the ProcessResult dataclass."""
|
||||
|
||||
def test_success_result(self):
|
||||
result = ProcessResult(item=1, result=2, error=None)
|
||||
|
||||
assert result.item == 1
|
||||
assert result.result == 2
|
||||
assert result.error is None
|
||||
assert result.success is True
|
||||
|
||||
def test_error_result(self):
|
||||
error = ValueError("test error")
|
||||
result = ProcessResult(item=1, result=None, error=error)
|
||||
|
||||
assert result.item == 1
|
||||
assert result.result is None
|
||||
assert result.error is error
|
||||
assert result.success is False
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestPaperlessCommandArguments:
|
||||
"""Tests for argument parsing behavior."""
|
||||
|
||||
def test_progress_bar_argument_added_by_default(self):
|
||||
command = SimpleCommand()
|
||||
parser = command.create_parser("manage.py", "simple")
|
||||
|
||||
options = parser.parse_args(["--no-progress-bar"])
|
||||
assert options.no_progress_bar is True
|
||||
|
||||
options = parser.parse_args([])
|
||||
assert options.no_progress_bar is False
|
||||
|
||||
def test_progress_bar_argument_not_added_when_disabled(self):
|
||||
command = NoProgressBarCommand()
|
||||
parser = command.create_parser("manage.py", "noprogress")
|
||||
|
||||
options = parser.parse_args([])
|
||||
assert not hasattr(options, "no_progress_bar")
|
||||
|
||||
def test_processes_argument_added_when_multiprocessing_enabled(self):
|
||||
command = MultiprocessCommand()
|
||||
parser = command.create_parser("manage.py", "multiprocess")
|
||||
|
||||
options = parser.parse_args(["--processes", "4"])
|
||||
assert options.processes == 4
|
||||
|
||||
options = parser.parse_args([])
|
||||
assert options.processes >= 1
|
||||
|
||||
def test_processes_argument_not_added_when_multiprocessing_disabled(self):
|
||||
command = SimpleCommand()
|
||||
parser = command.create_parser("manage.py", "simple")
|
||||
|
||||
options = parser.parse_args([])
|
||||
assert not hasattr(options, "processes")
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestPaperlessCommandExecute:
|
||||
"""Tests for the execute() setup behavior."""
|
||||
|
||||
@pytest.fixture
|
||||
def base_options(self) -> dict:
|
||||
"""Base options required for execute()."""
|
||||
return {
|
||||
"verbosity": 1,
|
||||
"no_color": True,
|
||||
"force_color": False,
|
||||
"skip_checks": True,
|
||||
}
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("no_progress_bar_flag", "expected"),
|
||||
[
|
||||
pytest.param(False, False, id="progress-bar-enabled"),
|
||||
pytest.param(True, True, id="progress-bar-disabled"),
|
||||
],
|
||||
)
|
||||
def test_no_progress_bar_state_set(
|
||||
self,
|
||||
base_options: dict,
|
||||
*,
|
||||
no_progress_bar_flag: bool,
|
||||
expected: bool,
|
||||
):
|
||||
command = SimpleCommand()
|
||||
command.stdout = io.StringIO()
|
||||
command.stderr = io.StringIO()
|
||||
|
||||
options = {**base_options, "no_progress_bar": no_progress_bar_flag}
|
||||
command.execute(**options)
|
||||
|
||||
assert command.no_progress_bar is expected
|
||||
|
||||
def test_no_progress_bar_always_true_when_not_supported(self, base_options: dict):
|
||||
command = NoProgressBarCommand()
|
||||
command.stdout = io.StringIO()
|
||||
command.stderr = io.StringIO()
|
||||
|
||||
command.execute(**base_options)
|
||||
|
||||
assert command.no_progress_bar is True
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("processes", "expected"),
|
||||
[
|
||||
pytest.param(1, 1, id="single-process"),
|
||||
pytest.param(4, 4, id="four-processes"),
|
||||
],
|
||||
)
|
||||
def test_process_count_set(
|
||||
self,
|
||||
base_options: dict,
|
||||
processes: int,
|
||||
expected: int,
|
||||
):
|
||||
command = MultiprocessCommand()
|
||||
command.stdout = io.StringIO()
|
||||
command.stderr = io.StringIO()
|
||||
|
||||
options = {**base_options, "processes": processes, "no_progress_bar": True}
|
||||
command.execute(**options)
|
||||
|
||||
assert command.process_count == expected
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"invalid_count",
|
||||
[
|
||||
pytest.param(0, id="zero"),
|
||||
pytest.param(-1, id="negative"),
|
||||
],
|
||||
)
|
||||
def test_process_count_validation_rejects_invalid(
|
||||
self,
|
||||
base_options: dict,
|
||||
invalid_count: int,
|
||||
):
|
||||
command = MultiprocessCommand()
|
||||
command.stdout = io.StringIO()
|
||||
command.stderr = io.StringIO()
|
||||
|
||||
options = {**base_options, "processes": invalid_count, "no_progress_bar": True}
|
||||
|
||||
with pytest.raises(CommandError, match="--processes must be at least 1"):
|
||||
command.execute(**options)
|
||||
|
||||
def test_process_count_defaults_to_one_when_not_supported(self, base_options: dict):
|
||||
command = SimpleCommand()
|
||||
command.stdout = io.StringIO()
|
||||
command.stderr = io.StringIO()
|
||||
|
||||
options = {**base_options, "no_progress_bar": True}
|
||||
command.execute(**options)
|
||||
|
||||
assert command.process_count == 1
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestGetIterableLength:
|
||||
"""Tests for the _get_iterable_length() method."""
|
||||
|
||||
def test_uses_count_for_querysets(
|
||||
self,
|
||||
simple_command: SimpleCommand,
|
||||
mock_queryset,
|
||||
):
|
||||
"""Should call .count() on Django querysets rather than len()."""
|
||||
queryset = mock_queryset([1, 2, 3, 4, 5])
|
||||
|
||||
result = simple_command._get_iterable_length(queryset)
|
||||
|
||||
assert result == 5
|
||||
assert queryset.count_called is True
|
||||
|
||||
def test_uses_len_for_sized(self, simple_command: SimpleCommand):
|
||||
"""Should use len() for sequences and other Sized types."""
|
||||
result = simple_command._get_iterable_length([1, 2, 3, 4])
|
||||
|
||||
assert result == 4
|
||||
|
||||
def test_returns_none_for_unsized_iterables(self, simple_command: SimpleCommand):
|
||||
"""Should return None for generators and other iterables without len()."""
|
||||
result = simple_command._get_iterable_length(x for x in [1, 2, 3])
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestTrack:
|
||||
"""Tests for the track() method."""
|
||||
|
||||
def test_with_progress_bar_disabled(self, simple_command: SimpleCommand):
|
||||
simple_command.no_progress_bar = True
|
||||
items = ["a", "b", "c"]
|
||||
|
||||
result = list(simple_command.track(items, description="Test..."))
|
||||
|
||||
assert result == items
|
||||
|
||||
def test_with_progress_bar_enabled(self, simple_command: SimpleCommand):
|
||||
simple_command.no_progress_bar = False
|
||||
items = [1, 2, 3]
|
||||
|
||||
result = list(simple_command.track(items, description="Processing..."))
|
||||
|
||||
assert result == items
|
||||
|
||||
def test_with_explicit_total(self, simple_command: SimpleCommand):
|
||||
simple_command.no_progress_bar = False
|
||||
|
||||
def gen():
|
||||
yield from [1, 2, 3]
|
||||
|
||||
result = list(simple_command.track(gen(), total=3))
|
||||
|
||||
assert result == [1, 2, 3]
|
||||
|
||||
def test_with_generator_no_total(self, simple_command: SimpleCommand):
|
||||
def gen():
|
||||
yield from [1, 2, 3]
|
||||
|
||||
result = list(simple_command.track(gen()))
|
||||
|
||||
assert result == [1, 2, 3]
|
||||
|
||||
def test_empty_iterable(self, simple_command: SimpleCommand):
|
||||
result = list(simple_command.track([]))
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_uses_queryset_count(
|
||||
self,
|
||||
simple_command: SimpleCommand,
|
||||
mock_queryset,
|
||||
mocker: MockerFixture,
|
||||
):
|
||||
"""Verify track() uses .count() for querysets."""
|
||||
simple_command.no_progress_bar = False
|
||||
queryset = mock_queryset([1, 2, 3])
|
||||
|
||||
spy = mocker.spy(simple_command, "_get_iterable_length")
|
||||
|
||||
result = list(simple_command.track(queryset))
|
||||
|
||||
assert result == [1, 2, 3]
|
||||
spy.assert_called_once_with(queryset)
|
||||
assert queryset.count_called is True
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestProcessParallel:
|
||||
"""Tests for the process_parallel() method."""
|
||||
|
||||
def test_sequential_processing_single_process(
|
||||
self,
|
||||
multiprocess_command: MultiprocessCommand,
|
||||
):
|
||||
multiprocess_command.process_count = 1
|
||||
items = [1, 2, 3, 4, 5]
|
||||
|
||||
results = list(multiprocess_command.process_parallel(_double_value, items))
|
||||
|
||||
assert len(results) == 5
|
||||
assert all(r.success for r in results)
|
||||
|
||||
result_map = {r.item: r.result for r in results}
|
||||
assert result_map == {1: 2, 2: 4, 3: 6, 4: 8, 5: 10}
|
||||
|
||||
def test_sequential_processing_handles_errors(
|
||||
self,
|
||||
multiprocess_command: MultiprocessCommand,
|
||||
):
|
||||
multiprocess_command.process_count = 1
|
||||
items = [1, 2, 0, 4] # 0 causes ZeroDivisionError
|
||||
|
||||
results = list(multiprocess_command.process_parallel(_divide_ten_by, items))
|
||||
|
||||
assert len(results) == 4
|
||||
|
||||
successes = [r for r in results if r.success]
|
||||
failures = [r for r in results if not r.success]
|
||||
|
||||
assert len(successes) == 3
|
||||
assert len(failures) == 1
|
||||
assert failures[0].item == 0
|
||||
assert isinstance(failures[0].error, ZeroDivisionError)
|
||||
|
||||
def test_parallel_closes_db_connections(
|
||||
self,
|
||||
multiprocess_command: MultiprocessCommand,
|
||||
mocker: MockerFixture,
|
||||
):
|
||||
multiprocess_command.process_count = 2
|
||||
items = [1, 2, 3]
|
||||
|
||||
mock_connections = mocker.patch(
|
||||
"documents.management.commands.base.db.connections",
|
||||
)
|
||||
|
||||
results = list(multiprocess_command.process_parallel(_double_value, items))
|
||||
|
||||
mock_connections.close_all.assert_called_once()
|
||||
assert len(results) == 3
|
||||
|
||||
def test_parallel_processing_handles_errors(
|
||||
self,
|
||||
multiprocess_command: MultiprocessCommand,
|
||||
mocker: MockerFixture,
|
||||
):
|
||||
multiprocess_command.process_count = 2
|
||||
items = [1, 2, 0, 4]
|
||||
|
||||
mocker.patch("documents.management.commands.base.db.connections")
|
||||
|
||||
results = list(multiprocess_command.process_parallel(_divide_ten_by, items))
|
||||
|
||||
failures = [r for r in results if not r.success]
|
||||
assert len(failures) == 1
|
||||
assert failures[0].item == 0
|
||||
|
||||
def test_empty_items(self, multiprocess_command: MultiprocessCommand):
|
||||
results = list(multiprocess_command.process_parallel(_double_value, []))
|
||||
|
||||
assert results == []
|
||||
|
||||
def test_result_contains_original_item(
|
||||
self,
|
||||
multiprocess_command: MultiprocessCommand,
|
||||
):
|
||||
items = [10, 20, 30]
|
||||
|
||||
results = list(multiprocess_command.process_parallel(_double_value, items))
|
||||
|
||||
for result in results:
|
||||
assert result.item in items
|
||||
assert result.result == result.item * 2
|
||||
|
||||
def test_sequential_path_used_for_single_process(
|
||||
self,
|
||||
multiprocess_command: MultiprocessCommand,
|
||||
mocker: MockerFixture,
|
||||
):
|
||||
"""Verify single process uses sequential path (important for testing)."""
|
||||
multiprocess_command.process_count = 1
|
||||
|
||||
spy_sequential = mocker.spy(multiprocess_command, "_process_sequential")
|
||||
spy_parallel = mocker.spy(multiprocess_command, "_process_parallel")
|
||||
|
||||
list(multiprocess_command.process_parallel(_double_value, [1, 2, 3]))
|
||||
|
||||
spy_sequential.assert_called_once()
|
||||
spy_parallel.assert_not_called()
|
||||
|
||||
def test_parallel_path_used_for_multiple_processes(
|
||||
self,
|
||||
multiprocess_command: MultiprocessCommand,
|
||||
mocker: MockerFixture,
|
||||
):
|
||||
"""Verify multiple processes uses parallel path."""
|
||||
multiprocess_command.process_count = 2
|
||||
|
||||
mocker.patch("documents.management.commands.base.db.connections")
|
||||
spy_sequential = mocker.spy(multiprocess_command, "_process_sequential")
|
||||
spy_parallel = mocker.spy(multiprocess_command, "_process_parallel")
|
||||
|
||||
list(multiprocess_command.process_parallel(_double_value, [1, 2, 3]))
|
||||
|
||||
spy_parallel.assert_called_once()
|
||||
spy_sequential.assert_not_called()
|
||||
@@ -1,811 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from unittest import TestCase
|
||||
from unittest import mock
|
||||
|
||||
from auditlog.models import LogEntry # type: ignore[import-untyped]
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import FieldError
|
||||
from django.core.files.uploadedfile import SimpleUploadedFile
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.filters import EffectiveContentFilter
|
||||
from documents.filters import TitleContentFilter
|
||||
from documents.models import Document
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class TestDocumentVersioningApi(DirectoriesMixin, APITestCase):
|
||||
def setUp(self) -> None:
|
||||
super().setUp()
|
||||
|
||||
self.user = User.objects.create_superuser(username="temp_admin")
|
||||
self.client.force_authenticate(user=self.user)
|
||||
|
||||
def _make_pdf_upload(self, name: str = "version.pdf") -> SimpleUploadedFile:
|
||||
return SimpleUploadedFile(
|
||||
name,
|
||||
b"%PDF-1.4\n1 0 obj\n<<>>\nendobj\n%%EOF",
|
||||
content_type="application/pdf",
|
||||
)
|
||||
|
||||
def _write_file(self, path: Path, content: bytes = b"data") -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_bytes(content)
|
||||
|
||||
def _create_pdf(
|
||||
self,
|
||||
*,
|
||||
title: str,
|
||||
checksum: str,
|
||||
root_document: Document | None = None,
|
||||
) -> Document:
|
||||
doc = Document.objects.create(
|
||||
title=title,
|
||||
checksum=checksum,
|
||||
mime_type="application/pdf",
|
||||
root_document=root_document,
|
||||
)
|
||||
self._write_file(doc.source_path, b"pdf")
|
||||
self._write_file(doc.thumbnail_path, b"thumb")
|
||||
return doc
|
||||
|
||||
def test_root_endpoint_returns_root_for_version_and_root(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
|
||||
resp_root = self.client.get(f"/api/documents/{root.id}/root/")
|
||||
self.assertEqual(resp_root.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp_root.data["root_id"], root.id)
|
||||
|
||||
resp_version = self.client.get(f"/api/documents/{version.id}/root/")
|
||||
self.assertEqual(resp_version.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp_version.data["root_id"], root.id)
|
||||
|
||||
def test_root_endpoint_returns_404_for_missing_document(self) -> None:
|
||||
resp = self.client.get("/api/documents/9999/root/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_root_endpoint_returns_403_when_user_lacks_permission(self) -> None:
|
||||
owner = User.objects.create_user(username="owner")
|
||||
viewer = User.objects.create_user(username="viewer")
|
||||
viewer.user_permissions.add(
|
||||
Permission.objects.get(codename="view_document"),
|
||||
)
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
owner=owner,
|
||||
)
|
||||
self.client.force_authenticate(user=viewer)
|
||||
|
||||
resp = self.client.get(f"/api/documents/{root.id}/root/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
def test_delete_version_disallows_deleting_root(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
with mock.patch("documents.index.remove_document_from_index"):
|
||||
resp = self.client.delete(f"/api/documents/{root.id}/versions/{root.id}/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertTrue(Document.objects.filter(id=root.id).exists())
|
||||
|
||||
def test_delete_version_deletes_version_and_returns_current_version(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="root-content",
|
||||
)
|
||||
v1 = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="v1-content",
|
||||
)
|
||||
v2 = Document.objects.create(
|
||||
title="v2",
|
||||
checksum="v2",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="v2-content",
|
||||
)
|
||||
|
||||
with (
|
||||
mock.patch("documents.index.remove_document_from_index"),
|
||||
mock.patch("documents.index.add_or_update_document"),
|
||||
):
|
||||
resp = self.client.delete(f"/api/documents/{root.id}/versions/{v2.id}/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(Document.objects.filter(id=v2.id).exists())
|
||||
self.assertEqual(resp.data["current_version_id"], v1.id)
|
||||
root.refresh_from_db()
|
||||
self.assertEqual(root.content, "root-content")
|
||||
|
||||
with (
|
||||
mock.patch("documents.index.remove_document_from_index"),
|
||||
mock.patch("documents.index.add_or_update_document"),
|
||||
):
|
||||
resp = self.client.delete(f"/api/documents/{root.id}/versions/{v1.id}/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(Document.objects.filter(id=v1.id).exists())
|
||||
self.assertEqual(resp.data["current_version_id"], root.id)
|
||||
root.refresh_from_db()
|
||||
self.assertEqual(root.content, "root-content")
|
||||
|
||||
def test_delete_version_writes_audit_log_entry(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
version_id = version.id
|
||||
|
||||
with (
|
||||
mock.patch("documents.index.remove_document_from_index"),
|
||||
mock.patch("documents.index.add_or_update_document"),
|
||||
):
|
||||
resp = self.client.delete(
|
||||
f"/api/documents/{root.id}/versions/{version_id}/",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
|
||||
# Audit log entry is created against the root document.
|
||||
entry = (
|
||||
LogEntry.objects.filter(
|
||||
content_type=ContentType.objects.get_for_model(Document),
|
||||
object_id=root.id,
|
||||
)
|
||||
.order_by("-timestamp")
|
||||
.first()
|
||||
)
|
||||
self.assertIsNotNone(entry)
|
||||
assert entry is not None
|
||||
self.assertIsNotNone(entry.actor)
|
||||
assert entry.actor is not None
|
||||
self.assertEqual(entry.actor.id, self.user.id)
|
||||
self.assertEqual(entry.action, LogEntry.Action.UPDATE)
|
||||
self.assertEqual(
|
||||
entry.changes,
|
||||
{"Version Deleted": ["None", version_id]},
|
||||
)
|
||||
additional_data = entry.additional_data or {}
|
||||
self.assertEqual(additional_data.get("version_id"), version_id)
|
||||
|
||||
def test_delete_version_returns_404_when_version_not_related(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
other_root = Document.objects.create(
|
||||
title="other",
|
||||
checksum="other",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
other_version = Document.objects.create(
|
||||
title="other-v1",
|
||||
checksum="other-v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=other_root,
|
||||
)
|
||||
|
||||
with mock.patch("documents.index.remove_document_from_index"):
|
||||
resp = self.client.delete(
|
||||
f"/api/documents/{root.id}/versions/{other_version.id}/",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_delete_version_accepts_version_id_as_root_parameter(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
|
||||
with (
|
||||
mock.patch("documents.index.remove_document_from_index"),
|
||||
mock.patch("documents.index.add_or_update_document"),
|
||||
):
|
||||
resp = self.client.delete(
|
||||
f"/api/documents/{version.id}/versions/{version.id}/",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(Document.objects.filter(id=version.id).exists())
|
||||
self.assertEqual(resp.data["current_version_id"], root.id)
|
||||
|
||||
def test_delete_version_returns_404_when_root_missing(self) -> None:
|
||||
resp = self.client.delete("/api/documents/9999/versions/123/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_delete_version_reindexes_root_document(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
|
||||
with (
|
||||
mock.patch("documents.index.remove_document_from_index") as remove_index,
|
||||
mock.patch("documents.index.add_or_update_document") as add_or_update,
|
||||
):
|
||||
resp = self.client.delete(
|
||||
f"/api/documents/{root.id}/versions/{version.id}/",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
remove_index.assert_called_once_with(version)
|
||||
add_or_update.assert_called_once()
|
||||
self.assertEqual(add_or_update.call_args[0][0].id, root.id)
|
||||
|
||||
def test_delete_version_returns_403_without_permission(self) -> None:
|
||||
owner = User.objects.create_user(username="owner")
|
||||
other = User.objects.create_user(username="other")
|
||||
other.user_permissions.add(
|
||||
Permission.objects.get(codename="delete_document"),
|
||||
)
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
owner=owner,
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
self.client.force_authenticate(user=other)
|
||||
|
||||
resp = self.client.delete(
|
||||
f"/api/documents/{root.id}/versions/{version.id}/",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
def test_delete_version_returns_404_when_version_missing(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
resp = self.client.delete(f"/api/documents/{root.id}/versions/9999/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_update_version_label_updates_and_trims(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
version_label="old",
|
||||
)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"/api/documents/{root.id}/versions/{version.id}/",
|
||||
{"version_label": " Label 1 "},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
version.refresh_from_db()
|
||||
self.assertEqual(version.version_label, "Label 1")
|
||||
self.assertEqual(resp.data["version_label"], "Label 1")
|
||||
self.assertEqual(resp.data["id"], version.id)
|
||||
self.assertFalse(resp.data["is_root"])
|
||||
|
||||
def test_update_version_label_clears_on_blank(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
version_label="Root Label",
|
||||
)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"/api/documents/{root.id}/versions/{root.id}/",
|
||||
{"version_label": " "},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
root.refresh_from_db()
|
||||
self.assertIsNone(root.version_label)
|
||||
self.assertIsNone(resp.data["version_label"])
|
||||
self.assertTrue(resp.data["is_root"])
|
||||
|
||||
def test_update_version_label_returns_403_without_permission(self) -> None:
|
||||
owner = User.objects.create_user(username="owner")
|
||||
other = User.objects.create_user(username="other")
|
||||
other.user_permissions.add(
|
||||
Permission.objects.get(codename="change_document"),
|
||||
)
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
owner=owner,
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
self.client.force_authenticate(user=other)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"/api/documents/{root.id}/versions/{version.id}/",
|
||||
{"version_label": "Blocked"},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
def test_update_version_label_returns_404_for_unrelated_version(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
other_root = Document.objects.create(
|
||||
title="other",
|
||||
checksum="other",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
other_version = Document.objects.create(
|
||||
title="other-v1",
|
||||
checksum="other-v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=other_root,
|
||||
)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"/api/documents/{root.id}/versions/{other_version.id}/",
|
||||
{"version_label": "Nope"},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_download_version_param_errors(self) -> None:
|
||||
root = self._create_pdf(title="root", checksum="root")
|
||||
|
||||
resp = self.client.get(
|
||||
f"/api/documents/{root.id}/download/?version=not-a-number",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
resp = self.client.get(f"/api/documents/{root.id}/download/?version=9999")
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
other_root = self._create_pdf(title="other", checksum="other")
|
||||
other_version = self._create_pdf(
|
||||
title="other-v1",
|
||||
checksum="other-v1",
|
||||
root_document=other_root,
|
||||
)
|
||||
resp = self.client.get(
|
||||
f"/api/documents/{root.id}/download/?version={other_version.id}",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_download_preview_thumb_with_version_param(self) -> None:
|
||||
root = self._create_pdf(title="root", checksum="root")
|
||||
version = self._create_pdf(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
root_document=root,
|
||||
)
|
||||
self._write_file(version.source_path, b"version")
|
||||
self._write_file(version.thumbnail_path, b"thumb")
|
||||
|
||||
resp = self.client.get(
|
||||
f"/api/documents/{root.id}/download/?version={version.id}",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.content, b"version")
|
||||
|
||||
resp = self.client.get(
|
||||
f"/api/documents/{root.id}/preview/?version={version.id}",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.content, b"version")
|
||||
|
||||
resp = self.client.get(
|
||||
f"/api/documents/{root.id}/thumb/?version={version.id}",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.content, b"thumb")
|
||||
|
||||
def test_metadata_version_param_uses_version(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
|
||||
with mock.patch("documents.views.DocumentViewSet.get_metadata") as metadata:
|
||||
metadata.return_value = []
|
||||
resp = self.client.get(
|
||||
f"/api/documents/{root.id}/metadata/?version={version.id}",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertTrue(metadata.called)
|
||||
|
||||
def test_metadata_version_param_errors(self) -> None:
|
||||
root = self._create_pdf(title="root", checksum="root")
|
||||
|
||||
resp = self.client.get(
|
||||
f"/api/documents/{root.id}/metadata/?version=not-a-number",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
resp = self.client.get(f"/api/documents/{root.id}/metadata/?version=9999")
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
other_root = self._create_pdf(title="other", checksum="other")
|
||||
other_version = self._create_pdf(
|
||||
title="other-v1",
|
||||
checksum="other-v1",
|
||||
root_document=other_root,
|
||||
)
|
||||
resp = self.client.get(
|
||||
f"/api/documents/{root.id}/metadata/?version={other_version.id}",
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_metadata_returns_403_when_user_lacks_permission(self) -> None:
|
||||
owner = User.objects.create_user(username="owner")
|
||||
other = User.objects.create_user(username="other")
|
||||
other.user_permissions.add(
|
||||
Permission.objects.get(codename="view_document"),
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
owner=owner,
|
||||
)
|
||||
self.client.force_authenticate(user=other)
|
||||
|
||||
resp = self.client.get(f"/api/documents/{doc.id}/metadata/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
def test_update_version_enqueues_consume_with_overrides(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
upload = self._make_pdf_upload()
|
||||
|
||||
async_task = mock.Mock()
|
||||
async_task.id = "task-123"
|
||||
|
||||
with mock.patch("documents.views.consume_file") as consume_mock:
|
||||
consume_mock.delay.return_value = async_task
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"document": upload, "version_label": " New Version "},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data, "task-123")
|
||||
consume_mock.delay.assert_called_once()
|
||||
input_doc, overrides = consume_mock.delay.call_args[0]
|
||||
self.assertEqual(input_doc.root_document_id, root.id)
|
||||
self.assertEqual(input_doc.source, DocumentSource.ApiUpload)
|
||||
self.assertEqual(overrides.version_label, "New Version")
|
||||
self.assertEqual(overrides.actor_id, self.user.id)
|
||||
|
||||
def test_update_version_with_version_pk_normalizes_to_root(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
upload = self._make_pdf_upload()
|
||||
|
||||
async_task = mock.Mock()
|
||||
async_task.id = "task-123"
|
||||
|
||||
with mock.patch("documents.views.consume_file") as consume_mock:
|
||||
consume_mock.delay.return_value = async_task
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{version.id}/update_version/",
|
||||
{"document": upload, "version_label": " New Version "},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data, "task-123")
|
||||
consume_mock.delay.assert_called_once()
|
||||
input_doc, overrides = consume_mock.delay.call_args[0]
|
||||
self.assertEqual(input_doc.root_document_id, root.id)
|
||||
self.assertEqual(overrides.version_label, "New Version")
|
||||
self.assertEqual(overrides.actor_id, self.user.id)
|
||||
|
||||
def test_update_version_returns_500_on_consume_failure(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
upload = self._make_pdf_upload()
|
||||
|
||||
with mock.patch("documents.views.consume_file") as consume_mock:
|
||||
consume_mock.delay.side_effect = Exception("boom")
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"document": upload},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
def test_update_version_returns_403_without_permission(self) -> None:
|
||||
owner = User.objects.create_user(username="owner")
|
||||
other = User.objects.create_user(username="other")
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
owner=owner,
|
||||
)
|
||||
self.client.force_authenticate(user=other)
|
||||
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"document": self._make_pdf_upload()},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
def test_update_version_returns_404_for_missing_document(self) -> None:
|
||||
resp = self.client.post(
|
||||
"/api/documents/9999/update_version/",
|
||||
{"document": self._make_pdf_upload()},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_update_version_requires_document(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
resp = self.client.post(
|
||||
f"/api/documents/{root.id}/update_version/",
|
||||
{"version_label": "label"},
|
||||
format="multipart",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def test_patch_content_updates_latest_version_content(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="root-content",
|
||||
)
|
||||
v1 = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="v1-content",
|
||||
)
|
||||
v2 = Document.objects.create(
|
||||
title="v2",
|
||||
checksum="v2",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="v2-content",
|
||||
)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"/api/documents/{root.id}/",
|
||||
{"content": "edited-content"},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data["content"], "edited-content")
|
||||
root.refresh_from_db()
|
||||
v1.refresh_from_db()
|
||||
v2.refresh_from_db()
|
||||
self.assertEqual(v2.content, "edited-content")
|
||||
self.assertEqual(root.content, "root-content")
|
||||
self.assertEqual(v1.content, "v1-content")
|
||||
|
||||
def test_patch_content_updates_selected_version_content(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="root-content",
|
||||
)
|
||||
v1 = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="v1-content",
|
||||
)
|
||||
v2 = Document.objects.create(
|
||||
title="v2",
|
||||
checksum="v2",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="v2-content",
|
||||
)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"/api/documents/{root.id}/?version={v1.id}",
|
||||
{"content": "edited-v1"},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data["content"], "edited-v1")
|
||||
root.refresh_from_db()
|
||||
v1.refresh_from_db()
|
||||
v2.refresh_from_db()
|
||||
self.assertEqual(v1.content, "edited-v1")
|
||||
self.assertEqual(v2.content, "v2-content")
|
||||
self.assertEqual(root.content, "root-content")
|
||||
|
||||
def test_retrieve_returns_latest_version_content(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="root-content",
|
||||
)
|
||||
Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="v1-content",
|
||||
)
|
||||
|
||||
resp = self.client.get(f"/api/documents/{root.id}/")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data["content"], "v1-content")
|
||||
|
||||
def test_retrieve_with_version_param_returns_selected_version_content(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="root-content",
|
||||
)
|
||||
v1 = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="v1-content",
|
||||
)
|
||||
|
||||
resp = self.client.get(f"/api/documents/{root.id}/?version={v1.id}")
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(resp.data["content"], "v1-content")
|
||||
|
||||
|
||||
class TestVersionAwareFilters(TestCase):
|
||||
def test_title_content_filter_falls_back_to_content(self) -> None:
|
||||
queryset = mock.Mock()
|
||||
fallback_queryset = mock.Mock()
|
||||
queryset.filter.side_effect = [FieldError("missing field"), fallback_queryset]
|
||||
|
||||
result = TitleContentFilter().filter(queryset, " latest ")
|
||||
|
||||
self.assertIs(result, fallback_queryset)
|
||||
self.assertEqual(queryset.filter.call_count, 2)
|
||||
|
||||
def test_effective_content_filter_falls_back_to_content_lookup(self) -> None:
|
||||
queryset = mock.Mock()
|
||||
fallback_queryset = mock.Mock()
|
||||
queryset.filter.side_effect = [FieldError("missing field"), fallback_queryset]
|
||||
|
||||
result = EffectiveContentFilter(lookup_expr="icontains").filter(
|
||||
queryset,
|
||||
" latest ",
|
||||
)
|
||||
|
||||
self.assertIs(result, fallback_queryset)
|
||||
first_kwargs = queryset.filter.call_args_list[0].kwargs
|
||||
second_kwargs = queryset.filter.call_args_list[1].kwargs
|
||||
self.assertEqual(first_kwargs, {"effective_content__icontains": "latest"})
|
||||
self.assertEqual(second_kwargs, {"content__icontains": "latest"})
|
||||
|
||||
def test_effective_content_filter_returns_input_for_empty_values(self) -> None:
|
||||
queryset = mock.Mock()
|
||||
|
||||
result = EffectiveContentFilter(lookup_expr="icontains").filter(queryset, " ")
|
||||
|
||||
self.assertIs(result, queryset)
|
||||
queryset.filter.assert_not_called()
|
||||
@@ -445,40 +445,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.content, content)
|
||||
|
||||
@override_settings(FILENAME_FORMAT="")
|
||||
def test_download_follow_formatting(self) -> None:
|
||||
content = b"This is a test"
|
||||
content_archive = b"This is the same test but archived"
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="none",
|
||||
filename="my_document.pdf",
|
||||
archive_filename="archived.pdf",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
with Path(doc.source_path).open("wb") as f:
|
||||
f.write(content)
|
||||
|
||||
with Path(doc.archive_path).open("wb") as f:
|
||||
f.write(content_archive)
|
||||
|
||||
# Without follow_formatting, should use public filename
|
||||
response = self.client.get(f"/api/documents/{doc.pk}/download/")
|
||||
self.assertIn("none.pdf", response["Content-Disposition"])
|
||||
|
||||
# With follow_formatting, should use actual filename on disk
|
||||
response = self.client.get(
|
||||
f"/api/documents/{doc.pk}/download/?follow_formatting=true",
|
||||
)
|
||||
self.assertIn("archived.pdf", response["Content-Disposition"])
|
||||
|
||||
# With follow_formatting and original, should use source filename
|
||||
response = self.client.get(
|
||||
f"/api/documents/{doc.pk}/download/?original=true&follow_formatting=true",
|
||||
)
|
||||
self.assertIn("my_document.pdf", response["Content-Disposition"])
|
||||
|
||||
def test_document_actions_not_existing_file(self) -> None:
|
||||
doc = Document.objects.create(
|
||||
title="none",
|
||||
@@ -588,36 +554,6 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertIsNone(response.data[1]["actor"])
|
||||
self.assertEqual(response.data[1]["action"], "create")
|
||||
|
||||
def test_document_history_logs_version_deletion(self) -> None:
|
||||
root_doc = Document.objects.create(
|
||||
title="Root",
|
||||
checksum="123",
|
||||
mime_type="application/pdf",
|
||||
owner=self.user,
|
||||
)
|
||||
version_doc = Document.objects.create(
|
||||
title="Version",
|
||||
checksum="456",
|
||||
mime_type="application/pdf",
|
||||
root_document=root_doc,
|
||||
owner=self.user,
|
||||
)
|
||||
|
||||
response = self.client.delete(
|
||||
f"/api/documents/{root_doc.pk}/versions/{version_doc.pk}/",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
response = self.client.get(f"/api/documents/{root_doc.pk}/history/")
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(len(response.data), 2)
|
||||
self.assertEqual(response.data[0]["actor"]["id"], self.user.id)
|
||||
self.assertEqual(response.data[0]["action"], "update")
|
||||
self.assertEqual(
|
||||
response.data[0]["changes"],
|
||||
{"Version Deleted": ["None", version_doc.pk]},
|
||||
)
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=False)
|
||||
def test_document_history_action_disabled(self) -> None:
|
||||
"""
|
||||
@@ -1270,44 +1206,16 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "simple.pdf")
|
||||
self.assertIn(Path(settings.SCRATCH_DIR), input_doc.original_file.parents)
|
||||
self.assertTrue(
|
||||
input_doc.original_file.resolve(strict=False).is_relative_to(
|
||||
Path(settings.SCRATCH_DIR).resolve(strict=False),
|
||||
),
|
||||
)
|
||||
self.assertIsNone(overrides.title)
|
||||
self.assertIsNone(overrides.correspondent_id)
|
||||
self.assertIsNone(overrides.document_type_id)
|
||||
self.assertIsNone(overrides.tag_ids)
|
||||
|
||||
def test_document_filters_use_latest_version_content(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="versioned root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
content="root-content",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="versioned root",
|
||||
checksum="v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
content="latest-version-content",
|
||||
)
|
||||
|
||||
response = self.client.get(
|
||||
"/api/documents/?content__icontains=latest-version-content",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
results = response.data["results"]
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]["id"], root.id)
|
||||
self.assertEqual(results[0]["content"], version.content)
|
||||
|
||||
response = self.client.get(
|
||||
"/api/documents/?title_content=latest-version-content",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
results = response.data["results"]
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertEqual(results[0]["id"], root.id)
|
||||
|
||||
def test_create_wrong_endpoint(self) -> None:
|
||||
response = self.client.post(
|
||||
"/api/documents/",
|
||||
@@ -1351,7 +1259,11 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
input_doc, overrides = self.get_last_consume_delay_call_args()
|
||||
|
||||
self.assertEqual(input_doc.original_file.name, "simple.pdf")
|
||||
self.assertIn(Path(settings.SCRATCH_DIR), input_doc.original_file.parents)
|
||||
self.assertTrue(
|
||||
input_doc.original_file.resolve(strict=False).is_relative_to(
|
||||
Path(settings.SCRATCH_DIR).resolve(strict=False),
|
||||
),
|
||||
)
|
||||
self.assertIsNone(overrides.title)
|
||||
self.assertIsNone(overrides.correspondent_id)
|
||||
self.assertIsNone(overrides.document_type_id)
|
||||
@@ -2515,6 +2427,57 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED, endpoint)
|
||||
|
||||
def test_tag_color_default(self) -> None:
|
||||
response = self.client.post("/api/tags/", {"name": "tag"}, format="json")
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
self.assertEqual(Tag.objects.get(id=response.data["id"]).color, "#a6cee3")
|
||||
self.assertEqual(
|
||||
self.client.get(
|
||||
f"/api/tags/{response.data['id']}/",
|
||||
headers={"Accept": "application/json; version=1"},
|
||||
format="json",
|
||||
).data["colour"],
|
||||
1,
|
||||
)
|
||||
|
||||
def test_tag_color(self) -> None:
|
||||
response = self.client.post(
|
||||
"/api/tags/",
|
||||
data={"name": "tag", "colour": 3},
|
||||
headers={"Accept": "application/json; version=1"},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
self.assertEqual(Tag.objects.get(id=response.data["id"]).color, "#b2df8a")
|
||||
self.assertEqual(
|
||||
self.client.get(
|
||||
f"/api/tags/{response.data['id']}/",
|
||||
headers={"Accept": "application/json; version=1"},
|
||||
format="json",
|
||||
).data["colour"],
|
||||
3,
|
||||
)
|
||||
|
||||
def test_tag_color_invalid(self) -> None:
|
||||
response = self.client.post(
|
||||
"/api/tags/",
|
||||
data={"name": "tag", "colour": 34},
|
||||
headers={"Accept": "application/json; version=1"},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def test_tag_color_custom(self) -> None:
|
||||
tag = Tag.objects.create(name="test", color="#abcdef")
|
||||
self.assertEqual(
|
||||
self.client.get(
|
||||
f"/api/tags/{tag.id}/",
|
||||
headers={"Accept": "application/json; version=1"},
|
||||
format="json",
|
||||
).data["colour"],
|
||||
1,
|
||||
)
|
||||
|
||||
def test_get_existing_notes(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -896,210 +896,3 @@ class TestApiWorkflows(DirectoriesMixin, APITestCase):
|
||||
"Passwords are required",
|
||||
str(response.data["non_field_errors"][0]),
|
||||
)
|
||||
|
||||
def test_trash_action_validation(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- API request to create a workflow with a trash action
|
||||
WHEN:
|
||||
- API is called
|
||||
THEN:
|
||||
- Correct HTTP response
|
||||
"""
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
json.dumps(
|
||||
{
|
||||
"name": "Workflow 2",
|
||||
"order": 1,
|
||||
"triggers": [
|
||||
{
|
||||
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
"sources": [DocumentSource.ApiUpload],
|
||||
"filter_filename": "*",
|
||||
},
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
json.dumps(
|
||||
{
|
||||
"name": "Workflow 3",
|
||||
"order": 2,
|
||||
"triggers": [
|
||||
{
|
||||
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
"sources": [DocumentSource.ApiUpload],
|
||||
"filter_filename": "*",
|
||||
},
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
|
||||
def test_trash_action_as_last_action_valid(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- API request to create a workflow with multiple actions
|
||||
- Move to trash action is the last action
|
||||
WHEN:
|
||||
- API is called
|
||||
THEN:
|
||||
- Workflow is created successfully
|
||||
"""
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
json.dumps(
|
||||
{
|
||||
"name": "Workflow with Move to Trash Last",
|
||||
"order": 1,
|
||||
"triggers": [
|
||||
{
|
||||
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
"sources": [DocumentSource.ApiUpload],
|
||||
"filter_filename": "*",
|
||||
},
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
"assign_title": "Assigned Title",
|
||||
},
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.REMOVAL,
|
||||
"remove_all_tags": True,
|
||||
},
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
|
||||
def test_update_workflow_add_trash_at_end_valid(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow without trash action
|
||||
WHEN:
|
||||
- PATCH to add trash action at end
|
||||
THEN:
|
||||
- HTTP 200 success
|
||||
"""
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
json.dumps(
|
||||
{
|
||||
"name": "Workflow to Add Move to Trash",
|
||||
"order": 1,
|
||||
"triggers": [
|
||||
{
|
||||
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
"sources": [DocumentSource.ApiUpload],
|
||||
"filter_filename": "*",
|
||||
},
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
"assign_title": "First Action",
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
workflow_id = response.data["id"]
|
||||
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}{workflow_id}/",
|
||||
json.dumps(
|
||||
{
|
||||
"actions": [
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
"assign_title": "First Action",
|
||||
},
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
def test_update_workflow_remove_trash_action_valid(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing workflow with trash action
|
||||
WHEN:
|
||||
- PATCH to remove trash action
|
||||
THEN:
|
||||
- HTTP 200 success
|
||||
"""
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
json.dumps(
|
||||
{
|
||||
"name": "Workflow to Remove move to trash",
|
||||
"order": 1,
|
||||
"triggers": [
|
||||
{
|
||||
"type": WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
"sources": [DocumentSource.ApiUpload],
|
||||
"filter_filename": "*",
|
||||
},
|
||||
],
|
||||
"actions": [
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
"assign_title": "First Action",
|
||||
},
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
workflow_id = response.data["id"]
|
||||
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}{workflow_id}/",
|
||||
json.dumps(
|
||||
{
|
||||
"actions": [
|
||||
{
|
||||
"type": WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
"assign_title": "Only Action",
|
||||
},
|
||||
],
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import hashlib
|
||||
import shutil
|
||||
from datetime import date
|
||||
from pathlib import Path
|
||||
@@ -381,55 +382,6 @@ class TestBulkEdit(DirectoriesMixin, TestCase):
|
||||
[self.doc3.id, self.doc4.id, self.doc5.id],
|
||||
)
|
||||
|
||||
def test_delete_root_document_deletes_all_versions(self) -> None:
|
||||
version = Document.objects.create(
|
||||
checksum="A-v1",
|
||||
title="A version",
|
||||
root_document=self.doc1,
|
||||
)
|
||||
|
||||
bulk_edit.delete([self.doc1.id])
|
||||
|
||||
self.assertFalse(Document.objects.filter(id=self.doc1.id).exists())
|
||||
self.assertFalse(Document.objects.filter(id=version.id).exists())
|
||||
|
||||
def test_delete_version_document_keeps_root(self) -> None:
|
||||
version = Document.objects.create(
|
||||
checksum="A-v1",
|
||||
title="A version",
|
||||
root_document=self.doc1,
|
||||
)
|
||||
|
||||
bulk_edit.delete([version.id])
|
||||
|
||||
self.assertTrue(Document.objects.filter(id=self.doc1.id).exists())
|
||||
self.assertFalse(Document.objects.filter(id=version.id).exists())
|
||||
|
||||
def test_get_root_and_current_doc_mapping(self) -> None:
|
||||
version1 = Document.objects.create(
|
||||
checksum="B-v1",
|
||||
title="B version 1",
|
||||
root_document=self.doc2,
|
||||
)
|
||||
version2 = Document.objects.create(
|
||||
checksum="B-v2",
|
||||
title="B version 2",
|
||||
root_document=self.doc2,
|
||||
)
|
||||
|
||||
root_ids_by_doc_id = bulk_edit._get_root_ids_by_doc_id(
|
||||
[self.doc2.id, version1.id, version2.id],
|
||||
)
|
||||
self.assertEqual(root_ids_by_doc_id[self.doc2.id], self.doc2.id)
|
||||
self.assertEqual(root_ids_by_doc_id[version1.id], self.doc2.id)
|
||||
self.assertEqual(root_ids_by_doc_id[version2.id], self.doc2.id)
|
||||
|
||||
root_docs, current_docs = bulk_edit._get_root_and_current_docs_by_root_id(
|
||||
{self.doc2.id},
|
||||
)
|
||||
self.assertEqual(root_docs[self.doc2.id].id, self.doc2.id)
|
||||
self.assertEqual(current_docs[self.doc2.id].id, version2.id)
|
||||
|
||||
@mock.patch("documents.tasks.bulk_update_documents.delay")
|
||||
def test_set_permissions(self, m) -> None:
|
||||
doc_ids = [self.doc1.id, self.doc2.id, self.doc3.id]
|
||||
@@ -970,8 +922,15 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
def test_rotate(self, mock_consume_delay):
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("celery.chord.delay")
|
||||
def test_rotate(
|
||||
self,
|
||||
mock_chord,
|
||||
mock_update_document,
|
||||
mock_update_documents,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents
|
||||
@@ -982,22 +941,19 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
"""
|
||||
doc_ids = [self.doc1.id, self.doc2.id]
|
||||
result = bulk_edit.rotate(doc_ids, 90)
|
||||
self.assertEqual(mock_consume_delay.call_count, 2)
|
||||
for call, expected_id in zip(
|
||||
mock_consume_delay.call_args_list,
|
||||
doc_ids,
|
||||
):
|
||||
consumable, overrides = call.args
|
||||
self.assertEqual(consumable.root_document_id, expected_id)
|
||||
self.assertIsNotNone(overrides)
|
||||
self.assertEqual(mock_update_document.call_count, 2)
|
||||
mock_update_documents.assert_called_once()
|
||||
mock_chord.assert_called_once()
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_rotate_with_error(
|
||||
self,
|
||||
mock_pdf_save,
|
||||
mock_consume_delay,
|
||||
mock_update_archive_file,
|
||||
mock_update_documents,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -1016,12 +972,16 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
error_str = cm.output[0]
|
||||
expected_str = "Error rotating document"
|
||||
self.assertIn(expected_str, error_str)
|
||||
mock_consume_delay.assert_not_called()
|
||||
mock_update_archive_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.bulk_update_documents.si")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.s")
|
||||
@mock.patch("celery.chord.delay")
|
||||
def test_rotate_non_pdf(
|
||||
self,
|
||||
mock_consume_delay,
|
||||
mock_chord,
|
||||
mock_update_document,
|
||||
mock_update_documents,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -1033,18 +993,17 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
"""
|
||||
with self.assertLogs("paperless.bulk_edit", level="INFO") as cm:
|
||||
result = bulk_edit.rotate([self.doc2.id, self.img_doc.id], 90)
|
||||
expected_str = f"Document {self.img_doc.id} is not a PDF, skipping rotation"
|
||||
self.assertTrue(any(expected_str in line for line in cm.output))
|
||||
self.assertEqual(mock_consume_delay.call_count, 1)
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertIsNotNone(overrides)
|
||||
output_str = cm.output[1]
|
||||
expected_str = "Document 4 is not a PDF, skipping rotation"
|
||||
self.assertIn(expected_str, output_str)
|
||||
self.assertEqual(mock_update_document.call_count, 1)
|
||||
mock_update_documents.assert_called_once()
|
||||
mock_chord.assert_called_once()
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
@mock.patch("documents.data_models.magic.from_file", return_value="application/pdf")
|
||||
def test_delete_pages(self, mock_magic, mock_pdf_save, mock_consume_delay):
|
||||
def test_delete_pages(self, mock_pdf_save, mock_update_archive_file) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents
|
||||
@@ -1052,22 +1011,28 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
- Delete pages action is called with 1 document and 2 pages
|
||||
THEN:
|
||||
- Save should be called once
|
||||
- A new version should be enqueued via consume_file
|
||||
- Archive file should be updated once
|
||||
- The document's page_count should be reduced by the number of deleted pages
|
||||
"""
|
||||
doc_ids = [self.doc2.id]
|
||||
initial_page_count = self.doc2.page_count
|
||||
pages = [1, 3]
|
||||
result = bulk_edit.delete_pages(doc_ids, pages)
|
||||
mock_pdf_save.assert_called_once()
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertTrue(str(consumable.original_file).endswith("_pages_deleted.pdf"))
|
||||
self.assertIsNotNone(overrides)
|
||||
mock_update_archive_file.assert_called_once()
|
||||
self.assertEqual(result, "OK")
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
expected_page_count = initial_page_count - len(pages)
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.page_count, expected_page_count)
|
||||
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("pikepdf.Pdf.save")
|
||||
def test_delete_pages_with_error(self, mock_pdf_save, mock_consume_delay):
|
||||
def test_delete_pages_with_error(
|
||||
self,
|
||||
mock_pdf_save,
|
||||
mock_update_archive_file,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing documents
|
||||
@@ -1076,7 +1041,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
- PikePDF raises an error
|
||||
THEN:
|
||||
- Save should be called once
|
||||
- No new version should be enqueued
|
||||
- Archive file should not be updated
|
||||
"""
|
||||
mock_pdf_save.side_effect = Exception("Error saving PDF")
|
||||
doc_ids = [self.doc2.id]
|
||||
@@ -1087,7 +1052,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
error_str = cm.output[0]
|
||||
expected_str = "Error deleting pages from document"
|
||||
self.assertIn(expected_str, error_str)
|
||||
mock_consume_delay.assert_not_called()
|
||||
mock_update_archive_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@@ -1186,18 +1151,24 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertEqual(self.doc2.archive_serial_number, 333)
|
||||
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
def test_edit_pdf_with_update_document(self, mock_consume_delay):
|
||||
@mock.patch("documents.tasks.update_document_content_maybe_archive_file.delay")
|
||||
def test_edit_pdf_with_update_document(
|
||||
self,
|
||||
mock_update_document: mock.Mock,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- A single existing PDF document
|
||||
WHEN:
|
||||
- edit_pdf is called with update_document=True and a single output
|
||||
THEN:
|
||||
- A version update is enqueued targeting the existing document
|
||||
- The original document is updated in-place
|
||||
- The update_document_content_maybe_archive_file task is triggered
|
||||
"""
|
||||
doc_ids = [self.doc2.id]
|
||||
operations = [{"page": 1}, {"page": 2}]
|
||||
original_checksum = self.doc2.checksum
|
||||
original_page_count = self.doc2.page_count
|
||||
|
||||
result = bulk_edit.edit_pdf(
|
||||
doc_ids,
|
||||
@@ -1207,11 +1178,10 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
)
|
||||
|
||||
self.assertEqual(result, "OK")
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
self.assertEqual(consumable.root_document_id, self.doc2.id)
|
||||
self.assertTrue(str(consumable.original_file).endswith("_edited.pdf"))
|
||||
self.assertIsNotNone(overrides)
|
||||
self.doc2.refresh_from_db()
|
||||
self.assertNotEqual(self.doc2.checksum, original_checksum)
|
||||
self.assertNotEqual(self.doc2.page_count, original_page_count)
|
||||
mock_update_document.assert_called_once_with(document_id=self.doc2.id)
|
||||
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@mock.patch("documents.tasks.consume_file.s")
|
||||
@@ -1288,20 +1258,10 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_consume_file.assert_not_called()
|
||||
|
||||
@mock.patch("documents.bulk_edit.update_document_content_maybe_archive_file.delay")
|
||||
@mock.patch("documents.tasks.consume_file.delay")
|
||||
@mock.patch("documents.bulk_edit.tempfile.mkdtemp")
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_update_document(
|
||||
self,
|
||||
mock_open,
|
||||
mock_mkdtemp,
|
||||
mock_consume_delay,
|
||||
mock_update_document,
|
||||
):
|
||||
def test_remove_password_update_document(self, mock_open, mock_update_document):
|
||||
doc = self.doc1
|
||||
temp_dir = self.dirs.scratch_dir / "remove-password-update"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
mock_mkdtemp.return_value = str(temp_dir)
|
||||
original_checksum = doc.checksum
|
||||
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock(), mock.Mock()]
|
||||
@@ -1321,17 +1281,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(result, "OK")
|
||||
mock_open.assert_called_once_with(doc.source_path, password="secret")
|
||||
fake_pdf.remove_unreferenced_resources.assert_called_once()
|
||||
mock_update_document.assert_not_called()
|
||||
mock_consume_delay.assert_called_once()
|
||||
consumable, overrides = mock_consume_delay.call_args[0]
|
||||
expected_path = temp_dir / f"{doc.id}_unprotected.pdf"
|
||||
self.assertTrue(expected_path.exists())
|
||||
self.assertEqual(
|
||||
Path(consumable.original_file).resolve(),
|
||||
expected_path.resolve(),
|
||||
)
|
||||
self.assertEqual(consumable.root_document_id, doc.id)
|
||||
self.assertIsNotNone(overrides)
|
||||
doc.refresh_from_db()
|
||||
self.assertNotEqual(doc.checksum, original_checksum)
|
||||
expected_checksum = hashlib.md5(doc.source_path.read_bytes()).hexdigest()
|
||||
self.assertEqual(doc.checksum, expected_checksum)
|
||||
self.assertEqual(doc.page_count, len(fake_pdf.pages))
|
||||
mock_update_document.assert_called_once_with(document_id=doc.id)
|
||||
|
||||
@mock.patch("documents.bulk_edit.chord")
|
||||
@mock.patch("documents.bulk_edit.group")
|
||||
@@ -1340,12 +1295,12 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_creates_consumable_document(
|
||||
self,
|
||||
mock_open: mock.Mock,
|
||||
mock_mkdtemp: mock.Mock,
|
||||
mock_consume_file: mock.Mock,
|
||||
mock_group: mock.Mock,
|
||||
mock_chord: mock.Mock,
|
||||
) -> None:
|
||||
mock_open,
|
||||
mock_mkdtemp,
|
||||
mock_consume_file,
|
||||
mock_group,
|
||||
mock_chord,
|
||||
):
|
||||
doc = self.doc2
|
||||
temp_dir = self.dirs.scratch_dir / "remove-password"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -1354,8 +1309,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
|
||||
def save_side_effect(target_path: Path) -> None:
|
||||
target_path.write_bytes(b"password removed")
|
||||
def save_side_effect(target_path):
|
||||
Path(target_path).write_bytes(b"password removed")
|
||||
|
||||
fake_pdf.save.side_effect = save_side_effect
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
@@ -1397,13 +1352,13 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_deletes_original(
|
||||
self,
|
||||
mock_open: mock.Mock,
|
||||
mock_mkdtemp: mock.Mock,
|
||||
mock_consume_file: mock.Mock,
|
||||
mock_group: mock.Mock,
|
||||
mock_chord: mock.Mock,
|
||||
mock_delete: mock.Mock,
|
||||
) -> None:
|
||||
mock_open,
|
||||
mock_mkdtemp,
|
||||
mock_consume_file,
|
||||
mock_group,
|
||||
mock_chord,
|
||||
mock_delete,
|
||||
):
|
||||
doc = self.doc2
|
||||
temp_dir = self.dirs.scratch_dir / "remove-password-delete"
|
||||
temp_dir.mkdir(parents=True, exist_ok=True)
|
||||
@@ -1412,8 +1367,8 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
fake_pdf = mock.MagicMock()
|
||||
fake_pdf.pages = [mock.Mock(), mock.Mock()]
|
||||
|
||||
def save_side_effect(target_path: Path) -> None:
|
||||
target_path.write_bytes(b"password removed")
|
||||
def save_side_effect(target_path):
|
||||
Path(target_path).write_bytes(b"password removed")
|
||||
|
||||
fake_pdf.save.side_effect = save_side_effect
|
||||
mock_open.return_value.__enter__.return_value = fake_pdf
|
||||
@@ -1436,7 +1391,7 @@ class TestPDFActions(DirectoriesMixin, TestCase):
|
||||
mock_delete.si.assert_called_once_with([doc.id])
|
||||
|
||||
@mock.patch("pikepdf.open")
|
||||
def test_remove_password_open_failure(self, mock_open: mock.Mock) -> None:
|
||||
def test_remove_password_open_failure(self, mock_open):
|
||||
mock_open.side_effect = RuntimeError("wrong password")
|
||||
|
||||
with self.assertLogs("paperless.bulk_edit", level="ERROR") as cm:
|
||||
|
||||
@@ -16,9 +16,6 @@ from guardian.core import ObjectPermissionChecker
|
||||
|
||||
from documents.barcodes import BarcodePlugin
|
||||
from documents.consumer import ConsumerError
|
||||
from documents.consumer import ConsumerPlugin
|
||||
from documents.consumer import ConsumerPreflightPlugin
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Correspondent
|
||||
@@ -32,7 +29,6 @@ from documents.parsers import ParseError
|
||||
from documents.plugins.helpers import ProgressStatusOptions
|
||||
from documents.tasks import sanity_check
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import DummyProgressManager
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
from documents.tests.utils import GetConsumerMixin
|
||||
from paperless_mail.models import MailRule
|
||||
@@ -98,13 +94,11 @@ class FaultyGenericExceptionParser(_BaseTestParser):
|
||||
raise Exception("Generic exception.")
|
||||
|
||||
|
||||
def fake_magic_from_file(file, *, mime=False): # NOSONAR
|
||||
def fake_magic_from_file(file, *, mime=False):
|
||||
if mime:
|
||||
filepath = Path(file)
|
||||
if filepath.name.startswith("invalid_pdf"):
|
||||
return "application/octet-stream"
|
||||
if filepath.name.startswith("valid_pdf"):
|
||||
return "application/pdf"
|
||||
if filepath.suffix == ".pdf":
|
||||
return "application/pdf"
|
||||
elif filepath.suffix == ".png":
|
||||
@@ -670,144 +664,6 @@ class TestConsumer(
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@mock.patch("documents.consumer.load_classifier")
|
||||
def test_version_label_override_applies(self, m) -> None:
|
||||
m.return_value = MagicMock()
|
||||
|
||||
with self.get_consumer(
|
||||
self.get_test_file(),
|
||||
DocumentMetadataOverrides(version_label="v1"),
|
||||
) as consumer:
|
||||
consumer.run()
|
||||
|
||||
document = Document.objects.first()
|
||||
assert document is not None
|
||||
|
||||
self.assertEqual(document.version_label, "v1")
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
@mock.patch("documents.consumer.load_classifier")
|
||||
def test_consume_version_creates_new_version(self, m) -> None:
|
||||
m.return_value = MagicMock()
|
||||
|
||||
with self.get_consumer(self.get_test_file()) as consumer:
|
||||
consumer.run()
|
||||
|
||||
root_doc = Document.objects.first()
|
||||
self.assertIsNotNone(root_doc)
|
||||
assert root_doc is not None
|
||||
|
||||
actor = User.objects.create_user(
|
||||
username="actor",
|
||||
email="actor@example.com",
|
||||
password="password",
|
||||
)
|
||||
|
||||
version_file = self.get_test_file2()
|
||||
status = DummyProgressManager(version_file.name, None)
|
||||
overrides = DocumentMetadataOverrides(
|
||||
version_label="v2",
|
||||
actor_id=actor.pk,
|
||||
)
|
||||
doc = ConsumableDocument(
|
||||
DocumentSource.ApiUpload,
|
||||
original_file=version_file,
|
||||
root_document_id=root_doc.pk,
|
||||
)
|
||||
preflight = ConsumerPreflightPlugin(
|
||||
doc,
|
||||
overrides,
|
||||
status, # type: ignore[arg-type]
|
||||
self.dirs.scratch_dir,
|
||||
"task-id",
|
||||
)
|
||||
preflight.setup()
|
||||
preflight.run()
|
||||
|
||||
consumer = ConsumerPlugin(
|
||||
doc,
|
||||
overrides,
|
||||
status, # type: ignore[arg-type]
|
||||
self.dirs.scratch_dir,
|
||||
"task-id",
|
||||
)
|
||||
consumer.setup()
|
||||
try:
|
||||
self.assertTrue(consumer.filename.endswith("_v0.pdf"))
|
||||
consumer.run()
|
||||
finally:
|
||||
consumer.cleanup()
|
||||
|
||||
versions = Document.objects.filter(root_document=root_doc)
|
||||
self.assertEqual(versions.count(), 1)
|
||||
version = versions.first()
|
||||
assert version is not None
|
||||
assert version.original_filename is not None
|
||||
self.assertEqual(version.version_label, "v2")
|
||||
self.assertTrue(version.original_filename.endswith("_v0.pdf"))
|
||||
self.assertTrue(bool(version.content))
|
||||
|
||||
@override_settings(AUDIT_LOG_ENABLED=True)
|
||||
@mock.patch("documents.consumer.load_classifier")
|
||||
def test_consume_version_with_missing_actor_and_filename_without_suffix(
|
||||
self,
|
||||
m: mock.Mock,
|
||||
) -> None:
|
||||
m.return_value = MagicMock()
|
||||
|
||||
with self.get_consumer(self.get_test_file()) as consumer:
|
||||
consumer.run()
|
||||
|
||||
root_doc = Document.objects.first()
|
||||
self.assertIsNotNone(root_doc)
|
||||
assert root_doc is not None
|
||||
|
||||
version_file = self.get_test_file2()
|
||||
status = DummyProgressManager(version_file.name, None)
|
||||
overrides = DocumentMetadataOverrides(
|
||||
filename="valid_pdf_version-upload",
|
||||
actor_id=999999,
|
||||
)
|
||||
doc = ConsumableDocument(
|
||||
DocumentSource.ApiUpload,
|
||||
original_file=version_file,
|
||||
root_document_id=root_doc.pk,
|
||||
)
|
||||
|
||||
preflight = ConsumerPreflightPlugin(
|
||||
doc,
|
||||
overrides,
|
||||
status, # type: ignore[arg-type]
|
||||
self.dirs.scratch_dir,
|
||||
"task-id",
|
||||
)
|
||||
preflight.setup()
|
||||
preflight.run()
|
||||
|
||||
consumer = ConsumerPlugin(
|
||||
doc,
|
||||
overrides,
|
||||
status, # type: ignore[arg-type]
|
||||
self.dirs.scratch_dir,
|
||||
"task-id",
|
||||
)
|
||||
consumer.setup()
|
||||
try:
|
||||
self.assertEqual(consumer.filename, "valid_pdf_version-upload_v0")
|
||||
consumer.run()
|
||||
finally:
|
||||
consumer.cleanup()
|
||||
|
||||
version = (
|
||||
Document.objects.filter(root_document=root_doc).order_by("-id").first()
|
||||
)
|
||||
self.assertIsNotNone(version)
|
||||
assert version is not None
|
||||
self.assertEqual(version.original_filename, "valid_pdf_version-upload_v0")
|
||||
self.assertTrue(bool(version.content))
|
||||
|
||||
@mock.patch("documents.consumer.load_classifier")
|
||||
def testClassifyDocument(self, m) -> None:
|
||||
correspondent = Correspondent.objects.create(
|
||||
@@ -1323,7 +1179,7 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
|
||||
consumer.run_post_consume_script(doc)
|
||||
|
||||
@mock.patch("documents.consumer.run_subprocess")
|
||||
def test_post_consume_script_simple(self, m: mock.MagicMock) -> None:
|
||||
def test_post_consume_script_simple(self, m) -> None:
|
||||
with tempfile.NamedTemporaryFile() as script:
|
||||
with override_settings(POST_CONSUME_SCRIPT=script.name):
|
||||
doc = Document.objects.create(title="Test", mime_type="application/pdf")
|
||||
@@ -1334,10 +1190,7 @@ class PostConsumeTestCase(DirectoriesMixin, GetConsumerMixin, TestCase):
|
||||
m.assert_called_once()
|
||||
|
||||
@mock.patch("documents.consumer.run_subprocess")
|
||||
def test_post_consume_script_with_correspondent_and_type(
|
||||
self,
|
||||
m: mock.MagicMock,
|
||||
) -> None:
|
||||
def test_post_consume_script_with_correspondent_and_type(self, m) -> None:
|
||||
with tempfile.NamedTemporaryFile() as script:
|
||||
with override_settings(POST_CONSUME_SCRIPT=script.name):
|
||||
c = Correspondent.objects.create(name="my_bank")
|
||||
@@ -1420,19 +1273,6 @@ class TestMetadataOverrides(TestCase):
|
||||
base.update(incoming)
|
||||
self.assertTrue(base.skip_asn_if_exists)
|
||||
|
||||
def test_update_actor_and_version_label(self) -> None:
|
||||
base = DocumentMetadataOverrides(
|
||||
actor_id=1,
|
||||
version_label="root",
|
||||
)
|
||||
incoming = DocumentMetadataOverrides(
|
||||
actor_id=2,
|
||||
version_label="v2",
|
||||
)
|
||||
base.update(incoming)
|
||||
self.assertEqual(base.actor_id, 2)
|
||||
self.assertEqual(base.version_label, "v2")
|
||||
|
||||
|
||||
class TestBarcodeApplyDetectedASN(TestCase):
|
||||
"""
|
||||
|
||||
@@ -45,22 +45,10 @@ class TestDocument(TestCase):
|
||||
Path(file_path).touch()
|
||||
Path(thumb_path).touch()
|
||||
|
||||
with mock.patch(
|
||||
"documents.signals.handlers.Path.unlink",
|
||||
autospec=True,
|
||||
) as mock_unlink:
|
||||
with mock.patch("documents.signals.handlers.Path.unlink") as mock_unlink:
|
||||
document.delete()
|
||||
empty_trash([document.pk])
|
||||
|
||||
target_paths: set[str] = {str(file_path), str(thumb_path)}
|
||||
|
||||
actual_deletions = [
|
||||
call
|
||||
for call in mock_unlink.call_args_list
|
||||
if str(call.args[0]) in target_paths
|
||||
]
|
||||
|
||||
self.assertEqual(len(actual_deletions), 2)
|
||||
self.assertEqual(mock_unlink.call_count, 2)
|
||||
|
||||
def test_document_soft_delete(self) -> None:
|
||||
document = Document.objects.create(
|
||||
@@ -77,12 +65,7 @@ class TestDocument(TestCase):
|
||||
Path(file_path).touch()
|
||||
Path(thumb_path).touch()
|
||||
|
||||
target_paths: set[str] = {str(file_path), str(thumb_path)}
|
||||
|
||||
with mock.patch(
|
||||
"documents.signals.handlers.Path.unlink",
|
||||
autospec=True,
|
||||
) as mock_unlink:
|
||||
with mock.patch("documents.signals.handlers.Path.unlink") as mock_unlink:
|
||||
document.delete()
|
||||
self.assertEqual(mock_unlink.call_count, 0)
|
||||
|
||||
@@ -93,36 +76,7 @@ class TestDocument(TestCase):
|
||||
|
||||
document.delete()
|
||||
empty_trash([document.pk])
|
||||
|
||||
actual_deletions = [
|
||||
call
|
||||
for call in mock_unlink.call_args_list
|
||||
if str(call.args[0]) in target_paths
|
||||
]
|
||||
|
||||
self.assertEqual(len(actual_deletions), 2)
|
||||
|
||||
def test_delete_root_deletes_versions(self) -> None:
|
||||
root = Document.objects.create(
|
||||
correspondent=Correspondent.objects.create(name="Test0"),
|
||||
title="Head",
|
||||
content="content",
|
||||
checksum="checksum",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
Document.objects.create(
|
||||
root_document=root,
|
||||
correspondent=root.correspondent,
|
||||
title="Version",
|
||||
content="content",
|
||||
checksum="checksum2",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
root.delete()
|
||||
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
self.assertEqual(Document.deleted_objects.count(), 2)
|
||||
self.assertEqual(mock_unlink.call_count, 2)
|
||||
|
||||
def test_file_name(self) -> None:
|
||||
doc = Document(
|
||||
|
||||
@@ -4,7 +4,6 @@ from io import StringIO
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from auditlog.models import LogEntry
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.management import call_command
|
||||
@@ -20,7 +19,6 @@ from documents.tests.utils import FileSystemAssertsMixin
|
||||
sample_file: Path = Path(__file__).parent / "samples" / "simple.pdf"
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
||||
class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def make_models(self):
|
||||
@@ -96,7 +94,6 @@ class TestArchiver(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
self.assertEqual(doc2.archive_filename, "document_01.pdf")
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestMakeIndex(TestCase):
|
||||
@mock.patch("documents.management.commands.document_index.index_reindex")
|
||||
def test_reindex(self, m) -> None:
|
||||
@@ -109,7 +106,6 @@ class TestMakeIndex(TestCase):
|
||||
m.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestRenamer(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
@override_settings(FILENAME_FORMAT="")
|
||||
def test_rename(self) -> None:
|
||||
@@ -144,7 +140,6 @@ class TestCreateClassifier(TestCase):
|
||||
m.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestSanityChecker(DirectoriesMixin, TestCase):
|
||||
def test_no_issues(self) -> None:
|
||||
with self.assertLogs() as capture:
|
||||
@@ -170,7 +165,6 @@ class TestSanityChecker(DirectoriesMixin, TestCase):
|
||||
self.assertIn("Checksum mismatch. Stored: abc, actual:", capture.output[1])
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestConvertMariaDBUUID(TestCase):
|
||||
@mock.patch("django.db.connection.schema_editor")
|
||||
def test_convert(self, m) -> None:
|
||||
@@ -184,7 +178,6 @@ class TestConvertMariaDBUUID(TestCase):
|
||||
self.assertIn("Successfully converted", stdout.getvalue())
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestPruneAuditLogs(TestCase):
|
||||
def test_prune_audit_logs(self) -> None:
|
||||
LogEntry.objects.create(
|
||||
|
||||
@@ -577,7 +577,6 @@ class TestTagsFromPath:
|
||||
assert len(tag_ids) == 0
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestCommandValidation:
|
||||
"""Tests for command argument validation."""
|
||||
|
||||
@@ -606,7 +605,6 @@ class TestCommandValidation:
|
||||
cmd.handle(directory=str(sample_pdf), oneshot=True, testing=False)
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
@pytest.mark.usefixtures("mock_supported_extensions")
|
||||
class TestCommandOneshot:
|
||||
"""Tests for oneshot mode."""
|
||||
@@ -777,7 +775,6 @@ def start_consumer(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
@pytest.mark.django_db
|
||||
class TestCommandWatch:
|
||||
"""Integration tests for the watch loop."""
|
||||
@@ -899,7 +896,6 @@ class TestCommandWatch:
|
||||
assert not thread.is_alive()
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
@pytest.mark.django_db
|
||||
class TestCommandWatchPolling:
|
||||
"""Tests for polling mode."""
|
||||
@@ -932,7 +928,6 @@ class TestCommandWatchPolling:
|
||||
mock_consume_file_delay.delay.assert_called()
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
@pytest.mark.django_db
|
||||
class TestCommandWatchRecursive:
|
||||
"""Tests for recursive watching."""
|
||||
@@ -996,7 +991,6 @@ class TestCommandWatchRecursive:
|
||||
assert len(overrides.tag_ids) == 2
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
@pytest.mark.django_db
|
||||
class TestCommandWatchEdgeCases:
|
||||
"""Tests for edge cases and error handling."""
|
||||
|
||||
@@ -7,7 +7,6 @@ from pathlib import Path
|
||||
from unittest import mock
|
||||
from zipfile import ZipFile
|
||||
|
||||
import pytest
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from allauth.socialaccount.models import SocialApp
|
||||
from allauth.socialaccount.models import SocialToken
|
||||
@@ -46,7 +45,6 @@ from documents.tests.utils import paperless_environment
|
||||
from paperless_mail.models import MailAccount
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestExportImport(
|
||||
DirectoriesMixin,
|
||||
FileSystemAssertsMixin,
|
||||
@@ -848,7 +846,6 @@ class TestExportImport(
|
||||
self.assertEqual(Document.objects.all().count(), 4)
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestCryptExportImport(
|
||||
DirectoriesMixin,
|
||||
FileSystemAssertsMixin,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from io import StringIO
|
||||
|
||||
import pytest
|
||||
from django.core.management import CommandError
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
@@ -8,7 +7,6 @@ from django.test import TestCase
|
||||
from documents.models import Document
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestFuzzyMatchCommand(TestCase):
|
||||
MSG_REGEX = r"Document \d fuzzy match to \d \(confidence \d\d\.\d\d\d\)"
|
||||
|
||||
@@ -51,6 +49,19 @@ class TestFuzzyMatchCommand(TestCase):
|
||||
self.call_command("--ratio", "101")
|
||||
self.assertIn("The ratio must be between 0 and 100", str(e.exception))
|
||||
|
||||
def test_invalid_process_count(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Invalid process count less than 0 above upper
|
||||
WHEN:
|
||||
- Command is called
|
||||
THEN:
|
||||
- Error is raised indicating issue
|
||||
"""
|
||||
with self.assertRaises(CommandError) as e:
|
||||
self.call_command("--processes", "0")
|
||||
self.assertIn("There must be at least 1 process", str(e.exception))
|
||||
|
||||
def test_no_matches(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -140,7 +151,7 @@ class TestFuzzyMatchCommand(TestCase):
|
||||
mime_type="application/pdf",
|
||||
filename="final_test.pdf",
|
||||
)
|
||||
stdout, _ = self.call_command("--no-progress-bar")
|
||||
stdout, _ = self.call_command()
|
||||
lines = [x.strip() for x in stdout.splitlines() if x.strip()]
|
||||
self.assertEqual(len(lines), 3)
|
||||
for line in lines:
|
||||
@@ -183,7 +194,7 @@ class TestFuzzyMatchCommand(TestCase):
|
||||
|
||||
self.assertEqual(Document.objects.count(), 3)
|
||||
|
||||
stdout, _ = self.call_command("--delete", "--no-progress-bar")
|
||||
stdout, _ = self.call_command("--delete")
|
||||
|
||||
self.assertIn(
|
||||
"The command is configured to delete documents. Use with caution",
|
||||
|
||||
@@ -4,7 +4,6 @@ from io import StringIO
|
||||
from pathlib import Path
|
||||
from zipfile import ZipFile
|
||||
|
||||
import pytest
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
@@ -19,7 +18,6 @@ from documents.tests.utils import FileSystemAssertsMixin
|
||||
from documents.tests.utils import SampleDirMixin
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestCommandImport(
|
||||
DirectoriesMixin,
|
||||
FileSystemAssertsMixin,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import pytest
|
||||
from django.core.management import call_command
|
||||
from django.core.management.base import CommandError
|
||||
from django.test import TestCase
|
||||
@@ -11,7 +10,6 @@ from documents.models import Tag
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestRetagger(DirectoriesMixin, TestCase):
|
||||
def make_models(self) -> None:
|
||||
self.sp1 = StoragePath.objects.create(
|
||||
|
||||
@@ -2,7 +2,6 @@ import os
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from django.contrib.auth.models import User
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
@@ -10,7 +9,6 @@ from django.test import TestCase
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestManageSuperUser(DirectoriesMixin, TestCase):
|
||||
def call_command(self, environ):
|
||||
out = StringIO()
|
||||
|
||||
@@ -2,7 +2,6 @@ import shutil
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from django.core.management import call_command
|
||||
from django.test import TestCase
|
||||
|
||||
@@ -13,7 +12,6 @@ from documents.tests.utils import DirectoriesMixin
|
||||
from documents.tests.utils import FileSystemAssertsMixin
|
||||
|
||||
|
||||
@pytest.mark.management
|
||||
class TestMakeThumbnails(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
def make_models(self) -> None:
|
||||
self.d1 = Document.objects.create(
|
||||
|
||||
@@ -7,9 +7,7 @@ from django.test import TestCase
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.data_models import DocumentSource
|
||||
from documents.models import Document
|
||||
from documents.models import PaperlessTask
|
||||
from documents.signals.handlers import add_to_index
|
||||
from documents.signals.handlers import before_task_publish_handler
|
||||
from documents.signals.handlers import task_failure_handler
|
||||
from documents.signals.handlers import task_postrun_handler
|
||||
@@ -200,39 +198,3 @@ class TestTaskSignalHandler(DirectoriesMixin, TestCase):
|
||||
task = PaperlessTask.objects.get()
|
||||
|
||||
self.assertEqual(celery.states.FAILURE, task.status)
|
||||
|
||||
def test_add_to_index_indexes_root_once_for_root_documents(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
with mock.patch("documents.index.add_or_update_document") as add:
|
||||
add_to_index(sender=None, document=root)
|
||||
|
||||
add.assert_called_once_with(root)
|
||||
|
||||
def test_add_to_index_reindexes_root_for_version_documents(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
version = Document.objects.create(
|
||||
title="version",
|
||||
checksum="version",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
|
||||
with mock.patch("documents.index.add_or_update_document") as add:
|
||||
add_to_index(sender=None, document=version)
|
||||
|
||||
self.assertEqual(add.call_count, 2)
|
||||
self.assertEqual(add.call_args_list[0].args[0].id, version.id)
|
||||
self.assertEqual(add.call_args_list[1].args[0].id, root.id)
|
||||
self.assertEqual(
|
||||
add.call_args_list[1].kwargs,
|
||||
{"effective_content": version.content},
|
||||
)
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
from types import SimpleNamespace
|
||||
from unittest import mock
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
from documents.conditionals import metadata_etag
|
||||
from documents.conditionals import preview_etag
|
||||
from documents.conditionals import thumbnail_last_modified
|
||||
from documents.models import Document
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
from documents.versioning import resolve_effective_document_by_pk
|
||||
|
||||
|
||||
class TestConditionals(DirectoriesMixin, TestCase):
|
||||
def test_metadata_etag_uses_latest_version_for_root_request(self) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root-checksum",
|
||||
archive_checksum="root-archive",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
latest = Document.objects.create(
|
||||
title="v1",
|
||||
checksum="version-checksum",
|
||||
archive_checksum="version-archive",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
request = SimpleNamespace(query_params={})
|
||||
|
||||
self.assertEqual(metadata_etag(request, root.id), latest.checksum)
|
||||
self.assertEqual(preview_etag(request, root.id), latest.archive_checksum)
|
||||
|
||||
def test_resolve_effective_doc_returns_none_for_invalid_or_unrelated_version(
|
||||
self,
|
||||
) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
other_root = Document.objects.create(
|
||||
title="other",
|
||||
checksum="other",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
other_version = Document.objects.create(
|
||||
title="other-v1",
|
||||
checksum="other-v1",
|
||||
mime_type="application/pdf",
|
||||
root_document=other_root,
|
||||
)
|
||||
|
||||
invalid_request = SimpleNamespace(query_params={"version": "not-a-number"})
|
||||
unrelated_request = SimpleNamespace(
|
||||
query_params={"version": str(other_version.id)},
|
||||
)
|
||||
|
||||
self.assertIsNone(
|
||||
resolve_effective_document_by_pk(root.id, invalid_request).document,
|
||||
)
|
||||
self.assertIsNone(
|
||||
resolve_effective_document_by_pk(root.id, unrelated_request).document,
|
||||
)
|
||||
|
||||
def test_thumbnail_last_modified_uses_effective_document_for_cache_key(
|
||||
self,
|
||||
) -> None:
|
||||
root = Document.objects.create(
|
||||
title="root",
|
||||
checksum="root",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
latest = Document.objects.create(
|
||||
title="v2",
|
||||
checksum="v2",
|
||||
mime_type="application/pdf",
|
||||
root_document=root,
|
||||
)
|
||||
latest.thumbnail_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
latest.thumbnail_path.write_bytes(b"thumb")
|
||||
|
||||
request = SimpleNamespace(query_params={})
|
||||
with mock.patch(
|
||||
"documents.conditionals.get_thumbnail_modified_key",
|
||||
return_value="thumb-modified-key",
|
||||
) as get_thumb_key:
|
||||
result = thumbnail_last_modified(request, root.id)
|
||||
|
||||
self.assertIsNotNone(result)
|
||||
get_thumb_key.assert_called_once_with(latest.id)
|
||||
@@ -7,7 +7,6 @@ from collections.abc import Callable
|
||||
from datetime import timedelta
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
@@ -57,7 +56,6 @@ from documents.models import WorkflowActionEmail
|
||||
from documents.models import WorkflowActionWebhook
|
||||
from documents.models import WorkflowRun
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.plugins.base import StopConsumeTaskError
|
||||
from documents.serialisers import WorkflowTriggerSerializer
|
||||
from documents.signals import document_consumption_finished
|
||||
from documents.tests.utils import DirectoriesMixin
|
||||
@@ -643,7 +641,9 @@ class TestWorkflows(
|
||||
|
||||
expected_str = f"Document did not match {w}"
|
||||
self.assertIn(expected_str, cm.output[0])
|
||||
expected_str = f"Document path {test_file} does not match"
|
||||
expected_str = (
|
||||
f"Document path {Path(test_file).resolve(strict=False)} does not match"
|
||||
)
|
||||
self.assertIn(expected_str, cm.output[1])
|
||||
|
||||
def test_workflow_no_match_mail_rule(self) -> None:
|
||||
@@ -1968,6 +1968,36 @@ class TestWorkflows(
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
|
||||
@mock.patch("documents.tasks.send_websocket_document_updated")
|
||||
def test_workflow_scheduled_trigger_sends_websocket_update(
|
||||
self,
|
||||
mock_send_websocket_document_updated,
|
||||
) -> None:
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.SCHEDULED,
|
||||
schedule_offset_days=1,
|
||||
schedule_date_field=WorkflowTrigger.ScheduleDateField.CREATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(assign_owner=self.user2)
|
||||
workflow = Workflow.objects.create(name="Workflow 1", order=0)
|
||||
workflow.triggers.add(trigger)
|
||||
workflow.actions.add(action)
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
created=timezone.now() - timedelta(days=2),
|
||||
)
|
||||
|
||||
tasks.check_scheduled_workflows()
|
||||
|
||||
self.assertEqual(mock_send_websocket_document_updated.call_count, 1)
|
||||
self.assertEqual(
|
||||
mock_send_websocket_document_updated.call_args.kwargs["document"].pk,
|
||||
doc.pk,
|
||||
)
|
||||
|
||||
def test_workflow_scheduled_trigger_added(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -3917,427 +3947,6 @@ class TestWorkflows(
|
||||
)
|
||||
assert mock_remove_password.call_count == 2
|
||||
|
||||
def test_workflow_trash_action_soft_delete(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Document updated workflow with delete action
|
||||
WHEN:
|
||||
- Document that matches is updated
|
||||
THEN:
|
||||
- Document is moved to trash (soft deleted)
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow 1",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 1)
|
||||
self.assertEqual(Document.deleted_objects.count(), 0)
|
||||
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
self.assertEqual(Document.deleted_objects.count(), 1)
|
||||
|
||||
@override_settings(
|
||||
PAPERLESS_EMAIL_HOST="localhost",
|
||||
EMAIL_ENABLED=True,
|
||||
PAPERLESS_URL="http://localhost:8000",
|
||||
)
|
||||
@mock.patch("django.core.mail.message.EmailMessage.send")
|
||||
def test_workflow_trash_with_email_action(self, mock_email_send):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow with email action, then move to trash action
|
||||
WHEN:
|
||||
- Document matches and workflow runs
|
||||
THEN:
|
||||
- Email is sent first
|
||||
- Document is moved to trash (soft deleted)
|
||||
"""
|
||||
mock_email_send.return_value = 1
|
||||
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
email_action = WorkflowActionEmail.objects.create(
|
||||
subject="Document deleted: {doc_title}",
|
||||
body="Document {doc_title} will be deleted",
|
||||
to="user@example.com",
|
||||
include_document=False,
|
||||
)
|
||||
email_workflow_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.EMAIL,
|
||||
email=email_action,
|
||||
)
|
||||
trash_workflow_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow with email then move to trash",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(email_workflow_action, trash_workflow_action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 1)
|
||||
self.assertEqual(Document.deleted_objects.count(), 0)
|
||||
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
mock_email_send.assert_called_once()
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
self.assertEqual(Document.deleted_objects.count(), 1)
|
||||
|
||||
@override_settings(
|
||||
PAPERLESS_URL="http://localhost:8000",
|
||||
)
|
||||
@mock.patch("documents.workflows.webhooks.send_webhook.delay")
|
||||
def test_workflow_trash_with_webhook_action(self, mock_webhook_delay):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow with webhook action (include_document=True), then move to trash action
|
||||
WHEN:
|
||||
- Document matches and workflow runs
|
||||
THEN:
|
||||
- Webhook .delay() is called with complete data including file bytes
|
||||
- Document is moved to trash (soft deleted)
|
||||
- Webhook task has all necessary data and doesn't rely on document existence
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
webhook_action = WorkflowActionWebhook.objects.create(
|
||||
use_params=True,
|
||||
params={
|
||||
"title": "{{doc_title}}",
|
||||
"message": "Document being deleted",
|
||||
},
|
||||
url="https://paperless-ngx.com/webhook",
|
||||
include_document=True,
|
||||
)
|
||||
webhook_workflow_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.WEBHOOK,
|
||||
webhook=webhook_action,
|
||||
)
|
||||
trash_workflow_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow with webhook then move to trash",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(webhook_workflow_action, trash_workflow_action)
|
||||
w.save()
|
||||
|
||||
test_file = shutil.copy(
|
||||
self.SAMPLE_DIR / "simple.pdf",
|
||||
self.dirs.scratch_dir / "simple.pdf",
|
||||
)
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="simple.pdf",
|
||||
filename=test_file,
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 1)
|
||||
self.assertEqual(Document.deleted_objects.count(), 0)
|
||||
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
mock_webhook_delay.assert_called_once()
|
||||
call_kwargs = mock_webhook_delay.call_args[1]
|
||||
self.assertEqual(call_kwargs["url"], "https://paperless-ngx.com/webhook")
|
||||
self.assertEqual(
|
||||
call_kwargs["data"],
|
||||
{"title": "sample test", "message": "Document being deleted"},
|
||||
)
|
||||
self.assertIsNotNone(call_kwargs["files"])
|
||||
self.assertIn("file", call_kwargs["files"])
|
||||
self.assertEqual(call_kwargs["files"]["file"][0], "simple.pdf")
|
||||
self.assertEqual(call_kwargs["files"]["file"][2], "application/pdf")
|
||||
self.assertIsInstance(call_kwargs["files"]["file"][1], bytes)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
self.assertEqual(Document.deleted_objects.count(), 1)
|
||||
|
||||
@override_settings(
|
||||
PAPERLESS_EMAIL_HOST="localhost",
|
||||
EMAIL_ENABLED=True,
|
||||
PAPERLESS_URL="http://localhost:8000",
|
||||
)
|
||||
@mock.patch("django.core.mail.message.EmailMessage.send")
|
||||
def test_workflow_trash_after_email_failure(self, mock_email_send) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow with email action (that fails), then move to trash action
|
||||
WHEN:
|
||||
- Document matches and workflow runs
|
||||
- Email action raises exception
|
||||
THEN:
|
||||
- Email failure is logged
|
||||
- Move to Trash still executes successfully (soft delete)
|
||||
"""
|
||||
mock_email_send.side_effect = Exception("Email server error")
|
||||
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
email_action = WorkflowActionEmail.objects.create(
|
||||
subject="Document deleted: {doc_title}",
|
||||
body="Document {doc_title} will be deleted",
|
||||
to="user@example.com",
|
||||
include_document=False,
|
||||
)
|
||||
email_workflow_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.EMAIL,
|
||||
email=email_action,
|
||||
)
|
||||
trash_workflow_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow with failing email then move to trash",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(email_workflow_action, trash_workflow_action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 1)
|
||||
self.assertEqual(Document.deleted_objects.count(), 0)
|
||||
|
||||
with self.assertLogs("paperless.workflows.actions", level="ERROR") as cm:
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
expected_str = "Error occurred sending notification email"
|
||||
self.assertIn(expected_str, cm.output[0])
|
||||
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
self.assertEqual(Document.deleted_objects.count(), 1)
|
||||
|
||||
def test_multiple_workflows_trash_then_assignment(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow 1 (order=0) with move to trash action
|
||||
- Workflow 2 (order=1) with assignment action
|
||||
- Both workflows match the same document
|
||||
WHEN:
|
||||
- Workflows run sequentially
|
||||
THEN:
|
||||
- First workflow runs and deletes document (soft delete)
|
||||
- Second workflow does not trigger (document no longer exists)
|
||||
- Logs confirm move to trash and skipping of remaining workflows
|
||||
"""
|
||||
trigger1 = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
trash_workflow_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
)
|
||||
w1 = Workflow.objects.create(
|
||||
name="Workflow 1 - Move to Trash",
|
||||
order=0,
|
||||
)
|
||||
w1.triggers.add(trigger1)
|
||||
w1.actions.add(trash_workflow_action)
|
||||
w1.save()
|
||||
|
||||
trigger2 = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
assignment_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
assign_correspondent=self.c2,
|
||||
)
|
||||
w2 = Workflow.objects.create(
|
||||
name="Workflow 2 - Assignment",
|
||||
order=1,
|
||||
)
|
||||
w2.triggers.add(trigger2)
|
||||
w2.actions.add(assignment_action)
|
||||
w2.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 1)
|
||||
self.assertEqual(Document.deleted_objects.count(), 0)
|
||||
|
||||
with self.assertLogs("paperless", level="DEBUG") as cm:
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
self.assertEqual(Document.deleted_objects.count(), 1)
|
||||
|
||||
# We check logs instead of WorkflowRun.objects.count() because when the document
|
||||
# is soft-deleted, the WorkflowRun is cascade-deleted (hard delete) since it does
|
||||
# not inherit from the SoftDeleteModel. The logs confirm that the first workflow
|
||||
# executed the move to trash and remaining workflows were skipped.
|
||||
log_output = "\n".join(cm.output)
|
||||
self.assertIn("Moved document", log_output)
|
||||
self.assertIn("to trash", log_output)
|
||||
self.assertIn(
|
||||
"Document was moved to trash, skipping remaining workflows",
|
||||
log_output,
|
||||
)
|
||||
|
||||
def test_workflow_delete_action_during_consumption(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow with consumption trigger and delete action
|
||||
WHEN:
|
||||
- Document is being consumed and workflow runs
|
||||
THEN:
|
||||
- StopConsumeTaskError is raised to halt consumption
|
||||
- Original file is deleted
|
||||
- No document is created
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
sources=f"{DocumentSource.ConsumeFolder}",
|
||||
filter_filename="*",
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow Delete During Consumption",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
# Create a test file to be consumed
|
||||
test_file = shutil.copy(
|
||||
self.SAMPLE_DIR / "simple.pdf",
|
||||
self.dirs.scratch_dir / "simple.pdf",
|
||||
)
|
||||
test_file_path = Path(test_file)
|
||||
self.assertTrue(test_file_path.exists())
|
||||
|
||||
# Create a ConsumableDocument
|
||||
consumable_doc = ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=test_file_path,
|
||||
)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
|
||||
# Run workflows with overrides (consumption flow)
|
||||
with self.assertRaises(StopConsumeTaskError) as context:
|
||||
run_workflows(
|
||||
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
consumable_doc,
|
||||
overrides=DocumentMetadataOverrides(),
|
||||
)
|
||||
|
||||
self.assertIn("deleted by workflow action", str(context.exception))
|
||||
|
||||
# File should be deleted
|
||||
self.assertFalse(test_file_path.exists())
|
||||
|
||||
# No document should be created
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
|
||||
def test_workflow_delete_action_during_consumption_with_assignment(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Workflow with consumption trigger, assignment action, then delete action
|
||||
WHEN:
|
||||
- Document is being consumed and workflow runs
|
||||
THEN:
|
||||
- StopConsumeTaskError is raised to halt consumption
|
||||
- Original file is deleted
|
||||
- No document is created (even though assignment would have worked)
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
sources=f"{DocumentSource.ConsumeFolder}",
|
||||
filter_filename="*",
|
||||
)
|
||||
assignment_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
assign_title="This should not be applied",
|
||||
assign_correspondent=self.c,
|
||||
)
|
||||
trash_workflow_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.MOVE_TO_TRASH,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow Assignment then Delete During Consumption",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(assignment_action, trash_workflow_action)
|
||||
w.save()
|
||||
|
||||
# Create a test file to be consumed
|
||||
test_file = shutil.copy(
|
||||
self.SAMPLE_DIR / "simple.pdf",
|
||||
self.dirs.scratch_dir / "simple2.pdf",
|
||||
)
|
||||
test_file_path = Path(test_file)
|
||||
self.assertTrue(test_file_path.exists())
|
||||
|
||||
# Create a ConsumableDocument
|
||||
consumable_doc = ConsumableDocument(
|
||||
source=DocumentSource.ConsumeFolder,
|
||||
original_file=test_file_path,
|
||||
)
|
||||
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
|
||||
# Run workflows with overrides (consumption flow)
|
||||
with self.assertRaises(StopConsumeTaskError):
|
||||
run_workflows(
|
||||
WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
consumable_doc,
|
||||
overrides=DocumentMetadataOverrides(),
|
||||
)
|
||||
|
||||
# File should be deleted
|
||||
self.assertFalse(test_file_path.exists())
|
||||
|
||||
# No document should be created
|
||||
self.assertEqual(Document.objects.count(), 0)
|
||||
|
||||
|
||||
class TestWebhookSend:
|
||||
def test_send_webhook_data_or_json(
|
||||
@@ -4380,17 +3989,13 @@ class TestWebhookSend:
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def resolve_to(monkeypatch: pytest.MonkeyPatch) -> Callable[[str], None]:
|
||||
def resolve_to(monkeypatch):
|
||||
"""
|
||||
Force DNS resolution to a specific IP for any hostname.
|
||||
"""
|
||||
|
||||
def _set(ip: str) -> None:
|
||||
def fake_getaddrinfo(
|
||||
host: str,
|
||||
*_args: object,
|
||||
**_kwargs: object,
|
||||
) -> list[tuple[Any, ...]]:
|
||||
def _set(ip: str):
|
||||
def fake_getaddrinfo(host, *_args, **_kwargs):
|
||||
return [(socket.AF_INET, None, None, "", (ip, 0))]
|
||||
|
||||
monkeypatch.setattr(socket, "getaddrinfo", fake_getaddrinfo)
|
||||
@@ -4597,7 +4202,7 @@ class TestDateWorkflowLocalization(
|
||||
self,
|
||||
title_template: str,
|
||||
expected_title: str,
|
||||
) -> None:
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Document added workflow with title template using localize_date filter
|
||||
@@ -4662,7 +4267,7 @@ class TestDateWorkflowLocalization(
|
||||
self,
|
||||
title_template: str,
|
||||
expected_title: str,
|
||||
) -> None:
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- Document updated workflow with title template using localize_date filter
|
||||
@@ -4738,7 +4343,7 @@ class TestDateWorkflowLocalization(
|
||||
settings: SettingsWrapper,
|
||||
title_template: str,
|
||||
expected_title: str,
|
||||
) -> None:
|
||||
):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.CONSUMPTION,
|
||||
sources=f"{DocumentSource.ApiUpload}",
|
||||
|
||||
@@ -1,124 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
|
||||
from documents.models import Document
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from django.http import HttpRequest
|
||||
|
||||
|
||||
class VersionResolutionError(str, Enum):
|
||||
INVALID = "invalid"
|
||||
NOT_FOUND = "not_found"
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class VersionResolution:
|
||||
document: Document | None
|
||||
error: VersionResolutionError | None = None
|
||||
|
||||
|
||||
def _document_manager(*, include_deleted: bool) -> Any:
|
||||
return Document.global_objects if include_deleted else Document.objects
|
||||
|
||||
|
||||
def get_request_version_param(request: HttpRequest) -> str | None:
|
||||
if hasattr(request, "query_params"):
|
||||
return request.query_params.get("version")
|
||||
return None
|
||||
|
||||
|
||||
def get_root_document(doc: Document, *, include_deleted: bool = False) -> Document:
|
||||
# Use root_document_id to avoid a query when this is already a root.
|
||||
# If root_document isn't available, fall back to the document itself.
|
||||
if doc.root_document_id is None:
|
||||
return doc
|
||||
if doc.root_document is not None:
|
||||
return doc.root_document
|
||||
|
||||
manager = _document_manager(include_deleted=include_deleted)
|
||||
root_doc = manager.only("id").filter(id=doc.root_document_id).first()
|
||||
return root_doc or doc
|
||||
|
||||
|
||||
def get_latest_version_for_root(
|
||||
root_doc: Document,
|
||||
*,
|
||||
include_deleted: bool = False,
|
||||
) -> Document:
|
||||
manager = _document_manager(include_deleted=include_deleted)
|
||||
latest = manager.filter(root_document=root_doc).order_by("-id").first()
|
||||
return latest or root_doc
|
||||
|
||||
|
||||
def resolve_requested_version_for_root(
|
||||
root_doc: Document,
|
||||
request: Any,
|
||||
*,
|
||||
include_deleted: bool = False,
|
||||
) -> VersionResolution:
|
||||
version_param = get_request_version_param(request)
|
||||
if not version_param:
|
||||
return VersionResolution(
|
||||
document=get_latest_version_for_root(
|
||||
root_doc,
|
||||
include_deleted=include_deleted,
|
||||
),
|
||||
)
|
||||
|
||||
try:
|
||||
version_id = int(version_param)
|
||||
except (TypeError, ValueError):
|
||||
return VersionResolution(document=None, error=VersionResolutionError.INVALID)
|
||||
|
||||
manager = _document_manager(include_deleted=include_deleted)
|
||||
candidate = manager.only("id", "root_document_id").filter(id=version_id).first()
|
||||
if candidate is None:
|
||||
return VersionResolution(document=None, error=VersionResolutionError.NOT_FOUND)
|
||||
if candidate.id != root_doc.id and candidate.root_document_id != root_doc.id:
|
||||
return VersionResolution(document=None, error=VersionResolutionError.NOT_FOUND)
|
||||
return VersionResolution(document=candidate)
|
||||
|
||||
|
||||
def resolve_effective_document(
|
||||
request_doc: Document,
|
||||
request: Any,
|
||||
*,
|
||||
include_deleted: bool = False,
|
||||
) -> VersionResolution:
|
||||
root_doc = get_root_document(request_doc, include_deleted=include_deleted)
|
||||
if get_request_version_param(request) is not None:
|
||||
return resolve_requested_version_for_root(
|
||||
root_doc,
|
||||
request,
|
||||
include_deleted=include_deleted,
|
||||
)
|
||||
if request_doc.root_document_id is None:
|
||||
return VersionResolution(
|
||||
document=get_latest_version_for_root(
|
||||
root_doc,
|
||||
include_deleted=include_deleted,
|
||||
),
|
||||
)
|
||||
return VersionResolution(document=request_doc)
|
||||
|
||||
|
||||
def resolve_effective_document_by_pk(
|
||||
pk: int,
|
||||
request: Any,
|
||||
*,
|
||||
include_deleted: bool = False,
|
||||
) -> VersionResolution:
|
||||
manager = _document_manager(include_deleted=include_deleted)
|
||||
request_doc = manager.only("id", "root_document_id").filter(pk=pk).first()
|
||||
if request_doc is None:
|
||||
return VersionResolution(document=None, error=VersionResolutionError.NOT_FOUND)
|
||||
return resolve_effective_document(
|
||||
request_doc,
|
||||
request,
|
||||
include_deleted=include_deleted,
|
||||
)
|
||||
@@ -10,8 +10,6 @@ from collections import deque
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from time import mktime
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Any
|
||||
from typing import Literal
|
||||
from unicodedata import normalize
|
||||
from urllib.parse import quote
|
||||
@@ -31,22 +29,15 @@ from django.db.migrations.loader import MigrationLoader
|
||||
from django.db.migrations.recorder import MigrationRecorder
|
||||
from django.db.models import Case
|
||||
from django.db.models import Count
|
||||
from django.db.models import F
|
||||
from django.db.models import IntegerField
|
||||
from django.db.models import Max
|
||||
from django.db.models import Model
|
||||
from django.db.models import OuterRef
|
||||
from django.db.models import Prefetch
|
||||
from django.db.models import Q
|
||||
from django.db.models import Subquery
|
||||
from django.db.models import Sum
|
||||
from django.db.models import When
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.db.models.functions import Lower
|
||||
from django.db.models.manager import Manager
|
||||
from django.http import FileResponse
|
||||
from django.http import Http404
|
||||
from django.http import HttpRequest
|
||||
from django.http import HttpResponse
|
||||
from django.http import HttpResponseBadRequest
|
||||
from django.http import HttpResponseForbidden
|
||||
@@ -91,7 +82,6 @@ from rest_framework.mixins import ListModelMixin
|
||||
from rest_framework.mixins import RetrieveModelMixin
|
||||
from rest_framework.mixins import UpdateModelMixin
|
||||
from rest_framework.permissions import IsAuthenticated
|
||||
from rest_framework.request import Request
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.viewsets import GenericViewSet
|
||||
from rest_framework.viewsets import ModelViewSet
|
||||
@@ -179,8 +169,6 @@ from documents.serialisers import CustomFieldSerializer
|
||||
from documents.serialisers import DocumentListSerializer
|
||||
from documents.serialisers import DocumentSerializer
|
||||
from documents.serialisers import DocumentTypeSerializer
|
||||
from documents.serialisers import DocumentVersionLabelSerializer
|
||||
from documents.serialisers import DocumentVersionSerializer
|
||||
from documents.serialisers import EmailSerializer
|
||||
from documents.serialisers import NotesSerializer
|
||||
from documents.serialisers import PostDocumentSerializer
|
||||
@@ -193,6 +181,7 @@ from documents.serialisers import ShareLinkSerializer
|
||||
from documents.serialisers import StoragePathSerializer
|
||||
from documents.serialisers import StoragePathTestSerializer
|
||||
from documents.serialisers import TagSerializer
|
||||
from documents.serialisers import TagSerializerVersion1
|
||||
from documents.serialisers import TasksViewSerializer
|
||||
from documents.serialisers import TrashSerializer
|
||||
from documents.serialisers import UiSettingsViewSerializer
|
||||
@@ -209,11 +198,6 @@ from documents.tasks import sanity_check
|
||||
from documents.tasks import train_classifier
|
||||
from documents.tasks import update_document_parent_tags
|
||||
from documents.utils import get_boolean
|
||||
from documents.versioning import VersionResolutionError
|
||||
from documents.versioning import get_latest_version_for_root
|
||||
from documents.versioning import get_request_version_param
|
||||
from documents.versioning import get_root_document
|
||||
from documents.versioning import resolve_requested_version_for_root
|
||||
from paperless import version
|
||||
from paperless.celery import app as celery_app
|
||||
from paperless.config import AIConfig
|
||||
@@ -488,7 +472,6 @@ class CorrespondentViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
|
||||
@extend_schema_view(**generate_object_with_permissions_schema(TagSerializer))
|
||||
class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
|
||||
model = Tag
|
||||
serializer_class = TagSerializer
|
||||
document_count_through = Document.tags.through
|
||||
document_count_source_field = "tag_id"
|
||||
|
||||
@@ -496,6 +479,12 @@ class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
|
||||
Lower("name"),
|
||||
)
|
||||
|
||||
def get_serializer_class(self, *args, **kwargs):
|
||||
if int(self.request.version) == 1:
|
||||
return TagSerializerVersion1
|
||||
else:
|
||||
return TagSerializer
|
||||
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
filter_backends = (
|
||||
@@ -617,12 +606,6 @@ class EmailDocumentDetailSchema(EmailSerializer):
|
||||
type=OpenApiTypes.BOOL,
|
||||
location=OpenApiParameter.QUERY,
|
||||
),
|
||||
OpenApiParameter(
|
||||
name="follow_formatting",
|
||||
description="Whether or not to use the filename on disk",
|
||||
type=OpenApiTypes.BOOL,
|
||||
location=OpenApiParameter.QUERY,
|
||||
),
|
||||
],
|
||||
responses={200: OpenApiTypes.BINARY},
|
||||
),
|
||||
@@ -801,7 +784,7 @@ class DocumentViewSet(
|
||||
GenericViewSet,
|
||||
):
|
||||
model = Document
|
||||
queryset = Document.objects.all()
|
||||
queryset = Document.objects.annotate(num_notes=Count("notes"))
|
||||
serializer_class = DocumentSerializer
|
||||
pagination_class = StandardPagination
|
||||
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
|
||||
@@ -812,7 +795,7 @@ class DocumentViewSet(
|
||||
ObjectOwnedOrGrantedPermissionsFilter,
|
||||
)
|
||||
filterset_class = DocumentFilterSet
|
||||
search_fields = ("title", "correspondent__name", "effective_content")
|
||||
search_fields = ("title", "correspondent__name", "content")
|
||||
ordering_fields = (
|
||||
"id",
|
||||
"title",
|
||||
@@ -830,33 +813,12 @@ class DocumentViewSet(
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
latest_version_content = Subquery(
|
||||
Document.objects.filter(root_document=OuterRef("pk"))
|
||||
.order_by("-id")
|
||||
.values("content")[:1],
|
||||
)
|
||||
return (
|
||||
Document.objects.filter(root_document__isnull=True)
|
||||
.distinct()
|
||||
Document.objects.distinct()
|
||||
.order_by("-created")
|
||||
.annotate(effective_content=Coalesce(latest_version_content, F("content")))
|
||||
.annotate(num_notes=Count("notes"))
|
||||
.select_related("correspondent", "storage_path", "document_type", "owner")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"versions",
|
||||
queryset=Document.objects.only(
|
||||
"id",
|
||||
"added",
|
||||
"checksum",
|
||||
"version_label",
|
||||
"root_document_id",
|
||||
),
|
||||
),
|
||||
"tags",
|
||||
"custom_fields",
|
||||
"notes",
|
||||
)
|
||||
.prefetch_related("tags", "custom_fields", "notes")
|
||||
)
|
||||
|
||||
def get_serializer(self, *args, **kwargs):
|
||||
@@ -878,100 +840,15 @@ class DocumentViewSet(
|
||||
)
|
||||
return super().get_serializer(*args, **kwargs)
|
||||
|
||||
@extend_schema(
|
||||
operation_id="documents_root",
|
||||
responses=inline_serializer(
|
||||
name="DocumentRootResponse",
|
||||
fields={
|
||||
"root_id": serializers.IntegerField(),
|
||||
},
|
||||
),
|
||||
)
|
||||
@action(methods=["get"], detail=True, url_path="root")
|
||||
def root(self, request, pk=None):
|
||||
try:
|
||||
doc = Document.global_objects.select_related(
|
||||
"owner",
|
||||
"root_document",
|
||||
).get(pk=pk)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
root_doc = get_root_document(doc)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
return Response({"root_id": root_doc.id})
|
||||
|
||||
def retrieve(
|
||||
self,
|
||||
request: Request,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Response:
|
||||
response = super().retrieve(request, *args, **kwargs)
|
||||
if (
|
||||
"version" not in request.query_params
|
||||
or not isinstance(response.data, dict)
|
||||
or "content" not in response.data
|
||||
):
|
||||
return response
|
||||
|
||||
root_doc = self.get_object()
|
||||
content_doc = self._resolve_file_doc(root_doc, request)
|
||||
response.data["content"] = content_doc.content or ""
|
||||
return response
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
partial = kwargs.pop("partial", False)
|
||||
root_doc = self.get_object()
|
||||
content_doc = (
|
||||
self._resolve_file_doc(root_doc, request)
|
||||
if "version" in request.query_params
|
||||
else get_latest_version_for_root(root_doc)
|
||||
)
|
||||
content_updated = "content" in request.data
|
||||
updated_content = request.data.get("content") if content_updated else None
|
||||
|
||||
data = request.data.copy()
|
||||
serializer_partial = partial
|
||||
if content_updated and content_doc.id != root_doc.id:
|
||||
if updated_content is None:
|
||||
raise ValidationError({"content": ["This field may not be null."]})
|
||||
data.pop("content", None)
|
||||
serializer_partial = True
|
||||
|
||||
serializer = self.get_serializer(
|
||||
root_doc,
|
||||
data=data,
|
||||
partial=serializer_partial,
|
||||
)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
self.perform_update(serializer)
|
||||
|
||||
if content_updated and content_doc.id != root_doc.id:
|
||||
content_doc.content = (
|
||||
str(updated_content) if updated_content is not None else ""
|
||||
)
|
||||
content_doc.save(update_fields=["content", "modified"])
|
||||
|
||||
refreshed_doc = self.get_queryset().get(pk=root_doc.pk)
|
||||
response_data = self.get_serializer(refreshed_doc).data
|
||||
if "version" in request.query_params and "content" in response_data:
|
||||
response_data["content"] = content_doc.content
|
||||
response = Response(response_data)
|
||||
|
||||
response = super().update(request, *args, **kwargs)
|
||||
from documents import index
|
||||
|
||||
index.add_or_update_document(refreshed_doc)
|
||||
index.add_or_update_document(self.get_object())
|
||||
|
||||
document_updated.send(
|
||||
sender=self.__class__,
|
||||
document=refreshed_doc,
|
||||
document=self.get_object(),
|
||||
)
|
||||
|
||||
return response
|
||||
@@ -999,76 +876,19 @@ class DocumentViewSet(
|
||||
and request.query_params["original"] == "true"
|
||||
)
|
||||
|
||||
def _resolve_file_doc(self, root_doc: Document, request):
|
||||
version_requested = get_request_version_param(request) is not None
|
||||
resolution = resolve_requested_version_for_root(
|
||||
root_doc,
|
||||
request,
|
||||
include_deleted=version_requested,
|
||||
)
|
||||
if resolution.error == VersionResolutionError.INVALID:
|
||||
raise NotFound("Invalid version parameter")
|
||||
if resolution.document is None:
|
||||
raise Http404
|
||||
return resolution.document
|
||||
|
||||
def _get_effective_file_doc(
|
||||
self,
|
||||
request_doc: Document,
|
||||
root_doc: Document,
|
||||
request: Request,
|
||||
) -> Document:
|
||||
if (
|
||||
request_doc.root_document_id is not None
|
||||
and get_request_version_param(request) is None
|
||||
):
|
||||
return request_doc
|
||||
return self._resolve_file_doc(root_doc, request)
|
||||
|
||||
def _resolve_request_and_root_doc(
|
||||
self,
|
||||
pk,
|
||||
request: Request,
|
||||
*,
|
||||
include_deleted: bool = False,
|
||||
) -> tuple[Document, Document] | HttpResponseForbidden:
|
||||
manager = Document.global_objects if include_deleted else Document.objects
|
||||
try:
|
||||
request_doc = manager.select_related(
|
||||
"owner",
|
||||
"root_document",
|
||||
).get(id=pk)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
root_doc = get_root_document(
|
||||
request_doc,
|
||||
include_deleted=include_deleted,
|
||||
)
|
||||
def file_response(self, pk, request, disposition):
|
||||
doc = Document.global_objects.select_related("owner").get(id=pk)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
root_doc,
|
||||
doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
return request_doc, root_doc
|
||||
|
||||
def file_response(self, pk, request, disposition):
|
||||
resolved = self._resolve_request_and_root_doc(
|
||||
pk,
|
||||
request,
|
||||
include_deleted=True,
|
||||
)
|
||||
if isinstance(resolved, HttpResponseForbidden):
|
||||
return resolved
|
||||
request_doc, root_doc = resolved
|
||||
file_doc = self._get_effective_file_doc(request_doc, root_doc, request)
|
||||
return serve_file(
|
||||
doc=file_doc,
|
||||
doc=doc,
|
||||
use_archive=not self.original_requested(request)
|
||||
and file_doc.has_archive_version,
|
||||
and doc.has_archive_version,
|
||||
disposition=disposition,
|
||||
follow_formatting=request.query_params.get("follow_formatting", False),
|
||||
)
|
||||
|
||||
def get_metadata(self, file, mime_type):
|
||||
@@ -1101,14 +921,16 @@ class DocumentViewSet(
|
||||
condition(etag_func=metadata_etag, last_modified_func=metadata_last_modified),
|
||||
)
|
||||
def metadata(self, request, pk=None):
|
||||
resolved = self._resolve_request_and_root_doc(pk, request)
|
||||
if isinstance(resolved, HttpResponseForbidden):
|
||||
return resolved
|
||||
request_doc, root_doc = resolved
|
||||
|
||||
# Choose the effective document (newest version by default,
|
||||
# or explicit via ?version=).
|
||||
doc = self._get_effective_file_doc(request_doc, root_doc, request)
|
||||
try:
|
||||
doc = Document.objects.select_related("owner").get(pk=pk)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
document_cached_metadata = get_metadata_cache(doc.pk)
|
||||
|
||||
@@ -1277,38 +1099,29 @@ class DocumentViewSet(
|
||||
condition(etag_func=preview_etag, last_modified_func=preview_last_modified),
|
||||
)
|
||||
def preview(self, request, pk=None):
|
||||
resolved = self._resolve_request_and_root_doc(pk, request)
|
||||
if isinstance(resolved, HttpResponseForbidden):
|
||||
return resolved
|
||||
request_doc, root_doc = resolved
|
||||
|
||||
try:
|
||||
file_doc = self._get_effective_file_doc(request_doc, root_doc, request)
|
||||
|
||||
return serve_file(
|
||||
doc=file_doc,
|
||||
use_archive=not self.original_requested(request)
|
||||
and file_doc.has_archive_version,
|
||||
disposition="inline",
|
||||
)
|
||||
except FileNotFoundError:
|
||||
response = self.file_response(pk, request, "inline")
|
||||
return response
|
||||
except (FileNotFoundError, Document.DoesNotExist):
|
||||
raise Http404
|
||||
|
||||
@action(methods=["get"], detail=True, filter_backends=[])
|
||||
@method_decorator(cache_control(no_cache=True))
|
||||
@method_decorator(last_modified(thumbnail_last_modified))
|
||||
def thumb(self, request, pk=None):
|
||||
resolved = self._resolve_request_and_root_doc(pk, request)
|
||||
if isinstance(resolved, HttpResponseForbidden):
|
||||
return resolved
|
||||
request_doc, root_doc = resolved
|
||||
|
||||
try:
|
||||
file_doc = self._get_effective_file_doc(request_doc, root_doc, request)
|
||||
handle = file_doc.thumbnail_file
|
||||
doc = Document.objects.select_related("owner").get(id=pk)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"view_document",
|
||||
doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
handle = doc.thumbnail_file
|
||||
|
||||
return HttpResponse(handle, content_type="image/webp")
|
||||
except FileNotFoundError:
|
||||
except (FileNotFoundError, Document.DoesNotExist):
|
||||
raise Http404
|
||||
|
||||
@action(methods=["get"], detail=True)
|
||||
@@ -1597,246 +1410,6 @@ class DocumentViewSet(
|
||||
"Error emailing documents, check logs for more detail.",
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
operation_id="documents_update_version",
|
||||
request=DocumentVersionSerializer,
|
||||
responses={
|
||||
200: OpenApiTypes.STR,
|
||||
},
|
||||
)
|
||||
@action(methods=["post"], detail=True, parser_classes=[parsers.MultiPartParser])
|
||||
def update_version(self, request, pk=None):
|
||||
serializer = DocumentVersionSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
try:
|
||||
request_doc = Document.objects.select_related(
|
||||
"owner",
|
||||
"root_document",
|
||||
).get(pk=pk)
|
||||
root_doc = get_root_document(request_doc)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"change_document",
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
try:
|
||||
doc_name, doc_data = serializer.validated_data.get("document")
|
||||
version_label = serializer.validated_data.get("version_label")
|
||||
|
||||
t = int(mktime(datetime.now().timetuple()))
|
||||
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
temp_file_path = Path(tempfile.mkdtemp(dir=settings.SCRATCH_DIR)) / Path(
|
||||
pathvalidate.sanitize_filename(doc_name),
|
||||
)
|
||||
|
||||
temp_file_path.write_bytes(doc_data)
|
||||
|
||||
os.utime(temp_file_path, times=(t, t))
|
||||
|
||||
input_doc = ConsumableDocument(
|
||||
source=DocumentSource.ApiUpload,
|
||||
original_file=temp_file_path,
|
||||
root_document_id=root_doc.pk,
|
||||
)
|
||||
|
||||
overrides = DocumentMetadataOverrides()
|
||||
if version_label:
|
||||
overrides.version_label = version_label.strip()
|
||||
if request.user is not None:
|
||||
overrides.actor_id = request.user.id
|
||||
|
||||
async_task = consume_file.delay(
|
||||
input_doc,
|
||||
overrides,
|
||||
)
|
||||
logger.debug(
|
||||
f"Updated document {root_doc.id} with new version",
|
||||
)
|
||||
return Response(async_task.id)
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred updating document: {e!s}")
|
||||
return HttpResponseServerError(
|
||||
"Error updating document, check logs for more detail.",
|
||||
)
|
||||
|
||||
def _get_root_doc_for_version_action(self, pk) -> Document:
|
||||
try:
|
||||
root_doc = Document.objects.select_related(
|
||||
"owner",
|
||||
"root_document",
|
||||
).get(pk=pk)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
return get_root_document(root_doc)
|
||||
|
||||
def _get_version_doc_for_root(self, root_doc: Document, version_id) -> Document:
|
||||
try:
|
||||
version_doc = Document.objects.select_related("owner").get(
|
||||
pk=version_id,
|
||||
)
|
||||
except Document.DoesNotExist:
|
||||
raise Http404
|
||||
|
||||
if (
|
||||
version_doc.id != root_doc.id
|
||||
and version_doc.root_document_id != root_doc.id
|
||||
):
|
||||
raise Http404
|
||||
return version_doc
|
||||
|
||||
@extend_schema(
|
||||
operation_id="documents_delete_version",
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="version_id",
|
||||
type=OpenApiTypes.INT,
|
||||
location=OpenApiParameter.PATH,
|
||||
),
|
||||
],
|
||||
responses=inline_serializer(
|
||||
name="DeleteDocumentVersionResult",
|
||||
fields={
|
||||
"result": serializers.CharField(),
|
||||
"current_version_id": serializers.IntegerField(),
|
||||
},
|
||||
),
|
||||
)
|
||||
@action(
|
||||
methods=["delete"],
|
||||
detail=True,
|
||||
url_path=r"versions/(?P<version_id>\d+)",
|
||||
)
|
||||
def delete_version(self, request, pk=None, version_id=None):
|
||||
root_doc = self._get_root_doc_for_version_action(pk)
|
||||
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"delete_document",
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
version_doc = self._get_version_doc_for_root(root_doc, version_id)
|
||||
|
||||
if version_doc.id == root_doc.id:
|
||||
return HttpResponseBadRequest(
|
||||
"Cannot delete the root/original version. Delete the document instead.",
|
||||
)
|
||||
|
||||
from documents import index
|
||||
|
||||
index.remove_document_from_index(version_doc)
|
||||
version_doc_id = version_doc.id
|
||||
version_doc.delete()
|
||||
index.add_or_update_document(root_doc)
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
actor = (
|
||||
request.user if request.user and request.user.is_authenticated else None
|
||||
)
|
||||
LogEntry.objects.log_create(
|
||||
instance=root_doc,
|
||||
changes={
|
||||
"Version Deleted": ["None", version_doc_id],
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
actor=actor,
|
||||
additional_data={
|
||||
"reason": "Version deleted",
|
||||
"version_id": version_doc_id,
|
||||
},
|
||||
)
|
||||
|
||||
current = (
|
||||
Document.objects.filter(Q(id=root_doc.id) | Q(root_document=root_doc))
|
||||
.order_by("-id")
|
||||
.first()
|
||||
)
|
||||
return Response(
|
||||
{
|
||||
"result": "OK",
|
||||
"current_version_id": current.id if current else root_doc.id,
|
||||
},
|
||||
)
|
||||
|
||||
@extend_schema(
|
||||
operation_id="documents_update_version_label",
|
||||
request=DocumentVersionLabelSerializer,
|
||||
parameters=[
|
||||
OpenApiParameter(
|
||||
name="version_id",
|
||||
type=OpenApiTypes.INT,
|
||||
location=OpenApiParameter.PATH,
|
||||
),
|
||||
],
|
||||
responses=inline_serializer(
|
||||
name="UpdateDocumentVersionLabelResult",
|
||||
fields={
|
||||
"id": serializers.IntegerField(),
|
||||
"added": serializers.DateTimeField(),
|
||||
"version_label": serializers.CharField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
),
|
||||
"checksum": serializers.CharField(
|
||||
required=False,
|
||||
allow_null=True,
|
||||
),
|
||||
"is_root": serializers.BooleanField(),
|
||||
},
|
||||
),
|
||||
)
|
||||
@delete_version.mapping.patch
|
||||
def update_version_label(self, request, pk=None, version_id=None):
|
||||
serializer = DocumentVersionLabelSerializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
root_doc = self._get_root_doc_for_version_action(pk)
|
||||
if request.user is not None and not has_perms_owner_aware(
|
||||
request.user,
|
||||
"change_document",
|
||||
root_doc,
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
version_doc = self._get_version_doc_for_root(root_doc, version_id)
|
||||
old_label = version_doc.version_label
|
||||
version_doc.version_label = serializer.validated_data["version_label"]
|
||||
version_doc.save(update_fields=["version_label"])
|
||||
|
||||
if settings.AUDIT_LOG_ENABLED and old_label != version_doc.version_label:
|
||||
actor = (
|
||||
request.user if request.user and request.user.is_authenticated else None
|
||||
)
|
||||
LogEntry.objects.log_create(
|
||||
instance=root_doc,
|
||||
changes={
|
||||
"Version Label": [old_label, version_doc.version_label],
|
||||
},
|
||||
action=LogEntry.Action.UPDATE,
|
||||
actor=actor,
|
||||
additional_data={
|
||||
"reason": "Version label updated",
|
||||
"version_id": version_doc.id,
|
||||
},
|
||||
)
|
||||
|
||||
return Response(
|
||||
{
|
||||
"id": version_doc.id,
|
||||
"added": version_doc.added,
|
||||
"version_label": version_doc.version_label,
|
||||
"checksum": version_doc.checksum,
|
||||
"is_root": version_doc.id == root_doc.id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class ChatStreamingSerializer(serializers.Serializer):
|
||||
q = serializers.CharField(required=True)
|
||||
@@ -1925,7 +1498,7 @@ class ChatStreamingView(GenericAPIView):
|
||||
),
|
||||
)
|
||||
class UnifiedSearchViewSet(DocumentViewSet):
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
def __init__(self, *args, **kwargs) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.searcher = None
|
||||
|
||||
@@ -2103,7 +1676,7 @@ class SavedViewViewSet(ModelViewSet, PassUserMixin):
|
||||
.prefetch_related("filter_rules")
|
||||
)
|
||||
|
||||
def perform_create(self, serializer: serializers.BaseSerializer[Any]) -> None:
|
||||
def perform_create(self, serializer) -> None:
|
||||
serializer.save(owner=self.request.user)
|
||||
|
||||
|
||||
@@ -2136,15 +1709,13 @@ class BulkEditView(PassUserMixin):
|
||||
"modify_custom_fields": "custom_fields",
|
||||
"set_permissions": None,
|
||||
"delete": "deleted_at",
|
||||
# These operations create new documents/versions no longer altering
|
||||
# fields on the selected document in place
|
||||
"rotate": None,
|
||||
"delete_pages": None,
|
||||
"rotate": "checksum",
|
||||
"delete_pages": "checksum",
|
||||
"split": None,
|
||||
"merge": None,
|
||||
"edit_pdf": None,
|
||||
"edit_pdf": "checksum",
|
||||
"reprocess": "checksum",
|
||||
"remove_password": None,
|
||||
"remove_password": "checksum",
|
||||
}
|
||||
|
||||
permission_classes = (IsAuthenticated,)
|
||||
@@ -2162,8 +1733,6 @@ class BulkEditView(PassUserMixin):
|
||||
if method in [
|
||||
bulk_edit.split,
|
||||
bulk_edit.merge,
|
||||
bulk_edit.rotate,
|
||||
bulk_edit.delete_pages,
|
||||
bulk_edit.edit_pdf,
|
||||
bulk_edit.remove_password,
|
||||
]:
|
||||
@@ -3453,30 +3022,14 @@ class SharedLinkView(View):
|
||||
return response
|
||||
|
||||
|
||||
def serve_file(
|
||||
*,
|
||||
doc: Document,
|
||||
use_archive: bool,
|
||||
disposition: str,
|
||||
follow_formatting: bool = False,
|
||||
) -> HttpResponse:
|
||||
def serve_file(*, doc: Document, use_archive: bool, disposition: str) -> HttpResponse:
|
||||
if use_archive:
|
||||
if TYPE_CHECKING:
|
||||
assert doc.archive_filename
|
||||
|
||||
file_handle = doc.archive_file
|
||||
filename = (
|
||||
doc.archive_filename
|
||||
if follow_formatting
|
||||
else doc.get_public_filename(archive=True)
|
||||
)
|
||||
filename = doc.get_public_filename(archive=True)
|
||||
mime_type = "application/pdf"
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
assert doc.filename
|
||||
|
||||
file_handle = doc.source_file
|
||||
filename = doc.filename if follow_formatting else doc.get_public_filename()
|
||||
filename = doc.get_public_filename()
|
||||
mime_type = doc.mime_type
|
||||
# Support browser previewing csv files by using text mime type
|
||||
if mime_type in {"application/csv", "text/csv"} and disposition == "inline":
|
||||
@@ -3957,16 +3510,11 @@ class TrashView(ListModelMixin, PassUserMixin):
|
||||
|
||||
queryset = Document.deleted_objects.all()
|
||||
|
||||
def get(self, request: Request, format: str | None = None) -> Response:
|
||||
def get(self, request, format=None):
|
||||
self.serializer_class = DocumentSerializer
|
||||
return self.list(request, format)
|
||||
|
||||
def post(
|
||||
self,
|
||||
request: Request,
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Response | HttpResponse:
|
||||
def post(self, request, *args, **kwargs):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
@@ -3990,7 +3538,7 @@ class TrashView(ListModelMixin, PassUserMixin):
|
||||
return Response({"result": "OK", "doc_ids": doc_ids})
|
||||
|
||||
|
||||
def serve_logo(request: HttpRequest, filename: str | None = None) -> FileResponse:
|
||||
def serve_logo(request, filename=None):
|
||||
"""
|
||||
Serves the configured logo file with Content-Disposition: attachment.
|
||||
Prevents inline execution of SVGs. See GHSA-6p53-hqqw-8j62
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import logging
|
||||
import re
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
@@ -16,7 +15,6 @@ from documents.models import Document
|
||||
from documents.models import DocumentType
|
||||
from documents.models import WorkflowAction
|
||||
from documents.models import WorkflowTrigger
|
||||
from documents.plugins.base import StopConsumeTaskError
|
||||
from documents.signals import document_consumption_finished
|
||||
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||
from documents.workflows.webhooks import send_webhook
|
||||
@@ -340,33 +338,3 @@ def execute_password_removal_action(
|
||||
document.pk,
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
|
||||
|
||||
def execute_move_to_trash_action(
|
||||
action: WorkflowAction,
|
||||
document: Document | ConsumableDocument,
|
||||
logging_group: uuid.UUID | None,
|
||||
) -> None:
|
||||
"""
|
||||
Execute a move to trash action for a workflow on an existing document or a
|
||||
document in consumption. In case of an existing document it soft-deletes
|
||||
the document. In case of consumption it aborts consumption and deletes the
|
||||
file.
|
||||
"""
|
||||
if isinstance(document, Document):
|
||||
document.delete()
|
||||
logger.debug(
|
||||
f"Moved document {document} to trash",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
else:
|
||||
if document.original_file.exists():
|
||||
document.original_file.unlink()
|
||||
logger.info(
|
||||
f"Workflow move to trash action triggered during consumption, "
|
||||
f"deleting file {document.original_file}",
|
||||
extra={"group": logging_group},
|
||||
)
|
||||
raise StopConsumeTaskError(
|
||||
"Document deleted by workflow action during consumption",
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,5 @@
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.exceptions import AcceptConnection
|
||||
@@ -52,3 +53,10 @@ class StatusConsumer(WebsocketConsumer):
|
||||
self.close()
|
||||
else:
|
||||
self.send(json.dumps(event))
|
||||
|
||||
def document_updated(self, event: Any) -> None:
|
||||
if not self._authenticated():
|
||||
self.close()
|
||||
else:
|
||||
if self._can_view(event["data"]):
|
||||
self.send(json.dumps(event))
|
||||
|
||||
@@ -377,7 +377,7 @@ REST_FRAMEWORK = {
|
||||
"DEFAULT_VERSION": "9", # match src-ui/src/environments/environment.prod.ts
|
||||
# Make sure these are ordered and that the most recent version appears
|
||||
# last. See api.md#api-versioning when adding new versions.
|
||||
"ALLOWED_VERSIONS": ["2", "3", "4", "5", "6", "7", "8", "9"],
|
||||
"ALLOWED_VERSIONS": ["1", "2", "3", "4", "5", "6", "7", "8", "9"],
|
||||
# DRF Spectacular default schema
|
||||
"DEFAULT_SCHEMA_CLASS": "drf_spectacular.openapi.AutoSchema",
|
||||
}
|
||||
|
||||
@@ -48,6 +48,20 @@ class TestWebSockets(TestCase):
|
||||
mock_close.assert_called_once()
|
||||
mock_close.reset_mock()
|
||||
|
||||
message = {
|
||||
"type": "document_updated",
|
||||
"data": {"document_id": 10, "modified": "2026-02-17T00:00:00Z"},
|
||||
}
|
||||
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
)
|
||||
await communicator.receive_nothing()
|
||||
|
||||
mock_close.assert_called_once()
|
||||
mock_close.reset_mock()
|
||||
|
||||
message = {"type": "documents_deleted", "data": {"documents": [1, 2, 3]}}
|
||||
|
||||
await channel_layer.group_send(
|
||||
@@ -158,6 +172,40 @@ class TestWebSockets(TestCase):
|
||||
|
||||
await communicator.disconnect()
|
||||
|
||||
@mock.patch("paperless.consumers.StatusConsumer._can_view")
|
||||
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
|
||||
async def test_receive_document_updated(self, _authenticated, _can_view) -> None:
|
||||
_authenticated.return_value = True
|
||||
_can_view.return_value = True
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
message = {
|
||||
"type": "document_updated",
|
||||
"data": {
|
||||
"document_id": 10,
|
||||
"modified": "2026-02-17T00:00:00Z",
|
||||
"owner_id": 1,
|
||||
"users_can_view": [1],
|
||||
"groups_can_view": [],
|
||||
},
|
||||
}
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
assert channel_layer is not None
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
)
|
||||
|
||||
response = await communicator.receive_json_from()
|
||||
|
||||
self.assertEqual(response, message)
|
||||
|
||||
await communicator.disconnect()
|
||||
|
||||
@mock.patch("channels.layers.InMemoryChannelLayer.group_send")
|
||||
def test_manager_send_progress(self, mock_group_send) -> None:
|
||||
with ProgressManager(task_id="test") as manager:
|
||||
@@ -190,7 +238,10 @@ class TestWebSockets(TestCase):
|
||||
)
|
||||
|
||||
@mock.patch("channels.layers.InMemoryChannelLayer.group_send")
|
||||
def test_manager_send_documents_deleted(self, mock_group_send) -> None:
|
||||
def test_manager_send_documents_deleted(
|
||||
self,
|
||||
mock_group_send: mock.MagicMock,
|
||||
) -> None:
|
||||
with DocumentsStatusManager() as manager:
|
||||
manager.send_documents_deleted([1, 2, 3])
|
||||
|
||||
|
||||
@@ -23,7 +23,6 @@ def get_embedding_model() -> BaseEmbedding:
|
||||
return OpenAIEmbedding(
|
||||
model=config.llm_embedding_model or "text-embedding-3-small",
|
||||
api_key=config.llm_api_key,
|
||||
api_base=config.llm_endpoint or None,
|
||||
)
|
||||
case LLMEmbeddingBackend.HUGGINGFACE:
|
||||
return HuggingFaceEmbedding(
|
||||
|
||||
@@ -65,14 +65,12 @@ def test_get_embedding_model_openai(mock_ai_config):
|
||||
mock_ai_config.return_value.llm_embedding_backend = LLMEmbeddingBackend.OPENAI
|
||||
mock_ai_config.return_value.llm_embedding_model = "text-embedding-3-small"
|
||||
mock_ai_config.return_value.llm_api_key = "test_api_key"
|
||||
mock_ai_config.return_value.llm_endpoint = "http://test-url"
|
||||
|
||||
with patch("paperless_ai.embedding.OpenAIEmbedding") as MockOpenAIEmbedding:
|
||||
model = get_embedding_model()
|
||||
MockOpenAIEmbedding.assert_called_once_with(
|
||||
model="text-embedding-3-small",
|
||||
api_key="test_api_key",
|
||||
api_base="http://test-url",
|
||||
)
|
||||
assert model == MockOpenAIEmbedding.return_value
|
||||
|
||||
|
||||
@@ -536,7 +536,6 @@ class MailAccountHandler(LoggingMixin):
|
||||
self.log.debug(f"Processing mail account {account}")
|
||||
|
||||
total_processed_files = 0
|
||||
consumed_messages: set[tuple[str, str | None]] = set()
|
||||
try:
|
||||
with get_mailbox(
|
||||
account.imap_server,
|
||||
@@ -575,7 +574,6 @@ class MailAccountHandler(LoggingMixin):
|
||||
M,
|
||||
rule,
|
||||
supports_gmail_labels=supports_gmail_labels,
|
||||
consumed_messages=consumed_messages,
|
||||
)
|
||||
if total_processed_files > 0 and rule.stop_processing:
|
||||
self.log.debug(
|
||||
@@ -607,8 +605,7 @@ class MailAccountHandler(LoggingMixin):
|
||||
rule: MailRule,
|
||||
*,
|
||||
supports_gmail_labels: bool,
|
||||
consumed_messages: set[tuple[str, str | None]],
|
||||
) -> int:
|
||||
):
|
||||
folders = [rule.folder]
|
||||
# In case of MOVE, make sure also the destination exists
|
||||
if rule.action == MailRule.MailAction.MOVE:
|
||||
@@ -655,26 +652,11 @@ class MailAccountHandler(LoggingMixin):
|
||||
|
||||
mails_processed = 0
|
||||
total_processed_files = 0
|
||||
rule_seen_messages: set[tuple[str, str | None]] = set()
|
||||
|
||||
for message in messages:
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(message, MailMessage)
|
||||
|
||||
message_key = (rule.folder, message.uid)
|
||||
if message_key in rule_seen_messages:
|
||||
self.log.debug(
|
||||
f"Skipping duplicate fetched mail '{message.uid}' subject '{message.subject}' from '{message.from_}'.",
|
||||
)
|
||||
continue
|
||||
rule_seen_messages.add(message_key)
|
||||
|
||||
if message_key in consumed_messages:
|
||||
self.log.debug(
|
||||
f"Skipping mail '{message.uid}' subject '{message.subject}' from '{message.from_}', already queued by a previous rule in this run.",
|
||||
)
|
||||
continue
|
||||
|
||||
if ProcessedMail.objects.filter(
|
||||
rule=rule,
|
||||
uid=message.uid,
|
||||
@@ -687,8 +669,6 @@ class MailAccountHandler(LoggingMixin):
|
||||
|
||||
try:
|
||||
processed_files = self._handle_message(message, rule)
|
||||
if processed_files > 0:
|
||||
consumed_messages.add(message_key)
|
||||
|
||||
total_processed_files += processed_files
|
||||
mails_processed += 1
|
||||
|
||||
@@ -863,82 +863,6 @@ class TestMail(
|
||||
|
||||
self.assertEqual(len(self.mailMocker.bogus_mailbox.messages), 0)
|
||||
|
||||
def test_handle_mail_account_overlapping_rules_only_first_consumes(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Multiple rules that match the same mail
|
||||
WHEN:
|
||||
- Mail account is processed
|
||||
THEN:
|
||||
- Only the first rule should be applied
|
||||
"""
|
||||
account = MailAccount.objects.create(
|
||||
name="test",
|
||||
imap_server="",
|
||||
username="admin",
|
||||
password="secret",
|
||||
)
|
||||
|
||||
first_rule = MailRule.objects.create(
|
||||
name="testrule-first",
|
||||
account=account,
|
||||
action=MailRule.MailAction.DELETE,
|
||||
filter_subject="Claim",
|
||||
order=1,
|
||||
)
|
||||
_ = MailRule.objects.create(
|
||||
name="testrule-second",
|
||||
account=account,
|
||||
action=MailRule.MailAction.DELETE,
|
||||
filter_subject="Claim",
|
||||
order=2,
|
||||
)
|
||||
|
||||
self.mail_account_handler.handle_mail_account(account)
|
||||
self.mailMocker.apply_mail_actions()
|
||||
|
||||
self.assertEqual(self.mailMocker._queue_consumption_tasks_mock.call_count, 1)
|
||||
queued_rule = self.mailMocker._queue_consumption_tasks_mock.call_args.kwargs[
|
||||
"rule"
|
||||
]
|
||||
self.assertEqual(queued_rule.id, first_rule.id)
|
||||
|
||||
def test_handle_mail_account_skip_duplicate_uids_from_fetch(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Multiple mails with the same UID returned from the mailbox fetch method
|
||||
WHEN:
|
||||
- Mail account is processed
|
||||
THEN:
|
||||
- Only one of the mails should be processed, to avoid duplicate processing due to fetch issues
|
||||
"""
|
||||
account = MailAccount.objects.create(
|
||||
name="test",
|
||||
imap_server="",
|
||||
username="admin",
|
||||
password="secret",
|
||||
)
|
||||
_ = MailRule.objects.create(
|
||||
name="testrule",
|
||||
account=account,
|
||||
action=MailRule.MailAction.DELETE,
|
||||
filter_subject="Duplicated mail",
|
||||
)
|
||||
|
||||
duplicated_message = self.mailMocker.messageBuilder.create_message(
|
||||
subject="Duplicated mail",
|
||||
)
|
||||
self.mailMocker.bogus_mailbox.messages = [
|
||||
duplicated_message,
|
||||
duplicated_message,
|
||||
]
|
||||
self.mailMocker.bogus_mailbox.updateClient()
|
||||
|
||||
self.mail_account_handler.handle_mail_account(account)
|
||||
self.mailMocker.apply_mail_actions()
|
||||
|
||||
self.assertEqual(self.mailMocker._queue_consumption_tasks_mock.call_count, 1)
|
||||
|
||||
@pytest.mark.flaky(reruns=4)
|
||||
def test_handle_mail_account_flag(self) -> None:
|
||||
account = MailAccount.objects.create(
|
||||
|
||||
424
uv.lock
generated
424
uv.lock
generated
@@ -721,14 +721,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "concurrent-log-handler"
|
||||
version = "0.9.29"
|
||||
version = "0.9.28"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "portalocker", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9c/2c/ba185acc438cff6b58cd8f8dec27e7f4fcabf6968a1facbb6d0cacbde7fe/concurrent_log_handler-0.9.29.tar.gz", hash = "sha256:bc37a76d3f384cbf4a98f693ebd770543edc0f4cd5c6ab6bc70e9e1d7d582265", size = 42114, upload-time = "2026-02-22T18:18:25.758Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6b/ed/68b9c3a07a2331361a09a194e4375c4ee680a799391cfb1ca924ca2b6523/concurrent_log_handler-0.9.28.tar.gz", hash = "sha256:4cc27969b3420239bd153779266f40d9713ece814e312b7aa753ce62c6eacdb8", size = 30935, upload-time = "2025-06-10T19:02:15.622Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/f3/3e3188fdb3e53c6343fd1c7de41c55d4db626f07db3877eae77b28d58bd2/concurrent_log_handler-0.9.29-py3-none-any.whl", hash = "sha256:0d6c077fbaef2dae49a25975dcf72a602fe0a6a4ce80a3b7c37696d37e10459a", size = 32052, upload-time = "2026-02-22T18:18:24.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/a0/1331c3f12d95adc8d0385dc620001054c509db88376d2e17be36b6353020/concurrent_log_handler-0.9.28-py3-none-any.whl", hash = "sha256:65db25d05506651a61573937880789fc51c7555e7452303042b5a402fd78939c", size = 28983, upload-time = "2025-06-10T19:02:14.223Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1114,15 +1114,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "django-guardian"
|
||||
version = "3.3.0"
|
||||
version = "3.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "(python_full_version < '3.13' and sys_platform == 'darwin') or (python_full_version < '3.13' and sys_platform == 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d5/eb/bbeb4efd10d6cca8993697f571f17574e9fa7a912cead3ab39ce1d3793cd/django_guardian-3.3.0.tar.gz", hash = "sha256:abf1487399212cffdce7b3c909182a26fbe7e89746007299a8cab99f3d5ff009", size = 107443, upload-time = "2026-02-24T19:43:28.819Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e2/f9/bcff6a931298b9eb55e1550b55ab964fab747f594ba6d2d81cbe19736c5f/django_guardian-3.2.0.tar.gz", hash = "sha256:9e18ecd2e211b665972690c2d03d27bce0ea4932b5efac24a4bb9d526950a69e", size = 99940, upload-time = "2025-09-16T10:35:53.609Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/7c/3c/6517c5e27c6f9c165f989a5884f8798d66d25ce86fe44bf8c19aa4120351/django_guardian-3.3.0-py3-none-any.whl", hash = "sha256:4dca4fce104c7306e41b947a57d1cd6be46d9982548bef194ac8a6ad61d83686", size = 144003, upload-time = "2026-02-24T19:43:27Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/23/63a7d868373a73d25c4a5c2dd3cce3aaeb22fbee82560d42b6e93ba01403/django_guardian-3.2.0-py3-none-any.whl", hash = "sha256:0768565a057988a93fc4a1d93649c4a794abfd7473a8408a079cfbf83c559d77", size = 134674, upload-time = "2025-09-16T10:35:51.69Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1137,29 +1137,16 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/10/23c0644cf67567bbe4e3a2eeeec0e9c79b701990c0e07c5ee4a4f8897f91/django_multiselectfield-1.0.1-py3-none-any.whl", hash = "sha256:18dc14801f7eca844a48e21cba6d8ec35b9b581f2373bbb2cb75e6994518259a", size = 20481, upload-time = "2025-06-12T14:41:20.107Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "django-rich"
|
||||
version = "2.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a6/67/e307a5fef657e7992468f567b521534c52e01bdda5a1ae5b12de679a670f/django_rich-2.2.0.tar.gz", hash = "sha256:ecec7842d040024ed8a225699388535e46b87277550c33f46193b52cece2f780", size = 62427, upload-time = "2025-09-18T11:42:17.182Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/27/ed/23fa669493d78cd67e7f6734fa380f8690f2b4d75b4f72fd645a52c3b32a/django_rich-2.2.0-py3-none-any.whl", hash = "sha256:a0d2c916bd9750b6e9beb57407aef5e836c8705d7dbe9e4fd4725f7bbe41c407", size = 9210, upload-time = "2025-09-18T11:42:15.779Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "django-soft-delete"
|
||||
version = "1.0.23"
|
||||
version = "1.0.22"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "django", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/aa/98/c7c52a85b070b1703774df817b6460a7714655302a2d503f6447544f1a29/django_soft_delete-1.0.23.tar.gz", hash = "sha256:814659f0d19d4f2afc58b31ff73f88f0af66715ccef3b4fcd8f6b3a011d59b2a", size = 22458, upload-time = "2026-02-21T17:48:41.345Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/d1/c990b731676f93bd4594dee4b5133df52f5d0eee1eb8a969b4030014ac54/django_soft_delete-1.0.22.tar.gz", hash = "sha256:32d0bb95f180c28a40163e78a558acc18901fd56011f91f8ee735c171a6d4244", size = 21982, upload-time = "2025-10-25T13:11:46.199Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/91/9e/77375a163c340fff03d037eac7d970ce006626e6c3aea87b5d159f052f8b/django_soft_delete-1.0.23-py3-none-any.whl", hash = "sha256:dd2133d4925d58308680f389daa2e150abf7b81a4f0abbbf2161a9db3b9f1e74", size = 19308, upload-time = "2026-02-21T17:48:39.974Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/c2/fca2bf69b7ca7e18aed9ac059e89f1043663e207a514e8fb652450e49631/django_soft_delete-1.0.22-py3-none-any.whl", hash = "sha256:81973c541d21452d249151085d617ebbfb5ec463899f47cd6b1306677481e94c", size = 19221, upload-time = "2025-10-25T13:11:44.755Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1363,11 +1350,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.24.3"
|
||||
version = "3.20.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/73/92/a8e2479937ff39185d20dd6a851c1a63e55849e447a55e798cc2e1f49c65/filelock-3.24.3.tar.gz", hash = "sha256:011a5644dc937c22699943ebbfc46e969cdde3e171470a6e40b9533e5a72affa", size = 37935, upload-time = "2026-02-19T00:48:20.543Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/0f/5d0c71a1aefeb08efff26272149e07ab922b64f46c63363756224bd6872e/filelock-3.24.3-py3-none-any.whl", hash = "sha256:426e9a4660391f7f8a810d71b0555bce9008b0a1cc342ab1f6947d37639e002d", size = 24331, upload-time = "2026-02-19T00:48:18.465Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2194,7 +2181,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-core"
|
||||
version = "0.14.15"
|
||||
version = "0.14.13"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "aiohttp", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -2222,15 +2209,14 @@ dependencies = [
|
||||
{ name = "sqlalchemy", extra = ["asyncio"], marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tenacity", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tiktoken", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tinytag", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-inspect", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "wrapt", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0c/4f/7c714bdf94dd229707b43e7f8cedf3aed0a99938fd46a9ad8a418c199988/llama_index_core-0.14.15.tar.gz", hash = "sha256:3766aeeb95921b3a2af8c2a51d844f75f404215336e1639098e3652db52c68ce", size = 11593505, upload-time = "2026-02-18T19:05:48.274Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/74/54/d6043a088e5e9c1d62300db7ad0ef417c6b9a92f7b4a5cade066aeafdaca/llama_index_core-0.14.13.tar.gz", hash = "sha256:c3b30d20ae0407e5d0a1d35bb3376a98e242661ebfc22da754b5a3da1f8108c0", size = 11589074, upload-time = "2026-01-21T20:44:16.287Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/41/9e/262f6465ee4fffa40698b3cc2177e377ce7d945d3bd8b7d9c6b09448625d/llama_index_core-0.14.15-py3-none-any.whl", hash = "sha256:e02b321c10673871a38aaefdc4a93d5ae8ec324cad4408683189e5a1aa1e3d52", size = 11937002, upload-time = "2026-02-18T19:05:45.855Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/59/9769f03f1cccadcc014b3b65c166de18999b51459a0f0a579d80f6c91d80/llama_index_core-0.14.13-py3-none-any.whl", hash = "sha256:392f0a5a09433e9dea786964ef5fe5ca2a2b10aee9f979a9507c19a14da2a20a", size = 11934761, upload-time = "2026-01-21T20:44:18.892Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2288,27 +2274,27 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-llms-openai"
|
||||
version = "0.6.21"
|
||||
version = "0.6.18"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "openai", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d8/5b/775289b3064302966cc839bbccfdbe314f706eaf58ad4233b86e5d53343d/llama_index_llms_openai-0.6.21.tar.gz", hash = "sha256:0b92dcfb01cbc7752f5b8bdf6d93430643d295210cf9392b45291d6fdd81e0ee", size = 25961, upload-time = "2026-02-26T04:19:33.604Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/56/78/298de76242aee7f5fdd65a0bffb541b3f81759613de1e8ebc719eec8e8af/llama_index_llms_openai-0.6.18.tar.gz", hash = "sha256:36c0256a7a211bbbc5ecc00d3f2caa9730eea1971ced3b68b7c94025c0448020", size = 25946, upload-time = "2026-02-06T12:01:03.095Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/d7/5b513acbf0bfc2b6ef281b6bbca764062facc431e8f13763c16005fbd34b/llama_index_llms_openai-0.6.21-py3-none-any.whl", hash = "sha256:ef8c048849f844c7db9ff4208cca9878a799bc5fcdd72954197ea11e64b37c97", size = 26965, upload-time = "2026-02-26T04:19:34.561Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/46/5a4b62108fb94febe27d35c8476dea042d7a609ee4bf14f5b61f03d5a75a/llama_index_llms_openai-0.6.18-py3-none-any.whl", hash = "sha256:73bbbf233d38116d48350391a3649884829564f4c8f6168c8fa3f3ae1b557376", size = 26945, upload-time = "2026-02-06T12:01:01.25Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "llama-index-vector-stores-faiss"
|
||||
version = "0.5.3"
|
||||
version = "0.5.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "llama-index-core", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c5/e6/57da31b38d173cd9124fdcdd47487b9a917b69bd49e8f6e551407ccfa860/llama_index_vector_stores_faiss-0.5.3.tar.gz", hash = "sha256:9620b1e27e96233fda88878c453532fba6061cf7ba7a53698a34703faab21ece", size = 6048, upload-time = "2026-02-12T14:22:14.612Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/2d/5f/c4ae340f178f202cf09dcc24dd0953a41d9ab24bc33e1f7220544ba86e41/llama_index_vector_stores_faiss-0.5.2.tar.gz", hash = "sha256:924504765e68b1f84ec602feb2d9516be6a6c12fad5e133f19cc5da3b23f4282", size = 5910, upload-time = "2025-12-17T21:01:13.21Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/ad/ad192dd624ca2875b8ca74e55fddf9b083d6614524004f7830379d0a0cfd/llama_index_vector_stores_faiss-0.5.3-py3-none-any.whl", hash = "sha256:ef186e38a820e696a1adca15432c8539d73f2959eb05671011db21091a286c8c", size = 7738, upload-time = "2026-02-12T14:22:13.756Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/c1/c8317250c2a83d1d439814d1a7f41fa34a23c224b3099da898f08a249859/llama_index_vector_stores_faiss-0.5.2-py3-none-any.whl", hash = "sha256:72a3a03d9f25c70bbcc8c61aa860cd1db69f2a8070606ecc3266d767b71ff2a2", size = 7605, upload-time = "2025-12-17T21:01:12.429Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2780,9 +2766,9 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "mysqlclient"
|
||||
version = "2.2.8"
|
||||
version = "2.2.7"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/eb/b0/9df076488cb2e536d40ce6dbd4273c1f20a386e31ffe6e7cb613902b3c2a/mysqlclient-2.2.8.tar.gz", hash = "sha256:8ed20c5615a915da451bb308c7d0306648a4fd9a2809ba95c992690006306199", size = 92287, upload-time = "2026-02-10T10:58:37.405Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/68/810093cb579daae426794bbd9d88aa830fae296e85172d18cb0f0e5dd4bc/mysqlclient-2.2.7.tar.gz", hash = "sha256:24ae22b59416d5fcce7e99c9d37548350b4565baac82f95e149cac6ce4163845", size = 91383, upload-time = "2025-01-10T12:06:00.763Z" }
|
||||
|
||||
[[package]]
|
||||
name = "nest-asyncio"
|
||||
@@ -2825,7 +2811,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "nltk"
|
||||
version = "3.9.3"
|
||||
version = "3.9.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -2833,9 +2819,9 @@ dependencies = [
|
||||
{ name = "regex", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e1/8f/915e1c12df07c70ed779d18ab83d065718a926e70d3ea33eb0cd66ffb7c0/nltk-3.9.3.tar.gz", hash = "sha256:cb5945d6424a98d694c2b9a0264519fab4363711065a46aa0ae7a2195b92e71f", size = 2923673, upload-time = "2026-02-24T12:05:53.833Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f9/76/3a5e4312c19a028770f86fd7c058cf9f4ec4321c6cf7526bab998a5b683c/nltk-3.9.2.tar.gz", hash = "sha256:0f409e9b069ca4177c1903c3e843eef90c7e92992fa4931ae607da6de49e1419", size = 2887629, upload-time = "2025-10-01T07:19:23.764Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/7e/9af5a710a1236e4772de8dfcc6af942a561327bb9f42b5b4a24d0cf100fd/nltk-3.9.3-py3-none-any.whl", hash = "sha256:60b3db6e9995b3dd976b1f0fa7dec22069b2677e759c28eb69b62ddd44870522", size = 1525385, upload-time = "2026-02-24T12:05:46.54Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/90/81ac364ef94209c100e12579629dc92bf7a709a84af32f8c551b02c07e94/nltk-3.9.2-py3-none-any.whl", hash = "sha256:1e209d2b3009110635ed9709a67a1a3e33a10f799490fa71cf4bec218c11c88a", size = 1513404, upload-time = "2025-10-01T07:19:21.648Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3005,7 +2991,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "2.24.0"
|
||||
version = "2.17.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "anyio", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -3017,9 +3003,9 @@ dependencies = [
|
||||
{ name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/55/13/17e87641b89b74552ed408a92b231283786523edddc95f3545809fab673c/openai-2.24.0.tar.gz", hash = "sha256:1e5769f540dbd01cb33bc4716a23e67b9d695161a734aff9c5f925e2bf99a673", size = 658717, upload-time = "2026-02-24T20:02:07.958Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/9c/a2/677f22c4b487effb8a09439fb6134034b5f0a39ca27df8b95fac23a93720/openai-2.17.0.tar.gz", hash = "sha256:47224b74bd20f30c6b0a6a329505243cb2f26d5cf84d9f8d0825ff8b35e9c999", size = 631445, upload-time = "2026-02-05T16:27:40.953Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/30/844dc675ee6902579b8eef01ed23917cc9319a1c9c0c14ec6e39340c96d0/openai-2.24.0-py3-none-any.whl", hash = "sha256:fed30480d7d6c884303287bde864980a4b137b60553ffbcf9ab4a233b7a73d94", size = 1120122, upload-time = "2026-02-24T20:02:05.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/97/284535aa75e6e84ab388248b5a323fc296b1f70530130dee37f7f4fbe856/openai-2.17.0-py3-none-any.whl", hash = "sha256:4f393fd886ca35e113aac7ff239bcd578b81d8f104f5aedc7d3693eb2af1d338", size = 1069524, upload-time = "2026-02-05T16:27:38.941Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3055,7 +3041,6 @@ dependencies = [
|
||||
{ name = "django-filter", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "django-guardian", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "django-multiselectfield", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "django-rich", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "django-soft-delete", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "django-treenode", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "djangorestframework", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -3096,6 +3081,7 @@ dependencies = [
|
||||
{ name = "tika-client", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "torch", version = "2.10.0", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'darwin'" },
|
||||
{ name = "torch", version = "2.10.0+cpu", source = { registry = "https://download.pytorch.org/whl/cpu" }, marker = "sys_platform == 'linux'" },
|
||||
{ name = "tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "watchfiles", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "whitenoise", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "whoosh-reloaded", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -3176,6 +3162,7 @@ typing = [
|
||||
{ name = "types-pytz", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "types-redis", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "types-setuptools", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "types-tqdm", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
@@ -3197,9 +3184,8 @@ requires-dist = [
|
||||
{ name = "django-cors-headers", specifier = "~=4.9.0" },
|
||||
{ name = "django-extensions", specifier = "~=4.1" },
|
||||
{ name = "django-filter", specifier = "~=25.1" },
|
||||
{ name = "django-guardian", specifier = "~=3.3.0" },
|
||||
{ name = "django-guardian", specifier = "~=3.2.0" },
|
||||
{ name = "django-multiselectfield", specifier = "~=1.0.1" },
|
||||
{ name = "django-rich", specifier = "~=2.2.0" },
|
||||
{ name = "django-soft-delete", specifier = "~=1.0.18" },
|
||||
{ name = "django-treenode", specifier = ">=0.23.2" },
|
||||
{ name = "djangorestframework", specifier = "~=3.16" },
|
||||
@@ -3208,7 +3194,7 @@ requires-dist = [
|
||||
{ name = "drf-spectacular-sidecar", specifier = "~=2026.1.1" },
|
||||
{ name = "drf-writable-nested", specifier = "~=0.7.1" },
|
||||
{ name = "faiss-cpu", specifier = ">=1.10" },
|
||||
{ name = "filelock", specifier = "~=3.24.3" },
|
||||
{ name = "filelock", specifier = "~=3.20.0" },
|
||||
{ name = "flower", specifier = "~=2.0.1" },
|
||||
{ name = "gotenberg-client", specifier = "~=0.13.1" },
|
||||
{ name = "granian", extras = ["uvloop"], marker = "extra == 'webserver'", specifier = "~=2.7.0" },
|
||||
@@ -3246,6 +3232,7 @@ requires-dist = [
|
||||
{ name = "setproctitle", specifier = "~=1.3.4" },
|
||||
{ name = "tika-client", specifier = "~=0.10.0" },
|
||||
{ name = "torch", specifier = "~=2.10.0", index = "https://download.pytorch.org/whl/cpu" },
|
||||
{ name = "tqdm", specifier = "~=4.67.1" },
|
||||
{ name = "watchfiles", specifier = ">=1.1.1" },
|
||||
{ name = "whitenoise", specifier = "~=6.11" },
|
||||
{ name = "whoosh-reloaded", specifier = ">=2.7.5" },
|
||||
@@ -3261,8 +3248,8 @@ dev = [
|
||||
{ name = "prek", specifier = "~=0.3.0" },
|
||||
{ name = "pytest", specifier = "~=9.0.0" },
|
||||
{ name = "pytest-cov", specifier = "~=7.0.0" },
|
||||
{ name = "pytest-django", specifier = "~=4.12.0" },
|
||||
{ name = "pytest-env", specifier = "~=1.5.0" },
|
||||
{ name = "pytest-django", specifier = "~=4.11.1" },
|
||||
{ name = "pytest-env", specifier = "~=1.2.0" },
|
||||
{ name = "pytest-httpx" },
|
||||
{ name = "pytest-mock", specifier = "~=3.15.1" },
|
||||
{ name = "pytest-rerunfailures", specifier = "~=16.1" },
|
||||
@@ -3282,8 +3269,8 @@ testing = [
|
||||
{ name = "imagehash" },
|
||||
{ name = "pytest", specifier = "~=9.0.0" },
|
||||
{ name = "pytest-cov", specifier = "~=7.0.0" },
|
||||
{ name = "pytest-django", specifier = "~=4.12.0" },
|
||||
{ name = "pytest-env", specifier = "~=1.5.0" },
|
||||
{ name = "pytest-django", specifier = "~=4.11.1" },
|
||||
{ name = "pytest-env", specifier = "~=1.2.0" },
|
||||
{ name = "pytest-httpx" },
|
||||
{ name = "pytest-mock", specifier = "~=3.15.1" },
|
||||
{ name = "pytest-rerunfailures", specifier = "~=16.1" },
|
||||
@@ -3310,6 +3297,7 @@ typing = [
|
||||
{ name = "types-pytz" },
|
||||
{ name = "types-redis" },
|
||||
{ name = "types-setuptools" },
|
||||
{ name = "types-tqdm" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3549,23 +3537,23 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "prek"
|
||||
version = "0.3.3"
|
||||
version = "0.3.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/bf/f1/7613dc8347a33e40fc5b79eec6bc7d458d8bbc339782333d8433b665f86f/prek-0.3.3.tar.gz", hash = "sha256:117bd46ebeb39def24298ce021ccc73edcf697b81856fcff36d762dd56093f6f", size = 343697, upload-time = "2026-02-15T13:33:28.723Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/d3/f5/ee52def928dd1355c20bcfcf765e1e61434635c33f3075e848e7b83a157b/prek-0.3.2.tar.gz", hash = "sha256:dce0074ff1a21290748ca567b4bda7553ee305a8c7b14d737e6c58364a499364", size = 334229, upload-time = "2026-02-06T13:49:47.539Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/8b/dce13d2a3065fd1e8ffce593a0e51c4a79c3cde9c9a15dc0acc8d9d1573d/prek-0.3.3-py3-none-linux_armv6l.whl", hash = "sha256:e8629cac4bdb131be8dc6e5a337f0f76073ad34a8305f3fe2bc1ab6201ede0a4", size = 4644636, upload-time = "2026-02-15T13:33:43.609Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/01/30/06ab4dbe7ce02a8ce833e92deb1d9a8e85ae9d40e33d1959a2070b7494c6/prek-0.3.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4b9e819b9e4118e1e785047b1c8bd9aec7e4d836ed034cb58b7db5bcaaf49437", size = 4651410, upload-time = "2026-02-15T13:33:34.277Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/fc/da3bc5cb38471e7192eda06b7a26b7c24ef83e82da2c1dbc145f2bf33640/prek-0.3.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bf29db3b5657c083eb8444c25aadeeec5167dc492e9019e188f87932f01ea50a", size = 4273163, upload-time = "2026-02-15T13:33:42.106Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/74/47839395091e2937beced81a5dd2f8ea9c8239c853da8611aaf78ee21a8b/prek-0.3.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:ae09736149815b26e64a9d350ca05692bab32c2afdf2939114d3211aaad68a3e", size = 4631808, upload-time = "2026-02-15T13:33:20.076Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/89/3f5ef6f7c928c017cb63b029349d6bc03598ab7f6979d4a770ce02575f82/prek-0.3.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:856c2b55c51703c366bb4ce81c6a91102b70573a9fc8637db2ac61c66e4565f9", size = 4548959, upload-time = "2026-02-15T13:33:36.325Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/18/80002c4c4475f90ca025f27739a016927a0e5d905c60612fc95da1c56ab7/prek-0.3.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3acdf13a018f685beaff0a71d4b0d2ccbab4eaa1aced6d08fd471c1a654183eb", size = 4862256, upload-time = "2026-02-15T13:33:37.754Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/25/648bf084c2468fa7cfcdbbe9e59956bbb31b81f36e113bc9107d80af26a7/prek-0.3.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0f035667a8bd0a77b2bfa2b2e125da8cb1793949e9eeef0d8daab7f8ac8b57fe", size = 5404486, upload-time = "2026-02-15T13:33:39.239Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/43/261fb60a11712a327da345912bd8b338dc5a050199de800faafa278a6133/prek-0.3.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d09b2ad14332eede441d977de08eb57fb3f61226ed5fd2ceb7aadf5afcdb6794", size = 4887513, upload-time = "2026-02-15T13:33:40.702Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/2c/581e757ee57ec6046b32e0ee25660fc734bc2622c319f57119c49c0cab58/prek-0.3.3-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:c0c3ffac16e37a9daba43a7e8316778f5809b70254be138761a8b5b9ef0df28e", size = 4632336, upload-time = "2026-02-15T13:33:25.867Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/d8/aa276ce5d11b77882da4102ca0cb7161095831105043ae7979bbfdcc3dc4/prek-0.3.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a3dc7720b580c07c0386e17af2486a5b4bc2f6cc57034a288a614dcbc4abe555", size = 4679370, upload-time = "2026-02-15T13:33:22.247Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/19/9d4fa7bde428e58d9f48a74290c08736d42aeb5690dcdccc7a713e34a449/prek-0.3.3-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:60e0fa15da5020a03df2ee40268145ec5b88267ec2141a205317ad4df8c992d6", size = 4540316, upload-time = "2026-02-15T13:33:24.088Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/b5/973cce29257e0b47b16cc9b4c162772ea01dbb7c080791ea0c068e106e05/prek-0.3.3-py3-none-musllinux_1_1_i686.whl", hash = "sha256:553515da9586d9624dc42db32b744fdb91cf62b053753037a0cadb3c2d8d82a2", size = 4724566, upload-time = "2026-02-15T13:33:29.832Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/8b/ad8b2658895a8ed2b0bc630bf38686fe38b7ff2c619c58953a80e4de3048/prek-0.3.3-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:9512cf370e0d1496503463a4a65621480efb41b487841a9e9ff1661edf14b238", size = 4995072, upload-time = "2026-02-15T13:33:27.417Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/69/70a5fc881290a63910494df2677c0fb241d27cfaa435bbcd0de5cd2e2443/prek-0.3.2-py3-none-linux_armv6l.whl", hash = "sha256:4f352f9c3fc98aeed4c8b2ec4dbf16fc386e45eea163c44d67e5571489bd8e6f", size = 4614960, upload-time = "2026-02-06T13:50:05.818Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/15/a82d5d32a2207ccae5d86ea9e44f2b93531ed000faf83a253e8d1108e026/prek-0.3.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4a000cfbc3a6ec7d424f8be3c3e69ccd595448197f92daac8652382d0acc2593", size = 4622889, upload-time = "2026-02-06T13:49:53.662Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/75/ea833b58a12741397017baef9b66a6e443bfa8286ecbd645d14111446280/prek-0.3.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5436bdc2702cbd7bcf9e355564ae66f8131211e65fefae54665a94a07c3d450a", size = 4239653, upload-time = "2026-02-06T13:50:02.88Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/b4/d9c3885987afac6e20df4cb7db14e3b0d5a08a77ae4916488254ebac4d0b/prek-0.3.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.musllinux_1_1_aarch64.whl", hash = "sha256:0161b5f584f9e7f416d6cf40a17b98f17953050ff8d8350ec60f20fe966b86b6", size = 4595101, upload-time = "2026-02-06T13:49:49.813Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/21/a6/1a06473ed83dbc898de22838abdb13954e2583ce229f857f61828384634c/prek-0.3.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e641e8533bca38797eebb49aa89ed0e8db0e61225943b27008c257e3af4d631", size = 4521978, upload-time = "2026-02-06T13:49:41.266Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/5e/c38390d5612e6d86b32151c1d2fdab74a57913473193591f0eb00c894c21/prek-0.3.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfca1810d49d3f9ef37599c958c4e716bc19a1d78a7e88cbdcb332e0b008994f", size = 4829108, upload-time = "2026-02-06T13:49:44.598Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/80/a6/cecce2ab623747ff65ed990bb0d95fa38449ee19b348234862acf9392fff/prek-0.3.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5d69d754299a95a85dc20196f633232f306bee7e7c8cba61791f49ce70404ec", size = 5357520, upload-time = "2026-02-06T13:49:48.512Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/18/d6bcb29501514023c76d55d5cd03bdbc037737c8de8b6bc41cdebfb1682c/prek-0.3.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:539dcb90ad9b20837968539855df6a29493b328a1ae87641560768eed4f313b0", size = 4852635, upload-time = "2026-02-06T13:49:58.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/0a/ae46f34ba27ba87aea5c9ad4ac9cd3e07e014fd5079ae079c84198f62118/prek-0.3.2-py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:1998db3d0cbe243984736c82232be51318f9192e2433919a6b1c5790f600b5fd", size = 4599484, upload-time = "2026-02-06T13:49:43.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/a9/73bfb5b3f7c3583f9b0d431924873928705cdef6abb3d0461c37254a681b/prek-0.3.2-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:07ab237a5415a3e8c0db54de9d63899bcd947624bdd8820d26f12e65f8d19eb7", size = 4657694, upload-time = "2026-02-06T13:50:01.074Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/bc/0994bc176e1a80110fad3babce2c98b0ac4007630774c9e18fc200a34781/prek-0.3.2-py3-none-musllinux_1_1_armv7l.whl", hash = "sha256:0ced19701d69c14a08125f14a5dd03945982edf59e793c73a95caf4697a7ac30", size = 4509337, upload-time = "2026-02-06T13:49:54.891Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/13/e73f85f65ba8f626468e5d1694ab3763111513da08e0074517f40238c061/prek-0.3.2-py3-none-musllinux_1_1_i686.whl", hash = "sha256:ffb28189f976fa111e770ee94e4f298add307714568fb7d610c8a7095cb1ce59", size = 4697350, upload-time = "2026-02-06T13:50:04.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/47/98c46dcd580305b9960252a4eb966f1a7b1035c55c363f378d85662ba400/prek-0.3.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:f63134b3eea14421789a7335d86f99aee277cb520427196f2923b9260c60e5c5", size = 4955860, upload-time = "2026-02-06T13:49:56.581Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3953,15 +3941,15 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pyrefly"
|
||||
version = "0.54.0"
|
||||
version = "0.51.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/81/44/c10b16a302fda90d0af1328f880b232761b510eab546616a7be2fdf35a57/pyrefly-0.54.0.tar.gz", hash = "sha256:c6663be64d492f0d2f2a411ada9f28a6792163d34133639378b7f3dd9a8dca94", size = 5098893, upload-time = "2026-02-23T15:44:35.111Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e9/bd/b8065b801b4058954577afa3f78bc1dda5f119f7ea353570ba9029db5109/pyrefly-0.51.0.tar.gz", hash = "sha256:99467db60f148bb6965c45cdc3e769d94b704100e9d57b6455cc6796e5a9e7b1", size = 4918889, upload-time = "2026-02-02T15:32:58.45Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/99/8fdcdb4e55f0227fdd9f6abce36b619bab1ecb0662b83b66adc8cba3c788/pyrefly-0.54.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:58a3f092b6dc25ef79b2dc6c69a40f36784ca157c312bfc0baea463926a9db6d", size = 12223973, upload-time = "2026-02-23T15:44:14.278Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/35/c2aaf87a76003ad27b286594d2e5178f811eaa15bfe3d98dba2b47d56dd1/pyrefly-0.54.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:615081414106dd95873bc39c3a4bed68754c6cc24a8177ac51d22f88f88d3eb3", size = 11785585, upload-time = "2026-02-23T15:44:17.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/4a/ced02691ed67e5a897714979196f08ad279ec7ec7f63c45e00a75a7f3c0e/pyrefly-0.54.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbcaf20f5fe585079079a95205c1f3cd4542d17228cdf1df560288880623b70", size = 33381977, upload-time = "2026-02-23T15:44:19.736Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/ce/72a117ed437c8f6950862181014b41e36f3c3997580e29b772b71e78d587/pyrefly-0.54.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d5da116c0d34acfbd66663addd3ca8aa78a636f6692a66e078126d3620a883", size = 35962821, upload-time = "2026-02-23T15:44:22.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/de/89013f5ae0a35d2b6b01274a92a35ee91431ea001050edf0a16748d39875/pyrefly-0.54.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef3ac27f1a4baaf67aead64287d3163350844794aca6315ad1a9650b16ec26a", size = 38496689, upload-time = "2026-02-23T15:44:25.236Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/c1/0aa9b4cf5180f481e9f07a8fbfe9c3bc6044ec97612373fdd4f9f6aa49a4/pyrefly-0.51.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:4013f914d3b523a9b1afc25a620a011406f7745ad5cfc5781ec95235bc9cd583", size = 11900057, upload-time = "2026-02-02T15:32:34.353Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/07/6a576ec997845bc8e7d89afebe12bc6386092446330194789d120f6a73f7/pyrefly-0.51.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4a6eeffd5649d393bf457b7c1253f89b33295d475b1cae0f9a21377986708804", size = 11480421, upload-time = "2026-02-02T15:32:37.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/0e/1b4675289a29b72818c812d7456031a7cab98532826d207d39465f75712c/pyrefly-0.51.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:beace17854735136134848e5a0e8678b6862ee1144eaeb27f1bb70ff1f8fd9ca", size = 32511878, upload-time = "2026-02-02T15:32:40.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/4e/d564711718e4158339397123085da6afcad1c62222efa483cb7db5dab58b/pyrefly-0.51.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40055df65c184d825081e7177b99d277c8a1cb29c6e41a54ff40828d355aa467", size = 34797013, upload-time = "2026-02-02T15:32:43.687Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/db/961162ec2bb74a0cd5d0ef988f71695581449b3c6fce76ede9a984cdc8d1/pyrefly-0.51.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65689401e35b7d01a1394cdb1bafd46e2f49369b0f9891a333bce3568f100ce2", size = 35915591, upload-time = "2026-02-02T15:32:47.64Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3997,28 +3985,27 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pytest-django"
|
||||
version = "4.12.0"
|
||||
version = "4.11.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/13/2b/db9a193df89e5660137f5428063bcc2ced7ad790003b26974adf5c5ceb3b/pytest_django-4.12.0.tar.gz", hash = "sha256:df94ec819a83c8979c8f6de13d9cdfbe76e8c21d39473cfe2b40c9fc9be3c758", size = 91156, upload-time = "2026-02-14T18:40:49.235Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/fb/55d580352db26eb3d59ad50c64321ddfe228d3d8ac107db05387a2fadf3a/pytest_django-4.11.1.tar.gz", hash = "sha256:a949141a1ee103cb0e7a20f1451d355f83f5e4a5d07bdd4dcfdd1fd0ff227991", size = 86202, upload-time = "2025-04-03T18:56:09.338Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/83/a5/41d091f697c09609e7ef1d5d61925494e0454ebf51de7de05f0f0a728f1d/pytest_django-4.12.0-py3-none-any.whl", hash = "sha256:3ff300c49f8350ba2953b90297d23bf5f589db69545f56f1ec5f8cff5da83e85", size = 26123, upload-time = "2026-02-14T18:40:47.381Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/ac/bd0608d229ec808e51a21044f3f2f27b9a37e7a0ebaca7247882e67876af/pytest_django-4.11.1-py3-none-any.whl", hash = "sha256:1b63773f648aa3d8541000c26929c1ea63934be1cfa674c76436966d73fe6a10", size = 25281, upload-time = "2025-04-03T18:56:07.678Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-env"
|
||||
version = "1.5.0"
|
||||
version = "1.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "pytest", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "python-dotenv", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e6/56/a931c6f6194917ff44be41b8586e2ffd13a18fa70fb28d9800a4695befa5/pytest_env-1.5.0.tar.gz", hash = "sha256:db8994b9ce170f135a37acc09ac753a6fc697d15e691b576ed8d8ca261c40246", size = 15271, upload-time = "2026-02-17T18:31:39.095Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/13/12/9c87d0ca45d5992473208bcef2828169fa7d39b8d7fc6e3401f5c08b8bf7/pytest_env-1.2.0.tar.gz", hash = "sha256:475e2ebe8626cee01f491f304a74b12137742397d6c784ea4bc258f069232b80", size = 8973, upload-time = "2025-10-09T19:15:47.42Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/61/af/99b52a8524983bfece35e51e65a0b517b22920c023e57855c95e744e19e4/pytest_env-1.5.0-py3-none-any.whl", hash = "sha256:89a15686ac837c9cd009a8a2d52bd55865e2f23c82094247915dae4540c87161", size = 10122, upload-time = "2026-02-17T18:31:37.496Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/98/822b924a4a3eb58aacba84444c7439fce32680592f394de26af9c76e2569/pytest_env-1.2.0-py3-none-any.whl", hash = "sha256:d7e5b7198f9b83c795377c09feefa45d56083834e60d04767efd64819fc9da00", size = 6251, upload-time = "2025-10-09T19:15:46.077Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4399,102 +4386,102 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "2026.2.19"
|
||||
version = "2026.1.15"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/ff/c0/d8079d4f6342e4cec5c3e7d7415b5cd3e633d5f4124f7a4626908dbe84c7/regex-2026.2.19.tar.gz", hash = "sha256:6fb8cb09b10e38f3ae17cc6dc04a1df77762bd0351b6ba9041438e7cc85ec310", size = 414973, upload-time = "2026-02-19T19:03:47.899Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/af/de/f10b4506acfd684de4e42b0aa56ccea1a778a18864da8f6d319a40591062/regex-2026.2.19-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f5a37a17d110f9d5357a43aa7e3507cb077bf3143d1c549a45c4649e90e40a70", size = 488369, upload-time = "2026-02-19T18:59:45.01Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/2f/b4eaef1f0b4d0bf2a73eaf07c08f6c13422918a4180c9211ce0521746d0c/regex-2026.2.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:676c4e6847a83a1d5732b4ed553881ad36f0a8133627bb695a89ecf3571499d3", size = 290743, upload-time = "2026-02-19T18:59:48.527Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/7c/805413bd0a88d04688c0725c222cfb811bd54a2f571004c24199a1ae55d6/regex-2026.2.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82336faeecac33297cd42857c3b36f12b91810e3fdd276befdd128f73a2b43fa", size = 288652, upload-time = "2026-02-19T18:59:50.2Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/08/ff/2c4cd530a878b1975398e76faef4285f11e7c9ccf1aaedfd528bfcc1f580/regex-2026.2.19-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:52136f5b71f095cb74b736cc3a1b578030dada2e361ef2f07ca582240b703946", size = 781759, upload-time = "2026-02-19T18:59:51.836Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/45/9608ab1b41f6740ff4076eabadde8e8b3f3400942b348ac41e8599ccc131/regex-2026.2.19-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4192464fe3e6cb0ef6751f7d3b16f886d8270d359ed1590dd555539d364f0ff7", size = 850947, upload-time = "2026-02-19T18:59:53.739Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/3a/66471b6c4f7cac17e14bf5300e46661bba2b17ffb0871bd2759e837a6f82/regex-2026.2.19-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e561dd47a85d2660d3d3af4e6cb2da825cf20f121e577147963f875b83d32786", size = 898794, upload-time = "2026-02-19T18:59:55.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/d2/38c53929a5931f7398e5e49f5a5a3079cb2aba30119b4350608364cfad8c/regex-2026.2.19-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00ec994d7824bf01cd6c7d14c7a6a04d9aeaf7c42a2bc22d2359d715634d539b", size = 791922, upload-time = "2026-02-19T18:59:58.216Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/bd/b046e065630fa25059d9c195b7b5308ea94da45eee65d40879772500f74c/regex-2026.2.19-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2cb00aabd96b345d56a8c2bc328c8d6c4d29935061e05078bf1f02302e12abf5", size = 783345, upload-time = "2026-02-19T18:59:59.948Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/8f/045c643d2fa255a985e8f87d848e4be230b711a8935e4bdc58e60b8f7b84/regex-2026.2.19-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f374366ed35673ea81b86a8859c457d4fae6ba092b71024857e9e237410c7404", size = 768055, upload-time = "2026-02-19T19:00:01.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/9f/ab7ae9f5447559562f1a788bbc85c0e526528c5e6c20542d18e4afc86aad/regex-2026.2.19-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f9417fd853fcd00b7d55167e692966dd12d95ba1a88bf08a62002ccd85030790", size = 774955, upload-time = "2026-02-19T19:00:03.368Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/5c/f16fc23c56f60b6f4ff194604a6e53bb8aec7b6e8e4a23a482dee8d77235/regex-2026.2.19-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:12e86a01594031abf892686fcb309b041bf3de3d13d99eb7e2b02a8f3c687df1", size = 846010, upload-time = "2026-02-19T19:00:05.079Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/51/c8/6be4c854135d7c9f35d4deeafdaf124b039ecb4ffcaeb7ed0495ad2c97ca/regex-2026.2.19-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:79014115e6fdf18fd9b32e291d58181bf42d4298642beaa13fd73e69810e4cb6", size = 755938, upload-time = "2026-02-19T19:00:07.148Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/8d/f683d49b9663a5324b95a328e69d397f6dade7cb84154eec116bf79fe150/regex-2026.2.19-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:31aefac2506967b7dd69af2c58eca3cc8b086d4110b66d6ac6e9026f0ee5b697", size = 835773, upload-time = "2026-02-19T19:00:08.939Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/cd/619224b90da09f167fe4497c350a0d0b30edc539ee9244bf93e604c073c3/regex-2026.2.19-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:49cef7bb2a491f91a8869c7cdd90babf0a417047ab0bf923cd038ed2eab2ccb8", size = 780075, upload-time = "2026-02-19T19:00:10.838Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/93/43f405a98f54cc59c786efb4fc0b644615ed2392fc89d57d30da11f35b5b/regex-2026.2.19-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:93b16a18cadb938f0f2306267161d57eb33081a861cee9ffcd71e60941eb5dfc", size = 488365, upload-time = "2026-02-19T19:00:17.857Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/46/da0efce22cd8f5ae28eeb25ac69703f49edcad3331ac22440776f4ea0867/regex-2026.2.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:78af1e499cab704131f6f4e2f155b7f54ce396ca2acb6ef21a49507e4752e0be", size = 290737, upload-time = "2026-02-19T19:00:19.869Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/19/f735078448132c1c974974d30d5306337bc297fe6b6f126164bff72c1019/regex-2026.2.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eb20c11aa4c3793c9ad04c19a972078cdadb261b8429380364be28e867a843f2", size = 288654, upload-time = "2026-02-19T19:00:21.307Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/3e/6d7c24a2f423c03ad03e3fbddefa431057186ac1c4cb4fa98b03c7f39808/regex-2026.2.19-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db5fd91eec71e7b08de10011a2223d0faa20448d4e1380b9daa179fa7bf58906", size = 793785, upload-time = "2026-02-19T19:00:22.926Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/32/fdb8107504b3122a79bde6705ac1f9d495ed1fe35b87d7cfc1864471999a/regex-2026.2.19-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fdbade8acba71bb45057c2b72f477f0b527c4895f9c83e6cfc30d4a006c21726", size = 860731, upload-time = "2026-02-19T19:00:25.196Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/fd/cc8c6f05868defd840be6e75919b1c3f462357969ac2c2a0958363b4dc23/regex-2026.2.19-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:31a5f561eb111d6aae14202e7043fb0b406d3c8dddbbb9e60851725c9b38ab1d", size = 907350, upload-time = "2026-02-19T19:00:27.093Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/1b/4590db9caa8db3d5a3fe31197c4e42c15aab3643b549ef6a454525fa3a61/regex-2026.2.19-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4584a3ee5f257b71e4b693cc9be3a5104249399f4116fe518c3f79b0c6fc7083", size = 800628, upload-time = "2026-02-19T19:00:29.392Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/05/513eaa5b96fa579fd0b813e19ec047baaaf573d7374ff010fa139b384bf7/regex-2026.2.19-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:196553ba2a2f47904e5dc272d948a746352e2644005627467e055be19d73b39e", size = 773711, upload-time = "2026-02-19T19:00:30.996Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/65/5aed06d8c54563d37fea496cf888be504879a3981a7c8e12c24b2c92c209/regex-2026.2.19-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0c10869d18abb759a3317c757746cc913d6324ce128b8bcec99350df10419f18", size = 783186, upload-time = "2026-02-19T19:00:34.598Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2c/57/79a633ad90f2371b4ef9cd72ba3a69a1a67d0cfaab4fe6fa8586d46044ef/regex-2026.2.19-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e689fed279cbe797a6b570bd18ff535b284d057202692c73420cb93cca41aa32", size = 854854, upload-time = "2026-02-19T19:00:37.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/2d/0f113d477d9e91ec4545ec36c82e58be25038d06788229c91ad52da2b7f5/regex-2026.2.19-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0782bd983f19ac7594039c9277cd6f75c89598c1d72f417e4d30d874105eb0c7", size = 762279, upload-time = "2026-02-19T19:00:39.793Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/cb/237e9fa4f61469fd4f037164dbe8e675a376c88cf73aaaa0aedfd305601c/regex-2026.2.19-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:dbb240c81cfed5d4a67cb86d7676d9f7ec9c3f186310bec37d8a1415210e111e", size = 846172, upload-time = "2026-02-19T19:00:42.134Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/7c/104779c5915cc4eb557a33590f8a3f68089269c64287dd769afd76c7ce61/regex-2026.2.19-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:80d31c3f1fe7e4c6cd1831cd4478a0609903044dfcdc4660abfe6fb307add7f0", size = 789078, upload-time = "2026-02-19T19:00:43.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/73/13b39c7c9356f333e564ab4790b6cb0df125b8e64e8d6474e73da49b1955/regex-2026.2.19-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c1665138776e4ac1aa75146669236f7a8a696433ec4e525abf092ca9189247cc", size = 489541, upload-time = "2026-02-19T19:00:52.728Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/77/fcc7bd9a67000d07fbcc11ed226077287a40d5c84544e62171d29d3ef59c/regex-2026.2.19-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d792b84709021945597e05656aac059526df4e0c9ef60a0eaebb306f8fafcaa8", size = 291414, upload-time = "2026-02-19T19:00:54.51Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/87/3997fc72dc59233426ef2e18dfdd105bb123812fff740ee9cc348f1a3243/regex-2026.2.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:db970bcce4d63b37b3f9eb8c893f0db980bbf1d404a1d8d2b17aa8189de92c53", size = 289140, upload-time = "2026-02-19T19:00:56.841Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f3/d0/b7dd3883ed1cff8ee0c0c9462d828aaf12be63bf5dc55453cbf423523b13/regex-2026.2.19-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:03d706fbe7dfec503c8c3cb76f9352b3e3b53b623672aa49f18a251a6c71b8e6", size = 798767, upload-time = "2026-02-19T19:00:59.014Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/7e/8e2d09103832891b2b735a2515abf377db21144c6dd5ede1fb03c619bf09/regex-2026.2.19-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8dbff048c042beef60aa1848961384572c5afb9e8b290b0f1203a5c42cf5af65", size = 864436, upload-time = "2026-02-19T19:01:00.772Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/2e/afea8d23a6db1f67f45e3a0da3057104ce32e154f57dd0c8997274d45fcd/regex-2026.2.19-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccaaf9b907ea6b4223d5cbf5fa5dff5f33dc66f4907a25b967b8a81339a6e332", size = 912391, upload-time = "2026-02-19T19:01:02.865Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/3c/ea5a4687adaba5e125b9bd6190153d0037325a0ba3757cc1537cc2c8dd90/regex-2026.2.19-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:75472631eee7898e16a8a20998d15106cb31cfde21cdf96ab40b432a7082af06", size = 803702, upload-time = "2026-02-19T19:01:05.298Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/c5/624a0705e8473a26488ec1a3a4e0b8763ecfc682a185c302dfec71daea35/regex-2026.2.19-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d89f85a5ccc0cec125c24be75610d433d65295827ebaf0d884cbe56df82d4774", size = 775980, upload-time = "2026-02-19T19:01:07.047Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/4b/ed776642533232b5599b7c1f9d817fe11faf597e8a92b7a44b841daaae76/regex-2026.2.19-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0d9f81806abdca3234c3dd582b8a97492e93de3602c8772013cb4affa12d1668", size = 788122, upload-time = "2026-02-19T19:01:08.744Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/58/e93e093921d13b9784b4f69896b6e2a9e09580a265c59d9eb95e87d288f2/regex-2026.2.19-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9dadc10d1c2bbb1326e572a226d2ec56474ab8aab26fdb8cf19419b372c349a9", size = 858910, upload-time = "2026-02-19T19:01:10.488Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/77/ff1d25a0c56cd546e0455cbc93235beb33474899690e6a361fa6b52d265b/regex-2026.2.19-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:6bc25d7e15f80c9dc7853cbb490b91c1ec7310808b09d56bd278fe03d776f4f6", size = 764153, upload-time = "2026-02-19T19:01:12.156Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/ef/8ec58df26d52d04443b1dc56f9be4b409f43ed5ae6c0248a287f52311fc4/regex-2026.2.19-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:965d59792f5037d9138da6fed50ba943162160443b43d4895b182551805aff9c", size = 850348, upload-time = "2026-02-19T19:01:14.147Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f5/b3/c42fd5ed91639ce5a4225b9df909180fc95586db071f2bf7c68d2ccbfbe6/regex-2026.2.19-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:38d88c6ed4a09ed61403dbdf515d969ccba34669af3961ceb7311ecd0cef504a", size = 789977, upload-time = "2026-02-19T19:01:15.838Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/2d/a849835e76ac88fcf9e8784e642d3ea635d183c4112150ca91499d6703af/regex-2026.2.19-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8df08decd339e8b3f6a2eb5c05c687fe9d963ae91f352bc57beb05f5b2ac6879", size = 489329, upload-time = "2026-02-19T19:01:23.841Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/da/aa/78ff4666d3855490bae87845a5983485e765e1f970da20adffa2937b241d/regex-2026.2.19-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3aa0944f1dc6e92f91f3b306ba7f851e1009398c84bfd370633182ee4fc26a64", size = 291308, upload-time = "2026-02-19T19:01:25.605Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cd/58/714384efcc07ae6beba528a541f6e99188c5cc1bc0295337f4e8a868296d/regex-2026.2.19-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c13228fbecb03eadbfd8f521732c5fda09ef761af02e920a3148e18ad0e09968", size = 289033, upload-time = "2026-02-19T19:01:27.243Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/75/ec/6438a9344d2869cf5265236a06af1ca6d885e5848b6561e10629bc8e5a11/regex-2026.2.19-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0d0e72703c60d68b18b27cde7cdb65ed2570ae29fb37231aa3076bfb6b1d1c13", size = 798798, upload-time = "2026-02-19T19:01:28.877Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/be/b1ce2d395e3fd2ce5f2fde2522f76cade4297cfe84cd61990ff48308749c/regex-2026.2.19-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:46e69a4bf552e30e74a8aa73f473c87efcb7f6e8c8ece60d9fd7bf13d5c86f02", size = 864444, upload-time = "2026-02-19T19:01:30.933Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/97/a3406460c504f7136f140d9461960c25f058b0240e4424d6fb73c7a067ab/regex-2026.2.19-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8edda06079bd770f7f0cf7f3bba1a0b447b96b4a543c91fe0c142d034c166161", size = 912633, upload-time = "2026-02-19T19:01:32.744Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8b/d9/e5dbef95008d84e9af1dc0faabbc34a7fbc8daa05bc5807c5cf86c2bec49/regex-2026.2.19-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9cbc69eae834afbf634f7c902fc72ff3e993f1c699156dd1af1adab5d06b7fe7", size = 803718, upload-time = "2026-02-19T19:01:34.61Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/e5/61d80132690a1ef8dc48e0f44248036877aebf94235d43f63a20d1598888/regex-2026.2.19-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bcf57d30659996ee5c7937999874504c11b5a068edc9515e6a59221cc2744dd1", size = 775975, upload-time = "2026-02-19T19:01:36.525Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/32/ae828b3b312c972cf228b634447de27237d593d61505e6ad84723f8eabba/regex-2026.2.19-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8e6e77cd92216eb489e21e5652a11b186afe9bdefca8a2db739fd6b205a9e0a4", size = 788129, upload-time = "2026-02-19T19:01:38.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cb/25/d74f34676f22bec401eddf0e5e457296941e10cbb2a49a571ca7a2c16e5a/regex-2026.2.19-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b9ab8dec42afefa6314ea9b31b188259ffdd93f433d77cad454cd0b8d235ce1c", size = 858818, upload-time = "2026-02-19T19:01:40.409Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/eb/0bc2b01a6b0b264e1406e5ef11cae3f634c3bd1a6e61206fd3227ce8e89c/regex-2026.2.19-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:294c0fb2e87c6bcc5f577c8f609210f5700b993151913352ed6c6af42f30f95f", size = 764186, upload-time = "2026-02-19T19:01:43.009Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/37/5fe5a630d0d99ecf0c3570f8905dafbc160443a2d80181607770086c9812/regex-2026.2.19-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:c0924c64b082d4512b923ac016d6e1dcf647a3560b8a4c7e55cbbd13656cb4ed", size = 850363, upload-time = "2026-02-19T19:01:45.015Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/45/ef68d805294b01ec030cfd388724ba76a5a21a67f32af05b17924520cb0b/regex-2026.2.19-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:790dbf87b0361606cb0d79b393c3e8f4436a14ee56568a7463014565d97da02a", size = 790026, upload-time = "2026-02-19T19:01:47.51Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/a2/e0b4575b93bc84db3b1fab24183e008691cd2db5c0ef14ed52681fbd94dd/regex-2026.2.19-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:93d881cab5afdc41a005dba1524a40947d6f7a525057aa64aaf16065cf62faa9", size = 492202, upload-time = "2026-02-19T19:01:54.816Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/b5/b84fec8cbb5f92a7eed2b6b5353a6a9eed9670fee31817c2da9eb85dc797/regex-2026.2.19-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:80caaa1ddcc942ec7be18427354f9d58a79cee82dea2a6b3d4fd83302e1240d7", size = 292884, upload-time = "2026-02-19T19:01:58.254Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/0c/fe89966dfae43da46f475362401f03e4d7dc3a3c955b54f632abc52669e0/regex-2026.2.19-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d793c5b4d2b4c668524cd1651404cfc798d40694c759aec997e196fe9729ec60", size = 291236, upload-time = "2026-02-19T19:01:59.966Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/f7/bda2695134f3e63eb5cccbbf608c2a12aab93d261ff4e2fe49b47fabc948/regex-2026.2.19-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5100acb20648d9efd3f4e7e91f51187f95f22a741dcd719548a6cf4e1b34b3f", size = 807660, upload-time = "2026-02-19T19:02:01.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/56/6e3a4bf5e60d17326b7003d91bbde8938e439256dec211d835597a44972d/regex-2026.2.19-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5e3a31e94d10e52a896adaa3adf3621bd526ad2b45b8c2d23d1bbe74c7423007", size = 873585, upload-time = "2026-02-19T19:02:03.522Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/35/5e/c90c6aa4d1317cc11839359479cfdd2662608f339e84e81ba751c8a4e461/regex-2026.2.19-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8497421099b981f67c99eba4154cf0dfd8e47159431427a11cfb6487f7791d9e", size = 915243, upload-time = "2026-02-19T19:02:05.608Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/7c/981ea0694116793001496aaf9524e5c99e122ec3952d9e7f1878af3a6bf1/regex-2026.2.19-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1e7a08622f7d51d7a068f7e4052a38739c412a3e74f55817073d2e2418149619", size = 812922, upload-time = "2026-02-19T19:02:08.115Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/be/9eda82afa425370ffdb3fa9f3ea42450b9ae4da3ff0a4ec20466f69e371b/regex-2026.2.19-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8abe671cf0f15c26b1ad389bf4043b068ce7d3b1c5d9313e12895f57d6738555", size = 781318, upload-time = "2026-02-19T19:02:10.072Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/d5/50f0bbe56a8199f60a7b6c714e06e54b76b33d31806a69d0703b23ce2a9e/regex-2026.2.19-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5a8f28dd32a4ce9c41758d43b5b9115c1c497b4b1f50c457602c1d571fa98ce1", size = 795649, upload-time = "2026-02-19T19:02:11.96Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/09/d039f081e44a8b0134d0bb2dd805b0ddf390b69d0b58297ae098847c572f/regex-2026.2.19-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:654dc41a5ba9b8cc8432b3f1aa8906d8b45f3e9502442a07c2f27f6c63f85db5", size = 868844, upload-time = "2026-02-19T19:02:14.043Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/53/e2903b79a19ec8557fe7cd21cd093956ff2dbc2e0e33969e3adbe5b184dd/regex-2026.2.19-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:4a02faea614e7fdd6ba8b3bec6c8e79529d356b100381cec76e638f45d12ca04", size = 770113, upload-time = "2026-02-19T19:02:16.161Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/e2/784667767b55714ebb4e59bf106362327476b882c0b2f93c25e84cc99b1a/regex-2026.2.19-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d96162140bb819814428800934c7b71b7bffe81fb6da2d6abc1dcca31741eca3", size = 854922, upload-time = "2026-02-19T19:02:18.155Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/78/9ef4356bd4aed752775bd18071034979b85f035fec51f3a4f9dea497a254/regex-2026.2.19-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c227f2922153ee42bbeb355fd6d009f8c81d9d7bdd666e2276ce41f53ed9a743", size = 799636, upload-time = "2026-02-19T19:02:20.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/e2/7ad4e76a6dddefc0d64dbe12a4d3ca3947a19ddc501f864a5df2a8222ddd/regex-2026.2.19-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:03d191a9bcf94d31af56d2575210cb0d0c6a054dbcad2ea9e00aa4c42903b919", size = 489306, upload-time = "2026-02-19T19:02:29.058Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/95/ee1736135733afbcf1846c58671046f99c4d5170102a150ebb3dd8d701d9/regex-2026.2.19-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:516ee067c6c721d0d0bfb80a2004edbd060fffd07e456d4e1669e38fe82f922e", size = 291218, upload-time = "2026-02-19T19:02:31.083Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/08/180d1826c3d7065200a5168c6b993a44947395c7bb6e04b2c2a219c34225/regex-2026.2.19-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:997862c619994c4a356cb7c3592502cbd50c2ab98da5f61c5c871f10f22de7e5", size = 289097, upload-time = "2026-02-19T19:02:33.485Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/93/0651924c390c5740f5f896723f8ddd946a6c63083a7d8647231c343912ff/regex-2026.2.19-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02b9e1b8a7ebe2807cd7bbdf662510c8e43053a23262b9f46ad4fc2dfc9d204e", size = 799147, upload-time = "2026-02-19T19:02:35.669Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a7/00/2078bd8bcd37d58a756989adbfd9f1d0151b7ca4085a9c2a07e917fbac61/regex-2026.2.19-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6c8fb3b19652e425ff24169dad3ee07f99afa7996caa9dfbb3a9106cd726f49a", size = 865239, upload-time = "2026-02-19T19:02:38.012Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/13/75195161ec16936b35a365fa8c1dd2ab29fd910dd2587765062b174d8cfc/regex-2026.2.19-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50f1ee9488dd7a9fda850ec7c68cad7a32fa49fd19733f5403a3f92b451dcf73", size = 911904, upload-time = "2026-02-19T19:02:40.737Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/96/72/ac42f6012179343d1c4bd0ffee8c948d841cb32ea188d37e96d80527fcc9/regex-2026.2.19-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ab780092b1424d13200aa5a62996e95f65ee3db8509be366437439cdc0af1a9f", size = 803518, upload-time = "2026-02-19T19:02:42.923Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/d1/75a08e2269b007b9783f0f86aa64488e023141219cb5f14dc1e69cda56c6/regex-2026.2.19-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:17648e1a88e72d88641b12635e70e6c71c5136ba14edba29bf8fc6834005a265", size = 775866, upload-time = "2026-02-19T19:02:45.189Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/41/70e7d05faf6994c2ca7a9fcaa536da8f8e4031d45b0ec04b57040ede201f/regex-2026.2.19-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f914ae8c804c8a8a562fe216100bc156bfb51338c1f8d55fe32cf407774359a", size = 788224, upload-time = "2026-02-19T19:02:47.804Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/83/34a2dd601f9deb13c20545c674a55f4a05c90869ab73d985b74d639bac43/regex-2026.2.19-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c7e121a918bbee3f12ac300ce0a0d2f2c979cf208fb071ed8df5a6323281915c", size = 859682, upload-time = "2026-02-19T19:02:50.583Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/30/136db9a09a7f222d6e48b806f3730e7af6499a8cad9c72ac0d49d52c746e/regex-2026.2.19-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2fedd459c791da24914ecc474feecd94cf7845efb262ac3134fe27cbd7eda799", size = 764223, upload-time = "2026-02-19T19:02:52.777Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/ea/bb947743c78a16df481fa0635c50aa1a439bb80b0e6dc24cd4e49c716679/regex-2026.2.19-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:ea8dfc99689240e61fb21b5fc2828f68b90abf7777d057b62d3166b7c1543c4c", size = 850101, upload-time = "2026-02-19T19:02:55.87Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/25/27/e3bfe6e97a99f7393665926be02fef772da7f8aa59e50bc3134e4262a032/regex-2026.2.19-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fff45852160960f29e184ec8a5be5ab4063cfd0b168d439d1fc4ac3744bf29e", size = 789904, upload-time = "2026-02-19T19:02:58.523Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/0b/f901cfeb4efd83e4f5c3e9f91a6de77e8e5ceb18555698aca3a27e215ed3/regex-2026.2.19-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:5ec1d7c080832fdd4e150c6f5621fe674c70c63b3ae5a4454cebd7796263b175", size = 492196, upload-time = "2026-02-19T19:03:08.188Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/94/0a/349b959e3da874e15eda853755567b4cde7e5309dbb1e07bfe910cfde452/regex-2026.2.19-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8457c1bc10ee9b29cdfd897ccda41dce6bde0e9abd514bcfef7bcd05e254d411", size = 292878, upload-time = "2026-02-19T19:03:10.272Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/b0/9d81b3c2c5ddff428f8c506713737278979a2c476f6e3675a9c51da0c389/regex-2026.2.19-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:cce8027010d1ffa3eb89a0b19621cdc78ae548ea2b49fea1f7bfb3ea77064c2b", size = 291235, upload-time = "2026-02-19T19:03:12.5Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/e7/be7818df8691dbe9508c381ea2cc4c1153e4fdb1c4b06388abeaa93bd712/regex-2026.2.19-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:11c138febb40546ff9e026dbbc41dc9fb8b29e61013fa5848ccfe045f5b23b83", size = 807893, upload-time = "2026-02-19T19:03:15.064Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0c/b6/b898a8b983190cfa0276031c17beb73cfd1db07c03c8c37f606d80b655e2/regex-2026.2.19-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:74ff212aa61532246bb3036b3dfea62233414b0154b8bc3676975da78383cac3", size = 873696, upload-time = "2026-02-19T19:03:17.848Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/98/126ba671d54f19080ec87cad228fb4f3cc387fff8c4a01cb4e93f4ff9d94/regex-2026.2.19-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d00c95a2b6bfeb3ea1cb68d1751b1dfce2b05adc2a72c488d77a780db06ab867", size = 915493, upload-time = "2026-02-19T19:03:20.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/10/550c84a1a1a7371867fe8be2bea7df55e797cbca4709974811410e195c5d/regex-2026.2.19-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:311fcccb76af31be4c588d5a17f8f1a059ae8f4b097192896ebffc95612f223a", size = 813094, upload-time = "2026-02-19T19:03:23.287Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/29/fb/ba221d2fc76a27b6b7d7a60f73a7a6a7bac21c6ba95616a08be2bcb434b0/regex-2026.2.19-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:77cfd6b5e7c4e8bf7a39d243ea05882acf5e3c7002b0ef4756de6606893b0ecd", size = 781583, upload-time = "2026-02-19T19:03:26.872Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/f1/af79231301297c9e962679efc04a31361b58dc62dec1fc0cb4b8dd95956a/regex-2026.2.19-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6380f29ff212ec922b6efb56100c089251940e0526a0d05aa7c2d9b571ddf2fe", size = 795875, upload-time = "2026-02-19T19:03:29.223Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/90/1e1d76cb0a2d0a4f38a039993e1c5cd971ae50435d751c5bae4f10e1c302/regex-2026.2.19-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:655f553a1fa3ab8a7fd570eca793408b8d26a80bfd89ed24d116baaf13a38969", size = 868916, upload-time = "2026-02-19T19:03:31.415Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/67/a1c01da76dbcfed690855a284c665cc0a370e7d02d1bd635cf9ff7dd74b8/regex-2026.2.19-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:015088b8558502f1f0bccd58754835aa154a7a5b0bd9d4c9b7b96ff4ae9ba876", size = 770386, upload-time = "2026-02-19T19:03:33.972Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/6f/94842bf294f432ff3836bfd91032e2ecabea6d284227f12d1f935318c9c4/regex-2026.2.19-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9e6693b8567a59459b5dda19104c4a4dbbd4a1c78833eacc758796f2cfef1854", size = 855007, upload-time = "2026-02-19T19:03:36.238Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/93/393cd203ca0d1d368f05ce12d2c7e91a324bc93c240db2e6d5ada05835f4/regex-2026.2.19-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:4071209fd4376ab5ceec72ad3507e9d3517c59e38a889079b98916477a871868", size = 799863, upload-time = "2026-02-19T19:03:38.497Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/d2/e6ee96b7dff201a83f650241c52db8e5bd080967cb93211f57aa448dc9d6/regex-2026.1.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e3dd93c8f9abe8aa4b6c652016da9a3afa190df5ad822907efe6b206c09896e", size = 488166, upload-time = "2026-01-14T23:13:46.408Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/8a/819e9ce14c9f87af026d0690901b3931f3101160833e5d4c8061fa3a1b67/regex-2026.1.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97499ff7862e868b1977107873dd1a06e151467129159a6ffd07b66706ba3a9f", size = 290632, upload-time = "2026-01-14T23:13:48.688Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/c3/23dfe15af25d1d45b07dfd4caa6003ad710dcdcb4c4b279909bdfe7a2de8/regex-2026.1.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0bda75ebcac38d884240914c6c43d8ab5fb82e74cde6da94b43b17c411aa4c2b", size = 288500, upload-time = "2026-01-14T23:13:50.503Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/31/1adc33e2f717df30d2f4d973f8776d2ba6ecf939301efab29fca57505c95/regex-2026.1.15-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7dcc02368585334f5bc81fc73a2a6a0bbade60e7d83da21cead622faf408f32c", size = 781670, upload-time = "2026-01-14T23:13:52.453Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/ce/21a8a22d13bc4adcb927c27b840c948f15fc973e21ed2346c1bd0eae22dc/regex-2026.1.15-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:693b465171707bbe882a7a05de5e866f33c76aa449750bee94a8d90463533cc9", size = 850820, upload-time = "2026-01-14T23:13:54.894Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6c/4f/3eeacdf587a4705a44484cd0b30e9230a0e602811fb3e2cc32268c70d509/regex-2026.1.15-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b0d190e6f013ea938623a58706d1469a62103fb2a241ce2873a9906e0386582c", size = 898777, upload-time = "2026-01-14T23:13:56.908Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/a9/1898a077e2965c35fc22796488141a22676eed2d73701e37c73ad7c0b459/regex-2026.1.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ff818702440a5878a81886f127b80127f5d50563753a28211482867f8318106", size = 791750, upload-time = "2026-01-14T23:13:58.527Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/84/e31f9d149a178889b3817212827f5e0e8c827a049ff31b4b381e76b26e2d/regex-2026.1.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f052d1be37ef35a54e394de66136e30fa1191fab64f71fc06ac7bc98c9a84618", size = 782674, upload-time = "2026-01-14T23:13:59.874Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/ff/adf60063db24532add6a1676943754a5654dcac8237af024ede38244fd12/regex-2026.1.15-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6bfc31a37fd1592f0c4fc4bfc674b5c42e52efe45b4b7a6a14f334cca4bcebe4", size = 767906, upload-time = "2026-01-14T23:14:01.298Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/3e/e6a216cee1e2780fec11afe7fc47b6f3925d7264e8149c607ac389fd9b1a/regex-2026.1.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3d6ce5ae80066b319ae3bc62fd55a557c9491baa5efd0d355f0de08c4ba54e79", size = 774798, upload-time = "2026-01-14T23:14:02.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/98/23a4a8378a9208514ed3efc7e7850c27fa01e00ed8557c958df0335edc4a/regex-2026.1.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1704d204bd42b6bb80167df0e4554f35c255b579ba99616def38f69e14a5ccb9", size = 845861, upload-time = "2026-01-14T23:14:04.824Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/57/d7605a9d53bd07421a8785d349cd29677fe660e13674fa4c6cbd624ae354/regex-2026.1.15-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:e3174a5ed4171570dc8318afada56373aa9289eb6dc0d96cceb48e7358b0e220", size = 755648, upload-time = "2026-01-14T23:14:06.371Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/76/6f2e24aa192da1e299cc1101674a60579d3912391867ce0b946ba83e2194/regex-2026.1.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:87adf5bd6d72e3e17c9cb59ac4096b1faaf84b7eb3037a5ffa61c4b4370f0f13", size = 836250, upload-time = "2026-01-14T23:14:08.343Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/11/3a/1f2a1d29453299a7858eab7759045fc3d9d1b429b088dec2dc85b6fa16a2/regex-2026.1.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e85dc94595f4d766bd7d872a9de5ede1ca8d3063f3bdf1e2c725f5eb411159e3", size = 779919, upload-time = "2026-01-14T23:14:09.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/c9/0c80c96eab96948363d270143138d671d5731c3a692b417629bf3492a9d6/regex-2026.1.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ae6020fb311f68d753b7efa9d4b9a5d47a5d6466ea0d5e3b5a471a960ea6e4a", size = 488168, upload-time = "2026-01-14T23:14:16.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/f0/271c92f5389a552494c429e5cc38d76d1322eb142fb5db3c8ccc47751468/regex-2026.1.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eddf73f41225942c1f994914742afa53dc0d01a6e20fe14b878a1b1edc74151f", size = 290636, upload-time = "2026-01-14T23:14:17.715Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/f9/5f1fd077d106ca5655a0f9ff8f25a1ab55b92128b5713a91ed7134ff688e/regex-2026.1.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e8cd52557603f5c66a548f69421310886b28b7066853089e1a71ee710e1cdc1", size = 288496, upload-time = "2026-01-14T23:14:19.326Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/e1/8f43b03a4968c748858ec77f746c286d81f896c2e437ccf050ebc5d3128c/regex-2026.1.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5170907244b14303edc5978f522f16c974f32d3aa92109fabc2af52411c9433b", size = 793503, upload-time = "2026-01-14T23:14:20.922Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/4e/a39a5e8edc5377a46a7c875c2f9a626ed3338cb3bb06931be461c3e1a34a/regex-2026.1.15-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2748c1ec0663580b4510bd89941a31560b4b439a0b428b49472a3d9944d11cd8", size = 860535, upload-time = "2026-01-14T23:14:22.405Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/1c/9dce667a32a9477f7a2869c1c767dc00727284a9fa3ff5c09a5c6c03575e/regex-2026.1.15-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2f2775843ca49360508d080eaa87f94fa248e2c946bbcd963bb3aae14f333413", size = 907225, upload-time = "2026-01-14T23:14:23.897Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a4/3c/87ca0a02736d16b6262921425e84b48984e77d8e4e572c9072ce96e66c30/regex-2026.1.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9ea2604370efc9a174c1b5dcc81784fb040044232150f7f33756049edfc9026", size = 800526, upload-time = "2026-01-14T23:14:26.039Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/ff/647d5715aeea7c87bdcbd2f578f47b415f55c24e361e639fe8c0cc88878f/regex-2026.1.15-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0dcd31594264029b57bf16f37fd7248a70b3b764ed9e0839a8f271b2d22c0785", size = 773446, upload-time = "2026-01-14T23:14:28.109Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/89/bf22cac25cb4ba0fe6bff52ebedbb65b77a179052a9d6037136ae93f42f4/regex-2026.1.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c08c1f3e34338256732bd6938747daa3c0d5b251e04b6e43b5813e94d503076e", size = 783051, upload-time = "2026-01-14T23:14:29.929Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/f4/6ed03e71dca6348a5188363a34f5e26ffd5db1404780288ff0d79513bce4/regex-2026.1.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e43a55f378df1e7a4fa3547c88d9a5a9b7113f653a66821bcea4718fe6c58763", size = 854485, upload-time = "2026-01-14T23:14:31.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/9a/8e8560bd78caded8eb137e3e47612430a05b9a772caf60876435192d670a/regex-2026.1.15-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:f82110ab962a541737bd0ce87978d4c658f06e7591ba899192e2712a517badbb", size = 762195, upload-time = "2026-01-14T23:14:32.802Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/6b/61fc710f9aa8dfcd764fe27d37edfaa023b1a23305a0d84fccd5adb346ea/regex-2026.1.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:27618391db7bdaf87ac6c92b31e8f0dfb83a9de0075855152b720140bda177a2", size = 845986, upload-time = "2026-01-14T23:14:34.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/2e/fbee4cb93f9d686901a7ca8d94285b80405e8c34fe4107f63ffcbfb56379/regex-2026.1.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bfb0d6be01fbae8d6655c8ca21b3b72458606c4aec9bbc932db758d47aba6db1", size = 788992, upload-time = "2026-01-14T23:14:37.116Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/e4/1fc4599450c9f0863d9406e944592d968b8d6dfd0d552a7d569e43bceada/regex-2026.1.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa", size = 798656, upload-time = "2026-01-14T23:14:48.77Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/e6/59650d73a73fa8a60b3a590545bfcf1172b4384a7df2e7fe7b9aab4e2da9/regex-2026.1.15-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804", size = 864252, upload-time = "2026-01-14T23:14:50.528Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/ab/1d0f4d50a1638849a97d731364c9a80fa304fec46325e48330c170ee8e80/regex-2026.1.15-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c", size = 912268, upload-time = "2026-01-14T23:14:52.952Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/df/0d722c030c82faa1d331d1921ee268a4e8fb55ca8b9042c9341c352f17fa/regex-2026.1.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5", size = 803589, upload-time = "2026-01-14T23:14:55.182Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/23/33289beba7ccb8b805c6610a8913d0131f834928afc555b241caabd422a9/regex-2026.1.15-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3", size = 775700, upload-time = "2026-01-14T23:14:56.707Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/65/bf3a42fa6897a0d3afa81acb25c42f4b71c274f698ceabd75523259f6688/regex-2026.1.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb", size = 787928, upload-time = "2026-01-14T23:14:58.312Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/f5/13bf65864fc314f68cdd6d8ca94adcab064d4d39dbd0b10fef29a9da48fc/regex-2026.1.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410", size = 858607, upload-time = "2026-01-14T23:15:00.657Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/31/040e589834d7a439ee43fb0e1e902bc81bd58a5ba81acffe586bb3321d35/regex-2026.1.15-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4", size = 763729, upload-time = "2026-01-14T23:15:02.248Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/84/6921e8129687a427edf25a34a5594b588b6d88f491320b9de5b6339a4fcb/regex-2026.1.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d", size = 850697, upload-time = "2026-01-14T23:15:03.878Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/87/3d06143d4b128f4229158f2de5de6c8f2485170c7221e61bf381313314b2/regex-2026.1.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22", size = 789849, upload-time = "2026-01-14T23:15:06.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/2e/6870bb16e982669b674cce3ee9ff2d1d46ab80528ee6bcc20fb2292efb60/regex-2026.1.15-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e69d0deeb977ffe7ed3d2e4439360089f9c3f217ada608f0f88ebd67afb6385e", size = 489164, upload-time = "2026-01-14T23:15:13.962Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/67/9774542e203849b0286badf67199970a44ebdb0cc5fb739f06e47ada72f8/regex-2026.1.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3601ffb5375de85a16f407854d11cca8fe3f5febbe3ac78fb2866bb220c74d10", size = 291218, upload-time = "2026-01-14T23:15:15.647Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/87/b0cda79f22b8dee05f774922a214da109f9a4c0eca5da2c9d72d77ea062c/regex-2026.1.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4c5ef43b5c2d4114eb8ea424bb8c9cec01d5d17f242af88b2448f5ee81caadbc", size = 288895, upload-time = "2026-01-14T23:15:17.788Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/6a/0041f0a2170d32be01ab981d6346c83a8934277d82c780d60b127331f264/regex-2026.1.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:968c14d4f03e10b2fd960f1d5168c1f0ac969381d3c1fcc973bc45fb06346599", size = 798680, upload-time = "2026-01-14T23:15:19.342Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/de/30e1cfcdbe3e891324aa7568b7c968771f82190df5524fabc1138cb2d45a/regex-2026.1.15-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56a5595d0f892f214609c9f76b41b7428bed439d98dc961efafdd1354d42baae", size = 864210, upload-time = "2026-01-14T23:15:22.005Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/44/4db2f5c5ca0ccd40ff052ae7b1e9731352fcdad946c2b812285a7505ca75/regex-2026.1.15-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf650f26087363434c4e560011f8e4e738f6f3e029b85d4904c50135b86cfa5", size = 912358, upload-time = "2026-01-14T23:15:24.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/b6/e6a5665d43a7c42467138c8a2549be432bad22cbd206f5ec87162de74bd7/regex-2026.1.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18388a62989c72ac24de75f1449d0fb0b04dfccd0a1a7c1c43af5eb503d890f6", size = 803583, upload-time = "2026-01-14T23:15:26.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/53/7cd478222169d85d74d7437e74750005e993f52f335f7c04ff7adfda3310/regex-2026.1.15-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d220a2517f5893f55daac983bfa9fe998a7dbcaee4f5d27a88500f8b7873788", size = 775782, upload-time = "2026-01-14T23:15:29.352Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/b5/75f9a9ee4b03a7c009fe60500fe550b45df94f0955ca29af16333ef557c5/regex-2026.1.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9c08c2fbc6120e70abff5d7f28ffb4d969e14294fb2143b4b5c7d20e46d1714", size = 787978, upload-time = "2026-01-14T23:15:31.295Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/b3/79821c826245bbe9ccbb54f6eadb7879c722fd3e0248c17bfc90bf54e123/regex-2026.1.15-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7ef7d5d4bd49ec7364315167a4134a015f61e8266c6d446fc116a9ac4456e10d", size = 858550, upload-time = "2026-01-14T23:15:33.558Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4a/85/2ab5f77a1c465745bfbfcb3ad63178a58337ae8d5274315e2cc623a822fa/regex-2026.1.15-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:6e42844ad64194fa08d5ccb75fe6a459b9b08e6d7296bd704460168d58a388f3", size = 763747, upload-time = "2026-01-14T23:15:35.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/84/c27df502d4bfe2873a3e3a7cf1bdb2b9cc10284d1a44797cf38bed790470/regex-2026.1.15-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cfecdaa4b19f9ca534746eb3b55a5195d5c95b88cac32a205e981ec0a22b7d31", size = 850615, upload-time = "2026-01-14T23:15:37.523Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/b7/658a9782fb253680aa8ecb5ccbb51f69e088ed48142c46d9f0c99b46c575/regex-2026.1.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:08df9722d9b87834a3d701f3fca570b2be115654dbfd30179f30ab2f39d606d3", size = 789951, upload-time = "2026-01-14T23:15:39.582Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/38/0cfd5a78e5c6db00e6782fdae70458f89850ce95baa5e8694ab91d89744f/regex-2026.1.15-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ec94c04149b6a7b8120f9f44565722c7ae31b7a6d2275569d2eefa76b83da3be", size = 492068, upload-time = "2026-01-14T23:15:47.616Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/72/6c86acff16cb7c959c4355826bbf06aad670682d07c8f3998d9ef4fee7cd/regex-2026.1.15-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:40c86d8046915bb9aeb15d3f3f15b6fd500b8ea4485b30e1bbc799dab3fe29f8", size = 292756, upload-time = "2026-01-14T23:15:49.307Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4e/58/df7fb69eadfe76526ddfce28abdc0af09ffe65f20c2c90932e89d705153f/regex-2026.1.15-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:726ea4e727aba21643205edad8f2187ec682d3305d790f73b7a51c7587b64bdd", size = 291114, upload-time = "2026-01-14T23:15:51.484Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/6c/a4011cd1cf96b90d2cdc7e156f91efbd26531e822a7fbb82a43c1016678e/regex-2026.1.15-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb740d044aff31898804e7bf1181cc72c03d11dfd19932b9911ffc19a79070a", size = 807524, upload-time = "2026-01-14T23:15:53.102Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/25/a53ffb73183f69c3e9f4355c4922b76d2840aee160af6af5fac229b6201d/regex-2026.1.15-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05d75a668e9ea16f832390d22131fe1e8acc8389a694c8febc3e340b0f810b93", size = 873455, upload-time = "2026-01-14T23:15:54.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/66/0b/8b47fc2e8f97d9b4a851736f3890a5f786443aa8901061c55f24c955f45b/regex-2026.1.15-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d991483606f3dbec93287b9f35596f41aa2e92b7c2ebbb935b63f409e243c9af", size = 915007, upload-time = "2026-01-14T23:15:57.041Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/fa/97de0d681e6d26fabe71968dbee06dd52819e9a22fdce5dac7256c31ed84/regex-2026.1.15-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:194312a14819d3e44628a44ed6fea6898fdbecb0550089d84c403475138d0a09", size = 812794, upload-time = "2026-01-14T23:15:58.916Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/38/e752f94e860d429654aa2b1c51880bff8dfe8f084268258adf9151cf1f53/regex-2026.1.15-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe2fda4110a3d0bc163c2e0664be44657431440722c5c5315c65155cab92f9e5", size = 781159, upload-time = "2026-01-14T23:16:00.817Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e9/a7/d739ffaef33c378fc888302a018d7f81080393d96c476b058b8c64fd2b0d/regex-2026.1.15-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:124dc36c85d34ef2d9164da41a53c1c8c122cfb1f6e1ec377a1f27ee81deb794", size = 795558, upload-time = "2026-01-14T23:16:03.267Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3e/c4/542876f9a0ac576100fc73e9c75b779f5c31e3527576cfc9cb3009dcc58a/regex-2026.1.15-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:a1774cd1981cd212506a23a14dba7fdeaee259f5deba2df6229966d9911e767a", size = 868427, upload-time = "2026-01-14T23:16:05.646Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fc/0f/d5655bea5b22069e32ae85a947aa564912f23758e112cdb74212848a1a1b/regex-2026.1.15-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b5f7d8d2867152cdb625e72a530d2ccb48a3d199159144cbdd63870882fb6f80", size = 769939, upload-time = "2026-01-14T23:16:07.542Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/06/7e18a4fa9d326daeda46d471a44ef94201c46eaa26dbbb780b5d92cbfdda/regex-2026.1.15-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:492534a0ab925d1db998defc3c302dae3616a2fc3fe2e08db1472348f096ddf2", size = 854753, upload-time = "2026-01-14T23:16:10.395Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3b/67/dc8946ef3965e166f558ef3b47f492bc364e96a265eb4a2bb3ca765c8e46/regex-2026.1.15-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c661fc820cfb33e166bf2450d3dadbda47c8d8981898adb9b6fe24e5e582ba60", size = 799559, upload-time = "2026-01-14T23:16:12.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/0a/47fa888ec7cbbc7d62c5f2a6a888878e76169170ead271a35239edd8f0e8/regex-2026.1.15-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:d920392a6b1f353f4aa54328c867fec3320fa50657e25f64abf17af054fc97ac", size = 489170, upload-time = "2026-01-14T23:16:19.835Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/c4/d000e9b7296c15737c9301708e9e7fbdea009f8e93541b6b43bdb8219646/regex-2026.1.15-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b5a28980a926fa810dbbed059547b02783952e2efd9c636412345232ddb87ff6", size = 291146, upload-time = "2026-01-14T23:16:21.541Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/b6/921cc61982e538682bdf3bdf5b2c6ab6b34368da1f8e98a6c1ddc503c9cf/regex-2026.1.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:621f73a07595d83f28952d7bd1e91e9d1ed7625fb7af0064d3516674ec93a2a2", size = 288986, upload-time = "2026-01-14T23:16:23.381Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/33/eb7383dde0bbc93f4fb9d03453aab97e18ad4024ac7e26cef8d1f0a2cff0/regex-2026.1.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d7d92495f47567a9b1669c51fc8d6d809821849063d168121ef801bbc213846", size = 799098, upload-time = "2026-01-14T23:16:25.088Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/56/b664dccae898fc8d8b4c23accd853f723bde0f026c747b6f6262b688029c/regex-2026.1.15-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8dd16fba2758db7a3780a051f245539c4451ca20910f5a5e6ea1c08d06d4a76b", size = 864980, upload-time = "2026-01-14T23:16:27.297Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/40/0999e064a170eddd237bae9ccfcd8f28b3aa98a38bf727a086425542a4fc/regex-2026.1.15-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1e1808471fbe44c1a63e5f577a1d5f02fe5d66031dcbdf12f093ffc1305a858e", size = 911607, upload-time = "2026-01-14T23:16:29.235Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/78/c77f644b68ab054e5a674fb4da40ff7bffb2c88df58afa82dbf86573092d/regex-2026.1.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0751a26ad39d4f2ade8fe16c59b2bf5cb19eb3d2cd543e709e583d559bd9efde", size = 803358, upload-time = "2026-01-14T23:16:31.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/31/d4292ea8566eaa551fafc07797961c5963cf5235c797cc2ae19b85dfd04d/regex-2026.1.15-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0f0c7684c7f9ca241344ff95a1de964f257a5251968484270e91c25a755532c5", size = 775833, upload-time = "2026-01-14T23:16:33.141Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/b2/cff3bf2fea4133aa6fb0d1e370b37544d18c8350a2fa118c7e11d1db0e14/regex-2026.1.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:74f45d170a21df41508cb67165456538425185baaf686281fa210d7e729abc34", size = 788045, upload-time = "2026-01-14T23:16:35.005Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/99/2cb9b69045372ec877b6f5124bda4eb4253bc58b8fe5848c973f752bc52c/regex-2026.1.15-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:f1862739a1ffb50615c0fde6bae6569b5efbe08d98e59ce009f68a336f64da75", size = 859374, upload-time = "2026-01-14T23:16:36.919Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/09/16/710b0a5abe8e077b1729a562d2f297224ad079f3a66dce46844c193416c8/regex-2026.1.15-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:453078802f1b9e2b7303fb79222c054cb18e76f7bdc220f7530fdc85d319f99e", size = 763940, upload-time = "2026-01-14T23:16:38.685Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/d1/7585c8e744e40eb3d32f119191969b91de04c073fca98ec14299041f6e7e/regex-2026.1.15-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:a30a68e89e5a218b8b23a52292924c1f4b245cb0c68d1cce9aec9bbda6e2c160", size = 850112, upload-time = "2026-01-14T23:16:40.646Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/af/d6/43e1dd85df86c49a347aa57c1f69d12c652c7b60e37ec162e3096194a278/regex-2026.1.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9479cae874c81bf610d72b85bb681a94c95722c127b55445285fb0e2c82db8e1", size = 789586, upload-time = "2026-01-14T23:16:42.799Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/77/0b1e81857060b92b9cad239104c46507dd481b3ff1fa79f8e7f865aae38a/regex-2026.1.15-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:ee6854c9000a10938c79238de2379bea30c82e4925a371711af45387df35cab8", size = 492073, upload-time = "2026-01-14T23:16:51.154Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/70/f3/f8302b0c208b22c1e4f423147e1913fd475ddd6230565b299925353de644/regex-2026.1.15-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c2b80399a422348ce5de4fe40c418d6299a0fa2803dd61dc0b1a2f28e280fcf", size = 292757, upload-time = "2026-01-14T23:16:53.08Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/f0/ef55de2460f3b4a6da9d9e7daacd0cb79d4ef75c64a2af316e68447f0df0/regex-2026.1.15-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:dca3582bca82596609959ac39e12b7dad98385b4fefccb1151b937383cec547d", size = 291122, upload-time = "2026-01-14T23:16:55.383Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/55/bb8ccbacabbc3a11d863ee62a9f18b160a83084ea95cdfc5d207bfc3dd75/regex-2026.1.15-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71d476caa6692eea743ae5ea23cde3260677f70122c4d258ca952e5c2d4e84", size = 807761, upload-time = "2026-01-14T23:16:57.251Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/84/f75d937f17f81e55679a0509e86176e29caa7298c38bd1db7ce9c0bf6075/regex-2026.1.15-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c243da3436354f4af6c3058a3f81a97d47ea52c9bd874b52fd30274853a1d5df", size = 873538, upload-time = "2026-01-14T23:16:59.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b8/d9/0da86327df70349aa8d86390da91171bd3ca4f0e7c1d1d453a9c10344da3/regex-2026.1.15-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8355ad842a7c7e9e5e55653eade3b7d1885ba86f124dd8ab1f722f9be6627434", size = 915066, upload-time = "2026-01-14T23:17:01.607Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2a/5e/f660fb23fc77baa2a61aa1f1fe3a4eea2bbb8a286ddec148030672e18834/regex-2026.1.15-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f192a831d9575271a22d804ff1a5355355723f94f31d9eef25f0d45a152fdc1a", size = 812938, upload-time = "2026-01-14T23:17:04.366Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/33/a47a29bfecebbbfd1e5cd3f26b28020a97e4820f1c5148e66e3b7d4b4992/regex-2026.1.15-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:166551807ec20d47ceaeec380081f843e88c8949780cd42c40f18d16168bed10", size = 781314, upload-time = "2026-01-14T23:17:06.378Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/ec/7ec2bbfd4c3f4e494a24dec4c6943a668e2030426b1b8b949a6462d2c17b/regex-2026.1.15-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f9ca1cbdc0fbfe5e6e6f8221ef2309988db5bcede52443aeaee9a4ad555e0dac", size = 795652, upload-time = "2026-01-14T23:17:08.521Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/46/79/a5d8651ae131fe27d7c521ad300aa7f1c7be1dbeee4d446498af5411b8a9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b30bcbd1e1221783c721483953d9e4f3ab9c5d165aa709693d3f3946747b1aea", size = 868550, upload-time = "2026-01-14T23:17:10.573Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/b7/25635d2809664b79f183070786a5552dd4e627e5aedb0065f4e3cf8ee37d/regex-2026.1.15-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2a8d7b50c34578d0d3bf7ad58cde9652b7d683691876f83aedc002862a35dc5e", size = 769981, upload-time = "2026-01-14T23:17:12.871Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/8b/fc3fcbb2393dcfa4a6c5ffad92dc498e842df4581ea9d14309fcd3c55fb9/regex-2026.1.15-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9d787e3310c6a6425eb346be4ff2ccf6eece63017916fd77fe8328c57be83521", size = 854780, upload-time = "2026-01-14T23:17:14.837Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/38/dde117c76c624713c8a2842530be9c93ca8b606c0f6102d86e8cd1ce8bea/regex-2026.1.15-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:619843841e220adca114118533a574a9cd183ed8a28b85627d2844c500a2b0db", size = 799778, upload-time = "2026-01-14T23:17:17.369Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4631,24 +4618,24 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.15.4"
|
||||
version = "0.15.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/da/31/d6e536cdebb6568ae75a7f00e4b4819ae0ad2640c3604c305a0428680b0c/ruff-0.15.4.tar.gz", hash = "sha256:3412195319e42d634470cc97aa9803d07e9d5c9223b99bcb1518f0c725f26ae1", size = 4569550, upload-time = "2026-02-26T20:04:14.959Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c8/39/5cee96809fbca590abea6b46c6d1c586b49663d1d2830a751cc8fc42c666/ruff-0.15.0.tar.gz", hash = "sha256:6bdea47cdbea30d40f8f8d7d69c0854ba7c15420ec75a26f463290949d7f7e9a", size = 4524893, upload-time = "2026-02-03T17:53:35.357Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/82/c11a03cfec3a4d26a0ea1e571f0f44be5993b923f905eeddfc397c13d360/ruff-0.15.4-py3-none-linux_armv6l.whl", hash = "sha256:a1810931c41606c686bae8b5b9a8072adac2f611bb433c0ba476acba17a332e0", size = 10453333, upload-time = "2026-02-26T20:04:20.093Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/5d/6a1f271f6e31dffb31855996493641edc3eef8077b883eaf007a2f1c2976/ruff-0.15.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5a1632c66672b8b4d3e1d1782859e98d6e0b4e70829530666644286600a33992", size = 10853356, upload-time = "2026-02-26T20:04:05.808Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/d8/0fab9f8842b83b1a9c2bf81b85063f65e93fb512e60effa95b0be49bfc54/ruff-0.15.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a4386ba2cd6c0f4ff75252845906acc7c7c8e1ac567b7bc3d373686ac8c222ba", size = 10187434, upload-time = "2026-02-26T20:03:54.656Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/cc/cc220fd9394eff5db8d94dec199eec56dd6c9f3651d8869d024867a91030/ruff-0.15.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2496488bdfd3732747558b6f95ae427ff066d1fcd054daf75f5a50674411e75", size = 10535456, upload-time = "2026-02-26T20:03:52.738Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/0f/bced38fa5cf24373ec767713c8e4cadc90247f3863605fb030e597878661/ruff-0.15.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f1c4893841ff2d54cbda1b2860fa3260173df5ddd7b95d370186f8a5e66a4ac", size = 10287772, upload-time = "2026-02-26T20:04:08.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/90/58a1802d84fed15f8f281925b21ab3cecd813bde52a8ca033a4de8ab0e7a/ruff-0.15.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:820b8766bd65503b6c30aaa6331e8ef3a6e564f7999c844e9a547c40179e440a", size = 11049051, upload-time = "2026-02-26T20:04:03.53Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/ac/b7ad36703c35f3866584564dc15f12f91cb1a26a897dc2fd13d7cb3ae1af/ruff-0.15.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9fb74bab47139c1751f900f857fa503987253c3ef89129b24ed375e72873e85", size = 11890494, upload-time = "2026-02-26T20:04:10.497Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/3d/3eb2f47a39a8b0da99faf9c54d3eb24720add1e886a5309d4d1be73a6380/ruff-0.15.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f80c98765949c518142b3a50a5db89343aa90f2c2bf7799de9986498ae6176db", size = 11326221, upload-time = "2026-02-26T20:04:12.84Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/90/bf134f4c1e5243e62690e09d63c55df948a74084c8ac3e48a88468314da6/ruff-0.15.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451a2e224151729b3b6c9ffb36aed9091b2996fe4bdbd11f47e27d8f2e8888ec", size = 11168459, upload-time = "2026-02-26T20:04:00.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b5/e5/a64d27688789b06b5d55162aafc32059bb8c989c61a5139a36e1368285eb/ruff-0.15.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:a8f157f2e583c513c4f5f896163a93198297371f34c04220daf40d133fdd4f7f", size = 11104366, upload-time = "2026-02-26T20:03:48.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/f6/32d1dcb66a2559763fc3027bdd65836cad9eb09d90f2ed6a63d8e9252b02/ruff-0.15.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:917cc68503357021f541e69b35361c99387cdbbf99bd0ea4aa6f28ca99ff5338", size = 10510887, upload-time = "2026-02-26T20:03:45.771Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/92/22d1ced50971c5b6433aed166fcef8c9343f567a94cf2b9d9089f6aa80fe/ruff-0.15.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:e9737c8161da79fd7cfec19f1e35620375bd8b2a50c3e77fa3d2c16f574105cc", size = 10285939, upload-time = "2026-02-26T20:04:22.42Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e6/f4/7c20aec3143837641a02509a4668fb146a642fd1211846634edc17eb5563/ruff-0.15.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:291258c917539e18f6ba40482fe31d6f5ac023994ee11d7bdafd716f2aab8a68", size = 10765471, upload-time = "2026-02-26T20:03:58.924Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/09/6d2f7586f09a16120aebdff8f64d962d7c4348313c77ebb29c566cefc357/ruff-0.15.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3f83c45911da6f2cd5936c436cf86b9f09f09165f033a99dcf7477e34041cbc3", size = 11263382, upload-time = "2026-02-26T20:04:24.424Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/88/3fd1b0aa4b6330d6aaa63a285bc96c9f71970351579152d231ed90914586/ruff-0.15.0-py3-none-linux_armv6l.whl", hash = "sha256:aac4ebaa612a82b23d45964586f24ae9bc23ca101919f5590bdb368d74ad5455", size = 10354332, upload-time = "2026-02-03T17:52:54.892Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/72/f6/62e173fbb7eb75cc29fe2576a1e20f0a46f671a2587b5f604bfb0eaf5f6f/ruff-0.15.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:dcd4be7cc75cfbbca24a98d04d0b9b36a270d0833241f776b788d59f4142b14d", size = 10767189, upload-time = "2026-02-03T17:53:19.778Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/e4/968ae17b676d1d2ff101d56dc69cf333e3a4c985e1ec23803df84fc7bf9e/ruff-0.15.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d747e3319b2bce179c7c1eaad3d884dc0a199b5f4d5187620530adf9105268ce", size = 10075384, upload-time = "2026-02-03T17:53:29.241Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/bf/9843c6044ab9e20af879c751487e61333ca79a2c8c3058b15722386b8cae/ruff-0.15.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:650bd9c56ae03102c51a5e4b554d74d825ff3abe4db22b90fd32d816c2e90621", size = 10481363, upload-time = "2026-02-03T17:52:43.332Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/d9/4ada5ccf4cd1f532db1c8d44b6f664f2208d3d93acbeec18f82315e15193/ruff-0.15.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a6664b7eac559e3048223a2da77769c2f92b43a6dfd4720cef42654299a599c9", size = 10187736, upload-time = "2026-02-03T17:53:00.522Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/e2/f25eaecd446af7bb132af0a1d5b135a62971a41f5366ff41d06d25e77a91/ruff-0.15.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f811f97b0f092b35320d1556f3353bf238763420ade5d9e62ebd2b73f2ff179", size = 10968415, upload-time = "2026-02-03T17:53:15.705Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/dc/f06a8558d06333bf79b497d29a50c3a673d9251214e0d7ec78f90b30aa79/ruff-0.15.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:761ec0a66680fab6454236635a39abaf14198818c8cdf691e036f4bc0f406b2d", size = 11809643, upload-time = "2026-02-03T17:53:23.031Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/45/0ece8db2c474ad7df13af3a6d50f76e22a09d078af63078f005057ca59eb/ruff-0.15.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:940f11c2604d317e797b289f4f9f3fa5555ffe4fb574b55ed006c3d9b6f0eb78", size = 11234787, upload-time = "2026-02-03T17:52:46.432Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/d9/0e3a81467a120fd265658d127db648e4d3acfe3e4f6f5d4ea79fac47e587/ruff-0.15.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcbca3d40558789126da91d7ef9a7c87772ee107033db7191edefa34e2c7f1b4", size = 11112797, upload-time = "2026-02-03T17:52:49.274Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/cb/8c0b3b0c692683f8ff31351dfb6241047fa873a4481a76df4335a8bff716/ruff-0.15.0-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:9a121a96db1d75fa3eb39c4539e607f628920dd72ff1f7c5ee4f1b768ac62d6e", size = 11033133, upload-time = "2026-02-03T17:53:33.105Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/5e/23b87370cf0f9081a8c89a753e69a4e8778805b8802ccfe175cc410e50b9/ruff-0.15.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5298d518e493061f2eabd4abd067c7e4fb89e2f63291c94332e35631c07c3662", size = 10442646, upload-time = "2026-02-03T17:53:06.278Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/9a/3c94de5ce642830167e6d00b5c75aacd73e6347b4c7fc6828699b150a5ee/ruff-0.15.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afb6e603d6375ff0d6b0cee563fa21ab570fd15e65c852cb24922cef25050cf1", size = 10195750, upload-time = "2026-02-03T17:53:26.084Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/15/e396325080d600b436acc970848d69df9c13977942fb62bb8722d729bee8/ruff-0.15.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:77e515f6b15f828b94dc17d2b4ace334c9ddb7d9468c54b2f9ed2b9c1593ef16", size = 10676120, upload-time = "2026-02-03T17:53:09.363Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/c9/229a23d52a2983de1ad0fb0ee37d36e0257e6f28bfd6b498ee2c76361874/ruff-0.15.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6f6e80850a01eb13b3e42ee0ebdf6e4497151b48c35051aab51c101266d187a3", size = 11201636, upload-time = "2026-02-03T17:52:57.281Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4839,7 +4826,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "sentence-transformers"
|
||||
version = "5.2.3"
|
||||
version = "5.2.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "huggingface-hub", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -4854,9 +4841,9 @@ dependencies = [
|
||||
{ name = "transformers", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "typing-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5b/30/21664028fc0776eb1ca024879480bbbab36f02923a8ff9e4cae5a150fa35/sentence_transformers-5.2.3.tar.gz", hash = "sha256:3cd3044e1f3fe859b6a1b66336aac502eaae5d3dd7d5c8fc237f37fbf58137c7", size = 381623, upload-time = "2026-02-17T14:05:20.238Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/a6/bc/0bc9c0ec1cf83ab2ec6e6f38667d167349b950fff6dd2086b79bd360eeca/sentence_transformers-5.2.2.tar.gz", hash = "sha256:7033ee0a24bc04c664fd490abf2ef194d387b3a58a97adcc528783ff505159fa", size = 381607, upload-time = "2026-01-27T11:11:02.658Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/46/9f/dba4b3e18ebbe1eaa29d9f1764fbc7da0cd91937b83f2b7928d15c5d2d36/sentence_transformers-5.2.3-py3-none-any.whl", hash = "sha256:6437c62d4112b615ddebda362dfc16a4308d604c5b68125ed586e3e95d5b2e30", size = 494225, upload-time = "2026-02-17T14:05:18.596Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/21/7e925890636791386e81b52878134f114d63072e79fffe14cdcc5e7a5e6a/sentence_transformers-5.2.2-py3-none-any.whl", hash = "sha256:280ac54bffb84c110726b4d8848ba7b7c60813b9034547f8aea6e9a345cd1c23", size = 494106, upload-time = "2026-01-27T11:11:00.983Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5145,15 +5132,6 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/0d/c1ad6f4016a3968c048545f5d9b8ffebf577774b2ede3e2e352553b685fe/tiktoken-0.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5edb8743b88d5be814b1a8a8854494719080c28faaa1ccbef02e87354fe71ef0", size = 1253706, upload-time = "2025-10-06T20:22:33.385Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tinytag"
|
||||
version = "2.2.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/98/07/fb260bac73119f369a10e884016516d07cd760b5068e703773f83dd5e7bf/tinytag-2.2.0.tar.gz", hash = "sha256:f15b082510f6e0fc717e597edc8759d6f2d3ff6194ac0f3bcd675a9a09d9b798", size = 38120, upload-time = "2025-12-15T21:10:19.093Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/e2/9818fcebb348237389d2ac2fea97cf2b2638378a0866105a45ae9be49728/tinytag-2.2.0-py3-none-any.whl", hash = "sha256:d2cf3ef8ee0f6c854663f77d9d5f8159ee1c834c70f5ea4f214ddc4af8148f79", size = 32861, upload-time = "2025-12-15T21:10:17.63Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokenizers"
|
||||
version = "0.22.2"
|
||||
@@ -5502,11 +5480,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "types-markdown"
|
||||
version = "3.10.2.20260211"
|
||||
version = "3.10.0.20251106"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/6d/2e/35b30a09f6ee8a69142408d3ceb248c4454aa638c0a414d8704a3ef79563/types_markdown-3.10.2.20260211.tar.gz", hash = "sha256:66164310f88c11a58c6c706094c6f8c537c418e3525d33b76276a5fbd66b01ce", size = 19768, upload-time = "2026-02-11T04:19:29.497Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/de/e4/060f0dadd9b551cae77d6407f2bc84b168f918d90650454aff219c1b3ed2/types_markdown-3.10.0.20251106.tar.gz", hash = "sha256:12836f7fcbd7221db8baeb0d3a2f820b95050d0824bfa9665c67b4d144a1afa1", size = 19486, upload-time = "2025-11-06T03:06:44.317Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/54/c9/659fa2df04b232b0bfcd05d2418e683080e91ec68f636f3c0a5a267350e7/types_markdown-3.10.2.20260211-py3-none-any.whl", hash = "sha256:2d94d08587e3738203b3c4479c449845112b171abe8b5cadc9b0c12fcf3e99da", size = 25854, upload-time = "2026-02-11T04:19:28.647Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/58/f666ca9391f2a8bd33bb0b0797cde6ac3e764866708d5f8aec6fab215320/types_markdown-3.10.0.20251106-py3-none-any.whl", hash = "sha256:2c39512a573899b59efae07e247ba088a75b70e3415e81277692718f430afd7e", size = 25862, upload-time = "2025-11-06T03:06:43.082Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5595,6 +5573,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/7f/016dc5cc718ec6ccaa84fb73ed409ef1c261793fd5e637cdfaa18beb40a9/types_setuptools-80.10.0.20260124-py3-none-any.whl", hash = "sha256:efed7e044f01adb9c2806c7a8e1b6aa3656b8e382379b53d5f26ee3db24d4c01", size = 64333, upload-time = "2026-01-24T03:18:38.344Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-tqdm"
|
||||
version = "4.67.3.20260205"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "types-requests", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/53/46/790b9872523a48163bdda87d47849b4466017640e5259d06eed539340afd/types_tqdm-4.67.3.20260205.tar.gz", hash = "sha256:f3023682d4aa3bbbf908c8c6bb35f35692d319460d9bbd3e646e8852f3dd9f85", size = 17597, upload-time = "2026-02-05T04:03:19.721Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/da/7f761868dbaa328392356fab30c18ab90d14cce86b269e7e63328f29d4a3/types_tqdm-4.67.3.20260205-py3-none-any.whl", hash = "sha256:85c31731e81dc3c5cecc34c6c8b2e5166fafa722468f58840c2b5ac6a8c5c173", size = 23894, upload-time = "2026-02-05T04:03:18.48Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "types-webencodings"
|
||||
version = "0.5.0.20251108"
|
||||
@@ -6104,7 +6094,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "zensical"
|
||||
version = "0.0.24"
|
||||
version = "0.0.21"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -6115,18 +6105,18 @@ dependencies = [
|
||||
{ name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3b/96/9c6cbdd7b351d1023cdbbcf7872d4cb118b0334cfe5821b99e0dd18e3f00/zensical-0.0.24.tar.gz", hash = "sha256:b5d99e225329bf4f98c8022bdf0a0ee9588c2fada7b4df1b7b896fcc62b37ec3", size = 3840688, upload-time = "2026-02-26T09:43:44.557Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8a/50/2655b5f72d0c72f4366be580f5e2354ff05280d047ea986fe89570e44589/zensical-0.0.21.tar.gz", hash = "sha256:c13563836fa63a3cabeffd83fe3a770ca740cfa5ae7b85df85d89837e31b3b4a", size = 3819731, upload-time = "2026-02-04T17:47:59.396Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/aa/b8201af30e376a67566f044a1c56210edac5ae923fd986a836d2cf593c9c/zensical-0.0.24-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:d390c5453a5541ca35d4f9e1796df942b6612c546e3153dd928236d3b758409a", size = 12263407, upload-time = "2026-02-26T09:43:14.716Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/8e/3d910214471ade604fd39b080db3696864acc23678b5b4b8475c7dbfd2ce/zensical-0.0.24-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:81ac072869cf4d280853765b2bfb688653da0dfb9408f3ab15aca96455ab8223", size = 12142610, upload-time = "2026-02-26T09:43:17.546Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/d7/eb0983640aa0419ddf670298cfbcf8b75629b6484925429b857851e00784/zensical-0.0.24-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5eb1dfa84cae8e960bfa2c6851d2bc8e9710c4c4c683bd3aaf23185f646ae46", size = 12508380, upload-time = "2026-02-26T09:43:20.114Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a3/04/4405b9e6f937a75db19f0d875798a7eb70817d6a3bec2a2d289a2d5e8aea/zensical-0.0.24-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d7c9e589da99c1879a1c703e67c85eaa6be4661cdc6ce6534f7bb3575983f4", size = 12440807, upload-time = "2026-02-26T09:43:22.679Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/12/dc/a7ca2a4224b3072a2c2998b6611ad7fd4f8f131ceae7aa23238d97d26e22/zensical-0.0.24-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42fcc121c3095734b078a95a0dae4d4924fb8fbf16bf730456146ad6cab48ad0", size = 12782727, upload-time = "2026-02-26T09:43:25.347Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/42/37/22f1727da356ed3fcbd31f68d4a477f15c232997c87e270cfffb927459ac/zensical-0.0.24-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:832d4a2a051b9f49561031a2986ace502326f82d9a401ddf125530d30025fdd4", size = 12547616, upload-time = "2026-02-26T09:43:28.031Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/ff/c75ff111b8e12157901d00752beef9d691dbb5a034b6a77359972262416a/zensical-0.0.24-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e5fea3bb61238dba9f930f52669db67b0c26be98e1c8386a05eb2b1e3cb875dc", size = 12684883, upload-time = "2026-02-26T09:43:30.642Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/92/4f6ea066382e3d068d3cadbed99e9a71af25e46c84a403e0f747960472a2/zensical-0.0.24-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:75eef0428eec2958590633fdc82dc2a58af124879e29573aa7e153b662978073", size = 12713825, upload-time = "2026-02-26T09:43:33.273Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/fb/bf735b19bce0034b1f3b8e1c50b2896ebbd0c5d92d462777e759e78bb083/zensical-0.0.24-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:3c6b39659156394ff805b4831dac108c839483d9efa4c9b901eaa913efee1ac7", size = 12854318, upload-time = "2026-02-26T09:43:35.632Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/28/0ddab6c1237e3625e7763ff666806f31e5760bb36d18624135a6bb6e8643/zensical-0.0.24-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9eef82865a18b3ca4c3cd13e245dff09a865d1da3c861e2fc86eaa9253a90f02", size = 12818270, upload-time = "2026-02-26T09:43:37.749Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/98/90710d232cb35b633815fa7b493da542391b89283b6103a5bb4ae9fc0dd9/zensical-0.0.21-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:67404cc70c330246dfb7269bcdb60a25be0bb60a212a09c9c50229a1341b1f84", size = 12237120, upload-time = "2026-02-04T17:47:28.615Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/fb/4280b3781157e8f051711732192f949bf29beeafd0df3e33c1c8bf9b7a1a/zensical-0.0.21-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:d4fd253ccfbf5af56434124f13bac01344e456c020148369b18d8836b6537c3c", size = 12118047, upload-time = "2026-02-04T17:47:31.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/b3/b7f85ae9cf920cf9f17bf157ae6c274919477148feb7716bf735636caa0e/zensical-0.0.21-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:440e40cdc30a29bf7466bcd6f43ed7bd1c54ea3f1a0fefca65619358b481a5bc", size = 12473440, upload-time = "2026-02-04T17:47:33.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/ac/1dc6e98f79ed19b9f103c88a0bd271f9140565d7d26b64bc1542b3ef6d91/zensical-0.0.21-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:368e832fc8068e75dc45cab59379db4cefcd81eb116f48d058db8fb7b7aa8d14", size = 12412588, upload-time = "2026-02-04T17:47:36.491Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/76/16a580f6dd32b387caa4a41615451e7dddd1917a2ff2e5b08744f41b4e11/zensical-0.0.21-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4ab962d47f9dd73510eed168469326c7a452554dfbfdb9cdf85efc7140244df", size = 12749438, upload-time = "2026-02-04T17:47:38.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/30/4baaa1c910eee61db5f49d0d45f2e550a0027218c618f3dd7f8da966a019/zensical-0.0.21-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b846d53dfce007f056ff31848f87f3f2a388228e24d4851c0cafdce0fa204c9b", size = 12514504, upload-time = "2026-02-04T17:47:41.31Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/77/931fccae5580b94409a0448a26106f922dcfa7822e7b93cacd2876dd63a8/zensical-0.0.21-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:daac1075552d230d52d621d2e4754ba24d5afcaa201a7a991f1a8d57e320c9de", size = 12647832, upload-time = "2026-02-04T17:47:44.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/82/3cf75de64340829d55c87c36704f4d1d8c952bd2cdc8a7bc48cbfb8ab333/zensical-0.0.21-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:7b380f545adb6d40896f9bd698eb0e1540ed4258d35b83f55f91658d0fdae312", size = 12678537, upload-time = "2026-02-04T17:47:46.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/91/6f4938dceeaa241f78bbfaf58a94acef10ba18be3468795173e3087abeb6/zensical-0.0.21-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:5c2227fdab64616bea94b40b8340bafe00e2e23631cc58eeea1e7267167e6ac5", size = 12822164, upload-time = "2026-02-04T17:47:49.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/4e/a9c9d25ef0766f767db7b4f09da68da9b3d8a28c3d68cfae01f8e3f9e297/zensical-0.0.21-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2e0f5154d236ed0f98662ee68785b67e8cd2138ea9d5e26070649e93c22eeee0", size = 12785632, upload-time = "2026-02-04T17:47:52.613Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user