mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-05-07 07:05:24 +00:00
Compare commits
65 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 05e48b2316 | |||
| fa0c5bde1e | |||
| caac5088e4 | |||
| be8658d61a | |||
| 10e61c5a7a | |||
| 552e5cf422 | |||
| d574867abb | |||
| e5561ba06f | |||
| 22d3b208c9 | |||
| 1692c916f8 | |||
| ffd886eae0 | |||
| 12c0dc635e | |||
| 1e01ce42c0 | |||
| e46f4a5aaa | |||
| 1ba6c31385 | |||
| f3ee820fa4 | |||
| 2f5bcdf66e | |||
| 501cdd92d2 | |||
| 66c5c46913 | |||
| 782634d912 | |||
| 3cfe9fa2a8 | |||
| 784fed447f | |||
| 3292a0e7cc | |||
| 9646b8c67d | |||
| e590d7df69 | |||
| cc71aad058 | |||
| 3cbdf5d0b7 | |||
| f84e0097e5 | |||
| 7dbf8bdd4a | |||
| d2a752a196 | |||
| 2cb155e717 | |||
| 9e9fc6213c | |||
| 0f7c02de5e | |||
| 95dea787f2 | |||
| b6501b0c47 | |||
| 87ebd13abc | |||
| a86c9d32fe | |||
| 7942edfdf4 | |||
| 1e00ad5f30 | |||
| 5f26c01c6f | |||
| 92e133eeb0 | |||
| 06b2d5102c | |||
| 40255cfdbb | |||
| d919c341b1 | |||
| ba0a80a8ad | |||
| 60319c6d37 | |||
| 1e8622494d | |||
| d6a316b1df | |||
| 8f311c4b6b | |||
| f25322600d | |||
| 615f27e6fb | |||
| 5b809122b5 | |||
| 8b8307571a | |||
| 0e97419e0e | |||
| 7ff51452f0 | |||
| a6c974589f | |||
| 47f9f642a9 | |||
| 8bfebc3b9b | |||
| c7f83212a3 | |||
| b010f65ae7 | |||
| 89d3a53603 | |||
| 9601b3d597 | |||
| 13e07844fe | |||
| be82fcb70a | |||
| 98298e37cd |
@@ -2,6 +2,17 @@
|
||||
# shellcheck shell=bash
|
||||
declare -r log_prefix="[init-user]"
|
||||
|
||||
# When the container is started as a non-root user (e.g. via `user: 999:999`
|
||||
# in Docker Compose), usermod/groupmod require root and are meaningless.
|
||||
# USERMAP_* variables only apply to the root-started path.
|
||||
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||
if [[ -n "${USERMAP_UID}" || -n "${USERMAP_GID}" ]]; then
|
||||
echo "${log_prefix} WARNING: USERMAP_UID/USERMAP_GID are set but have no effect when the container is started as a non-root user"
|
||||
fi
|
||||
echo "${log_prefix} Running as non-root user ($(id --user):$(id --group)), skipping UID/GID remapping"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
declare -r usermap_original_uid=$(id -u paperless)
|
||||
declare -r usermap_original_gid=$(id -g paperless)
|
||||
declare -r usermap_new_uid=${USERMAP_UID:-$usermap_original_uid}
|
||||
|
||||
@@ -761,7 +761,7 @@ MariaDB: `mariadb-tzinfo-to-sql /usr/share/zoneinfo | mariadb -u root mysql -p`
|
||||
|
||||
## Barcodes {#barcodes}
|
||||
|
||||
Paperless is able to utilize barcodes for automatically performing some tasks.
|
||||
Paperless is able to utilize barcodes for automatically performing some tasks. Barcodes are only supported for PDF documents or TIFF, [if enabled](configuration.md#PAPERLESS_CONSUMER_BARCODE_TIFF_SUPPORT).
|
||||
|
||||
At this time, the library utilized for detection of barcodes supports the following types:
|
||||
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 57 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
@@ -1,7 +1,141 @@
|
||||
# Changelog
|
||||
|
||||
## paperless-ngx 2.20.14
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: do not submit permissions for non-owners [@shamoon](https://github.com/shamoon) ([#12571](https://github.com/paperless-ngx/paperless-ngx/pull/12571))
|
||||
- Fix: prevent duplicate parent tag IDs [@shamoon](https://github.com/shamoon) ([#12522](https://github.com/paperless-ngx/paperless-ngx/pull/12522))
|
||||
- Fix: dont defer tag change application in workflows [@shamoon](https://github.com/shamoon) ([#12478](https://github.com/paperless-ngx/paperless-ngx/pull/12478))
|
||||
- Fix: limit share link viewset actions [@shamoon](https://github.com/shamoon) ([#12461](https://github.com/paperless-ngx/paperless-ngx/pull/12461))
|
||||
- Fix: add fallback ordering for documents by id after created [@shamoon](https://github.com/shamoon) ([#12440](https://github.com/paperless-ngx/paperless-ngx/pull/12440))
|
||||
- Fixhancement: default mail-created correspondent matching to exact [@shamoon](https://github.com/shamoon) ([#12414](https://github.com/paperless-ngx/paperless-ngx/pull/12414))
|
||||
- Fix: validate date CF value in serializer [@shamoon](https://github.com/shamoon) ([#12410](https://github.com/paperless-ngx/paperless-ngx/pull/12410))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>7 changes</summary>
|
||||
|
||||
- Fix: do not submit permissions for non-owners [@shamoon](https://github.com/shamoon) ([#12571](https://github.com/paperless-ngx/paperless-ngx/pull/12571))
|
||||
- Fix: prevent duplicate parent tag IDs [@shamoon](https://github.com/shamoon) ([#12522](https://github.com/paperless-ngx/paperless-ngx/pull/12522))
|
||||
- Fix: dont defer tag change application in workflows [@shamoon](https://github.com/shamoon) ([#12478](https://github.com/paperless-ngx/paperless-ngx/pull/12478))
|
||||
- Fix: limit share link viewset actions [@shamoon](https://github.com/shamoon) ([#12461](https://github.com/paperless-ngx/paperless-ngx/pull/12461))
|
||||
- Fix: add fallback ordering for documents by id after created [@shamoon](https://github.com/shamoon) ([#12440](https://github.com/paperless-ngx/paperless-ngx/pull/12440))
|
||||
- Fixhancement: default mail-created correspondent matching to exact [@shamoon](https://github.com/shamoon) ([#12414](https://github.com/paperless-ngx/paperless-ngx/pull/12414))
|
||||
- Fix: validate date CF value in serializer [@shamoon](https://github.com/shamoon) ([#12410](https://github.com/paperless-ngx/paperless-ngx/pull/12410))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.13
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: suggest corrections only if visible results
|
||||
- Fix: require view permission for more-like search
|
||||
- Fix: validate document link targets
|
||||
- Fix: enforce permissions when attaching accounts to mail rules
|
||||
|
||||
## paperless-ngx 2.20.12
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-96jx-fj7m-qh6x](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-96jx-fj7m-qh6x)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: Scope the workflow saves to prevent clobbering filename/archive_filename [@stumpylog](https://github.com/stumpylog) ([#12390](https://github.com/paperless-ngx/paperless-ngx/pull/12390))
|
||||
- Fix: don't try to usermod/groupmod when non-root + update docs (#<!---->12365) [@stumpylog](https://github.com/stumpylog) ([#12391](https://github.com/paperless-ngx/paperless-ngx/pull/12391))
|
||||
- Fix: avoid moving files if already moved [@shamoon](https://github.com/shamoon) ([#12389](https://github.com/paperless-ngx/paperless-ngx/pull/12389))
|
||||
- Fix: remove pagination from document notes api spec [@shamoon](https://github.com/shamoon) ([#12388](https://github.com/paperless-ngx/paperless-ngx/pull/12388))
|
||||
- Fix: fix file button hover color in dark mode [@shamoon](https://github.com/shamoon) ([#12367](https://github.com/paperless-ngx/paperless-ngx/pull/12367))
|
||||
- Fixhancement: only offer basic auth for appropriate requests [@shamoon](https://github.com/shamoon) ([#12362](https://github.com/paperless-ngx/paperless-ngx/pull/12362))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>5 changes</summary>
|
||||
|
||||
- Fix: Scope the workflow saves to prevent clobbering filename/archive_filename [@stumpylog](https://github.com/stumpylog) ([#12390](https://github.com/paperless-ngx/paperless-ngx/pull/12390))
|
||||
- Fix: avoid moving files if already moved [@shamoon](https://github.com/shamoon) ([#12389](https://github.com/paperless-ngx/paperless-ngx/pull/12389))
|
||||
- Fix: remove pagination from document notes api spec [@shamoon](https://github.com/shamoon) ([#12388](https://github.com/paperless-ngx/paperless-ngx/pull/12388))
|
||||
- Fix: fix file button hover color in dark mode [@shamoon](https://github.com/shamoon) ([#12367](https://github.com/paperless-ngx/paperless-ngx/pull/12367))
|
||||
- Fixhancement: only offer basic auth for appropriate requests [@shamoon](https://github.com/shamoon) ([#12362](https://github.com/paperless-ngx/paperless-ngx/pull/12362))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.11
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-59xh-5vwx-4c4q](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-59xh-5vwx-4c4q)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: correct dropdown list active color in dark mode [@shamoon](https://github.com/shamoon) ([#12328](https://github.com/paperless-ngx/paperless-ngx/pull/12328))
|
||||
- Fixhancement: clear descendant selections in dropdown when parent toggled [@shamoon](https://github.com/shamoon) ([#12326](https://github.com/paperless-ngx/paperless-ngx/pull/12326))
|
||||
- Fix: prevent wrapping with larger amounts of tags on small cards, reset moreTags setting to correct count [@shamoon](https://github.com/shamoon) ([#12302](https://github.com/paperless-ngx/paperless-ngx/pull/12302))
|
||||
- Fix: prevent stale db filename during workflow actions [@shamoon](https://github.com/shamoon) ([#12289](https://github.com/paperless-ngx/paperless-ngx/pull/12289))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>4 changes</summary>
|
||||
|
||||
- Fix: correct dropdown list active color in dark mode [@shamoon](https://github.com/shamoon) ([#12328](https://github.com/paperless-ngx/paperless-ngx/pull/12328))
|
||||
- Fixhancement: clear descendant selections in dropdown when parent toggled [@shamoon](https://github.com/shamoon) ([#12326](https://github.com/paperless-ngx/paperless-ngx/pull/12326))
|
||||
- Fix: prevent wrapping with larger amounts of tags on small cards, reset moreTags setting to correct count [@shamoon](https://github.com/shamoon) ([#12302](https://github.com/paperless-ngx/paperless-ngx/pull/12302))
|
||||
- Fix: prevent stale db filename during workflow actions [@shamoon](https://github.com/shamoon) ([#12289](https://github.com/paperless-ngx/paperless-ngx/pull/12289))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.10
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: support string coercion in filepath jinja templates [@shamoon](https://github.com/shamoon) ([#12244](https://github.com/paperless-ngx/paperless-ngx/pull/12244))
|
||||
- Fix: apply ordering after annotating tag document count [@shamoon](https://github.com/shamoon) ([#12238](https://github.com/paperless-ngx/paperless-ngx/pull/12238))
|
||||
- Fix: enforce path limit for db filename fields [@shamoon](https://github.com/shamoon) ([#12235](https://github.com/paperless-ngx/paperless-ngx/pull/12235))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>3 changes</summary>
|
||||
|
||||
- Fix: support string coercion in filepath jinja templates [@shamoon](https://github.com/shamoon) ([#12244](https://github.com/paperless-ngx/paperless-ngx/pull/12244))
|
||||
- Fix: apply ordering after annotating tag document count [@shamoon](https://github.com/shamoon) ([#12238](https://github.com/paperless-ngx/paperless-ngx/pull/12238))
|
||||
- Fix: enforce path limit for db filename fields [@shamoon](https://github.com/shamoon) ([#12235](https://github.com/paperless-ngx/paperless-ngx/pull/12235))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.9
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-386h-chg4-cfw9](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-386h-chg4-cfw9)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fixhancement: config option reset [@shamoon](https://github.com/shamoon) ([#12176](https://github.com/paperless-ngx/paperless-ngx/pull/12176))
|
||||
- Fix: correct page count by separating display vs collection sizes for tags [@shamoon](https://github.com/shamoon) ([#12170](https://github.com/paperless-ngx/paperless-ngx/pull/12170))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>2 changes</summary>
|
||||
|
||||
- Fixhancement: config option reset [@shamoon](https://github.com/shamoon) ([#12176](https://github.com/paperless-ngx/paperless-ngx/pull/12176))
|
||||
- Fix: correct page count by separating display vs collection sizes for tags [@shamoon](https://github.com/shamoon) ([#12170](https://github.com/paperless-ngx/paperless-ngx/pull/12170))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.8
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-7qqc-wrcw-2fj9](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-7qqc-wrcw-2fj9)
|
||||
|
||||
## paperless-ngx 2.20.7
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-x395-6h48-wr8v](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-x395-6h48-wr8v)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Performance fix: use subqueries to improve object retrieval in large installs [@shamoon](https://github.com/shamoon) ([#11950](https://github.com/paperless-ngx/paperless-ngx/pull/11950))
|
||||
@@ -20,6 +154,10 @@
|
||||
|
||||
## paperless-ngx 2.20.6
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-jqwv-hx7q-fxh3](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-jqwv-hx7q-fxh3) and [GHSA-w47q-3m69-84v8](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-w47q-3m69-84v8)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: extract all ids for nested tags [@shamoon](https://github.com/shamoon) ([#11888](https://github.com/paperless-ngx/paperless-ngx/pull/11888))
|
||||
|
||||
+1
-1
@@ -358,7 +358,7 @@ If you want to build the documentation locally, this is how you do it:
|
||||
$ uv run zensical serve
|
||||
```
|
||||
|
||||
## Building the Docker image
|
||||
## Building the Docker image {#docker_build}
|
||||
|
||||
The docker image is primarily built by the GitHub actions workflow, but
|
||||
it can be faster when developing to build and tag an image locally.
|
||||
|
||||
+248
-309
@@ -4,53 +4,74 @@ title: Setup
|
||||
|
||||
# Installation
|
||||
|
||||
You can go multiple routes to setup and run Paperless:
|
||||
|
||||
- [Use the script to setup a Docker install](#docker_script)
|
||||
- [Use the Docker compose templates](#docker)
|
||||
- [Build the Docker image yourself](#docker_build)
|
||||
- [Install Paperless-ngx directly on your system manually ("bare metal")](#bare_metal)
|
||||
- A user-maintained list of commercial hosting providers can be found [in the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Related-Projects)
|
||||
|
||||
The Docker routes are quick & easy. These are the recommended routes.
|
||||
This configures all the stuff from the above automatically so that it
|
||||
just works and uses sensible defaults for all configuration options.
|
||||
Here you find a cheat-sheet for docker beginners: [CLI
|
||||
Basics](https://www.sehn.tech/refs/devops-with-docker/)
|
||||
|
||||
The bare metal route is complicated to setup but makes it easier should
|
||||
you want to contribute some code back. You need to configure and run the
|
||||
above mentioned components yourself.
|
||||
|
||||
### Use the Installation Script {#docker_script}
|
||||
|
||||
Paperless provides an interactive installation script to setup a Docker Compose
|
||||
installation. The script asks for a couple configuration options, and will then create the
|
||||
necessary configuration files, pull the docker image, start Paperless-ngx and create your superuser
|
||||
account. The script essentially automatically performs the steps described in [Docker setup](#docker).
|
||||
|
||||
1. Make sure that Docker and Docker Compose are [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
|
||||
|
||||
2. Download and run the installation script:
|
||||
!!! tip "Quick Start"
|
||||
|
||||
If you just want Paperless-ngx running quickly, use our installation script:
|
||||
```shell-session
|
||||
bash -c "$(curl --location --silent --show-error https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||
```
|
||||
_If piping into a shell directly from the internet makes you nervous, inspect [the script](https://github.com/paperless-ngx/paperless-ngx/blob/main/install-paperless-ngx.sh) first!_
|
||||
|
||||
!!! note
|
||||
## Overview
|
||||
|
||||
macOS users will need to install [gnu-sed](https://formulae.brew.sh/formula/gnu-sed) with support
|
||||
for running as `sed` as well as [wget](https://formulae.brew.sh/formula/wget).
|
||||
Choose the installation route that best fits your setup:
|
||||
|
||||
### Use Docker Compose {#docker}
|
||||
| Route | Best for | Effort |
|
||||
| ----------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------- | ------ |
|
||||
| [Installation script](#docker_script) | Fastest first-time setup with guided prompts (recommended for most users) | Low |
|
||||
| [Docker Compose templates](#docker) | Manual control over compose files and settings | Medium |
|
||||
| [Bare metal](#bare_metal) | Advanced setups, packaging, and development-adjacent workflows | High |
|
||||
| [Hosted providers (wiki)](https://github.com/paperless-ngx/paperless-ngx/wiki/Related-Projects#hosting-providers) | Managed hosting options maintained by the community — check details carefully | Varies |
|
||||
|
||||
1. Make sure that Docker and Docker Compose are [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
|
||||
For most users, Docker is the best option. It is faster to set up,
|
||||
easier to maintain, and ships with sensible defaults.
|
||||
|
||||
2. Go to the [/docker/compose directory on the project
|
||||
The bare-metal route gives you more control, but it requires manual
|
||||
installation and operation of all components. It is usually best suited
|
||||
for advanced users and contributors.
|
||||
|
||||
!!! info
|
||||
|
||||
Because [superuser](usage.md#superusers) accounts have full access to all objects and documents, you may want to create a separate user account for daily use,
|
||||
or "downgrade" your superuser account to a normal user account after setup.
|
||||
|
||||
## Installation Script {#docker_script}
|
||||
|
||||
Paperless-ngx provides an interactive script for Docker Compose setups.
|
||||
It asks a few configuration questions, then creates the required files,
|
||||
pulls the image, starts the containers, and creates your [superuser](usage.md#superusers)
|
||||
account. In short, it automates the [Docker Compose setup](#docker) described below.
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
- Docker and Docker Compose must be [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
|
||||
- macOS users will need [GNU sed](https://formulae.brew.sh/formula/gnu-sed) with support for running as `sed` as well as [wget](https://formulae.brew.sh/formula/wget).
|
||||
|
||||
#### Run the installation script
|
||||
|
||||
```shell-session
|
||||
bash -c "$(curl --location --silent --show-error https://raw.githubusercontent.com/paperless-ngx/paperless-ngx/main/install-paperless-ngx.sh)"
|
||||
```
|
||||
|
||||
#### After installation
|
||||
|
||||
Paperless-ngx should be available at `http://127.0.0.1:8000` (or similar,
|
||||
depending on your configuration) and you will be able to login with the
|
||||
credentials you provided during the installation script.
|
||||
|
||||
## Docker Compose Install {#docker}
|
||||
|
||||
#### Prerequisites
|
||||
|
||||
- Docker and Docker Compose must be [installed](https://docs.docker.com/engine/install/){:target="\_blank"}.
|
||||
|
||||
#### Installation
|
||||
|
||||
1. Go to the [/docker/compose directory on the project
|
||||
page](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose){:target="\_blank"}
|
||||
and download one of the `docker-compose.*.yml` files, depending on which database backend
|
||||
you want to use. Place the files in a local directory and rename it `docker-compose.yml`. Download the
|
||||
`docker-compose.env` file and the `.env` file as well in the same directory.
|
||||
and download one `docker-compose.*.yml` file for your preferred
|
||||
database backend. Save it in a local directory as `docker-compose.yml`.
|
||||
Also download `docker-compose.env` and `.env` into that same directory.
|
||||
|
||||
If you want to enable optional support for Office and other documents, download a
|
||||
file with `-tika` in the file name.
|
||||
@@ -60,15 +81,16 @@ account. The script essentially automatically performs the steps described in [D
|
||||
For new installations, it is recommended to use PostgreSQL as the
|
||||
database backend.
|
||||
|
||||
3. Modify `docker-compose.yml` as needed. For example, you may want to change the paths to the
|
||||
consumption, media etc. directories to use 'bind mounts'.
|
||||
2. Modify `docker-compose.yml` as needed. For example, you may want to
|
||||
change the paths for `consume`, `media`, and other directories to
|
||||
use bind mounts.
|
||||
Find the line that specifies where to mount the directory, e.g.:
|
||||
|
||||
```yaml
|
||||
- ./consume:/usr/src/paperless/consume
|
||||
```
|
||||
|
||||
Replace the part _before_ the colon with a local directory of your choice:
|
||||
Replace the part _before_ the colon with your local directory:
|
||||
|
||||
```yaml
|
||||
- /home/jonaswinkler/paperless-inbox:/usr/src/paperless/consume
|
||||
@@ -82,38 +104,15 @@ account. The script essentially automatically performs the steps described in [D
|
||||
- 8010:8000
|
||||
```
|
||||
|
||||
**Rootless**
|
||||
|
||||
!!! warning
|
||||
|
||||
It is currently not possible to run the container rootless if additional languages are specified via `PAPERLESS_OCR_LANGUAGES`.
|
||||
|
||||
If you want to run Paperless as a rootless container, you will need
|
||||
to do the following in your `docker-compose.yml`:
|
||||
|
||||
- set the `user` running the container to map to the `paperless`
|
||||
user in the container. This value (`user_id` below), should be
|
||||
the same id that `USERMAP_UID` and `USERMAP_GID` are set to in
|
||||
the next step. See `USERMAP_UID` and `USERMAP_GID`
|
||||
[here](configuration.md#docker).
|
||||
|
||||
Your entry for Paperless should contain something like:
|
||||
|
||||
> ```
|
||||
> webserver:
|
||||
> image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
> user: <user_id>
|
||||
> ```
|
||||
|
||||
4. Modify `docker-compose.env` with any configuration options you'd like.
|
||||
3. Modify `docker-compose.env` with any configuration options you need.
|
||||
See the [configuration documentation](configuration.md) for all options.
|
||||
|
||||
You may also need to set `USERMAP_UID` and `USERMAP_GID` to
|
||||
the uid and gid of your user on the host system. Use `id -u` and
|
||||
`id -g` to get these. This ensures that both the container and the host
|
||||
user have write access to the consumption directory. If your UID
|
||||
and GID on the host system is 1000 (the default for the first normal
|
||||
user on most systems), it will work out of the box without any
|
||||
the UID and GID of your user on the host system. Use `id -u` and
|
||||
`id -g` to get these values. This ensures both the container and the
|
||||
host user can write to the consumption directory. If your UID and
|
||||
GID are `1000` (the default for the first normal user on many
|
||||
systems), this usually works out of the box without
|
||||
modifications. Run `id "username"` to check.
|
||||
|
||||
!!! note
|
||||
@@ -122,79 +121,55 @@ account. The script essentially automatically performs the steps described in [D
|
||||
appending `_FILE` to configuration values. For example [`PAPERLESS_DBUSER`](configuration.md#PAPERLESS_DBUSER)
|
||||
can be set using `PAPERLESS_DBUSER_FILE=/var/run/secrets/password.txt`.
|
||||
|
||||
!!! warning
|
||||
|
||||
Some file systems such as NFS network shares don't support file
|
||||
system notifications with `inotify`. When storing the consumption
|
||||
directory on such a file system, paperless will not pick up new
|
||||
files with the default configuration. You will need to use
|
||||
[`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING), which will disable inotify. See
|
||||
[here](configuration.md#polling).
|
||||
|
||||
5. Run `docker compose pull`. This will pull the image from the GitHub container registry
|
||||
by default but you can change the image to pull from Docker Hub by changing the `image`
|
||||
4. Run `docker compose pull`. This pulls the image from the GitHub container registry
|
||||
by default, but you can pull from Docker Hub by changing the `image`
|
||||
line to `image: paperlessngx/paperless-ngx:latest`.
|
||||
|
||||
6. Run `docker compose up -d`. This will create and start the necessary containers.
|
||||
5. Run `docker compose up -d`. This will create and start the necessary containers.
|
||||
|
||||
7. Congratulations! Your Paperless-ngx instance should now be accessible at `http://127.0.0.1:8000`
|
||||
(or similar, depending on your configuration). When you first access the web interface, you will be
|
||||
prompted to create a superuser account.
|
||||
#### After installation
|
||||
|
||||
### Build the Docker image yourself {#docker_build}
|
||||
Your Paperless-ngx instance should now be accessible at
|
||||
`http://127.0.0.1:8000` (or similar, depending on your configuration).
|
||||
When you first access the web interface, you will be prompted to create
|
||||
a [superuser](usage.md#superusers) account.
|
||||
|
||||
1. Clone the entire repository of paperless:
|
||||
#### Optional Advanced Compose Configurations {#advanced_compose data-toc-label="Advanced Compose Configurations"}
|
||||
|
||||
```shell-session
|
||||
git clone https://github.com/paperless-ngx/paperless-ngx
|
||||
```
|
||||
**Rootless**
|
||||
|
||||
The main branch always reflects the latest stable version.
|
||||
!!! warning
|
||||
|
||||
2. Copy one of the `docker/compose/docker-compose.*.yml` to
|
||||
`docker-compose.yml` in the root folder, depending on which database
|
||||
backend you want to use. Copy `docker-compose.env` into the project
|
||||
root as well.
|
||||
It is not possible to run the container rootless if additional languages are specified via `PAPERLESS_OCR_LANGUAGES`.
|
||||
|
||||
3. In the `docker-compose.yml` file, find the line that instructs
|
||||
Docker Compose to pull the paperless image from Docker Hub:
|
||||
If you want to run Paperless as a rootless container, set `user:` in `docker-compose.yml` to the UID and GID of your host user (use `id -u` and `id -g` to find these values). The container process starts directly as that user with no internal privilege remapping:
|
||||
|
||||
```yaml
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
```
|
||||
```yaml
|
||||
webserver:
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
user: '1000:1000'
|
||||
```
|
||||
|
||||
and replace it with a line that instructs Docker Compose to build
|
||||
the image from the current working directory instead:
|
||||
Do not combine this with `USERMAP_UID` or `USERMAP_GID`, which are intended for the non-rootless case described in step 3.
|
||||
|
||||
```yaml
|
||||
webserver:
|
||||
build:
|
||||
context: .
|
||||
```
|
||||
**File systems without inotify support (e.g. NFS)**
|
||||
|
||||
4. Follow the [Docker setup](#docker) above except when asked to run
|
||||
`docker compose pull` to pull the image, run
|
||||
Some file systems, such as NFS network shares, don't support file system
|
||||
notifications with `inotify`. When the consumption directory is on such a
|
||||
file system, Paperless-ngx will not pick up new files with the default
|
||||
configuration. Use [`PAPERLESS_CONSUMER_POLLING`](configuration.md#PAPERLESS_CONSUMER_POLLING)
|
||||
to enable polling and disable inotify. See [here](configuration.md#polling).
|
||||
|
||||
```shell-session
|
||||
docker compose build
|
||||
```
|
||||
## Bare Metal Install {#bare_metal}
|
||||
|
||||
instead to build the image.
|
||||
#### Prerequisites
|
||||
|
||||
### Bare Metal Route {#bare_metal}
|
||||
- Paperless runs on Linux only, Windows is not supported.
|
||||
- Python 3 is required with versions 3.10 - 3.12 currently supported. Newer versions may work, but some dependencies may not be fully compatible.
|
||||
|
||||
Paperless runs on linux only. The following procedure has been tested on
|
||||
a minimal installation of Debian/Buster, which is the current stable
|
||||
release at the time of writing. Windows is not and will never be
|
||||
supported.
|
||||
#### Installation
|
||||
|
||||
Paperless requires Python 3. At this time, 3.10 - 3.12 are tested versions.
|
||||
Newer versions may work, but some dependencies may not fully support newer versions.
|
||||
Support for older Python versions may be dropped as they reach end of life or as newer versions
|
||||
are released, dependency support is confirmed, etc.
|
||||
|
||||
1. Install dependencies. Paperless requires the following packages.
|
||||
1. Install dependencies. Paperless requires the following packages:
|
||||
|
||||
- `python3`
|
||||
- `python3-pip`
|
||||
@@ -258,8 +233,8 @@ are released, dependency support is confirmed, etc.
|
||||
|
||||
2. Install `redis` >= 6.0 and configure it to start automatically.
|
||||
|
||||
3. Optional. Install `postgresql` and configure a database, user and
|
||||
password for paperless. If you do not wish to use PostgreSQL,
|
||||
3. Optional: Install `postgresql` and configure a database, user, and
|
||||
password for Paperless-ngx. If you do not wish to use PostgreSQL,
|
||||
MariaDB and SQLite are available as well.
|
||||
|
||||
!!! note
|
||||
@@ -268,61 +243,60 @@ are released, dependency support is confirmed, etc.
|
||||
extension](https://code.djangoproject.com/wiki/JSON1Extension) is
|
||||
enabled. This is usually the case, but not always.
|
||||
|
||||
4. Create a system user with a new home folder under which you wish
|
||||
to run paperless.
|
||||
4. Create a system user with a new home folder in which you want
|
||||
to run Paperless-ngx.
|
||||
|
||||
```shell-session
|
||||
adduser paperless --system --home /opt/paperless --group
|
||||
```
|
||||
|
||||
5. Get the release archive from
|
||||
<https://github.com/paperless-ngx/paperless-ngx/releases> for example with
|
||||
5. Download a release archive from
|
||||
<https://github.com/paperless-ngx/paperless-ngx/releases>. For example:
|
||||
|
||||
```shell-session
|
||||
curl -O -L https://github.com/paperless-ngx/paperless-ngx/releases/download/v1.10.2/paperless-ngx-v1.10.2.tar.xz
|
||||
curl -O -L https://github.com/paperless-ngx/paperless-ngx/releases/download/vX.Y.Z/paperless-ngx-vX.Y.Z.tar.xz
|
||||
```
|
||||
|
||||
Extract the archive with
|
||||
|
||||
```shell-session
|
||||
tar -xf paperless-ngx-v1.10.2.tar.xz
|
||||
tar -xf paperless-ngx-vX.Y.Z.tar.xz
|
||||
```
|
||||
|
||||
and copy the contents to the
|
||||
home folder of the user you created before (`/opt/paperless`).
|
||||
and copy the contents to the home directory of the user you created
|
||||
earlier (`/opt/paperless`).
|
||||
|
||||
Optional: If you cloned the git repo, you will have to
|
||||
compile the frontend yourself, see [here](development.md#front-end-development)
|
||||
Optional: If you cloned the Git repository, you will need to
|
||||
compile the frontend yourself. See [here](development.md#front-end-development)
|
||||
and use the `build` step, not `serve`.
|
||||
|
||||
6. Configure paperless. See [configuration](configuration.md) for details.
|
||||
6. Configure Paperless-ngx. See [configuration](configuration.md) for details.
|
||||
Edit the included `paperless.conf` and adjust the settings to your
|
||||
needs. Required settings for getting
|
||||
paperless running are:
|
||||
needs. Required settings for getting Paperless-ngx running are:
|
||||
|
||||
- [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) should point to your redis server, such as
|
||||
<redis://localhost:6379>.
|
||||
- [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) optional, and should be one of `postgres`,
|
||||
- [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) should point to your Redis server, such as
|
||||
`redis://localhost:6379`.
|
||||
- [`PAPERLESS_DBENGINE`](configuration.md#PAPERLESS_DBENGINE) is optional, and should be one of `postgres`,
|
||||
`mariadb`, or `sqlite`
|
||||
- [`PAPERLESS_DBHOST`](configuration.md#PAPERLESS_DBHOST) should be the hostname on which your
|
||||
PostgreSQL server is running. Do not configure this to use
|
||||
SQLite instead. Also configure port, database name, user and
|
||||
password as necessary.
|
||||
- [`PAPERLESS_CONSUMPTION_DIR`](configuration.md#PAPERLESS_CONSUMPTION_DIR) should point to a folder which
|
||||
paperless should watch for documents. You might want to have
|
||||
this somewhere else. Likewise, [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) and
|
||||
[`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) define where paperless stores its data.
|
||||
If you like, you can point both to the same directory.
|
||||
- [`PAPERLESS_CONSUMPTION_DIR`](configuration.md#PAPERLESS_CONSUMPTION_DIR) should point to the folder
|
||||
that Paperless-ngx should watch for incoming documents.
|
||||
Likewise, [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) and
|
||||
[`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) define where Paperless-ngx stores its data.
|
||||
If needed, these can point to the same directory.
|
||||
- [`PAPERLESS_SECRET_KEY`](configuration.md#PAPERLESS_SECRET_KEY) should be a random sequence of
|
||||
characters. It's used for authentication. Failure to do so
|
||||
allows third parties to forge authentication credentials.
|
||||
- [`PAPERLESS_URL`](configuration.md#PAPERLESS_URL) if you are behind a reverse proxy. This should
|
||||
- Set [`PAPERLESS_URL`](configuration.md#PAPERLESS_URL) if you are behind a reverse proxy. This should
|
||||
point to your domain. Please see
|
||||
[configuration](configuration.md) for more
|
||||
information.
|
||||
|
||||
Many more adjustments can be made to paperless, especially the OCR
|
||||
part. The following options are recommended for everyone:
|
||||
You can make many more adjustments, especially for OCR.
|
||||
The following options are recommended for most users:
|
||||
|
||||
- Set [`PAPERLESS_OCR_LANGUAGE`](configuration.md#PAPERLESS_OCR_LANGUAGE) to the language most of your
|
||||
documents are written in.
|
||||
@@ -332,15 +306,14 @@ are released, dependency support is confirmed, etc.
|
||||
|
||||
Ensure your Redis instance [is secured](https://redis.io/docs/latest/operate/oss_and_stack/management/security/).
|
||||
|
||||
7. Create the following directories if they are missing:
|
||||
7. Create the following directories if they do not already exist:
|
||||
|
||||
- `/opt/paperless/media`
|
||||
- `/opt/paperless/data`
|
||||
- `/opt/paperless/consume`
|
||||
|
||||
Adjust as necessary if you configured different folders.
|
||||
Ensure that the paperless user has write permissions for every one
|
||||
of these folders with
|
||||
Adjust these paths if you configured different folders.
|
||||
Then verify that the `paperless` user has write permissions:
|
||||
|
||||
```shell-session
|
||||
ls -l -d /opt/paperless/media
|
||||
@@ -354,45 +327,44 @@ are released, dependency support is confirmed, etc.
|
||||
sudo chown paperless:paperless /opt/paperless/consume
|
||||
```
|
||||
|
||||
8. Install python requirements from the `requirements.txt` file.
|
||||
8. Install Python dependencies from `requirements.txt`.
|
||||
|
||||
```shell-session
|
||||
sudo -Hu paperless pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
This will install all python dependencies in the home directory of
|
||||
This will install all Python dependencies in the home directory of
|
||||
the new paperless user.
|
||||
|
||||
!!! tip
|
||||
|
||||
It is up to you if you wish to use a virtual environment or not for the Python
|
||||
dependencies. This is an alternative to the above and may require adjusting
|
||||
the example scripts to utilize the virtual environment paths
|
||||
You can use a virtual environment if you prefer. If you do,
|
||||
you may need to adjust the example scripts for your virtual
|
||||
environment paths.
|
||||
|
||||
!!! tip
|
||||
|
||||
If you use modern Python tooling, such as `uv`, installation will not include
|
||||
dependencies for Postgres or Mariadb. You can select those extras with `--extra <EXTRA>`
|
||||
or all with `--all-extras`
|
||||
dependencies for PostgreSQL or MariaDB. You can select those
|
||||
extras with `--extra <EXTRA>`, or install all extras with
|
||||
`--all-extras`.
|
||||
|
||||
9. Go to `/opt/paperless/src`, and execute the following command:
|
||||
9. Go to `/opt/paperless/src` and execute the following command:
|
||||
|
||||
```bash
|
||||
# This creates the database schema.
|
||||
sudo -Hu paperless python3 manage.py migrate
|
||||
```
|
||||
|
||||
When you first access the web interface you will be prompted to create a superuser account.
|
||||
|
||||
10. Optional: Test that paperless is working by executing
|
||||
10. Optional: Test that Paperless-ngx is working by running
|
||||
|
||||
```bash
|
||||
# Manually starts the webserver
|
||||
sudo -Hu paperless python3 manage.py runserver
|
||||
```
|
||||
|
||||
and pointing your browser to http://localhost:8000 if
|
||||
accessing from the same devices on which paperless is installed.
|
||||
Then point your browser to `http://localhost:8000` if
|
||||
accessing from the same device on which Paperless-ngx is installed.
|
||||
If accessing from another machine, set up systemd services. You may need
|
||||
to set `PAPERLESS_DEBUG=true` in order for the development server to work
|
||||
normally in your browser.
|
||||
@@ -400,23 +372,24 @@ are released, dependency support is confirmed, etc.
|
||||
!!! warning
|
||||
|
||||
This is a development server which should not be used in production.
|
||||
It is not audited for security and performance is inferior to
|
||||
production ready web servers.
|
||||
It is not audited for security, and performance is inferior to
|
||||
production-ready web servers.
|
||||
|
||||
!!! tip
|
||||
|
||||
This will not start the consumer. Paperless does this in a separate
|
||||
process.
|
||||
|
||||
11. Setup systemd services to run paperless automatically. You may use
|
||||
11. Set up systemd services to run Paperless-ngx automatically. You may use
|
||||
the service definition files included in the `scripts` folder as a
|
||||
starting point.
|
||||
|
||||
Paperless needs the `webserver` script to run the webserver, the
|
||||
`consumer` script to watch the input folder, `taskqueue` for the
|
||||
background workers used to handle things like document consumption
|
||||
and the `scheduler` script to run tasks such as email checking at
|
||||
certain times .
|
||||
Paperless needs:
|
||||
|
||||
- The `webserver` script to run the webserver.
|
||||
- The `consumer` script to watch the input folder.
|
||||
- The `taskqueue` script for background workers (document consumption, etc.).
|
||||
- The `scheduler` script for periodic tasks such as email checking.
|
||||
|
||||
!!! note
|
||||
|
||||
@@ -425,9 +398,9 @@ are released, dependency support is confirmed, etc.
|
||||
`Require=paperless-webserver.socket` in the `webserver` script
|
||||
and configure `granian` to listen on port 80 (set `GRANIAN_PORT`).
|
||||
|
||||
These services rely on redis and optionally the database server, but
|
||||
These services rely on Redis and optionally the database server, but
|
||||
don't need to be started in any particular order. The example files
|
||||
depend on redis being started. If you use a database server, you
|
||||
depend on Redis being started. If you use a database server, you
|
||||
should add additional dependencies.
|
||||
|
||||
!!! note
|
||||
@@ -437,18 +410,15 @@ are released, dependency support is confirmed, etc.
|
||||
|
||||
!!! warning
|
||||
|
||||
If celery won't start (check with
|
||||
If Celery won't start, check
|
||||
`sudo systemctl status paperless-task-queue.service` for
|
||||
paperless-task-queue.service and paperless-scheduler.service
|
||||
) you need to change the path in the files. Example:
|
||||
`paperless-task-queue.service` and `paperless-scheduler.service`.
|
||||
You may need to change the path in the files. Example:
|
||||
`ExecStart=/opt/paperless/.local/bin/celery --app paperless worker --loglevel INFO`
|
||||
|
||||
12. Optional: Install a samba server and make the consumption folder
|
||||
available as a network share.
|
||||
|
||||
13. Configure ImageMagick to allow processing of PDF documents. Most
|
||||
12. Configure ImageMagick to allow processing of PDF documents. Most
|
||||
distributions have this disabled by default, since PDF documents can
|
||||
contain malware. If you don't do this, paperless will fall back to
|
||||
contain malware. If you don't do this, Paperless-ngx will fall back to
|
||||
Ghostscript for certain steps such as thumbnail generation.
|
||||
|
||||
Edit `/etc/ImageMagick-6/policy.xml` and adjust
|
||||
@@ -463,32 +433,38 @@ are released, dependency support is confirmed, etc.
|
||||
<policy domain="coder" rights="read|write" pattern="PDF" />
|
||||
```
|
||||
|
||||
14. Optional: Install the
|
||||
[jbig2enc](https://ocrmypdf.readthedocs.io/en/latest/jbig2.html)
|
||||
encoder. This will reduce the size of generated PDF documents.
|
||||
You'll most likely need to compile this by yourself, because this
|
||||
software has been patented until around 2017 and binary packages are
|
||||
not available for most distributions.
|
||||
**Optional: Install the [jbig2enc](https://ocrmypdf.readthedocs.io/en/latest/jbig2.html) encoder.**
|
||||
This will reduce the size of generated PDF documents. You'll most likely need to compile this yourself, because this
|
||||
software has been patented until around 2017 and binary packages are not available for most distributions.
|
||||
|
||||
15. Optional: If using the NLTK machine learning processing (see
|
||||
[`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) for details),
|
||||
download the NLTK data for the Snowball
|
||||
Stemmer, Stopwords and Punkt tokenizer to `/usr/share/nltk_data`. Refer to the [NLTK
|
||||
instructions](https://www.nltk.org/data.html) for details on how to
|
||||
download the data.
|
||||
**Optional: download the NLTK data**
|
||||
If using the NLTK machine-learning processing (see [`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) for details),
|
||||
download the NLTK data for the Snowball Stemmer, Stopwords and Punkt tokenizer to `/usr/share/nltk_data`. Refer to the [NLTK
|
||||
instructions](https://www.nltk.org/data.html) for details on how to download the data.
|
||||
|
||||
# Migrating to Paperless-ngx
|
||||
#### After installation
|
||||
|
||||
Migration is possible both from Paperless-ng or directly from the
|
||||
'original' Paperless.
|
||||
Your Paperless-ngx instance should now be accessible at `http://localhost:8000` (or similar, depending on your configuration).
|
||||
When you first access the web interface you will be prompted to create a [superuser](usage.md#superusers) account.
|
||||
|
||||
## Migrating from Paperless-ng
|
||||
## Build the Docker image yourself {#docker_build data-toc-label="Building the Docker image"}
|
||||
|
||||
Paperless-ngx is meant to be a drop-in replacement for Paperless-ng and
|
||||
thus upgrading should be trivial for most users, especially when using
|
||||
docker. However, as with any major change, it is recommended to take a
|
||||
Building the Docker image yourself is typically used for development, but it can also be used for production
|
||||
if you want to customize the image. See [Building the Docker image](development.md#docker_build) in the
|
||||
development documentation.
|
||||
|
||||
## Migrating to Paperless-ngx
|
||||
|
||||
You can migrate to Paperless-ngx from Paperless-ng or from the original
|
||||
Paperless project.
|
||||
|
||||
<h3 id="migration_ng">Migrating from Paperless-ng</h3>
|
||||
|
||||
Paperless-ngx is meant to be a drop-in replacement for Paperless-ng, and
|
||||
upgrading should be trivial for most users, especially when using
|
||||
Docker. However, as with any major change, it is recommended to take a
|
||||
full backup first. Once you are ready, simply change the docker image to
|
||||
point to the new source. E.g. if using Docker Compose, edit
|
||||
point to the new source. For example, if using Docker Compose, edit
|
||||
`docker-compose.yml` and change:
|
||||
|
||||
```
|
||||
@@ -501,66 +477,64 @@ to
|
||||
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||
```
|
||||
|
||||
and then run `docker compose up -d` which will pull the new image
|
||||
recreate the container. That's it!
|
||||
and then run `docker compose up -d`, which will pull the new image and
|
||||
recreate the container. That's it.
|
||||
|
||||
Users who installed with the bare-metal route should also update their
|
||||
Git clone to point to `https://github.com/paperless-ngx/paperless-ngx`,
|
||||
e.g. using the command
|
||||
for example using:
|
||||
`git remote set-url origin https://github.com/paperless-ngx/paperless-ngx`
|
||||
and then pull the latest version.
|
||||
|
||||
## Migrating from Paperless
|
||||
<h3 id="migration_paperless">Migrating from Paperless</h3>
|
||||
|
||||
At its core, paperless-ngx is still paperless and fully compatible.
|
||||
At its core, Paperless-ngx is still Paperless and fully compatible.
|
||||
However, some things have changed under the hood, so you need to adapt
|
||||
your setup depending on how you installed paperless.
|
||||
your setup depending on how you installed Paperless.
|
||||
|
||||
This setup describes how to update an existing paperless Docker
|
||||
installation. The important things to keep in mind are as follows:
|
||||
This section describes how to update an existing Paperless Docker
|
||||
installation. Keep these points in mind:
|
||||
|
||||
- Read the [changelog](changelog.md) and
|
||||
take note of breaking changes.
|
||||
- You should decide if you want to stick with SQLite or want to
|
||||
migrate your database to PostgreSQL. See [documentation](#sqlite_to_psql)
|
||||
for details on
|
||||
how to move your data from SQLite to PostgreSQL. Both work fine with
|
||||
paperless. However, if you already have a database server running
|
||||
for other services, you might as well use it for paperless as well.
|
||||
- The task scheduler of paperless, which is used to execute periodic
|
||||
- Decide whether to stay on SQLite or migrate to PostgreSQL.
|
||||
Both work fine with Paperless-ngx.
|
||||
However, if you already have a database server running
|
||||
for other services, you might as well use it for Paperless as well.
|
||||
- The task scheduler of Paperless, which is used to execute periodic
|
||||
tasks such as email checking and maintenance, requires a
|
||||
[redis](https://redis.io/) message broker instance. The
|
||||
[Redis](https://redis.io/) message broker instance. The
|
||||
Docker Compose route takes care of that.
|
||||
- The layout of the folder structure for your documents and data
|
||||
remains the same, so you can just plug your old docker volumes into
|
||||
remains the same, so you can plug your old Docker volumes into
|
||||
paperless-ngx and expect it to find everything where it should be.
|
||||
|
||||
Migration to paperless-ngx is then performed in a few simple steps:
|
||||
Migration to Paperless-ngx is then performed in a few simple steps:
|
||||
|
||||
1. Stop paperless.
|
||||
1. Stop Paperless.
|
||||
|
||||
```bash
|
||||
cd /path/to/current/paperless
|
||||
docker compose down
|
||||
```
|
||||
|
||||
2. Do a backup for two purposes: If something goes wrong, you still
|
||||
have your data. Second, if you don't like paperless-ngx, you can
|
||||
switch back to paperless.
|
||||
2. Create a backup for two reasons: if something goes wrong, you still
|
||||
have your data; and if you don't like paperless-ngx, you can
|
||||
switch back to Paperless.
|
||||
|
||||
3. Download the latest release of paperless-ngx. You can either go with
|
||||
3. Download the latest release of Paperless-ngx. You can either use
|
||||
the Docker Compose files from
|
||||
[here](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||
or clone the repository to build the image yourself (see
|
||||
[above](#docker_build)). You can
|
||||
either replace your current paperless folder or put paperless-ngx in
|
||||
[development docs](development.md#docker_build)). You can either replace your current paperless
|
||||
folder or put Paperless-ngx in
|
||||
a different location.
|
||||
|
||||
!!! warning
|
||||
|
||||
Paperless-ngx includes a `.env` file. This will set the project name
|
||||
for docker compose to `paperless`, which will also define the name
|
||||
of the volumes by paperless-ngx. However, if you experience that
|
||||
for Docker Compose to `paperless`, which will also define the
|
||||
volume names created by Paperless-ngx. However, if you notice that
|
||||
paperless-ngx is not using your old paperless volumes, verify the
|
||||
names of your volumes with
|
||||
|
||||
@@ -576,10 +550,10 @@ Migration to paperless-ngx is then performed in a few simple steps:
|
||||
after you migrated your existing SQLite database.
|
||||
|
||||
5. Adjust `docker-compose.yml` and `docker-compose.env` to your needs.
|
||||
See [Docker setup](#docker) details on
|
||||
which edits are advised.
|
||||
See [Docker setup](#docker) for details on
|
||||
which edits are recommended.
|
||||
|
||||
6. [Update paperless.](administration.md#updating)
|
||||
6. Follow the update procedure in [Update paperless](administration.md#updating).
|
||||
|
||||
7. In order to find your existing documents with the new search
|
||||
feature, you need to invoke a one-time operation that will create
|
||||
@@ -590,136 +564,99 @@ Migration to paperless-ngx is then performed in a few simple steps:
|
||||
```
|
||||
|
||||
This will migrate your database and create the search index. After
|
||||
that, paperless will take care of maintaining the index by itself.
|
||||
that, Paperless-ngx will maintain the index automatically.
|
||||
|
||||
8. Start paperless-ngx.
|
||||
8. Start Paperless-ngx.
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This will run paperless in the background and automatically start it
|
||||
This will run Paperless-ngx in the background and automatically start it
|
||||
on system boot.
|
||||
|
||||
9. Paperless installed a permanent redirect to `admin/` in your
|
||||
9. Paperless may have installed a permanent redirect to `admin/` in your
|
||||
browser. This redirect is still in place and prevents access to the
|
||||
new UI. Clear your browsing cache in order to fix this.
|
||||
new UI. Clear your browser cache to fix this.
|
||||
|
||||
10. Optionally, follow the instructions below to migrate your existing
|
||||
data to PostgreSQL.
|
||||
|
||||
## Migrating from LinuxServer.io Docker Image
|
||||
<h3 id="migration_lsio">Migrating from LinuxServer.io Docker Image</h3>
|
||||
|
||||
As with any upgrades and large changes, it is highly recommended to
|
||||
As with any upgrade or large change, it is highly recommended to
|
||||
create a backup before starting. This assumes the image was running
|
||||
using Docker Compose, but the instructions are translatable to Docker
|
||||
commands as well.
|
||||
|
||||
1. Stop and remove the paperless container
|
||||
2. If using an external database, stop the container
|
||||
3. Update Redis configuration
|
||||
1. Stop and remove the Paperless container.
|
||||
2. If using an external database, stop that container.
|
||||
3. Update Redis configuration.
|
||||
|
||||
1. If `REDIS_URL` is already set, change it to [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS)
|
||||
and continue to step 4.
|
||||
|
||||
1. Otherwise, in the `docker-compose.yml` add a new service for
|
||||
Redis, following [the example compose
|
||||
1. Otherwise, add a new Redis service in `docker-compose.yml`,
|
||||
following [the example compose
|
||||
files](https://github.com/paperless-ngx/paperless-ngx/tree/main/docker/compose)
|
||||
|
||||
1. Set the environment variable [`PAPERLESS_REDIS`](configuration.md#PAPERLESS_REDIS) so it points to
|
||||
the new Redis container
|
||||
the new Redis container.
|
||||
|
||||
4. Update user mapping
|
||||
4. Update user mapping.
|
||||
|
||||
1. If set, change the environment variable `PUID` to `USERMAP_UID`
|
||||
1. If set, change the environment variable `PUID` to `USERMAP_UID`.
|
||||
|
||||
1. If set, change the environment variable `PGID` to `USERMAP_GID`
|
||||
1. If set, change the environment variable `PGID` to `USERMAP_GID`.
|
||||
|
||||
5. Update configuration paths
|
||||
5. Update configuration paths.
|
||||
|
||||
1. Set the environment variable [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) to `/config`
|
||||
1. Set the environment variable [`PAPERLESS_DATA_DIR`](configuration.md#PAPERLESS_DATA_DIR) to `/config`.
|
||||
|
||||
6. Update media paths
|
||||
6. Update media paths.
|
||||
|
||||
1. Set the environment variable [`PAPERLESS_MEDIA_ROOT`](configuration.md#PAPERLESS_MEDIA_ROOT) to
|
||||
`/data/media`
|
||||
`/data/media`.
|
||||
|
||||
7. Update timezone
|
||||
7. Update timezone.
|
||||
|
||||
1. Set the environment variable [`PAPERLESS_TIME_ZONE`](configuration.md#PAPERLESS_TIME_ZONE) to the same
|
||||
value as `TZ`
|
||||
value as `TZ`.
|
||||
|
||||
8. Modify the `image:` to point to
|
||||
8. Modify `image:` to point to
|
||||
`ghcr.io/paperless-ngx/paperless-ngx:latest` or a specific version
|
||||
if preferred.
|
||||
9. Start the containers as before, using `docker compose`.
|
||||
|
||||
## Moving data from SQLite to PostgreSQL or MySQL/MariaDB {#sqlite_to_psql}
|
||||
## Running Paperless-ngx on less powerful devices {#less-powerful-devices data-toc-label="Less Powerful Devices"}
|
||||
|
||||
The best way to migrate between database types is to perform an [export](administration.md#exporter) and then
|
||||
[import](administration.md#importer) into a clean installation of Paperless-ngx.
|
||||
|
||||
## Moving back to Paperless
|
||||
|
||||
Lets say you migrated to Paperless-ngx and used it for a while, but
|
||||
decided that you don't like it and want to move back (If you do, send
|
||||
me a mail about what part you didn't like!), you can totally do that
|
||||
with a few simple steps.
|
||||
|
||||
Paperless-ngx modified the database schema slightly, however, these
|
||||
changes can be reverted while keeping your current data, so that your
|
||||
current data will be compatible with original Paperless. Thumbnails
|
||||
were also changed from PNG to WEBP format and will need to be
|
||||
re-generated.
|
||||
|
||||
Execute this:
|
||||
|
||||
```shell-session
|
||||
$ cd /path/to/paperless
|
||||
$ docker compose run --rm webserver migrate documents 0023
|
||||
```
|
||||
|
||||
Or without docker:
|
||||
|
||||
```shell-session
|
||||
$ cd /path/to/paperless/src
|
||||
$ python3 manage.py migrate documents 0023
|
||||
```
|
||||
|
||||
After regenerating thumbnails, you'll need to clear your cookies
|
||||
(Paperless-ngx comes with updated dependencies that do cookie-processing
|
||||
differently) and probably your cache as well.
|
||||
|
||||
# Considerations for less powerful devices {#less-powerful-devices}
|
||||
|
||||
Paperless runs on Raspberry Pi. However, some things are rather slow on
|
||||
the Pi and configuring some options in paperless can help improve
|
||||
performance immensely:
|
||||
Paperless runs on Raspberry Pi. Some tasks can be slow on lower-powered
|
||||
hardware, but a few settings can improve performance:
|
||||
|
||||
- Stick with SQLite to save some resources. See [troubleshooting](troubleshooting.md#log-reports-creating-paperlesstask-failed)
|
||||
if you encounter issues with SQLite locking.
|
||||
- If you do not need the filesystem-based consumer, consider disabling it
|
||||
entirely by setting [`PAPERLESS_CONSUMER_DISABLE`](configuration.md#PAPERLESS_CONSUMER_DISABLE) to `true`.
|
||||
- Consider setting [`PAPERLESS_OCR_PAGES`](configuration.md#PAPERLESS_OCR_PAGES) to 1, so that paperless will
|
||||
only OCR the first page of your documents. In most cases, this page
|
||||
- Consider setting [`PAPERLESS_OCR_PAGES`](configuration.md#PAPERLESS_OCR_PAGES) to 1, so that Paperless
|
||||
OCRs only the first page of your documents. In most cases, this page
|
||||
contains enough information to be able to find it.
|
||||
- [`PAPERLESS_TASK_WORKERS`](configuration.md#PAPERLESS_TASK_WORKERS) and [`PAPERLESS_THREADS_PER_WORKER`](configuration.md#PAPERLESS_THREADS_PER_WORKER) are
|
||||
configured to use all cores. The Raspberry Pi models 3 and up have 4
|
||||
cores, meaning that paperless will use 2 workers and 2 threads per
|
||||
cores, meaning that Paperless will use 2 workers and 2 threads per
|
||||
worker. This may result in sluggish response times during
|
||||
consumption, so you might want to lower these settings (example: 2
|
||||
workers and 1 thread to always have some computing power left for
|
||||
other tasks).
|
||||
- Keep [`PAPERLESS_OCR_MODE`](configuration.md#PAPERLESS_OCR_MODE) at its default value `skip` and consider
|
||||
OCR'ing your documents before feeding them into paperless. Some
|
||||
OCRing your documents before feeding them into Paperless. Some
|
||||
scanners are able to do this!
|
||||
- Set [`PAPERLESS_OCR_SKIP_ARCHIVE_FILE`](configuration.md#PAPERLESS_OCR_SKIP_ARCHIVE_FILE) to `with_text` to skip archive
|
||||
file generation for already ocr'ed documents, or `always` to skip it
|
||||
file generation for already OCRed documents, or `always` to skip it
|
||||
for all documents.
|
||||
- If you want to perform OCR on the device, consider using
|
||||
`PAPERLESS_OCR_CLEAN=none`. This will speed up OCR times and use
|
||||
less memory at the expense of slightly worse OCR results.
|
||||
- If using docker, consider setting [`PAPERLESS_WEBSERVER_WORKERS`](configuration.md#PAPERLESS_WEBSERVER_WORKERS) to 1. This will save some memory.
|
||||
- If using Docker, consider setting [`PAPERLESS_WEBSERVER_WORKERS`](configuration.md#PAPERLESS_WEBSERVER_WORKERS) to 1. This will save some memory.
|
||||
- Consider setting [`PAPERLESS_ENABLE_NLTK`](configuration.md#PAPERLESS_ENABLE_NLTK) to false, to disable the
|
||||
more advanced language processing, which can take more memory and
|
||||
processing time.
|
||||
@@ -731,17 +668,19 @@ For details, refer to [configuration](configuration.md).
|
||||
Updating the
|
||||
[automatic matching algorithm](advanced_usage.md#automatic-matching) takes quite a bit of time. However, the update mechanism
|
||||
checks if your data has changed before doing the heavy lifting. If you
|
||||
experience the algorithm taking too much cpu time, consider changing the
|
||||
experience the algorithm taking too much CPU time, consider changing the
|
||||
schedule in the admin interface to daily. You can also manually invoke
|
||||
the task by changing the date and time of the next run to today/now.
|
||||
|
||||
The actual matching of the algorithm is fast and works on Raspberry Pi
|
||||
as well as on any other device.
|
||||
|
||||
# Using nginx as a reverse proxy {#nginx}
|
||||
## Additional considerations
|
||||
|
||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx) for user-maintained documentation of using nginx with Paperless-ngx.
|
||||
**Using a reverse proxy with Paperless-ngx**
|
||||
|
||||
# Enhancing security {#security}
|
||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-a-Reverse-Proxy-with-Paperless-ngx#nginx) for user-maintained documentation on using nginx with Paperless-ngx.
|
||||
|
||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-Security-Tools-with-Paperless-ngx) for user-maintained documentation of how to configure security tools like Fail2ban with Paperless-ngx.
|
||||
**Enhancing security**
|
||||
|
||||
Please see [the wiki](https://github.com/paperless-ngx/paperless-ngx/wiki/Using-Security-Tools-with-Paperless-ngx) for user-maintained documentation on configuring security tools like Fail2ban with Paperless-ngx.
|
||||
|
||||
+9
-2
@@ -348,6 +348,11 @@ permissions can be granted to limit access to certain parts of the UI (and corre
|
||||
|
||||
Superusers can access all parts of the front and backend application as well as any and all objects. Superuser status can only be granted by another superuser.
|
||||
|
||||
!!! tip
|
||||
|
||||
Because superuser accounts can see all objects and documents, you may want to use a regular account for day-to-day use. Additional superuser accounts can
|
||||
be created via [cli](administration.md#create-superuser) or granted superuser status from an existing superuser account.
|
||||
|
||||
#### Admin Status
|
||||
|
||||
Admin status (Django 'staff status') grants access to viewing the paperless logs and the system status dialog
|
||||
@@ -376,7 +381,7 @@ still have "object-level" permissions.
|
||||
| StoragePath | Add, edit, delete or view Storage Paths. |
|
||||
| Tag | Add, edit, delete or view Tags. |
|
||||
| UISettings | Add, edit, delete or view the UI settings that are used by the web app.<br/>:warning: **Users that will access the web UI must be granted at least _View_ permissions.** |
|
||||
| User | Add, edit, delete or view Users. |
|
||||
| User | Add, edit, delete or view other user accounts via Settings > Users & Groups and `/api/users/`. These permissions are not needed for users to edit their own profile via "My Profile" or `/api/profile/`. |
|
||||
| Workflow | Add, edit, delete or view Workflows.<br/>Note that Workflows are global; all users who can access workflows see the same set. Workflows have other permission implications — see [Workflow permissions](#workflow-permissions). |
|
||||
|
||||
#### Detailed Explanation of Object Permissions {#object-permissions}
|
||||
@@ -387,6 +392,8 @@ still have "object-level" permissions.
|
||||
| View | Confers the ability to view (not edit) a document, tag, etc.<br/>Users without 'view' (or higher) permissions will be shown _'Private'_ in place of the object name for example when viewing a document with a tag for which the user doesn't have permissions. |
|
||||
| Edit | Confers the ability to edit (and view) a document, tag, etc. |
|
||||
|
||||
For related metadata such as tags, correspondents, document types, and storage paths, object visibility and document assignment are intentionally distinct. A user may still retain or submit a known object ID when editing a document even if that related object is displayed as _Private_ or omitted from search and selection results. This allows documents to preserve existing assignments that the current user cannot necessarily inspect in detail.
|
||||
|
||||
### Password reset
|
||||
|
||||
In order to enable the password reset feature you will need to setup an SMTP backend, see
|
||||
@@ -565,7 +572,7 @@ applied. You can use the following placeholders in the template with any trigger
|
||||
- `{{added_day}}`: added day
|
||||
- `{{added_time}}`: added time in HH:MM format
|
||||
- `{{original_filename}}`: original file name without extension
|
||||
- `{{filename}}`: current file name without extension
|
||||
- `{{filename}}`: current file name without extension (for "added" workflows this may not be final yet, you can use `{{original_filename}}`)
|
||||
- `{{doc_title}}`: current document title
|
||||
|
||||
The following placeholders are only available for "added" or "updated" triggers
|
||||
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "paperless-ngx"
|
||||
version = "2.20.8"
|
||||
version = "2.20.15"
|
||||
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
|
||||
+1
-1
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "paperless-ngx-ui",
|
||||
"version": "2.20.8",
|
||||
"version": "2.20.15",
|
||||
"scripts": {
|
||||
"preinstall": "npx only-allow pnpm",
|
||||
"ng": "ng",
|
||||
|
||||
@@ -19,13 +19,18 @@
|
||||
<div class="col">
|
||||
<div class="card bg-light">
|
||||
<div class="card-body">
|
||||
<div class="card-title">
|
||||
<h6>
|
||||
{{option.title}}
|
||||
<a class="btn btn-sm btn-link" title="Read the documentation about this setting" i18n-title [href]="getDocsUrl(option.config_key)" target="_blank" referrerpolicy="no-referrer">
|
||||
<i-bs name="info-circle"></i-bs>
|
||||
</a>
|
||||
<div class="card-title d-flex align-items-center">
|
||||
<h6 class="mb-0">
|
||||
{{option.title}}
|
||||
</h6>
|
||||
<a class="btn btn-sm btn-link" title="Read the documentation about this setting" i18n-title [href]="getDocsUrl(option.config_key)" target="_blank" referrerpolicy="no-referrer">
|
||||
<i-bs name="info-circle"></i-bs>
|
||||
</a>
|
||||
@if (isSet(option.key)) {
|
||||
<button type="button" class="btn btn-sm btn-link text-danger ms-auto pe-0" title="Reset" i18n-title (click)="resetOption(option.key)">
|
||||
<i-bs class="me-1" name="x"></i-bs><ng-container i18n>Reset</ng-container>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
<div class="mb-n3">
|
||||
@switch (option.type) {
|
||||
|
||||
@@ -144,4 +144,18 @@ describe('ConfigComponent', () => {
|
||||
component.uploadFile(new File([], 'test.png'), 'app_logo')
|
||||
expect(initSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should reset option to null', () => {
|
||||
component.configForm.patchValue({ output_type: OutputTypeConfig.PDF_A })
|
||||
expect(component.isSet('output_type')).toBeTruthy()
|
||||
component.resetOption('output_type')
|
||||
expect(component.configForm.get('output_type').value).toBeNull()
|
||||
expect(component.isSet('output_type')).toBeFalsy()
|
||||
component.configForm.patchValue({ app_title: 'Test Title' })
|
||||
component.resetOption('app_title')
|
||||
expect(component.configForm.get('app_title').value).toBeNull()
|
||||
component.configForm.patchValue({ barcodes_enabled: true })
|
||||
component.resetOption('barcodes_enabled')
|
||||
expect(component.configForm.get('barcodes_enabled').value).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -208,4 +208,12 @@ export class ConfigComponent
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
public isSet(key: string): boolean {
|
||||
return this.configForm.get(key).value != null
|
||||
}
|
||||
|
||||
public resetOption(key: string) {
|
||||
this.configForm.get(key).setValue(null)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
} from 'src/app/data/matching-model'
|
||||
import { Tag } from 'src/app/data/tag'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
@@ -87,6 +88,7 @@ describe('EditDialogComponent', () => {
|
||||
let component: TestComponent
|
||||
let fixture: ComponentFixture<TestComponent>
|
||||
let tagService: TagService
|
||||
let permissionsService: PermissionsService
|
||||
let settingsService: SettingsService
|
||||
let activeModal: NgbActiveModal
|
||||
let httpTestingController: HttpTestingController
|
||||
@@ -118,8 +120,10 @@ describe('EditDialogComponent', () => {
|
||||
}).compileComponents()
|
||||
|
||||
tagService = TestBed.inject(TagService)
|
||||
permissionsService = TestBed.inject(PermissionsService)
|
||||
settingsService = TestBed.inject(SettingsService)
|
||||
settingsService.currentUser = currentUser
|
||||
permissionsService.initialize([], currentUser as any)
|
||||
activeModal = TestBed.inject(NgbActiveModal)
|
||||
httpTestingController = TestBed.inject(HttpTestingController)
|
||||
|
||||
@@ -226,6 +230,25 @@ describe('EditDialogComponent', () => {
|
||||
expect(updateSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should not submit owner or permissions for non-owner edits', () => {
|
||||
component.object = tag
|
||||
component.dialogMode = EditDialogMode.EDIT
|
||||
component.ngOnInit()
|
||||
|
||||
component.objectForm.get('name').setValue('Updated tag')
|
||||
component.save()
|
||||
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}tags/${tag.id}/`
|
||||
)
|
||||
expect(req.request.method).toEqual('PUT')
|
||||
expect(req.request.body.name).toEqual('Updated tag')
|
||||
expect(req.request.body.owner).toEqual(tag.owner)
|
||||
expect(req.request.body.set_permissions).toBeUndefined()
|
||||
|
||||
req.flush({})
|
||||
})
|
||||
|
||||
it('should create an object on save in edit mode', () => {
|
||||
const createSpy = jest.spyOn(tagService, 'create')
|
||||
component.dialogMode = EditDialogMode.CREATE
|
||||
|
||||
@@ -18,6 +18,7 @@ import { ObjectWithId } from 'src/app/data/object-with-id'
|
||||
import { ObjectWithPermissions } from 'src/app/data/object-with-permissions'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { User } from 'src/app/data/user'
|
||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||
import { AbstractPaperlessService } from 'src/app/services/rest/abstract-paperless-service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
@@ -42,6 +43,7 @@ export abstract class EditDialogComponent<
|
||||
protected activeModal = inject(NgbActiveModal)
|
||||
protected userService = inject(UserService)
|
||||
protected settingsService = inject(SettingsService)
|
||||
protected permissionsService = inject(PermissionsService)
|
||||
|
||||
users: User[]
|
||||
|
||||
@@ -69,10 +71,6 @@ export abstract class EditDialogComponent<
|
||||
|
||||
ngOnInit(): void {
|
||||
if (this.object != null && this.dialogMode !== EditDialogMode.CREATE) {
|
||||
if ((this.object as ObjectWithPermissions).permissions) {
|
||||
this.object['set_permissions'] = this.object['permissions']
|
||||
}
|
||||
|
||||
this.object['permissions_form'] = {
|
||||
owner: (this.object as ObjectWithPermissions).owner,
|
||||
set_permissions: (this.object as ObjectWithPermissions).permissions,
|
||||
@@ -151,18 +149,28 @@ export abstract class EditDialogComponent<
|
||||
return Object.assign({}, this.objectForm.value)
|
||||
}
|
||||
|
||||
protected shouldSubmitPermissions(): boolean {
|
||||
return (
|
||||
this.dialogMode === EditDialogMode.CREATE ||
|
||||
this.permissionsService.currentUserOwnsObject(this.object)
|
||||
)
|
||||
}
|
||||
|
||||
save() {
|
||||
this.error = null
|
||||
const formValues = this.getFormValues()
|
||||
const permissionsObject: PermissionsFormObject =
|
||||
this.objectForm.get('permissions_form')?.value
|
||||
if (permissionsObject) {
|
||||
if (permissionsObject && this.shouldSubmitPermissions()) {
|
||||
formValues.owner = permissionsObject.owner
|
||||
formValues.set_permissions = permissionsObject.set_permissions
|
||||
delete formValues.permissions_form
|
||||
}
|
||||
delete formValues.permissions_form
|
||||
|
||||
var newObject = Object.assign(Object.assign({}, this.object), formValues)
|
||||
if (!this.shouldSubmitPermissions()) {
|
||||
delete newObject['set_permissions']
|
||||
}
|
||||
var serverResponse: Observable<T>
|
||||
switch (this.dialogMode) {
|
||||
case EditDialogMode.CREATE:
|
||||
|
||||
-2
@@ -9,7 +9,6 @@ import { first } from 'rxjs'
|
||||
import { EditDialogComponent } from 'src/app/components/common/edit-dialog/edit-dialog.component'
|
||||
import { Group } from 'src/app/data/group'
|
||||
import { User } from 'src/app/data/user'
|
||||
import { PermissionsService } from 'src/app/services/permissions.service'
|
||||
import { GroupService } from 'src/app/services/rest/group.service'
|
||||
import { UserService } from 'src/app/services/rest/user.service'
|
||||
import { SettingsService } from 'src/app/services/settings.service'
|
||||
@@ -39,7 +38,6 @@ export class UserEditDialogComponent
|
||||
implements OnInit
|
||||
{
|
||||
private toastService = inject(ToastService)
|
||||
private permissionsService = inject(PermissionsService)
|
||||
private groupsService: GroupService
|
||||
|
||||
groups: Group[]
|
||||
|
||||
+53
@@ -631,6 +631,59 @@ describe('FilterableDropdownComponent & FilterableDropdownSelectionModel', () =>
|
||||
])
|
||||
})
|
||||
|
||||
it('deselecting a parent clears selected descendants', () => {
|
||||
const root: Tag = { id: 100, name: 'Root Tag' }
|
||||
const child: Tag = { id: 101, name: 'Child Tag', parent: root.id }
|
||||
const grandchild: Tag = {
|
||||
id: 102,
|
||||
name: 'Grandchild Tag',
|
||||
parent: child.id,
|
||||
}
|
||||
const other: Tag = { id: 103, name: 'Other Tag' }
|
||||
|
||||
selectionModel.items = [root, child, grandchild, other]
|
||||
selectionModel.set(root.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(child.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(grandchild.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(other.id, ToggleableItemState.Selected, false)
|
||||
|
||||
selectionModel.toggle(root.id, false)
|
||||
|
||||
expect(selectionModel.getSelectedItems()).toEqual([other])
|
||||
})
|
||||
|
||||
it('un-excluding a parent clears excluded descendants', () => {
|
||||
const root: Tag = { id: 110, name: 'Root Tag' }
|
||||
const child: Tag = { id: 111, name: 'Child Tag', parent: root.id }
|
||||
const other: Tag = { id: 112, name: 'Other Tag' }
|
||||
|
||||
selectionModel.items = [root, child, other]
|
||||
selectionModel.set(root.id, ToggleableItemState.Excluded, false)
|
||||
selectionModel.set(child.id, ToggleableItemState.Excluded, false)
|
||||
selectionModel.set(other.id, ToggleableItemState.Excluded, false)
|
||||
|
||||
selectionModel.exclude(root.id, false)
|
||||
|
||||
expect(selectionModel.getExcludedItems()).toEqual([other])
|
||||
})
|
||||
|
||||
it('excluding a selected parent clears selected descendants', () => {
|
||||
const root: Tag = { id: 120, name: 'Root Tag' }
|
||||
const child: Tag = { id: 121, name: 'Child Tag', parent: root.id }
|
||||
const other: Tag = { id: 122, name: 'Other Tag' }
|
||||
|
||||
selectionModel.manyToOne = true
|
||||
selectionModel.items = [root, child, other]
|
||||
selectionModel.set(root.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(child.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(other.id, ToggleableItemState.Selected, false)
|
||||
|
||||
selectionModel.exclude(root.id, false)
|
||||
|
||||
expect(selectionModel.getExcludedItems()).toEqual([root])
|
||||
expect(selectionModel.getSelectedItems()).toEqual([other])
|
||||
})
|
||||
|
||||
it('resorts items immediately when document count sorting enabled', () => {
|
||||
const apple: Tag = { id: 55, name: 'Apple' }
|
||||
const zebra: Tag = { id: 56, name: 'Zebra' }
|
||||
|
||||
@@ -231,6 +231,7 @@ export class FilterableDropdownSelectionModel {
|
||||
state == ToggleableItemState.Excluded
|
||||
) {
|
||||
this.temporarySelectionStates.delete(id)
|
||||
this.clearDescendantSelections(id)
|
||||
}
|
||||
|
||||
if (!id) {
|
||||
@@ -257,6 +258,7 @@ export class FilterableDropdownSelectionModel {
|
||||
|
||||
if (this.manyToOne || this.singleSelect) {
|
||||
this.temporarySelectionStates.set(id, ToggleableItemState.Excluded)
|
||||
this.clearDescendantSelections(id)
|
||||
|
||||
if (this.singleSelect) {
|
||||
for (let key of this.temporarySelectionStates.keys()) {
|
||||
@@ -277,9 +279,15 @@ export class FilterableDropdownSelectionModel {
|
||||
newState = ToggleableItemState.NotSelected
|
||||
}
|
||||
this.temporarySelectionStates.set(id, newState)
|
||||
if (newState == ToggleableItemState.Excluded) {
|
||||
this.clearDescendantSelections(id)
|
||||
}
|
||||
}
|
||||
} else if (!id || state == ToggleableItemState.Excluded) {
|
||||
this.temporarySelectionStates.delete(id)
|
||||
if (id) {
|
||||
this.clearDescendantSelections(id)
|
||||
}
|
||||
}
|
||||
|
||||
if (fireEvent) {
|
||||
@@ -291,6 +299,33 @@ export class FilterableDropdownSelectionModel {
|
||||
return this.selectionStates.get(id) || ToggleableItemState.NotSelected
|
||||
}
|
||||
|
||||
private clearDescendantSelections(id: number) {
|
||||
for (const descendantID of this.getDescendantIDs(id)) {
|
||||
this.temporarySelectionStates.delete(descendantID)
|
||||
}
|
||||
}
|
||||
|
||||
private getDescendantIDs(id: number): number[] {
|
||||
const descendants: number[] = []
|
||||
const queue: number[] = [id]
|
||||
|
||||
while (queue.length) {
|
||||
const parentID = queue.shift()
|
||||
for (const item of this._items) {
|
||||
if (
|
||||
typeof item?.id === 'number' &&
|
||||
typeof (item as any)['parent'] === 'number' &&
|
||||
(item as any)['parent'] === parentID
|
||||
) {
|
||||
descendants.push(item.id)
|
||||
queue.push(item.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return descendants
|
||||
}
|
||||
|
||||
get logicalOperator(): LogicalOperator {
|
||||
return this.temporaryLogicalOperator
|
||||
}
|
||||
|
||||
@@ -205,6 +205,20 @@ describe('TagsComponent', () => {
|
||||
expect(component.value).toEqual([2, 1])
|
||||
})
|
||||
|
||||
it('should not duplicate parents when adding sibling nested tags', () => {
|
||||
const root: Tag = { id: 1, name: 'root' }
|
||||
const parent: Tag = { id: 2, name: 'parent', parent: 1 }
|
||||
const leafA: Tag = { id: 3, name: 'leaf-a', parent: 2 }
|
||||
const leafB: Tag = { id: 4, name: 'leaf-b', parent: 2 }
|
||||
component.tags = [root, parent, leafA, leafB]
|
||||
|
||||
component.value = []
|
||||
component.addTag(3)
|
||||
component.addTag(4)
|
||||
|
||||
expect(component.value).toEqual([3, 2, 1, 4])
|
||||
})
|
||||
|
||||
it('should return ancestors from root to parent using getParentChain', () => {
|
||||
const root: Tag = { id: 1, name: 'root' }
|
||||
const mid: Tag = { id: 2, name: 'mid', parent: 1 }
|
||||
|
||||
@@ -153,11 +153,13 @@ export class TagsComponent implements OnInit, ControlValueAccessor {
|
||||
}
|
||||
|
||||
public onAdd(tag: Tag) {
|
||||
if (tag.parent) {
|
||||
if (tag?.parent) {
|
||||
// add all parents recursively
|
||||
const parent = this.getTag(tag.parent)
|
||||
this.value = [...this.value, parent.id]
|
||||
this.onAdd(parent)
|
||||
if (parent && !this.value.includes(parent.id)) {
|
||||
this.value = [...this.value, parent.id]
|
||||
this.onAdd(parent)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
+1
-1
@@ -15,7 +15,7 @@
|
||||
}
|
||||
|
||||
@if (document && displayFields?.includes(DisplayField.TAGS)) {
|
||||
<div class="tags d-flex flex-column text-end position-absolute me-1 fs-6">
|
||||
<div class="tags d-flex flex-column text-end position-absolute me-1 fs-6" [class.tags-no-wrap]="document.tags.length > 3">
|
||||
@for (tagID of tagIDs; track tagID) {
|
||||
<pngx-tag [tagID]="tagID" (click)="clickTag.emit(tagID);$event.stopPropagation()" [clickable]="true" linkTitle="Toggle tag filter" i18n-linkTitle></pngx-tag>
|
||||
}
|
||||
|
||||
+10
@@ -72,4 +72,14 @@ a {
|
||||
max-width: 80%;
|
||||
row-gap: .2rem;
|
||||
line-height: 1;
|
||||
|
||||
&.tags-no-wrap {
|
||||
::ng-deep .badge {
|
||||
display: inline-block;
|
||||
max-width: 100%;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
+10
@@ -82,6 +82,16 @@ describe('DocumentCardSmallComponent', () => {
|
||||
).toHaveLength(6)
|
||||
})
|
||||
|
||||
it('should clear hidden tag counter when tag count falls below the limit', () => {
|
||||
expect(component.moreTags).toEqual(3)
|
||||
|
||||
component.document.tags = [1, 2, 3, 4, 5, 6]
|
||||
fixture.detectChanges()
|
||||
|
||||
expect(component.moreTags).toBeNull()
|
||||
expect(fixture.nativeElement.textContent).not.toContain('+ 3')
|
||||
})
|
||||
|
||||
it('should try to close the preview on mouse leave', () => {
|
||||
component.popupPreview = {
|
||||
close: jest.fn(),
|
||||
|
||||
+1
@@ -126,6 +126,7 @@ export class DocumentCardSmallComponent
|
||||
this.moreTags = this.document.tags.length - (limit - 1)
|
||||
return this.document.tags.slice(0, limit - 1)
|
||||
} else {
|
||||
this.moreTags = null
|
||||
return this.document.tags
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,9 +62,9 @@
|
||||
|
||||
@if (!loading) {
|
||||
<div class="d-flex mb-2">
|
||||
@if (collectionSize > 0) {
|
||||
@if (displayCollectionSize > 0) {
|
||||
<div>
|
||||
<ng-container i18n>{collectionSize, plural, =1 {One {{typeName}}} other {{{collectionSize || 0}} total {{typeNamePlural}}}}</ng-container>
|
||||
<ng-container i18n>{displayCollectionSize, plural, =1 {One {{typeName}}} other {{{displayCollectionSize || 0}} total {{typeNamePlural}}}}</ng-container>
|
||||
@if (selectedObjects.size > 0) {
|
||||
({{selectedObjects.size}} selected)
|
||||
}
|
||||
|
||||
@@ -229,7 +229,7 @@ describe('ManagementListComponent', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should use the all list length for collection size when provided', fakeAsync(() => {
|
||||
it('should use API count for pagination and all ids for displayed total', fakeAsync(() => {
|
||||
jest.spyOn(tagService, 'listFiltered').mockReturnValueOnce(
|
||||
of({
|
||||
count: 1,
|
||||
@@ -241,7 +241,8 @@ describe('ManagementListComponent', () => {
|
||||
component.reloadData()
|
||||
tick(100)
|
||||
|
||||
expect(component.collectionSize).toBe(3)
|
||||
expect(component.collectionSize).toBe(1)
|
||||
expect(component.displayCollectionSize).toBe(3)
|
||||
}))
|
||||
|
||||
it('should support quick filter for objects', () => {
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
MatchingModel,
|
||||
} from 'src/app/data/matching-model'
|
||||
import { ObjectWithPermissions } from 'src/app/data/object-with-permissions'
|
||||
import { Results } from 'src/app/data/results'
|
||||
import {
|
||||
SortableDirective,
|
||||
SortEvent,
|
||||
@@ -88,6 +89,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
public page = 1
|
||||
|
||||
public collectionSize = 0
|
||||
public displayCollectionSize = 0
|
||||
|
||||
public sortField: string
|
||||
public sortReverse: boolean
|
||||
@@ -141,6 +143,14 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
return data
|
||||
}
|
||||
|
||||
protected getCollectionSize(results: Results<T>): number {
|
||||
return results.all?.length ?? results.count
|
||||
}
|
||||
|
||||
protected getDisplayCollectionSize(results: Results<T>): number {
|
||||
return this.getCollectionSize(results)
|
||||
}
|
||||
|
||||
getDocumentCount(object: MatchingModel): number {
|
||||
return (
|
||||
object.document_count ??
|
||||
@@ -171,7 +181,8 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
tap((c) => {
|
||||
this.unfilteredData = c.results
|
||||
this.data = this.filterData(c.results)
|
||||
this.collectionSize = c.all?.length ?? c.count
|
||||
this.collectionSize = this.getCollectionSize(c)
|
||||
this.displayCollectionSize = this.getDisplayCollectionSize(c)
|
||||
}),
|
||||
delay(100)
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
} from '@ng-bootstrap/ng-bootstrap'
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { FILTER_HAS_TAGS_ALL } from 'src/app/data/filter-rule-type'
|
||||
import { Results } from 'src/app/data/results'
|
||||
import { Tag } from 'src/app/data/tag'
|
||||
import { IfPermissionsDirective } from 'src/app/directives/if-permissions.directive'
|
||||
import { SortableDirective } from 'src/app/directives/sortable.directive'
|
||||
@@ -77,6 +78,16 @@ export class TagListComponent extends ManagementListComponent<Tag> {
|
||||
return data.filter((tag) => !tag.parent || !availableIds.has(tag.parent))
|
||||
}
|
||||
|
||||
protected override getCollectionSize(results: Results<Tag>): number {
|
||||
// Tag list pages are requested with is_root=true (when unfiltered), so
|
||||
// pagination must follow root count even though `all` includes descendants
|
||||
return results.count
|
||||
}
|
||||
|
||||
protected override getDisplayCollectionSize(results: Results<Tag>): number {
|
||||
return super.getCollectionSize(results)
|
||||
}
|
||||
|
||||
protected override getSelectableIDs(tags: Tag[]): number[] {
|
||||
const ids: number[] = []
|
||||
for (const tag of tags.filter(Boolean)) {
|
||||
|
||||
@@ -0,0 +1,154 @@
|
||||
import { HttpErrorResponse, HttpRequest } from '@angular/common/http'
|
||||
import { TestBed } from '@angular/core/testing'
|
||||
import { throwError } from 'rxjs'
|
||||
import * as navUtils from '../utils/navigation'
|
||||
import { AuthExpiryInterceptor } from './auth-expiry.interceptor'
|
||||
|
||||
describe('AuthExpiryInterceptor', () => {
|
||||
let interceptor: AuthExpiryInterceptor
|
||||
let dateNowSpy: jest.SpiedFunction<typeof Date.now>
|
||||
|
||||
beforeEach(() => {
|
||||
TestBed.configureTestingModule({
|
||||
providers: [AuthExpiryInterceptor],
|
||||
})
|
||||
|
||||
interceptor = TestBed.inject(AuthExpiryInterceptor)
|
||||
dateNowSpy = jest.spyOn(Date, 'now').mockReturnValue(1000)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
jest.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('reloads when an API request returns 401', () => {
|
||||
const reloadSpy = jest
|
||||
.spyOn(navUtils, 'locationReload')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
interceptor
|
||||
.intercept(new HttpRequest('GET', '/api/documents/'), {
|
||||
handle: (_request) =>
|
||||
throwError(
|
||||
() =>
|
||||
new HttpErrorResponse({
|
||||
status: 401,
|
||||
url: '/api/documents/',
|
||||
})
|
||||
),
|
||||
})
|
||||
.subscribe({
|
||||
error: () => undefined,
|
||||
})
|
||||
|
||||
expect(reloadSpy).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('does not reload for non-401 errors', () => {
|
||||
const reloadSpy = jest
|
||||
.spyOn(navUtils, 'locationReload')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
interceptor
|
||||
.intercept(new HttpRequest('GET', '/api/documents/'), {
|
||||
handle: (_request) =>
|
||||
throwError(
|
||||
() =>
|
||||
new HttpErrorResponse({
|
||||
status: 500,
|
||||
url: '/api/documents/',
|
||||
})
|
||||
),
|
||||
})
|
||||
.subscribe({
|
||||
error: () => undefined,
|
||||
})
|
||||
|
||||
expect(reloadSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does not reload for non-api 401 responses', () => {
|
||||
const reloadSpy = jest
|
||||
.spyOn(navUtils, 'locationReload')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
interceptor
|
||||
.intercept(new HttpRequest('GET', '/accounts/profile/'), {
|
||||
handle: (_request) =>
|
||||
throwError(
|
||||
() =>
|
||||
new HttpErrorResponse({
|
||||
status: 401,
|
||||
url: '/accounts/profile/',
|
||||
})
|
||||
),
|
||||
})
|
||||
.subscribe({
|
||||
error: () => undefined,
|
||||
})
|
||||
|
||||
expect(reloadSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('reloads only once even with multiple API 401 responses', () => {
|
||||
const reloadSpy = jest
|
||||
.spyOn(navUtils, 'locationReload')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
const request = new HttpRequest('GET', '/api/documents/')
|
||||
const handler = {
|
||||
handle: (_request) =>
|
||||
throwError(
|
||||
() =>
|
||||
new HttpErrorResponse({
|
||||
status: 401,
|
||||
url: '/api/documents/',
|
||||
})
|
||||
),
|
||||
}
|
||||
|
||||
interceptor.intercept(request, handler).subscribe({
|
||||
error: () => undefined,
|
||||
})
|
||||
interceptor.intercept(request, handler).subscribe({
|
||||
error: () => undefined,
|
||||
})
|
||||
|
||||
expect(reloadSpy).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('retries reload after cooldown for repeated API 401 responses', () => {
|
||||
const reloadSpy = jest
|
||||
.spyOn(navUtils, 'locationReload')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
dateNowSpy
|
||||
.mockReturnValueOnce(1000)
|
||||
.mockReturnValueOnce(2500)
|
||||
.mockReturnValueOnce(3501)
|
||||
|
||||
const request = new HttpRequest('GET', '/api/documents/')
|
||||
const handler = {
|
||||
handle: (_request) =>
|
||||
throwError(
|
||||
() =>
|
||||
new HttpErrorResponse({
|
||||
status: 401,
|
||||
url: '/api/documents/',
|
||||
})
|
||||
),
|
||||
}
|
||||
|
||||
interceptor.intercept(request, handler).subscribe({
|
||||
error: () => undefined,
|
||||
})
|
||||
interceptor.intercept(request, handler).subscribe({
|
||||
error: () => undefined,
|
||||
})
|
||||
interceptor.intercept(request, handler).subscribe({
|
||||
error: () => undefined,
|
||||
})
|
||||
|
||||
expect(reloadSpy).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,38 @@
|
||||
import {
|
||||
HttpErrorResponse,
|
||||
HttpEvent,
|
||||
HttpHandler,
|
||||
HttpInterceptor,
|
||||
HttpRequest,
|
||||
} from '@angular/common/http'
|
||||
import { Injectable } from '@angular/core'
|
||||
import { catchError, Observable, throwError } from 'rxjs'
|
||||
import { locationReload } from '../utils/navigation'
|
||||
|
||||
@Injectable()
|
||||
export class AuthExpiryInterceptor implements HttpInterceptor {
|
||||
private lastReloadAttempt = Number.NEGATIVE_INFINITY
|
||||
|
||||
intercept(
|
||||
request: HttpRequest<unknown>,
|
||||
next: HttpHandler
|
||||
): Observable<HttpEvent<unknown>> {
|
||||
return next.handle(request).pipe(
|
||||
catchError((error: unknown) => {
|
||||
if (
|
||||
error instanceof HttpErrorResponse &&
|
||||
error.status === 401 &&
|
||||
request.url.includes('/api/')
|
||||
) {
|
||||
const now = Date.now()
|
||||
if (now - this.lastReloadAttempt >= 2000) {
|
||||
this.lastReloadAttempt = now
|
||||
locationReload()
|
||||
}
|
||||
}
|
||||
|
||||
return throwError(() => error)
|
||||
})
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -142,6 +142,21 @@ describe('CustomFieldQueryAtom', () => {
|
||||
atom.value = [1, 3]
|
||||
expect(changeSpy).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should emit one changed event when operator change coerces value', () => {
|
||||
const atom = new CustomFieldQueryAtom([
|
||||
1,
|
||||
CustomFieldQueryOperator.In,
|
||||
[1, 2],
|
||||
])
|
||||
const changeSpy = jest.fn()
|
||||
atom.changed.subscribe(changeSpy)
|
||||
|
||||
atom.operator = CustomFieldQueryOperator.Exact
|
||||
|
||||
expect(changeSpy).toHaveBeenCalledTimes(1)
|
||||
expect(atom.serialize()).toEqual([1, CustomFieldQueryOperator.Exact, ''])
|
||||
})
|
||||
})
|
||||
|
||||
describe('CustomFieldQueryExpression', () => {
|
||||
|
||||
@@ -70,29 +70,29 @@ export class CustomFieldQueryAtom extends CustomFieldQueryElement {
|
||||
const newTypes: string[] =
|
||||
CUSTOM_FIELD_QUERY_VALUE_TYPES_BY_OPERATOR[operator]?.split('|')
|
||||
if (!newTypes) {
|
||||
this.value = null
|
||||
this._value = null
|
||||
} else {
|
||||
if (!newTypes.includes(typeof this.value)) {
|
||||
switch (newTypes[0]) {
|
||||
case 'string':
|
||||
this.value = ''
|
||||
this._value = ''
|
||||
break
|
||||
case 'boolean':
|
||||
this.value = 'true'
|
||||
this._value = 'true'
|
||||
break
|
||||
case 'array':
|
||||
this.value = []
|
||||
this._value = []
|
||||
break
|
||||
case 'number':
|
||||
const num = parseFloat(this.value as string)
|
||||
this.value = isNaN(num) ? null : num.toString()
|
||||
this._value = isNaN(num) ? null : num.toString()
|
||||
break
|
||||
}
|
||||
} else if (
|
||||
['true', 'false'].includes(this.value as string) &&
|
||||
newTypes.includes('string')
|
||||
) {
|
||||
this.value = ''
|
||||
this._value = ''
|
||||
}
|
||||
}
|
||||
super.operator = operator
|
||||
@@ -103,7 +103,13 @@ export class CustomFieldQueryAtom extends CustomFieldQueryElement {
|
||||
return super.operator
|
||||
}
|
||||
|
||||
constructor(queryArray: [number, string, string] = [null, null, null]) {
|
||||
constructor(
|
||||
queryArray: [number, string, string | string[] | number[]] = [
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
]
|
||||
) {
|
||||
super(CustomFieldQueryElementType.Atom)
|
||||
;[this._field, this._operator, this._value] = queryArray
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ export const environment = {
|
||||
apiVersion: '9', // match src/paperless/settings.py
|
||||
appTitle: 'Paperless-ngx',
|
||||
tag: 'prod',
|
||||
version: '2.20.8',
|
||||
version: '2.20.15',
|
||||
webSocketHost: window.location.host,
|
||||
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
||||
webSocketBaseUrl: base_url.pathname + 'ws/',
|
||||
|
||||
@@ -147,6 +147,7 @@ import { DirtyDocGuard } from './app/guards/dirty-doc.guard'
|
||||
import { DirtySavedViewGuard } from './app/guards/dirty-saved-view.guard'
|
||||
import { PermissionsGuard } from './app/guards/permissions.guard'
|
||||
import { ApiVersionInterceptor } from './app/interceptors/api-version.interceptor'
|
||||
import { AuthExpiryInterceptor } from './app/interceptors/auth-expiry.interceptor'
|
||||
import { CsrfInterceptor } from './app/interceptors/csrf.interceptor'
|
||||
import { DocumentTitlePipe } from './app/pipes/document-title.pipe'
|
||||
import { FilterPipe } from './app/pipes/filter.pipe'
|
||||
@@ -390,6 +391,11 @@ bootstrapApplication(AppComponent, {
|
||||
useClass: ApiVersionInterceptor,
|
||||
multi: true,
|
||||
},
|
||||
{
|
||||
provide: HTTP_INTERCEPTORS,
|
||||
useClass: AuthExpiryInterceptor,
|
||||
multi: true,
|
||||
},
|
||||
FilterPipe,
|
||||
DocumentTitlePipe,
|
||||
{ provide: NgbDateAdapter, useClass: ISODateAdapter },
|
||||
|
||||
@@ -149,6 +149,15 @@ $form-check-radio-checked-bg-image-dark: url("data:image/svg+xml,<svg xmlns='htt
|
||||
background-color: var(--pngx-body-color-accent);
|
||||
}
|
||||
|
||||
.list-group-item-action:not(.active):active {
|
||||
--bs-list-group-action-active-color: var(--bs-body-color);
|
||||
--bs-list-group-action-active-bg: var(--pngx-bg-darker);
|
||||
}
|
||||
|
||||
.form-control:hover::file-selector-button {
|
||||
background-color:var(--pngx-bg-dark) !important
|
||||
}
|
||||
|
||||
.search-container {
|
||||
input, input:focus, i-bs[name="search"] , ::placeholder {
|
||||
color: var(--pngx-primary-text-contrast) !important;
|
||||
|
||||
@@ -19,6 +19,7 @@ from documents.classifier import load_classifier
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import generate_filename
|
||||
from documents.file_handling import generate_unique_filename
|
||||
from documents.loggers import LoggingMixin
|
||||
from documents.models import Correspondent
|
||||
@@ -493,7 +494,19 @@ class ConsumerPlugin(
|
||||
# After everything is in the database, copy the files into
|
||||
# place. If this fails, we'll also rollback the transaction.
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
document.filename = generate_unique_filename(document)
|
||||
generated_filename = generate_unique_filename(document)
|
||||
if (
|
||||
len(str(generated_filename))
|
||||
> Document.MAX_STORED_FILENAME_LENGTH
|
||||
):
|
||||
self.log.warning(
|
||||
"Generated source filename exceeds db path limit, falling back to default naming",
|
||||
)
|
||||
generated_filename = generate_filename(
|
||||
document,
|
||||
use_format=False,
|
||||
)
|
||||
document.filename = generated_filename
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
self._write(
|
||||
@@ -511,10 +524,23 @@ class ConsumerPlugin(
|
||||
)
|
||||
|
||||
if archive_path and Path(archive_path).is_file():
|
||||
document.archive_filename = generate_unique_filename(
|
||||
generated_archive_filename = generate_unique_filename(
|
||||
document,
|
||||
archive_filename=True,
|
||||
)
|
||||
if (
|
||||
len(str(generated_archive_filename))
|
||||
> Document.MAX_STORED_FILENAME_LENGTH
|
||||
):
|
||||
self.log.warning(
|
||||
"Generated archive filename exceeds db path limit, falling back to default naming",
|
||||
)
|
||||
generated_archive_filename = generate_filename(
|
||||
document,
|
||||
archive_filename=True,
|
||||
use_format=False,
|
||||
)
|
||||
document.archive_filename = generated_archive_filename
|
||||
create_source_path_directory(document.archive_path)
|
||||
self._write(
|
||||
document.storage_type,
|
||||
|
||||
@@ -128,17 +128,21 @@ def generate_filename(
|
||||
counter=0,
|
||||
append_gpg=True,
|
||||
archive_filename=False,
|
||||
use_format=True,
|
||||
) -> Path:
|
||||
base_path: Path | None = None
|
||||
|
||||
# Determine the source of the format string
|
||||
if doc.storage_path is not None:
|
||||
filename_format = doc.storage_path.path
|
||||
elif settings.FILENAME_FORMAT is not None:
|
||||
# Maybe convert old to new style
|
||||
filename_format = convert_format_str_to_template_format(
|
||||
settings.FILENAME_FORMAT,
|
||||
)
|
||||
if use_format:
|
||||
if doc.storage_path is not None:
|
||||
filename_format = doc.storage_path.path
|
||||
elif settings.FILENAME_FORMAT is not None:
|
||||
# Maybe convert old to new style
|
||||
filename_format = convert_format_str_to_template_format(
|
||||
settings.FILENAME_FORMAT,
|
||||
)
|
||||
else:
|
||||
filename_format = None
|
||||
else:
|
||||
filename_format = None
|
||||
|
||||
|
||||
@@ -470,7 +470,14 @@ class DelayedFullTextQuery(DelayedQuery):
|
||||
try:
|
||||
corrected = self.searcher.correct_query(q, q_str)
|
||||
if corrected.string != q_str:
|
||||
suggested_correction = corrected.string
|
||||
corrected_results = self.searcher.search(
|
||||
corrected.query,
|
||||
limit=1,
|
||||
filter=MappedDocIdSet(self.filter_queryset, self.searcher.ixreader),
|
||||
scored=False,
|
||||
)
|
||||
if len(corrected_results) > 0:
|
||||
suggested_correction = corrected.string
|
||||
except Exception as e:
|
||||
logger.info(
|
||||
"Error while correcting query %s: %s",
|
||||
|
||||
@@ -160,6 +160,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
(STORAGE_TYPE_UNENCRYPTED, _("Unencrypted")),
|
||||
(STORAGE_TYPE_GPG, _("Encrypted with GNU Privacy Guard")),
|
||||
)
|
||||
MAX_STORED_FILENAME_LENGTH: Final[int] = 1024
|
||||
|
||||
correspondent = models.ForeignKey(
|
||||
Correspondent,
|
||||
@@ -267,7 +268,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
|
||||
filename = models.FilePathField(
|
||||
_("filename"),
|
||||
max_length=1024,
|
||||
max_length=MAX_STORED_FILENAME_LENGTH,
|
||||
editable=False,
|
||||
default=None,
|
||||
unique=True,
|
||||
@@ -277,7 +278,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
|
||||
archive_filename = models.FilePathField(
|
||||
_("archive filename"),
|
||||
max_length=1024,
|
||||
max_length=MAX_STORED_FILENAME_LENGTH,
|
||||
editable=False,
|
||||
default=None,
|
||||
unique=True,
|
||||
@@ -287,7 +288,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
|
||||
original_filename = models.CharField(
|
||||
_("original filename"),
|
||||
max_length=1024,
|
||||
max_length=MAX_STORED_FILENAME_LENGTH,
|
||||
editable=False,
|
||||
default=None,
|
||||
unique=False,
|
||||
|
||||
@@ -75,6 +75,7 @@ from documents.parsers import is_mime_type_supported
|
||||
from documents.permissions import get_document_count_filter_for_user
|
||||
from documents.permissions import get_groups_with_only_permission
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import has_perms_owner_aware
|
||||
from documents.permissions import set_permissions_for_object
|
||||
from documents.regex import validate_regex_pattern
|
||||
from documents.templating.filepath import validate_filepath_template_and_render
|
||||
@@ -852,6 +853,25 @@ class ReadWriteSerializerMethodField(serializers.SerializerMethodField):
|
||||
return {self.field_name: data}
|
||||
|
||||
|
||||
def validate_documentlink_targets(user, doc_ids):
|
||||
if Document.objects.filter(id__in=doc_ids).count() != len(doc_ids):
|
||||
raise serializers.ValidationError(
|
||||
"Some documents in value don't exist or were specified twice.",
|
||||
)
|
||||
|
||||
if user is None:
|
||||
return
|
||||
|
||||
target_documents = Document.objects.filter(id__in=doc_ids).select_related("owner")
|
||||
if not all(
|
||||
has_perms_owner_aware(user, "change_document", document)
|
||||
for document in target_documents
|
||||
):
|
||||
raise PermissionDenied(
|
||||
_("Insufficient permissions."),
|
||||
)
|
||||
|
||||
|
||||
class CustomFieldInstanceSerializer(serializers.ModelSerializer):
|
||||
field = serializers.PrimaryKeyRelatedField(queryset=CustomField.objects.all())
|
||||
value = ReadWriteSerializerMethodField(allow_null=True)
|
||||
@@ -942,12 +962,13 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer):
|
||||
"Value must be a list",
|
||||
)
|
||||
doc_ids = data["value"]
|
||||
if Document.objects.filter(id__in=doc_ids).count() != len(
|
||||
data["value"],
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Some documents in value don't exist or were specified twice.",
|
||||
)
|
||||
request = self.context.get("request")
|
||||
validate_documentlink_targets(
|
||||
getattr(request, "user", None) if request is not None else None,
|
||||
doc_ids,
|
||||
)
|
||||
elif field.data_type == CustomField.FieldDataType.DATE:
|
||||
data["value"] = serializers.DateField().to_internal_value(data["value"])
|
||||
|
||||
return data
|
||||
|
||||
@@ -1497,6 +1518,19 @@ class BulkEditSerializer(
|
||||
f"Some custom fields in {name} don't exist or were specified twice.",
|
||||
)
|
||||
|
||||
if isinstance(custom_fields, dict):
|
||||
custom_field_map = CustomField.objects.in_bulk(ids)
|
||||
for raw_field_id, value in custom_fields.items():
|
||||
field = custom_field_map.get(int(raw_field_id))
|
||||
if (
|
||||
field is not None
|
||||
and field.data_type == CustomField.FieldDataType.DOCUMENTLINK
|
||||
and value is not None
|
||||
):
|
||||
if not isinstance(value, list):
|
||||
raise serializers.ValidationError("Value must be a list")
|
||||
validate_documentlink_targets(self.user, value)
|
||||
|
||||
def validate_method(self, method):
|
||||
if method == "set_correspondent":
|
||||
return bulk_edit.set_correspondent
|
||||
@@ -2179,6 +2213,17 @@ class ShareLinkSerializer(OwnedObjectSerializer):
|
||||
validated_data["slug"] = get_random_string(50)
|
||||
return super().create(validated_data)
|
||||
|
||||
def validate_document(self, document):
|
||||
if self.user is not None and has_perms_owner_aware(
|
||||
self.user,
|
||||
"view_document",
|
||||
document,
|
||||
):
|
||||
return document
|
||||
raise PermissionDenied(
|
||||
_("Insufficient permissions."),
|
||||
)
|
||||
|
||||
|
||||
class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin):
|
||||
objects = serializers.ListField(
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
@@ -391,6 +392,14 @@ class CannotMoveFilesException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def _path_matches_checksum(path: Path, checksum: str | None) -> bool:
|
||||
if checksum is None or not path.is_file():
|
||||
return False
|
||||
|
||||
with path.open("rb") as f:
|
||||
return hashlib.md5(f.read()).hexdigest() == checksum
|
||||
|
||||
|
||||
def _filename_template_uses_custom_fields(doc: Document) -> bool:
|
||||
template = None
|
||||
if doc.storage_path is not None:
|
||||
@@ -460,8 +469,24 @@ def update_filename_and_move_files(
|
||||
|
||||
old_filename = instance.filename
|
||||
old_source_path = instance.source_path
|
||||
move_original = False
|
||||
original_already_moved = False
|
||||
|
||||
old_archive_filename = instance.archive_filename
|
||||
old_archive_path = instance.archive_path
|
||||
move_archive = False
|
||||
archive_already_moved = False
|
||||
|
||||
candidate_filename = generate_filename(instance)
|
||||
if len(str(candidate_filename)) > Document.MAX_STORED_FILENAME_LENGTH:
|
||||
msg = (
|
||||
f"Document {instance!s}: Generated filename exceeds db path "
|
||||
f"limit ({len(str(candidate_filename))} > "
|
||||
f"{Document.MAX_STORED_FILENAME_LENGTH}): {candidate_filename!s}"
|
||||
)
|
||||
logger.warning(msg)
|
||||
raise CannotMoveFilesException(msg)
|
||||
|
||||
candidate_source_path = (
|
||||
settings.ORIGINALS_DIR / candidate_filename
|
||||
).resolve()
|
||||
@@ -471,20 +496,34 @@ def update_filename_and_move_files(
|
||||
candidate_source_path.exists()
|
||||
and candidate_source_path != old_source_path
|
||||
):
|
||||
# Only fall back to unique search when there is an actual conflict
|
||||
new_filename = generate_unique_filename(instance)
|
||||
if not old_source_path.is_file() and _path_matches_checksum(
|
||||
candidate_source_path,
|
||||
instance.checksum,
|
||||
):
|
||||
new_filename = candidate_filename
|
||||
original_already_moved = True
|
||||
else:
|
||||
# Only fall back to unique search when there is an actual conflict
|
||||
new_filename = generate_unique_filename(instance)
|
||||
else:
|
||||
new_filename = candidate_filename
|
||||
|
||||
# Need to convert to string to be able to save it to the db
|
||||
instance.filename = str(new_filename)
|
||||
move_original = old_filename != instance.filename
|
||||
|
||||
old_archive_filename = instance.archive_filename
|
||||
old_archive_path = instance.archive_path
|
||||
move_original = (
|
||||
old_filename != instance.filename and not original_already_moved
|
||||
)
|
||||
|
||||
if instance.has_archive_version:
|
||||
archive_candidate = generate_filename(instance, archive_filename=True)
|
||||
if len(str(archive_candidate)) > Document.MAX_STORED_FILENAME_LENGTH:
|
||||
msg = (
|
||||
f"Document {instance!s}: Generated archive filename exceeds "
|
||||
f"db path limit ({len(str(archive_candidate))} > "
|
||||
f"{Document.MAX_STORED_FILENAME_LENGTH}): {archive_candidate!s}"
|
||||
)
|
||||
logger.warning(msg)
|
||||
raise CannotMoveFilesException(msg)
|
||||
archive_candidate_path = (
|
||||
settings.ARCHIVE_DIR / archive_candidate
|
||||
).resolve()
|
||||
@@ -494,24 +533,38 @@ def update_filename_and_move_files(
|
||||
archive_candidate_path.exists()
|
||||
and archive_candidate_path != old_archive_path
|
||||
):
|
||||
new_archive_filename = generate_unique_filename(
|
||||
instance,
|
||||
archive_filename=True,
|
||||
)
|
||||
if not old_archive_path.is_file() and _path_matches_checksum(
|
||||
archive_candidate_path,
|
||||
instance.archive_checksum,
|
||||
):
|
||||
new_archive_filename = archive_candidate
|
||||
archive_already_moved = True
|
||||
else:
|
||||
new_archive_filename = generate_unique_filename(
|
||||
instance,
|
||||
archive_filename=True,
|
||||
)
|
||||
else:
|
||||
new_archive_filename = archive_candidate
|
||||
|
||||
instance.archive_filename = str(new_archive_filename)
|
||||
|
||||
move_archive = old_archive_filename != instance.archive_filename
|
||||
move_archive = (
|
||||
old_archive_filename != instance.archive_filename
|
||||
and not archive_already_moved
|
||||
)
|
||||
else:
|
||||
move_archive = False
|
||||
|
||||
if not move_original and not move_archive:
|
||||
# Just update modified. Also, don't save() here to prevent infinite recursion.
|
||||
Document.objects.filter(pk=instance.pk).update(
|
||||
modified=timezone.now(),
|
||||
)
|
||||
updates = {"modified": timezone.now()}
|
||||
if old_filename != instance.filename:
|
||||
updates["filename"] = instance.filename
|
||||
if old_archive_filename != instance.archive_filename:
|
||||
updates["archive_filename"] = instance.archive_filename
|
||||
|
||||
# Don't save() here to prevent infinite recursion.
|
||||
Document.objects.filter(pk=instance.pk).update(**updates)
|
||||
return
|
||||
|
||||
if move_original:
|
||||
@@ -765,7 +818,6 @@ def run_workflows(
|
||||
# Refresh this so the matching data is fresh and instance fields are re-freshed
|
||||
# Otherwise, this instance might be behind and overwrite the work another process did
|
||||
document.refresh_from_db()
|
||||
doc_tag_ids = list(document.tags.values_list("pk", flat=True))
|
||||
|
||||
if matching.document_matches_workflow(document, workflow, trigger_type):
|
||||
action: WorkflowAction
|
||||
@@ -783,14 +835,13 @@ def run_workflows(
|
||||
apply_assignment_to_document(
|
||||
action,
|
||||
document,
|
||||
doc_tag_ids,
|
||||
logging_group,
|
||||
)
|
||||
elif action.type == WorkflowAction.WorkflowActionType.REMOVAL:
|
||||
if use_overrides and overrides:
|
||||
apply_removal_to_overrides(action, overrides)
|
||||
else:
|
||||
apply_removal_to_document(action, document, doc_tag_ids)
|
||||
apply_removal_to_document(action, document)
|
||||
elif action.type == WorkflowAction.WorkflowActionType.EMAIL:
|
||||
context = build_workflow_action_context(document, overrides)
|
||||
execute_email_action(
|
||||
@@ -814,9 +865,25 @@ def run_workflows(
|
||||
if not use_overrides:
|
||||
# limit title to 128 characters
|
||||
document.title = document.title[:128]
|
||||
# save first before setting tags
|
||||
document.save()
|
||||
document.tags.set(doc_tag_ids)
|
||||
# Save only the fields that workflow actions can set directly.
|
||||
# Deliberately excludes filename and archive_filename — those are
|
||||
# managed exclusively by update_filename_and_move_files via the
|
||||
# post_save signal. Writing stale in-memory values here would revert
|
||||
# a concurrent update_filename_and_move_files DB write, leaving the
|
||||
# DB pointing at the old path while the file is already at the new
|
||||
# one (see: https://github.com/paperless-ngx/paperless-ngx/issues/12386).
|
||||
# modified has auto_now=True but is not auto-added when update_fields
|
||||
# is specified, so it must be listed explicitly.
|
||||
document.save(
|
||||
update_fields=[
|
||||
"title",
|
||||
"correspondent",
|
||||
"document_type",
|
||||
"storage_path",
|
||||
"owner",
|
||||
"modified",
|
||||
],
|
||||
)
|
||||
|
||||
WorkflowRun.objects.create(
|
||||
workflow=workflow,
|
||||
|
||||
@@ -79,6 +79,23 @@ class PlaceholderString(str):
|
||||
NO_VALUE_PLACEHOLDER = PlaceholderString("-none-")
|
||||
|
||||
|
||||
class MatchingModelContext:
|
||||
"""
|
||||
Safe template context for related objects.
|
||||
|
||||
Keeps legacy behavior where including the object ina template yields the related object's
|
||||
name as a string, while still exposing limited attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, *, id: int, name: str, path: str | None = None):
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.path = path
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
|
||||
_template_environment.undefined = _LogStrictUndefined
|
||||
|
||||
_template_environment.filters["get_cf_value"] = get_cf_value
|
||||
@@ -221,19 +238,26 @@ def get_safe_document_context(
|
||||
else None,
|
||||
"tags": [{"name": tag.name, "id": tag.id} for tag in tags],
|
||||
"correspondent": (
|
||||
{"name": document.correspondent.name, "id": document.correspondent.id}
|
||||
MatchingModelContext(
|
||||
name=document.correspondent.name,
|
||||
id=document.correspondent.id,
|
||||
)
|
||||
if document.correspondent
|
||||
else None
|
||||
),
|
||||
"document_type": (
|
||||
{"name": document.document_type.name, "id": document.document_type.id}
|
||||
MatchingModelContext(
|
||||
name=document.document_type.name,
|
||||
id=document.document_type.id,
|
||||
)
|
||||
if document.document_type
|
||||
else None
|
||||
),
|
||||
"storage_path": {
|
||||
"path": document.storage_path.path,
|
||||
"id": document.storage_path.id,
|
||||
}
|
||||
"storage_path": MatchingModelContext(
|
||||
name=document.storage_path.name,
|
||||
path=document.storage_path.path,
|
||||
id=document.storage_path.id,
|
||||
)
|
||||
if document.storage_path
|
||||
else None,
|
||||
}
|
||||
|
||||
@@ -160,3 +160,28 @@ class TestPaperlessAdmin(DirectoriesMixin, TestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
superuser.refresh_from_db()
|
||||
self.assertEqual(superuser.first_name, "Updated")
|
||||
|
||||
def test_superuser_can_only_be_deleted_by_superuser(self):
|
||||
superuser = User.objects.create_superuser(username="superuser", password="test")
|
||||
user = User.objects.create(
|
||||
username="test",
|
||||
is_superuser=False,
|
||||
is_staff=True,
|
||||
)
|
||||
delete_user_perm = Permission.objects.get(codename="delete_user")
|
||||
user.user_permissions.add(delete_user_perm)
|
||||
|
||||
self.client.force_login(user)
|
||||
response = self.client.delete(f"/api/users/{superuser.pk}/")
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
self.assertEqual(
|
||||
response.content.decode(),
|
||||
"Superusers can only be deleted by other superusers",
|
||||
)
|
||||
self.assertTrue(User.objects.filter(pk=superuser.pk).exists())
|
||||
|
||||
self.client.logout()
|
||||
self.client.force_login(superuser)
|
||||
response = self.client.delete(f"/api/users/{superuser.pk}/")
|
||||
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
|
||||
self.assertFalse(User.objects.filter(pk=superuser.pk).exists())
|
||||
|
||||
@@ -262,6 +262,50 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(kwargs["add_custom_fields"], [self.cf1.id])
|
||||
self.assertEqual(kwargs["remove_custom_fields"], [self.cf2.id])
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
|
||||
def test_api_modify_custom_fields_documentlink_forbidden_for_unpermitted_target(
|
||||
self,
|
||||
m,
|
||||
):
|
||||
self.setup_mock(m, "modify_custom_fields")
|
||||
user = User.objects.create_user(username="doc-owner")
|
||||
user.user_permissions.add(Permission.objects.get(codename="change_document"))
|
||||
other_user = User.objects.create_user(username="other-user")
|
||||
source_doc = Document.objects.create(
|
||||
checksum="source",
|
||||
title="Source",
|
||||
owner=user,
|
||||
)
|
||||
target_doc = Document.objects.create(
|
||||
checksum="target",
|
||||
title="Target",
|
||||
owner=other_user,
|
||||
)
|
||||
doclink_field = CustomField.objects.create(
|
||||
name="doclink",
|
||||
data_type=CustomField.FieldDataType.DOCUMENTLINK,
|
||||
)
|
||||
|
||||
self.client.force_authenticate(user=user)
|
||||
|
||||
response = self.client.post(
|
||||
"/api/documents/bulk_edit/",
|
||||
json.dumps(
|
||||
{
|
||||
"documents": [source_doc.id],
|
||||
"method": "modify_custom_fields",
|
||||
"parameters": {
|
||||
"add_custom_fields": {doclink_field.id: [target_doc.id]},
|
||||
"remove_custom_fields": [],
|
||||
},
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
m.assert_not_called()
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.modify_custom_fields")
|
||||
def test_api_modify_custom_fields_with_values(self, m):
|
||||
self.setup_mock(m, "modify_custom_fields")
|
||||
@@ -773,6 +817,22 @@ class TestBulkEditAPI(DirectoriesMixin, APITestCase):
|
||||
],
|
||||
)
|
||||
|
||||
def test_api_selection_data_requires_view_permission(self):
|
||||
self.doc2.owner = self.user
|
||||
self.doc2.save()
|
||||
|
||||
user1 = User.objects.create(username="user1")
|
||||
self.client.force_authenticate(user=user1)
|
||||
|
||||
response = self.client.post(
|
||||
"/api/documents/selection_data/",
|
||||
json.dumps({"documents": [self.doc2.id]}),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
self.assertEqual(response.content, b"Insufficient permissions")
|
||||
|
||||
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
|
||||
def test_set_permissions(self, m):
|
||||
self.setup_mock(m, "set_permissions")
|
||||
|
||||
@@ -6,6 +6,7 @@ from unittest.mock import ANY
|
||||
from django.contrib.auth.models import Permission
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import override_settings
|
||||
from guardian.shortcuts import assign_perm
|
||||
from rest_framework import status
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
@@ -1247,6 +1248,100 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(doc5.custom_fields.first().value, [1])
|
||||
|
||||
def test_documentlink_patch_requires_change_permission_on_target_documents(self):
|
||||
source_owner = User.objects.create_user(username="source-owner")
|
||||
source_owner.user_permissions.add(
|
||||
Permission.objects.get(codename="change_document"),
|
||||
)
|
||||
other_user = User.objects.create_user(username="other-user")
|
||||
|
||||
source_doc = Document.objects.create(
|
||||
title="Source",
|
||||
checksum="source",
|
||||
mime_type="application/pdf",
|
||||
owner=source_owner,
|
||||
)
|
||||
target_doc = Document.objects.create(
|
||||
title="Target",
|
||||
checksum="target",
|
||||
mime_type="application/pdf",
|
||||
owner=other_user,
|
||||
)
|
||||
custom_field_doclink = CustomField.objects.create(
|
||||
name="Test Custom Field Doc Link",
|
||||
data_type=CustomField.FieldDataType.DOCUMENTLINK,
|
||||
)
|
||||
|
||||
self.client.force_authenticate(user=source_owner)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"/api/documents/{source_doc.id}/",
|
||||
data={
|
||||
"custom_fields": [
|
||||
{
|
||||
"field": custom_field_doclink.id,
|
||||
"value": [target_doc.id],
|
||||
},
|
||||
],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_403_FORBIDDEN)
|
||||
self.assertEqual(
|
||||
CustomFieldInstance.objects.filter(field=custom_field_doclink).count(),
|
||||
0,
|
||||
)
|
||||
|
||||
def test_documentlink_patch_allowed_with_change_permission_on_target_documents(
|
||||
self,
|
||||
):
|
||||
source_owner = User.objects.create_user(username="source-owner")
|
||||
source_owner.user_permissions.add(
|
||||
Permission.objects.get(codename="change_document"),
|
||||
)
|
||||
other_user = User.objects.create_user(username="other-user")
|
||||
|
||||
source_doc = Document.objects.create(
|
||||
title="Source",
|
||||
checksum="source",
|
||||
mime_type="application/pdf",
|
||||
owner=source_owner,
|
||||
)
|
||||
target_doc = Document.objects.create(
|
||||
title="Target",
|
||||
checksum="target",
|
||||
mime_type="application/pdf",
|
||||
owner=other_user,
|
||||
)
|
||||
custom_field_doclink = CustomField.objects.create(
|
||||
name="Test Custom Field Doc Link",
|
||||
data_type=CustomField.FieldDataType.DOCUMENTLINK,
|
||||
)
|
||||
|
||||
assign_perm("change_document", source_owner, target_doc)
|
||||
self.client.force_authenticate(user=source_owner)
|
||||
|
||||
resp = self.client.patch(
|
||||
f"/api/documents/{source_doc.id}/",
|
||||
data={
|
||||
"custom_fields": [
|
||||
{
|
||||
"field": custom_field_doclink.id,
|
||||
"value": [target_doc.id],
|
||||
},
|
||||
],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
target_doc.refresh_from_db()
|
||||
self.assertEqual(
|
||||
target_doc.custom_fields.get(field=custom_field_doclink).value,
|
||||
[source_doc.id],
|
||||
)
|
||||
|
||||
def test_custom_field_filters(self):
|
||||
custom_field_string = CustomField.objects.create(
|
||||
name="Test Custom Field String",
|
||||
@@ -1330,3 +1425,41 @@ class TestCustomFieldsAPI(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
results = response.data["results"]
|
||||
self.assertEqual(results[0]["document_count"], 0)
|
||||
|
||||
def test_patch_document_invalid_date_custom_field_returns_validation_error(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- A date custom field
|
||||
- A document
|
||||
WHEN:
|
||||
- Patching the document with a date string in the wrong format
|
||||
THEN:
|
||||
- HTTP 400 is returned instead of an internal server error
|
||||
- No custom field instance is created
|
||||
"""
|
||||
cf_date = CustomField.objects.create(
|
||||
name="datefield",
|
||||
data_type=CustomField.FieldDataType.DATE,
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="Doc",
|
||||
checksum="123",
|
||||
mime_type="application/pdf",
|
||||
)
|
||||
|
||||
response = self.client.patch(
|
||||
f"/api/documents/{doc.pk}/",
|
||||
{
|
||||
"custom_fields": [
|
||||
{
|
||||
"field": cf_date.pk,
|
||||
"value": "10.03.2026",
|
||||
},
|
||||
],
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertIn("custom_fields", response.data)
|
||||
self.assertEqual(CustomFieldInstance.objects.count(), 0)
|
||||
|
||||
@@ -1087,6 +1087,43 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(len(response.data["all"]), 50)
|
||||
self.assertCountEqual(response.data["all"], [d.id for d in docs])
|
||||
|
||||
def test_default_ordering_uses_id_as_tiebreaker(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Documents sharing the same created date
|
||||
WHEN:
|
||||
- API request for documents without an explicit ordering
|
||||
THEN:
|
||||
- Results are correctly ordered by created > id
|
||||
"""
|
||||
older_doc = Document.objects.create(
|
||||
checksum="older",
|
||||
content="older",
|
||||
created=date(2024, 1, 1),
|
||||
)
|
||||
first_same_date_doc = Document.objects.create(
|
||||
checksum="same-date-1",
|
||||
content="same-date-1",
|
||||
created=date(2024, 1, 2),
|
||||
)
|
||||
second_same_date_doc = Document.objects.create(
|
||||
checksum="same-date-2",
|
||||
content="same-date-2",
|
||||
created=date(2024, 1, 2),
|
||||
)
|
||||
|
||||
response = self.client.get("/api/documents/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(
|
||||
[result["id"] for result in response.data["results"]],
|
||||
[
|
||||
second_same_date_doc.id,
|
||||
first_same_date_doc.id,
|
||||
older_doc.id,
|
||||
],
|
||||
)
|
||||
|
||||
def test_statistics(self):
|
||||
doc1 = Document.objects.create(
|
||||
title="none1",
|
||||
@@ -2683,6 +2720,77 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
# modified was updated to today
|
||||
self.assertEqual(doc.modified.day, timezone.now().day)
|
||||
|
||||
def test_delete_note_missing_id(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document
|
||||
WHEN:
|
||||
- API DELETE request to notes endpoint without an id query param
|
||||
- API DELETE request to notes endpoint with an empty id query param
|
||||
THEN:
|
||||
- HTTP 400 is returned
|
||||
"""
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
mime_type="application/pdf",
|
||||
content="this is a document",
|
||||
)
|
||||
|
||||
response = self.client.delete(
|
||||
f"/api/documents/{doc.pk}/notes/",
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
response = self.client.delete(
|
||||
f"/api/documents/{doc.pk}/notes/?id=",
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def test_delete_note_invalid_id(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document
|
||||
WHEN:
|
||||
- API DELETE request to notes endpoint with a non-integer note id
|
||||
THEN:
|
||||
- HTTP 400 is returned
|
||||
"""
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
mime_type="application/pdf",
|
||||
content="this is a document",
|
||||
)
|
||||
|
||||
response = self.client.delete(
|
||||
f"/api/documents/{doc.pk}/notes/?id=notaninteger",
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def test_delete_note_nonexistent_id(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document, no notes
|
||||
WHEN:
|
||||
- API DELETE request to notes endpoint with a non-existent note id
|
||||
THEN:
|
||||
- HTTP 404 is returned
|
||||
"""
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
mime_type="application/pdf",
|
||||
content="this is a document",
|
||||
)
|
||||
|
||||
response = self.client.delete(
|
||||
f"/api/documents/{doc.pk}/notes/?id=99999",
|
||||
format="json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
|
||||
|
||||
def test_get_notes_no_doc(self):
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -2905,6 +3013,106 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
)
|
||||
self.assertEqual(resp.status_code, status.HTTP_200_OK)
|
||||
|
||||
def test_create_share_link_requires_view_permission_for_document(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- A user with add_sharelink but without view permission on a document
|
||||
WHEN:
|
||||
- API request is made to create a share link for that document
|
||||
THEN:
|
||||
- Share link creation is denied until view permission is granted
|
||||
"""
|
||||
user1 = User.objects.create_user(username="test1")
|
||||
user1.user_permissions.add(*Permission.objects.filter(codename="add_sharelink"))
|
||||
user1.save()
|
||||
|
||||
user2 = User.objects.create_user(username="test2")
|
||||
user2.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
mime_type="application/pdf",
|
||||
content="this is a document which will be protected",
|
||||
owner=user2,
|
||||
)
|
||||
|
||||
self.client.force_authenticate(user1)
|
||||
|
||||
create_resp = self.client.post(
|
||||
"/api/share_links/",
|
||||
data={
|
||||
"document": doc.pk,
|
||||
"file_version": "original",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(create_resp.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
assign_perm("view_document", user1, doc)
|
||||
|
||||
create_resp = self.client.post(
|
||||
"/api/share_links/",
|
||||
data={
|
||||
"document": doc.pk,
|
||||
"file_version": "original",
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(create_resp.status_code, status.HTTP_201_CREATED)
|
||||
self.assertEqual(create_resp.data["document"], doc.pk)
|
||||
|
||||
def test_share_link_update_methods_not_allowed(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- An existing share link
|
||||
WHEN:
|
||||
- PUT and PATCH requests are made to its detail endpoint
|
||||
THEN:
|
||||
- The API rejects them with 405 and the link is unchanged
|
||||
"""
|
||||
doc = Document.objects.create(
|
||||
title="test",
|
||||
mime_type="application/pdf",
|
||||
content="share link content",
|
||||
)
|
||||
expiration = timezone.now() + timedelta(days=7)
|
||||
create_resp = self.client.post(
|
||||
"/api/share_links/",
|
||||
data={
|
||||
"document": doc.pk,
|
||||
"expiration": expiration.isoformat(),
|
||||
"file_version": ShareLink.FileVersion.ORIGINAL,
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(create_resp.status_code, status.HTTP_201_CREATED)
|
||||
share_link_id = create_resp.data["id"]
|
||||
|
||||
patch_resp = self.client.patch(
|
||||
f"/api/share_links/{share_link_id}/",
|
||||
data={
|
||||
"expiration": None,
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(patch_resp.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
|
||||
put_resp = self.client.put(
|
||||
f"/api/share_links/{share_link_id}/",
|
||||
data={
|
||||
"document": doc.pk,
|
||||
"expiration": None,
|
||||
"file_version": ShareLink.FileVersion.ARCHIVE,
|
||||
},
|
||||
format="json",
|
||||
)
|
||||
self.assertEqual(put_resp.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
|
||||
|
||||
share_link = ShareLink.objects.get(pk=share_link_id)
|
||||
self.assertEqual(share_link.file_version, ShareLink.FileVersion.ORIGINAL)
|
||||
self.assertIsNotNone(share_link.expiration)
|
||||
|
||||
def test_next_asn(self):
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -888,6 +888,19 @@ class TestApiUser(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
response = self.client.post(
|
||||
f"{self.ENDPOINT}",
|
||||
json.dumps(
|
||||
{
|
||||
"username": "user4",
|
||||
"is_superuser": "true",
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
self.client.force_authenticate(user2)
|
||||
|
||||
response = self.client.patch(
|
||||
@@ -920,6 +933,65 @@ class TestApiUser(DirectoriesMixin, APITestCase):
|
||||
returned_user1 = User.objects.get(pk=user1.pk)
|
||||
self.assertEqual(returned_user1.is_superuser, False)
|
||||
|
||||
def test_only_superusers_can_create_or_alter_staff_status(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing user account
|
||||
WHEN:
|
||||
- API request is made to add a user account with staff status
|
||||
- API request is made to change staff status
|
||||
THEN:
|
||||
- Only superusers can change staff status
|
||||
"""
|
||||
|
||||
user1 = User.objects.create_user(username="user1")
|
||||
user1.user_permissions.add(*Permission.objects.all())
|
||||
user2 = User.objects.create_superuser(username="user2")
|
||||
|
||||
self.client.force_authenticate(user1)
|
||||
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}{user1.pk}/",
|
||||
json.dumps(
|
||||
{
|
||||
"is_staff": "true",
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
response = self.client.post(
|
||||
f"{self.ENDPOINT}",
|
||||
json.dumps(
|
||||
{
|
||||
"username": "user3",
|
||||
"is_staff": 1,
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
self.client.force_authenticate(user2)
|
||||
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}{user1.pk}/",
|
||||
json.dumps(
|
||||
{
|
||||
"is_staff": True,
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
|
||||
returned_user1 = User.objects.get(pk=user1.pk)
|
||||
self.assertEqual(returned_user1.is_staff, True)
|
||||
|
||||
|
||||
class TestApiGroup(DirectoriesMixin, APITestCase):
|
||||
ENDPOINT = "/api/groups/"
|
||||
|
||||
@@ -702,6 +702,40 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
|
||||
self.assertEqual(correction, None)
|
||||
|
||||
def test_search_spelling_suggestion_suppressed_for_private_terms(self):
|
||||
owner = User.objects.create_user("owner")
|
||||
attacker = User.objects.create_user("attacker")
|
||||
attacker.user_permissions.add(
|
||||
Permission.objects.get(codename="view_document"),
|
||||
)
|
||||
|
||||
with AsyncWriter(index.open_index()) as writer:
|
||||
for i in range(55):
|
||||
private_doc = Document.objects.create(
|
||||
checksum=f"p{i}",
|
||||
pk=100 + i,
|
||||
title=f"Private Document {i + 1}",
|
||||
content=f"treasury document {i + 1}",
|
||||
owner=owner,
|
||||
)
|
||||
visible_doc = Document.objects.create(
|
||||
checksum=f"v{i}",
|
||||
pk=200 + i,
|
||||
title=f"Visible Document {i + 1}",
|
||||
content=f"public ledger {i + 1}",
|
||||
owner=attacker,
|
||||
)
|
||||
index.update_document(writer, private_doc)
|
||||
index.update_document(writer, visible_doc)
|
||||
|
||||
self.client.force_authenticate(user=attacker)
|
||||
|
||||
response = self.client.get("/api/documents/?query=treasurx")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(response.data["count"], 0)
|
||||
self.assertIsNone(response.data["corrected_query"])
|
||||
|
||||
@mock.patch(
|
||||
"whoosh.searching.Searcher.correct_query",
|
||||
side_effect=Exception("Test error"),
|
||||
@@ -772,6 +806,58 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(results[0]["id"], d3.id)
|
||||
self.assertEqual(results[1]["id"], d1.id)
|
||||
|
||||
def test_search_more_like_requires_view_permission_on_seed_document(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- A user can search documents they own
|
||||
- Another user's private document exists with similar content
|
||||
WHEN:
|
||||
- The user requests more-like-this for the private seed document
|
||||
THEN:
|
||||
- The request is rejected
|
||||
"""
|
||||
owner = User.objects.create_user("owner")
|
||||
attacker = User.objects.create_user("attacker")
|
||||
attacker.user_permissions.add(
|
||||
Permission.objects.get(codename="view_document"),
|
||||
)
|
||||
|
||||
private_seed = Document.objects.create(
|
||||
title="private bank statement",
|
||||
content="quarterly treasury bank statement wire transfer",
|
||||
checksum="seed",
|
||||
owner=owner,
|
||||
pk=10,
|
||||
)
|
||||
visible_doc = Document.objects.create(
|
||||
title="attacker-visible match",
|
||||
content="quarterly treasury bank statement wire transfer summary",
|
||||
checksum="visible",
|
||||
owner=attacker,
|
||||
pk=11,
|
||||
)
|
||||
other_doc = Document.objects.create(
|
||||
title="unrelated",
|
||||
content="completely different topic",
|
||||
checksum="other",
|
||||
owner=attacker,
|
||||
pk=12,
|
||||
)
|
||||
|
||||
with AsyncWriter(index.open_index()) as writer:
|
||||
index.update_document(writer, private_seed)
|
||||
index.update_document(writer, visible_doc)
|
||||
index.update_document(writer, other_doc)
|
||||
|
||||
self.client.force_authenticate(user=attacker)
|
||||
|
||||
response = self.client.get(
|
||||
f"/api/documents/?more_like_id={private_seed.id}",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
self.assertEqual(response.content, b"Insufficient permissions.")
|
||||
|
||||
def test_search_filtering(self):
|
||||
t = Tag.objects.create(name="tag")
|
||||
t2 = Tag.objects.create(name="tag2")
|
||||
@@ -1357,6 +1443,83 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(results["custom_fields"][0]["id"], custom_field1.id)
|
||||
self.assertEqual(results["workflows"][0]["id"], workflow1.id)
|
||||
|
||||
def test_global_search_filters_owned_mail_objects(self):
|
||||
user1 = User.objects.create_user("mail-search-user")
|
||||
user2 = User.objects.create_user("other-mail-search-user")
|
||||
user1.user_permissions.add(
|
||||
Permission.objects.get(codename="view_mailaccount"),
|
||||
Permission.objects.get(codename="view_mailrule"),
|
||||
)
|
||||
|
||||
own_account = MailAccount.objects.create(
|
||||
name="bank owned account",
|
||||
username="owner@example.com",
|
||||
password="secret",
|
||||
imap_server="imap.owner.example.com",
|
||||
imap_port=993,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
owner=user1,
|
||||
)
|
||||
other_account = MailAccount.objects.create(
|
||||
name="bank other account",
|
||||
username="other@example.com",
|
||||
password="secret",
|
||||
imap_server="imap.other.example.com",
|
||||
imap_port=993,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
owner=user2,
|
||||
)
|
||||
unowned_account = MailAccount.objects.create(
|
||||
name="bank shared account",
|
||||
username="shared@example.com",
|
||||
password="secret",
|
||||
imap_server="imap.shared.example.com",
|
||||
imap_port=993,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
own_rule = MailRule.objects.create(
|
||||
name="bank owned rule",
|
||||
account=own_account,
|
||||
action=MailRule.MailAction.MOVE,
|
||||
owner=user1,
|
||||
)
|
||||
other_rule = MailRule.objects.create(
|
||||
name="bank other rule",
|
||||
account=other_account,
|
||||
action=MailRule.MailAction.MOVE,
|
||||
owner=user2,
|
||||
)
|
||||
unowned_rule = MailRule.objects.create(
|
||||
name="bank shared rule",
|
||||
account=unowned_account,
|
||||
action=MailRule.MailAction.MOVE,
|
||||
)
|
||||
|
||||
self.client.force_authenticate(user1)
|
||||
|
||||
response = self.client.get("/api/search/?query=bank")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertCountEqual(
|
||||
[account["id"] for account in response.data["mail_accounts"]],
|
||||
[own_account.id, unowned_account.id],
|
||||
)
|
||||
self.assertCountEqual(
|
||||
[rule["id"] for rule in response.data["mail_rules"]],
|
||||
[own_rule.id, unowned_rule.id],
|
||||
)
|
||||
self.assertNotIn(
|
||||
other_account.id,
|
||||
[account["id"] for account in response.data["mail_accounts"]],
|
||||
)
|
||||
self.assertNotIn(
|
||||
other_rule.id,
|
||||
[rule["id"] for rule in response.data["mail_rules"]],
|
||||
)
|
||||
|
||||
def test_global_search_bad_request(self):
|
||||
"""
|
||||
WHEN:
|
||||
|
||||
@@ -57,11 +57,18 @@ class TestSystemStatus(APITestCase):
|
||||
"""
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
||||
self.assertEqual(response["WWW-Authenticate"], "Token")
|
||||
normal_user = User.objects.create_user(username="normal_user")
|
||||
self.client.force_login(normal_user)
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
|
||||
def test_system_status_with_bad_basic_auth_challenges(self) -> None:
|
||||
self.client.credentials(HTTP_AUTHORIZATION="Basic invalid")
|
||||
response = self.client.get(self.ENDPOINT)
|
||||
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
||||
self.assertEqual(response["WWW-Authenticate"], 'Basic realm="api"')
|
||||
|
||||
def test_system_status_container_detection(self):
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -633,6 +633,33 @@ class TestConsumer(
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@mock.patch("documents.consumer.generate_unique_filename")
|
||||
def testFilenameHandlingFallsBackWhenGeneratedPathExceedsDbLimit(self, m):
|
||||
m.side_effect = lambda doc, archive_filename=False: Path(
|
||||
("a" * 1100 + ".pdf") if not archive_filename else ("b" * 1100 + ".pdf"),
|
||||
)
|
||||
|
||||
with self.get_consumer(
|
||||
self.get_test_file(),
|
||||
DocumentMetadataOverrides(title="new docs"),
|
||||
) as consumer:
|
||||
consumer.run()
|
||||
|
||||
document = Document.objects.first()
|
||||
self.assertIsNotNone(document)
|
||||
assert document is not None
|
||||
|
||||
self.assertEqual(document.filename, f"{document.pk:07d}.pdf")
|
||||
self.assertLessEqual(len(document.filename), 1024)
|
||||
self.assertLessEqual(
|
||||
len(document.archive_filename),
|
||||
1024,
|
||||
)
|
||||
self.assertIsFile(document.source_path)
|
||||
self.assertIsFile(document.archive_path)
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
||||
@mock.patch("documents.signals.handlers.generate_unique_filename")
|
||||
def testFilenameHandlingUnstableFormat(self, m):
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
@@ -166,6 +167,52 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
||||
)
|
||||
self.assertEqual(document.filename, "none/none.pdf")
|
||||
|
||||
@override_settings(FILENAME_FORMAT=None)
|
||||
def test_stale_save_recovers_already_moved_files(self) -> None:
|
||||
old_storage_path = StoragePath.objects.create(
|
||||
name="old-path",
|
||||
path="old/{{title}}",
|
||||
)
|
||||
new_storage_path = StoragePath.objects.create(
|
||||
name="new-path",
|
||||
path="new/{{title}}",
|
||||
)
|
||||
original_bytes = b"original"
|
||||
archive_bytes = b"archive"
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="document",
|
||||
mime_type="application/pdf",
|
||||
checksum=hashlib.md5(original_bytes).hexdigest(),
|
||||
archive_checksum=hashlib.md5(archive_bytes).hexdigest(),
|
||||
filename="old/document.pdf",
|
||||
archive_filename="old/document.pdf",
|
||||
storage_path=old_storage_path,
|
||||
)
|
||||
create_source_path_directory(doc.source_path)
|
||||
doc.source_path.write_bytes(original_bytes)
|
||||
create_source_path_directory(doc.archive_path)
|
||||
doc.archive_path.write_bytes(archive_bytes)
|
||||
|
||||
stale_doc = Document.objects.get(pk=doc.pk)
|
||||
fresh_doc = Document.objects.get(pk=doc.pk)
|
||||
fresh_doc.storage_path = new_storage_path
|
||||
fresh_doc.save()
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.filename, "new/document.pdf")
|
||||
self.assertEqual(doc.archive_filename, "new/document.pdf")
|
||||
|
||||
stale_doc.storage_path = new_storage_path
|
||||
stale_doc.save()
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.filename, "new/document.pdf")
|
||||
self.assertEqual(doc.archive_filename, "new/document.pdf")
|
||||
self.assertIsFile(doc.source_path)
|
||||
self.assertIsFile(doc.archive_path)
|
||||
self.assertIsNotFile(settings.ORIGINALS_DIR / "old" / "document.pdf")
|
||||
self.assertIsNotFile(settings.ARCHIVE_DIR / "old" / "document.pdf")
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||
def test_document_delete(self):
|
||||
document = Document()
|
||||
@@ -1341,6 +1388,41 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
||||
Path("somepath/asn-201-400/asn-3xx/Does Matter.pdf"),
|
||||
)
|
||||
|
||||
def test_template_related_context_keeps_legacy_string_coercion(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- A storage path template that uses related objects directly as strings
|
||||
WHEN:
|
||||
- Filepath for a document with this format is called
|
||||
THEN:
|
||||
- Related objects coerce to their names (legacy behavior)
|
||||
- Explicit attribute access remains available for new templates
|
||||
"""
|
||||
sp = StoragePath.objects.create(
|
||||
name="PARTNER",
|
||||
path=(
|
||||
"{{ document.storage_path|lower }} / "
|
||||
"{{ document.correspondent|lower|replace('mi:', 'mieter/') }} / "
|
||||
"{{ document_type|lower }} / "
|
||||
"{{ title|lower }}"
|
||||
),
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="scan_017562",
|
||||
created=datetime.date(2025, 7, 2),
|
||||
added=timezone.make_aware(datetime.datetime(2026, 3, 3, 11, 53, 16)),
|
||||
mime_type="application/pdf",
|
||||
checksum="test-checksum",
|
||||
storage_path=sp,
|
||||
correspondent=Correspondent.objects.create(name="mi:kochkach"),
|
||||
document_type=DocumentType.objects.create(name="Mietvertrag"),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
generate_filename(doc),
|
||||
Path("partner/mieter/kochkach/mietvertrag/scan_017562.pdf"),
|
||||
)
|
||||
|
||||
@override_settings(
|
||||
FILENAME_FORMAT="{{creation_date}}/{{ title_name_str }}",
|
||||
)
|
||||
@@ -1699,6 +1781,21 @@ class TestCustomFieldFilenameUpdates(
|
||||
self.assertTrue(Path(self.doc.source_path).is_file())
|
||||
self.assertLessEqual(m.call_count, 1)
|
||||
|
||||
@override_settings(FILENAME_FORMAT=None)
|
||||
def test_overlong_storage_path_keeps_existing_filename(self):
|
||||
initial_filename = generate_filename(self.doc)
|
||||
Document.objects.filter(pk=self.doc.pk).update(filename=str(initial_filename))
|
||||
self.doc.refresh_from_db()
|
||||
Path(self.doc.source_path).parent.mkdir(parents=True, exist_ok=True)
|
||||
Path(self.doc.source_path).touch()
|
||||
|
||||
self.doc.storage_path = StoragePath.objects.create(path="a" * 1100)
|
||||
self.doc.save()
|
||||
|
||||
self.doc.refresh_from_db()
|
||||
self.assertEqual(Path(self.doc.filename), initial_filename)
|
||||
self.assertTrue(Path(self.doc.source_path).is_file())
|
||||
|
||||
|
||||
class TestPathDateLocalization:
|
||||
"""
|
||||
|
||||
@@ -147,6 +147,16 @@ class TestTagHierarchy(APITestCase):
|
||||
assert serializer.data # triggers serialization
|
||||
assert "document_count_filter" in context
|
||||
|
||||
def test_tag_list_can_order_by_document_count_with_children(self) -> None:
|
||||
self.document.tags.add(self.child)
|
||||
|
||||
response = self.client.get(
|
||||
"/api/tags/",
|
||||
{"ordering": "document_count"},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_cannot_set_parent_to_self(self):
|
||||
tag = Tag.objects.create(name="Selfie")
|
||||
resp = self.client.patch(
|
||||
|
||||
@@ -25,6 +25,7 @@ from rest_framework.test import APIClient
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import generate_filename
|
||||
from documents.file_handling import generate_unique_filename
|
||||
from documents.signals.handlers import run_workflows
|
||||
from documents.workflows.webhooks import send_webhook
|
||||
@@ -898,6 +899,121 @@ class TestWorkflows(
|
||||
expected_str = f"Document matched {trigger} from {w}"
|
||||
self.assertIn(expected_str, cm.output[0])
|
||||
|
||||
def test_workflow_assign_custom_field_keeps_storage_filename_in_sync(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document with a storage path template that depends on a custom field
|
||||
- Existing workflow triggered on document update assigning that custom field
|
||||
WHEN:
|
||||
- Workflow runs for the document
|
||||
THEN:
|
||||
- The database filename remains aligned with the moved file on disk
|
||||
"""
|
||||
storage_path = StoragePath.objects.create(
|
||||
name="workflow-custom-field-path",
|
||||
path="{{ custom_fields|get_cf_value('Custom Field 1', 'none') }}/{{ title }}",
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="workflow custom field sync",
|
||||
mime_type="application/pdf",
|
||||
checksum="workflow-custom-field-sync",
|
||||
storage_path=storage_path,
|
||||
original_filename="workflow-custom-field-sync.pdf",
|
||||
)
|
||||
CustomFieldInstance.objects.create(
|
||||
document=doc,
|
||||
field=self.cf1,
|
||||
value_text="initial",
|
||||
)
|
||||
|
||||
generated = generate_unique_filename(doc)
|
||||
destination = (settings.ORIGINALS_DIR / generated).resolve()
|
||||
create_source_path_directory(destination)
|
||||
shutil.copy(self.SAMPLE_DIR / "simple.pdf", destination)
|
||||
Document.objects.filter(pk=doc.pk).update(filename=generated.as_posix())
|
||||
doc.refresh_from_db()
|
||||
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
assign_custom_fields_values={self.cf1.pk: "cars"},
|
||||
)
|
||||
action.assign_custom_fields.add(self.cf1.pk)
|
||||
workflow = Workflow.objects.create(
|
||||
name="Workflow custom field filename sync",
|
||||
order=0,
|
||||
)
|
||||
workflow.triggers.add(trigger)
|
||||
workflow.actions.add(action)
|
||||
workflow.save()
|
||||
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
doc.refresh_from_db()
|
||||
expected_filename = generate_filename(doc)
|
||||
self.assertEqual(Path(doc.filename), expected_filename)
|
||||
self.assertTrue(doc.source_path.is_file())
|
||||
|
||||
def test_workflow_document_updated_does_not_overwrite_filename(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- A document whose filename has been updated in the DB by a concurrent
|
||||
bulk_update_documents task (simulating update_filename_and_move_files
|
||||
completing and writing the new filename to the DB)
|
||||
- A stale in-memory document instance still holding the old filename
|
||||
- An active DOCUMENT_UPDATED workflow
|
||||
WHEN:
|
||||
- run_workflows is called with the stale in-memory instance
|
||||
(as would happen in the second concurrent bulk_update_documents task)
|
||||
THEN:
|
||||
- The DB filename is NOT overwritten with the stale in-memory value
|
||||
(regression test for GH #12386 — the race window between
|
||||
refresh_from_db and document.save in run_workflows)
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
assign_title="Updated by workflow",
|
||||
)
|
||||
workflow = Workflow.objects.create(name="Race condition test workflow", order=0)
|
||||
workflow.triggers.add(trigger)
|
||||
workflow.actions.add(action)
|
||||
workflow.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="race condition test",
|
||||
mime_type="application/pdf",
|
||||
checksum="racecondition123",
|
||||
original_filename="old.pdf",
|
||||
filename="old/path/old.pdf",
|
||||
)
|
||||
|
||||
# Simulate BUD-1 completing update_filename_and_move_files:
|
||||
# the DB now holds the new filename while BUD-2's in-memory instance is stale.
|
||||
new_filename = "new/path/new.pdf"
|
||||
Document.global_objects.filter(pk=doc.pk).update(filename=new_filename)
|
||||
|
||||
# The stale instance still has filename="old/path/old.pdf" in memory.
|
||||
# Mock refresh_from_db so the stale value persists through run_workflows,
|
||||
# replicating the race window between refresh and save.
|
||||
# Mock update_filename_and_move_files to prevent file-not-found errors
|
||||
# since we are only testing DB state here.
|
||||
with (
|
||||
mock.patch(
|
||||
"documents.signals.handlers.update_filename_and_move_files",
|
||||
),
|
||||
mock.patch.object(Document, "refresh_from_db"),
|
||||
):
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
# The DB filename must not have been reverted to the stale old value.
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.filename, new_filename)
|
||||
|
||||
def test_document_added_workflow(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
@@ -3396,6 +3512,124 @@ class TestWorkflows(
|
||||
as_json=False,
|
||||
)
|
||||
|
||||
@mock.patch("documents.signals.handlers.execute_webhook_action")
|
||||
def test_workflow_webhook_action_does_not_overwrite_concurrent_tags(
|
||||
self,
|
||||
mock_execute_webhook_action,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- A document updated workflow with only a webhook action
|
||||
- A tag update that happens after run_workflows
|
||||
WHEN:
|
||||
- The workflow runs
|
||||
THEN:
|
||||
- The concurrent tag update is preserved
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
webhook_action = WorkflowActionWebhook.objects.create(
|
||||
use_params=False,
|
||||
body="Test message: {{doc_url}}",
|
||||
url="http://paperless-ngx.com",
|
||||
include_document=False,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.WEBHOOK,
|
||||
webhook=webhook_action,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Webhook workflow",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(action)
|
||||
w.save()
|
||||
|
||||
inbox_tag = Tag.objects.create(name="inbox")
|
||||
error_tag = Tag.objects.create(name="error")
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
)
|
||||
doc.tags.add(inbox_tag)
|
||||
|
||||
def add_error_tag(*args, **kwargs):
|
||||
Document.objects.get(pk=doc.pk).tags.add(error_tag)
|
||||
|
||||
mock_execute_webhook_action.side_effect = add_error_tag
|
||||
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertCountEqual(doc.tags.all(), [inbox_tag, error_tag])
|
||||
|
||||
@mock.patch("documents.signals.handlers.execute_webhook_action")
|
||||
def test_workflow_tag_actions_do_not_overwrite_concurrent_tags(
|
||||
self,
|
||||
mock_execute_webhook_action,
|
||||
):
|
||||
"""
|
||||
GIVEN:
|
||||
- A document updated workflow that clears tags and assigns an inbox tag
|
||||
- A later tag update that happens before the workflow finishes
|
||||
WHEN:
|
||||
- The workflow runs
|
||||
THEN:
|
||||
- The later tag update is preserved
|
||||
"""
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
removal_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.REMOVAL,
|
||||
remove_all_tags=True,
|
||||
)
|
||||
assign_action = WorkflowAction.objects.create(
|
||||
assign_owner=self.user2,
|
||||
)
|
||||
assign_action.assign_tags.add(self.t1)
|
||||
webhook_action = WorkflowActionWebhook.objects.create(
|
||||
use_params=False,
|
||||
body="Test message: {{doc_url}}",
|
||||
url="http://paperless-ngx.com",
|
||||
include_document=False,
|
||||
)
|
||||
notify_action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.WEBHOOK,
|
||||
webhook=webhook_action,
|
||||
)
|
||||
w = Workflow.objects.create(
|
||||
name="Workflow tag race",
|
||||
order=0,
|
||||
)
|
||||
w.triggers.add(trigger)
|
||||
w.actions.add(removal_action)
|
||||
w.actions.add(assign_action)
|
||||
w.actions.add(notify_action)
|
||||
w.save()
|
||||
|
||||
doc = Document.objects.create(
|
||||
title="sample test",
|
||||
correspondent=self.c,
|
||||
original_filename="sample.pdf",
|
||||
owner=self.user3,
|
||||
)
|
||||
doc.tags.add(self.t2, self.t3)
|
||||
|
||||
def add_error_tag(*args, **kwargs):
|
||||
Document.objects.get(pk=doc.pk).tags.add(self.t2)
|
||||
|
||||
mock_execute_webhook_action.side_effect = add_error_tag
|
||||
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
doc.refresh_from_db()
|
||||
self.assertEqual(doc.owner, self.user2)
|
||||
self.assertCountEqual(doc.tags.all(), [self.t1, self.t2])
|
||||
|
||||
@override_settings(
|
||||
PAPERLESS_URL="http://localhost:8000",
|
||||
)
|
||||
|
||||
+63
-11
@@ -49,6 +49,7 @@ from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.utils.timezone import make_aware
|
||||
from django.utils.translation import get_language
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views import View
|
||||
from django.views.decorators.cache import cache_control
|
||||
from django.views.decorators.http import condition
|
||||
@@ -70,10 +71,12 @@ from rest_framework import parsers
|
||||
from rest_framework import serializers
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import NotFound
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.filters import OrderingFilter
|
||||
from rest_framework.filters import SearchFilter
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.mixins import CreateModelMixin
|
||||
from rest_framework.mixins import DestroyModelMixin
|
||||
from rest_framework.mixins import ListModelMixin
|
||||
from rest_framework.mixins import RetrieveModelMixin
|
||||
@@ -487,13 +490,13 @@ class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
|
||||
user = getattr(getattr(self, "request", None), "user", None)
|
||||
children_source = list(
|
||||
annotate_document_count_for_related_queryset(
|
||||
Tag.objects.filter(pk__in=descendant_pks | {t.pk for t in all_tags})
|
||||
.select_related("owner")
|
||||
.order_by(*ordering),
|
||||
Tag.objects.filter(
|
||||
pk__in=descendant_pks | {t.pk for t in all_tags},
|
||||
).select_related("owner"),
|
||||
through_model=self.document_count_through,
|
||||
related_object_field=self.document_count_source_field,
|
||||
user=user,
|
||||
),
|
||||
).order_by(*ordering),
|
||||
)
|
||||
else:
|
||||
children_source = all_tags
|
||||
@@ -783,7 +786,7 @@ class DocumentViewSet(
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Document.objects.distinct()
|
||||
.order_by("-created")
|
||||
.order_by("-created", "-id")
|
||||
.annotate(num_notes=Count("notes"))
|
||||
.select_related("correspondent", "storage_path", "document_type", "owner")
|
||||
.prefetch_related("tags", "custom_fields", "notes")
|
||||
@@ -1038,6 +1041,7 @@ class DocumentViewSet(
|
||||
methods=["get", "post", "delete"],
|
||||
detail=True,
|
||||
permission_classes=[PaperlessNotePermissions],
|
||||
pagination_class=None,
|
||||
filter_backends=[],
|
||||
)
|
||||
def notes(self, request, pk=None):
|
||||
@@ -1121,7 +1125,14 @@ class DocumentViewSet(
|
||||
):
|
||||
return HttpResponseForbidden("Insufficient permissions to delete notes")
|
||||
|
||||
note = Note.objects.get(id=int(request.GET.get("id")), document=doc)
|
||||
note_id = request.GET.get("id")
|
||||
if not note_id:
|
||||
raise ValidationError({"id": "This field is required."})
|
||||
try:
|
||||
note_id_int = int(note_id)
|
||||
except ValueError:
|
||||
raise ValidationError({"id": "A valid integer is required."})
|
||||
note = get_object_or_404(Note, id=note_id_int, document=doc)
|
||||
if settings.AUDIT_LOG_ENABLED:
|
||||
LogEntry.objects.log_create(
|
||||
instance=doc,
|
||||
@@ -1368,11 +1379,28 @@ class UnifiedSearchViewSet(DocumentViewSet):
|
||||
filtered_queryset = super().filter_queryset(queryset)
|
||||
|
||||
if self._is_search_request():
|
||||
from documents import index
|
||||
|
||||
if "query" in self.request.query_params:
|
||||
from documents import index
|
||||
|
||||
query_class = index.DelayedFullTextQuery
|
||||
elif "more_like_id" in self.request.query_params:
|
||||
try:
|
||||
more_like_doc_id = int(self.request.query_params["more_like_id"])
|
||||
more_like_doc = Document.objects.select_related("owner").get(
|
||||
pk=more_like_doc_id,
|
||||
)
|
||||
except (TypeError, ValueError, Document.DoesNotExist):
|
||||
raise PermissionDenied(_("Invalid more_like_id"))
|
||||
|
||||
if not has_perms_owner_aware(
|
||||
self.request.user,
|
||||
"view_document",
|
||||
more_like_doc,
|
||||
):
|
||||
raise PermissionDenied(_("Insufficient permissions."))
|
||||
|
||||
from documents import index
|
||||
|
||||
query_class = index.DelayedMoreLikeThisQuery
|
||||
else:
|
||||
raise ValueError
|
||||
@@ -1408,6 +1436,8 @@ class UnifiedSearchViewSet(DocumentViewSet):
|
||||
return response
|
||||
except NotFound:
|
||||
raise
|
||||
except PermissionDenied as e:
|
||||
return HttpResponseForbidden(str(e.detail))
|
||||
except Exception as e:
|
||||
logger.warning(f"An error occurred listing search results: {e!s}")
|
||||
return HttpResponseBadRequest(
|
||||
@@ -1850,6 +1880,13 @@ class SelectionDataView(GenericAPIView):
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
ids = serializer.validated_data.get("documents")
|
||||
permitted_documents = get_objects_for_user_owner_aware(
|
||||
request.user,
|
||||
"documents.view_document",
|
||||
Document,
|
||||
)
|
||||
if permitted_documents.filter(pk__in=ids).count() != len(ids):
|
||||
return HttpResponseForbidden("Insufficient permissions")
|
||||
|
||||
correspondents = Correspondent.objects.annotate(
|
||||
document_count=Count(
|
||||
@@ -2104,13 +2141,21 @@ class GlobalSearchView(PassUserMixin):
|
||||
)
|
||||
groups = groups[:OBJECT_LIMIT]
|
||||
mail_rules = (
|
||||
MailRule.objects.filter(name__icontains=query)
|
||||
get_objects_for_user_owner_aware(
|
||||
request.user,
|
||||
"view_mailrule",
|
||||
MailRule,
|
||||
).filter(name__icontains=query)
|
||||
if request.user.has_perm("paperless_mail.view_mailrule")
|
||||
else []
|
||||
)
|
||||
mail_rules = mail_rules[:OBJECT_LIMIT]
|
||||
mail_accounts = (
|
||||
MailAccount.objects.filter(name__icontains=query)
|
||||
get_objects_for_user_owner_aware(
|
||||
request.user,
|
||||
"view_mailaccount",
|
||||
MailAccount,
|
||||
).filter(name__icontains=query)
|
||||
if request.user.has_perm("paperless_mail.view_mailaccount")
|
||||
else []
|
||||
)
|
||||
@@ -2665,7 +2710,14 @@ class TasksViewSet(ReadOnlyModelViewSet):
|
||||
)
|
||||
|
||||
|
||||
class ShareLinkViewSet(ModelViewSet, PassUserMixin):
|
||||
class ShareLinkViewSet(
|
||||
PassUserMixin,
|
||||
CreateModelMixin,
|
||||
RetrieveModelMixin,
|
||||
DestroyModelMixin,
|
||||
ListModelMixin,
|
||||
GenericViewSet,
|
||||
):
|
||||
model = ShareLink
|
||||
|
||||
queryset = ShareLink.objects.all()
|
||||
|
||||
@@ -16,7 +16,6 @@ logger = logging.getLogger("paperless.workflows.mutations")
|
||||
def apply_assignment_to_document(
|
||||
action: WorkflowAction,
|
||||
document: Document,
|
||||
doc_tag_ids: list[int],
|
||||
logging_group,
|
||||
):
|
||||
"""
|
||||
@@ -25,12 +24,7 @@ def apply_assignment_to_document(
|
||||
action: WorkflowAction, annotated with 'has_assign_*' boolean fields
|
||||
"""
|
||||
if action.has_assign_tags:
|
||||
tag_ids_to_add: set[int] = set()
|
||||
for tag in action.assign_tags.all():
|
||||
tag_ids_to_add.add(tag.pk)
|
||||
tag_ids_to_add.update(int(pk) for pk in tag.get_ancestors_pks())
|
||||
|
||||
doc_tag_ids[:] = list(set(doc_tag_ids) | tag_ids_to_add)
|
||||
document.add_nested_tags(action.assign_tags.all())
|
||||
|
||||
if action.assign_correspondent:
|
||||
document.correspondent = action.assign_correspondent
|
||||
@@ -197,7 +191,6 @@ def apply_assignment_to_overrides(
|
||||
def apply_removal_to_document(
|
||||
action: WorkflowAction,
|
||||
document: Document,
|
||||
doc_tag_ids: list[int],
|
||||
):
|
||||
"""
|
||||
Apply removal actions to a Document instance.
|
||||
@@ -206,14 +199,15 @@ def apply_removal_to_document(
|
||||
"""
|
||||
|
||||
if action.remove_all_tags:
|
||||
doc_tag_ids.clear()
|
||||
document.tags.clear()
|
||||
else:
|
||||
tag_ids_to_remove: set[int] = set()
|
||||
for tag in action.remove_tags.all():
|
||||
tag_ids_to_remove.add(tag.pk)
|
||||
tag_ids_to_remove.update(int(pk) for pk in tag.get_descendants_pks())
|
||||
|
||||
doc_tag_ids[:] = [t for t in doc_tag_ids if t not in tag_ids_to_remove]
|
||||
if tag_ids_to_remove:
|
||||
document.tags.remove(*tag_ids_to_remove)
|
||||
|
||||
if action.remove_all_correspondents or (
|
||||
document.correspondent
|
||||
|
||||
@@ -83,3 +83,11 @@ class PaperlessBasicAuthentication(authentication.BasicAuthentication):
|
||||
raise exceptions.AuthenticationFailed("MFA required")
|
||||
|
||||
return user_tuple
|
||||
|
||||
def authenticate_header(self, request):
|
||||
auth_header = request.META.get("HTTP_AUTHORIZATION", "")
|
||||
if auth_header.lower().startswith("basic "):
|
||||
return super().authenticate_header(request)
|
||||
|
||||
# Still 401 for anonymous API access
|
||||
return authentication.TokenAuthentication.keyword
|
||||
|
||||
@@ -0,0 +1,48 @@
|
||||
import uuid
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
from django.urls import resolve
|
||||
from django.urls import reverse
|
||||
from rest_framework import status
|
||||
|
||||
|
||||
class TestApiAuthViews(TestCase):
|
||||
def test_api_auth_login_uses_allauth_login_view(self):
|
||||
response = self.client.get(reverse("rest_framework:login"))
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertTemplateUsed(response, "account/login.html")
|
||||
|
||||
def test_api_auth_login_uses_same_view_as_account_login(self):
|
||||
api_match = resolve("/api/auth/login/")
|
||||
account_match = resolve("/accounts/login/")
|
||||
|
||||
self.assertIs(api_match.func.view_class, account_match.func.view_class)
|
||||
|
||||
@override_settings(DISABLE_REGULAR_LOGIN=True)
|
||||
def test_api_auth_login_respects_disable_regular_login(self):
|
||||
username = f"testuser-{uuid.uuid4().hex}"
|
||||
User.objects.create_user(
|
||||
username=username,
|
||||
password="testpassword",
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
reverse("rest_framework:login"),
|
||||
data={
|
||||
"login": username,
|
||||
"password": "testpassword",
|
||||
"next": "/api/documents/",
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertTemplateUsed(response, "account/login.html")
|
||||
self.assertContains(response, "Regular login is disabled")
|
||||
self.assertNotIn("_auth_user_id", self.client.session)
|
||||
|
||||
def test_api_auth_logout_uses_named_route(self):
|
||||
self.assertEqual(reverse("rest_framework:login"), "/api/auth/login/")
|
||||
self.assertEqual(reverse("rest_framework:logout"), "/api/auth/logout/")
|
||||
+15
-1
@@ -89,7 +89,21 @@ urlpatterns = [
|
||||
re_path(
|
||||
"^auth/",
|
||||
include(
|
||||
("rest_framework.urls", "rest_framework"),
|
||||
(
|
||||
[
|
||||
path(
|
||||
"login/",
|
||||
allauth_account_views.login,
|
||||
name="login",
|
||||
),
|
||||
path(
|
||||
"logout/",
|
||||
allauth_account_views.logout,
|
||||
name="logout",
|
||||
),
|
||||
],
|
||||
"rest_framework",
|
||||
),
|
||||
namespace="rest_framework",
|
||||
),
|
||||
),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Final
|
||||
|
||||
__version__: Final[tuple[int, int, int]] = (2, 20, 8)
|
||||
__version__: Final[tuple[int, int, int]] = (2, 20, 15)
|
||||
# Version string like X.Y.Z
|
||||
__full_version_str__: Final[str] = ".".join(map(str, __version__))
|
||||
# Version string like X.Y
|
||||
|
||||
+57
-6
@@ -25,6 +25,8 @@ from drf_spectacular.utils import extend_schema_view
|
||||
from rest_framework.authtoken.models import Token
|
||||
from rest_framework.authtoken.views import ObtainAuthToken
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import ValidationError
|
||||
from rest_framework.fields import BooleanField
|
||||
from rest_framework.filters import OrderingFilter
|
||||
from rest_framework.generics import GenericAPIView
|
||||
from rest_framework.pagination import PageNumberPagination
|
||||
@@ -103,6 +105,7 @@ class FaviconView(View):
|
||||
|
||||
|
||||
class UserViewSet(ModelViewSet):
|
||||
_BOOL_NOT_PROVIDED = object()
|
||||
model = User
|
||||
|
||||
queryset = User.objects.exclude(
|
||||
@@ -116,29 +119,77 @@ class UserViewSet(ModelViewSet):
|
||||
filterset_class = UserFilterSet
|
||||
ordering_fields = ("username",)
|
||||
|
||||
@staticmethod
|
||||
def _parse_requested_bool(data, key: str):
|
||||
if key not in data:
|
||||
return UserViewSet._BOOL_NOT_PROVIDED
|
||||
try:
|
||||
return BooleanField().to_internal_value(data.get(key))
|
||||
except ValidationError:
|
||||
# Let serializer validation report invalid values as 400 responses
|
||||
return UserViewSet._BOOL_NOT_PROVIDED
|
||||
|
||||
def create(self, request, *args, **kwargs):
|
||||
if not request.user.is_superuser and request.data.get("is_superuser") is True:
|
||||
return HttpResponseForbidden(
|
||||
"Superuser status can only be granted by a superuser",
|
||||
)
|
||||
requested_is_superuser = self._parse_requested_bool(
|
||||
request.data,
|
||||
"is_superuser",
|
||||
)
|
||||
requested_is_staff = self._parse_requested_bool(request.data, "is_staff")
|
||||
|
||||
if not request.user.is_superuser:
|
||||
if requested_is_superuser is True:
|
||||
return HttpResponseForbidden(
|
||||
"Superuser status can only be granted by a superuser",
|
||||
)
|
||||
if requested_is_staff is True:
|
||||
return HttpResponseForbidden(
|
||||
"Staff status can only be granted by a superuser",
|
||||
)
|
||||
|
||||
return super().create(request, *args, **kwargs)
|
||||
|
||||
def update(self, request, *args, **kwargs):
|
||||
user_to_update: User = self.get_object()
|
||||
|
||||
if not request.user.is_superuser and user_to_update.is_superuser:
|
||||
return HttpResponseForbidden(
|
||||
"Superusers can only be modified by other superusers",
|
||||
)
|
||||
|
||||
requested_is_superuser = self._parse_requested_bool(
|
||||
request.data,
|
||||
"is_superuser",
|
||||
)
|
||||
requested_is_staff = self._parse_requested_bool(request.data, "is_staff")
|
||||
|
||||
if (
|
||||
not request.user.is_superuser
|
||||
and request.data.get("is_superuser") is not None
|
||||
and request.data.get("is_superuser") != user_to_update.is_superuser
|
||||
and requested_is_superuser is not self._BOOL_NOT_PROVIDED
|
||||
and requested_is_superuser != user_to_update.is_superuser
|
||||
):
|
||||
return HttpResponseForbidden(
|
||||
"Superuser status can only be changed by a superuser",
|
||||
)
|
||||
if (
|
||||
not request.user.is_superuser
|
||||
and requested_is_staff is not self._BOOL_NOT_PROVIDED
|
||||
and requested_is_staff != user_to_update.is_staff
|
||||
):
|
||||
return HttpResponseForbidden(
|
||||
"Staff status can only be changed by a superuser",
|
||||
)
|
||||
return super().update(request, *args, **kwargs)
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
user_to_delete: User = self.get_object()
|
||||
|
||||
if not request.user.is_superuser and user_to_delete.is_superuser:
|
||||
return HttpResponseForbidden(
|
||||
"Superusers can only be deleted by other superusers",
|
||||
)
|
||||
|
||||
return super().destroy(request, *args, **kwargs)
|
||||
|
||||
@extend_schema(
|
||||
request=None,
|
||||
responses={
|
||||
|
||||
@@ -472,6 +472,7 @@ class MailAccountHandler(LoggingMixin):
|
||||
name=name,
|
||||
defaults={
|
||||
"match": name,
|
||||
"matching_algorithm": Correspondent.MATCH_LITERAL,
|
||||
},
|
||||
)[0]
|
||||
except DatabaseError as e:
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from rest_framework import serializers
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
|
||||
from documents.permissions import get_objects_for_user_owner_aware
|
||||
from documents.permissions import has_perms_owner_aware
|
||||
from documents.serialisers import CorrespondentField
|
||||
from documents.serialisers import DocumentTypeField
|
||||
from documents.serialisers import OwnedObjectSerializer
|
||||
@@ -56,7 +60,18 @@ class MailAccountSerializer(OwnedObjectSerializer):
|
||||
|
||||
class AccountField(serializers.PrimaryKeyRelatedField):
|
||||
def get_queryset(self):
|
||||
return MailAccount.objects.all().order_by("-id")
|
||||
user = getattr(self.context.get("request"), "user", None)
|
||||
if user is None:
|
||||
user = getattr(self.root, "user", None)
|
||||
|
||||
if user is None:
|
||||
return MailAccount.objects.none()
|
||||
|
||||
return get_objects_for_user_owner_aware(
|
||||
user,
|
||||
"change_mailaccount",
|
||||
MailAccount,
|
||||
).order_by("-id")
|
||||
|
||||
|
||||
class MailRuleSerializer(OwnedObjectSerializer):
|
||||
@@ -127,6 +142,18 @@ class MailRuleSerializer(OwnedObjectSerializer):
|
||||
|
||||
return attrs
|
||||
|
||||
def validate_account(self, account):
|
||||
if self.user is not None and has_perms_owner_aware(
|
||||
self.user,
|
||||
"change_mailaccount",
|
||||
account,
|
||||
):
|
||||
return account
|
||||
|
||||
raise PermissionDenied(
|
||||
_("Insufficient permissions."),
|
||||
)
|
||||
|
||||
def validate_maximum_age(self, value):
|
||||
if value > 36500: # ~100 years
|
||||
raise serializers.ValidationError("Maximum mail age is unreasonably large.")
|
||||
|
||||
@@ -632,6 +632,132 @@ class TestAPIMailRules(DirectoriesMixin, APITestCase):
|
||||
self.assertEqual(returned_rule1.name, "Updated Name 1")
|
||||
self.assertEqual(returned_rule1.action, MailRule.MailAction.DELETE)
|
||||
|
||||
def test_create_mail_rule_scopes_accounts(self):
|
||||
other_user = User.objects.create_user(username="mail-owner")
|
||||
foreign_account = MailAccount.objects.create(
|
||||
name="ForeignEmail",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
owner=other_user,
|
||||
)
|
||||
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
data={
|
||||
"name": "Rule1",
|
||||
"account": foreign_account.pk,
|
||||
"folder": "INBOX",
|
||||
"filter_from": "from@example.com",
|
||||
"maximum_age": 30,
|
||||
"action": MailRule.MailAction.MARK_READ,
|
||||
"assign_title_from": MailRule.TitleSource.FROM_SUBJECT,
|
||||
"assign_correspondent_from": MailRule.CorrespondentSource.FROM_NOTHING,
|
||||
"order": 0,
|
||||
"attachment_type": MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||
},
|
||||
)
|
||||
missing_response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
data={
|
||||
"name": "Rule1",
|
||||
"account": foreign_account.pk + 1000,
|
||||
"folder": "INBOX",
|
||||
"filter_from": "from@example.com",
|
||||
"maximum_age": 30,
|
||||
"action": MailRule.MailAction.MARK_READ,
|
||||
"assign_title_from": MailRule.TitleSource.FROM_SUBJECT,
|
||||
"assign_correspondent_from": MailRule.CorrespondentSource.FROM_NOTHING,
|
||||
"order": 0,
|
||||
"attachment_type": MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertEqual(missing_response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertEqual(response.data["account"][0].code, "does_not_exist")
|
||||
self.assertEqual(missing_response.data["account"][0].code, "does_not_exist")
|
||||
self.assertEqual(MailRule.objects.count(), 0)
|
||||
|
||||
def test_create_mail_rule_allowed_for_granted_account_change_permission(self):
|
||||
other_user = User.objects.create_user(username="mail-owner")
|
||||
foreign_account = MailAccount.objects.create(
|
||||
name="ForeignEmail",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
owner=other_user,
|
||||
)
|
||||
assign_perm("change_mailaccount", self.user, foreign_account)
|
||||
|
||||
response = self.client.post(
|
||||
self.ENDPOINT,
|
||||
data={
|
||||
"name": "Rule1",
|
||||
"account": foreign_account.pk,
|
||||
"folder": "INBOX",
|
||||
"filter_from": "from@example.com",
|
||||
"maximum_age": 30,
|
||||
"action": MailRule.MailAction.MARK_READ,
|
||||
"assign_title_from": MailRule.TitleSource.FROM_SUBJECT,
|
||||
"assign_correspondent_from": MailRule.CorrespondentSource.FROM_NOTHING,
|
||||
"order": 0,
|
||||
"attachment_type": MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||
},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
|
||||
self.assertEqual(MailRule.objects.get().account, foreign_account)
|
||||
|
||||
def test_update_mail_rule_forbidden_for_unpermitted_account(self):
|
||||
own_account = MailAccount.objects.create(
|
||||
name="Email1",
|
||||
username="username1",
|
||||
password="password1",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
)
|
||||
other_user = User.objects.create_user(username="mail-owner")
|
||||
foreign_account = MailAccount.objects.create(
|
||||
name="ForeignEmail",
|
||||
username="username2",
|
||||
password="password2",
|
||||
imap_server="server.example.com",
|
||||
imap_port=443,
|
||||
imap_security=MailAccount.ImapSecurity.SSL,
|
||||
character_set="UTF-8",
|
||||
owner=other_user,
|
||||
)
|
||||
rule1 = MailRule.objects.create(
|
||||
name="Rule1",
|
||||
account=own_account,
|
||||
folder="INBOX",
|
||||
filter_from="from@example.com",
|
||||
maximum_age=30,
|
||||
action=MailRule.MailAction.MARK_READ,
|
||||
assign_title_from=MailRule.TitleSource.FROM_SUBJECT,
|
||||
assign_correspondent_from=MailRule.CorrespondentSource.FROM_NOTHING,
|
||||
order=0,
|
||||
attachment_type=MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
|
||||
)
|
||||
|
||||
response = self.client.patch(
|
||||
f"{self.ENDPOINT}{rule1.pk}/",
|
||||
data={"account": foreign_account.pk},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
rule1.refresh_from_db()
|
||||
self.assertEqual(rule1.account, own_account)
|
||||
|
||||
def test_get_mail_rules_owner_aware(self):
|
||||
"""
|
||||
GIVEN:
|
||||
|
||||
@@ -448,7 +448,7 @@ class TestMail(
|
||||
c = handler._get_correspondent(message, rule)
|
||||
self.assertIsNotNone(c)
|
||||
self.assertEqual(c.name, "someone@somewhere.com")
|
||||
self.assertEqual(c.matching_algorithm, MatchingModel.MATCH_ANY)
|
||||
self.assertEqual(c.matching_algorithm, MatchingModel.MATCH_LITERAL)
|
||||
self.assertEqual(c.match, "someone@somewhere.com")
|
||||
c = handler._get_correspondent(message2, rule)
|
||||
self.assertIsNotNone(c)
|
||||
|
||||
@@ -1991,7 +1991,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "paperless-ngx"
|
||||
version = "2.20.8"
|
||||
version = "2.20.15"
|
||||
source = { virtual = "." }
|
||||
dependencies = [
|
||||
{ name = "babel", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -2645,11 +2645,11 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
version = "2.20.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2668,15 +2668,15 @@ crypto = [
|
||||
|
||||
[[package]]
|
||||
name = "pymdown-extensions"
|
||||
version = "10.20.1"
|
||||
version = "10.21.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markdown", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/1e/6c/9e370934bfa30e889d12e61d0dae009991294f40055c238980066a7fbd83/pymdown_extensions-10.20.1.tar.gz", hash = "sha256:e7e39c865727338d434b55f1dd8da51febcffcaebd6e1a0b9c836243f660740a", size = 852860, upload-time = "2026-01-24T05:56:56.758Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/df/08/f1c908c581fd11913da4711ea7ba32c0eee40b0190000996bb863b0c9349/pymdown_extensions-10.21.2.tar.gz", hash = "sha256:c3f55a5b8a1d0edf6699e35dcbea71d978d34ff3fa79f3d807b8a5b3fa90fbdc", size = 853922, upload-time = "2026-03-29T15:01:55.233Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/40/6d/b6ee155462a0156b94312bdd82d2b92ea56e909740045a87ccb98bf52405/pymdown_extensions-10.20.1-py3-none-any.whl", hash = "sha256:24af7feacbca56504b313b7b418c4f5e1317bb5fea60f03d57be7fcc40912aa0", size = 268768, upload-time = "2026-01-24T05:56:54.537Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f7/27/a2fc51a4a122dfd1015e921ae9d22fee3d20b0b8080d9a704578bf9deece/pymdown_extensions-10.21.2-py3-none-any.whl", hash = "sha256:5c0fd2a2bea14eb39af8ff284f1066d898ab2187d81b889b75d46d4348c01638", size = 268901, upload-time = "2026-03-29T15:01:53.244Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3696,35 +3696,41 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.2.1"
|
||||
version = "2.4.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/22/de/48c59722572767841493b26183a0d1cc411d54fd759c5607c4590b6563a6/tomli-2.4.1.tar.gz", hash = "sha256:7c7e1a961a0b2f2472c1ac5b69affa0ae1132c39adcb67aba98568702b9cc23f", size = 17543, upload-time = "2026-03-25T20:22:03.828Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/11/db3d5885d8528263d8adc260bb2d28ebf1270b96e98f0e0268d32b8d9900/tomli-2.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f8f0fc26ec2cc2b965b7a3b87cd19c5c6b8c5e5f436b984e85f486d652285c30", size = 154704, upload-time = "2026-03-25T20:21:10.473Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/f7/675db52c7e46064a9aa928885a9b20f4124ecb9bc2e1ce74c9106648d202/tomli-2.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4ab97e64ccda8756376892c53a72bd1f964e519c77236368527f758fbc36a53a", size = 149454, upload-time = "2026-03-25T20:21:12.036Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/71/81c50943cf953efa35bce7646caab3cf457a7d8c030b27cfb40d7235f9ee/tomli-2.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96481a5786729fd470164b47cdb3e0e58062a496f455ee41b4403be77cb5a076", size = 237561, upload-time = "2026-03-25T20:21:13.098Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/48/c1/f41d9cb618acccca7df82aaf682f9b49013c9397212cb9f53219e3abac37/tomli-2.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a881ab208c0baf688221f8cecc5401bd291d67e38a1ac884d6736cbcd8247e9", size = 243824, upload-time = "2026-03-25T20:21:14.569Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/e4/5a816ecdd1f8ca51fb756ef684b90f2780afc52fc67f987e3c61d800a46d/tomli-2.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47149d5bd38761ac8be13a84864bf0b7b70bc051806bc3669ab1cbc56216b23c", size = 242227, upload-time = "2026-03-25T20:21:15.712Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6b/49/2b2a0ef529aa6eec245d25f0c703e020a73955ad7edf73e7f54ddc608aa5/tomli-2.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ec9bfaf3ad2df51ace80688143a6a4ebc09a248f6ff781a9945e51937008fcbc", size = 247859, upload-time = "2026-03-25T20:21:17.001Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/ba/42f134a3fe2b370f555f44b1d72feebb94debcab01676bf918d0cb70e9aa/tomli-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c742f741d58a28940ce01d58f0ab2ea3ced8b12402f162f4d534dfe18ba1cd6a", size = 155924, upload-time = "2026-03-25T20:21:21.626Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dc/c7/62d7a17c26487ade21c5422b646110f2162f1fcc95980ef7f63e73c68f14/tomli-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7f86fd587c4ed9dd76f318225e7d9b29cfc5a9d43de44e5754db8d1128487085", size = 150018, upload-time = "2026-03-25T20:21:23.002Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/05/79d13d7c15f13bdef410bdd49a6485b1c37d28968314eabee452c22a7fda/tomli-2.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ff18e6a727ee0ab0388507b89d1bc6a22b138d1e2fa56d1ad494586d61d2eae9", size = 244948, upload-time = "2026-03-25T20:21:24.04Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/90/d62ce007a1c80d0b2c93e02cab211224756240884751b94ca72df8a875ca/tomli-2.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:136443dbd7e1dee43c68ac2694fde36b2849865fa258d39bf822c10e8068eac5", size = 253341, upload-time = "2026-03-25T20:21:25.177Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/7e/caf6496d60152ad4ed09282c1885cca4eea150bfd007da84aea07bcc0a3e/tomli-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5e262d41726bc187e69af7825504c933b6794dc3fbd5945e41a79bb14c31f585", size = 248159, upload-time = "2026-03-25T20:21:26.364Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/e7/c6f69c3120de34bbd882c6fba7975f3d7a746e9218e56ab46a1bc4b42552/tomli-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5cb41aa38891e073ee49d55fbc7839cfdb2bc0e600add13874d048c94aadddd1", size = 253290, upload-time = "2026-03-25T20:21:27.46Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/06/b823a7e818c756d9a7123ba2cda7d07bc2dd32835648d1a7b7b7a05d848d/tomli-2.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36d2bd2ad5fb9eaddba5226aa02c8ec3fa4f192631e347b3ed28186d43be6b54", size = 155866, upload-time = "2026-03-25T20:21:31.65Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/6f/12645cf7f08e1a20c7eb8c297c6f11d31c1b50f316a7e7e1e1de6e2e7b7e/tomli-2.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:eb0dc4e38e6a1fd579e5d50369aa2e10acfc9cace504579b2faabb478e76941a", size = 149887, upload-time = "2026-03-25T20:21:33.028Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/e0/90637574e5e7212c09099c67ad349b04ec4d6020324539297b634a0192b0/tomli-2.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7f2c7f2b9ca6bdeef8f0fa897f8e05085923eb091721675170254cbc5b02897", size = 243704, upload-time = "2026-03-25T20:21:34.51Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/8f/d3ddb16c5a4befdf31a23307f72828686ab2096f068eaf56631e136c1fdd/tomli-2.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f3c6818a1a86dd6dca7ddcaaf76947d5ba31aecc28cb1b67009a5877c9a64f3f", size = 251628, upload-time = "2026-03-25T20:21:36.012Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/f1/dbeeb9116715abee2485bf0a12d07a8f31af94d71608c171c45f64c0469d/tomli-2.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d312ef37c91508b0ab2cee7da26ec0b3ed2f03ce12bd87a588d771ae15dcf82d", size = 247180, upload-time = "2026-03-25T20:21:37.136Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d3/74/16336ffd19ed4da28a70959f92f506233bd7cfc2332b20bdb01591e8b1d1/tomli-2.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:51529d40e3ca50046d7606fa99ce3956a617f9b36380da3b7f0dd3dd28e68cb5", size = 251674, upload-time = "2026-03-25T20:21:38.298Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/fb/9a5c8d27dbab540869f7c1f8eb0abb3244189ce780ba9cd73f3770662072/tomli-2.4.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fd0409a3653af6c147209d267a0e4243f0ae46b011aa978b1080359fddc9b6cf", size = 155726, upload-time = "2026-03-25T20:21:42.23Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/62/05/d2f816630cc771ad836af54f5001f47a6f611d2d39535364f148b6a92d6b/tomli-2.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a120733b01c45e9a0c34aeef92bf0cf1d56cfe81ed9d47d562f9ed591a9828ac", size = 149859, upload-time = "2026-03-25T20:21:43.386Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/48/66341bdb858ad9bd0ceab5a86f90eddab127cf8b046418009f2125630ecb/tomli-2.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:559db847dc486944896521f68d8190be1c9e719fced785720d2216fe7022b662", size = 244713, upload-time = "2026-03-25T20:21:44.474Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/6d/c5fad00d82b3c7a3ab6189bd4b10e60466f22cfe8a08a9394185c8a8111c/tomli-2.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01f520d4f53ef97964a240a035ec2a869fe1a37dde002b57ebc4417a27ccd853", size = 252084, upload-time = "2026-03-25T20:21:45.62Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/71/3a69e86f3eafe8c7a59d008d245888051005bd657760e96d5fbfb0b740c2/tomli-2.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7f94b27a62cfad8496c8d2513e1a222dd446f095fca8987fceef261225538a15", size = 247973, upload-time = "2026-03-25T20:21:46.937Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/50/361e986652847fec4bd5e4a0208752fbe64689c603c7ae5ea7cb16b1c0ca/tomli-2.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ede3e6487c5ef5d28634ba3f31f989030ad6af71edfb0055cbbd14189ff240ba", size = 256223, upload-time = "2026-03-25T20:21:48.467Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/45/4b/b877b05c8ba62927d9865dd980e34a755de541eb65fffba52b4cc495d4d2/tomli-2.4.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:d4d8fe59808a54658fcc0160ecfb1b30f9089906c50b23bcb4c69eddc19ec2b4", size = 164263, upload-time = "2026-03-25T20:21:52.543Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/79/6ab420d37a270b89f7195dec5448f79400d9e9c1826df982f3f8e97b24fd/tomli-2.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7008df2e7655c495dd12d2a4ad038ff878d4ca4b81fccaf82b714e07eae4402c", size = 160736, upload-time = "2026-03-25T20:21:53.674Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/e0/3630057d8eb170310785723ed5adcdfb7d50cb7e6455f85ba8a3deed642b/tomli-2.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1d8591993e228b0c930c4bb0db464bdad97b3289fb981255d6c9a41aedc84b2d", size = 270717, upload-time = "2026-03-25T20:21:55.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/b4/1613716072e544d1a7891f548d8f9ec6ce2faf42ca65acae01d76ea06bb0/tomli-2.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:734e20b57ba95624ecf1841e72b53f6e186355e216e5412de414e3c51e5e3c41", size = 278461, upload-time = "2026-03-25T20:21:56.228Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/05/38/30f541baf6a3f6df77b3df16b01ba319221389e2da59427e221ef417ac0c/tomli-2.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8a650c2dbafa08d42e51ba0b62740dae4ecb9338eefa093aa5c78ceb546fcd5c", size = 274855, upload-time = "2026-03-25T20:21:57.653Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/a3/ec9dd4fd2c38e98de34223b995a3b34813e6bdadf86c75314c928350ed14/tomli-2.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:504aa796fe0569bb43171066009ead363de03675276d2d121ac1a4572397870f", size = 283144, upload-time = "2026-03-25T20:21:59.089Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7b/61/cceae43728b7de99d9b847560c262873a1f6c98202171fd5ed62640b494b/tomli-2.4.1-py3-none-any.whl", hash = "sha256:0d85819802132122da43cb86656f8d1f8c6587d54ae7dcaf30e90533028b49fe", size = 14583, upload-time = "2026-03-25T20:22:03.012Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4198,7 +4204,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "zensical"
|
||||
version = "0.0.21"
|
||||
version = "0.0.36"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
@@ -4207,20 +4213,20 @@ dependencies = [
|
||||
{ name = "pygments", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "pymdown-extensions", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "pyyaml", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
{ name = "tomli", marker = "(python_full_version < '3.11' and sys_platform == 'darwin') or (python_full_version < '3.11' and sys_platform == 'linux')" },
|
||||
{ name = "tomli", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8a/50/2655b5f72d0c72f4366be580f5e2354ff05280d047ea986fe89570e44589/zensical-0.0.21.tar.gz", hash = "sha256:c13563836fa63a3cabeffd83fe3a770ca740cfa5ae7b85df85d89837e31b3b4a", size = 3819731, upload-time = "2026-02-04T17:47:59.396Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/e9/8d0e66ad113e702d7f5eed2cc5ad0f035cb212c49b0415553473f2da900b/zensical-0.0.36.tar.gz", hash = "sha256:32126c57fd241267e55c863f2bdd31bfe4422c376280e74e4a1036a89c0d513c", size = 3897092, upload-time = "2026-04-23T15:37:46.892Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/98/90710d232cb35b633815fa7b493da542391b89283b6103a5bb4ae9fc0dd9/zensical-0.0.21-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:67404cc70c330246dfb7269bcdb60a25be0bb60a212a09c9c50229a1341b1f84", size = 12237120, upload-time = "2026-02-04T17:47:28.615Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/fb/4280b3781157e8f051711732192f949bf29beeafd0df3e33c1c8bf9b7a1a/zensical-0.0.21-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:d4fd253ccfbf5af56434124f13bac01344e456c020148369b18d8836b6537c3c", size = 12118047, upload-time = "2026-02-04T17:47:31.369Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/74/b3/b7f85ae9cf920cf9f17bf157ae6c274919477148feb7716bf735636caa0e/zensical-0.0.21-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:440e40cdc30a29bf7466bcd6f43ed7bd1c54ea3f1a0fefca65619358b481a5bc", size = 12473440, upload-time = "2026-02-04T17:47:33.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d8/ac/1dc6e98f79ed19b9f103c88a0bd271f9140565d7d26b64bc1542b3ef6d91/zensical-0.0.21-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:368e832fc8068e75dc45cab59379db4cefcd81eb116f48d058db8fb7b7aa8d14", size = 12412588, upload-time = "2026-02-04T17:47:36.491Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/76/16a580f6dd32b387caa4a41615451e7dddd1917a2ff2e5b08744f41b4e11/zensical-0.0.21-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4ab962d47f9dd73510eed168469326c7a452554dfbfdb9cdf85efc7140244df", size = 12749438, upload-time = "2026-02-04T17:47:38.969Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/30/4baaa1c910eee61db5f49d0d45f2e550a0027218c618f3dd7f8da966a019/zensical-0.0.21-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b846d53dfce007f056ff31848f87f3f2a388228e24d4851c0cafdce0fa204c9b", size = 12514504, upload-time = "2026-02-04T17:47:41.31Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/76/77/931fccae5580b94409a0448a26106f922dcfa7822e7b93cacd2876dd63a8/zensical-0.0.21-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:daac1075552d230d52d621d2e4754ba24d5afcaa201a7a991f1a8d57e320c9de", size = 12647832, upload-time = "2026-02-04T17:47:44.073Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/82/3cf75de64340829d55c87c36704f4d1d8c952bd2cdc8a7bc48cbfb8ab333/zensical-0.0.21-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:7b380f545adb6d40896f9bd698eb0e1540ed4258d35b83f55f91658d0fdae312", size = 12678537, upload-time = "2026-02-04T17:47:46.899Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/77/91/6f4938dceeaa241f78bbfaf58a94acef10ba18be3468795173e3087abeb6/zensical-0.0.21-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:5c2227fdab64616bea94b40b8340bafe00e2e23631cc58eeea1e7267167e6ac5", size = 12822164, upload-time = "2026-02-04T17:47:49.231Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/4e/a9c9d25ef0766f767db7b4f09da68da9b3d8a28c3d68cfae01f8e3f9e297/zensical-0.0.21-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2e0f5154d236ed0f98662ee68785b67e8cd2138ea9d5e26070649e93c22eeee0", size = 12785632, upload-time = "2026-02-04T17:47:52.613Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/ff/2846737502a9ae783570b32aac4f20f5232512fbf245bbf1c0398728c7ed/zensical-0.0.36-cp310-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3d42312267c4124ed67ddfd2809167bdd3ea4f71892c8a20897be98b66da8b73", size = 12515534, upload-time = "2026-04-23T15:37:07.815Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/e9/443b561793ed6626cb46c328fd8fd916a7b18e5af5349934c5346438548c/zensical-0.0.36-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:8462c133c8da5234cd301ad3c722d52d66a0092a51b7b93e2ce12f217976b29b", size = 12384874, upload-time = "2026-04-23T15:37:11.617Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/f0/faecf0a5dff381ff331b7b87d385c8335ca0b7297a33d85abc3313cfa598/zensical-0.0.36-cp310-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0a6dc86dc0d8488b18c6501d62b63989a538350a33173347da8b9f1f54bed2c", size = 12764889, upload-time = "2026-04-23T15:37:14.512Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b0/56/1ddee63d323d779733e5bf00e99c878f03e50b77f294711a850c1e1ceddb/zensical-0.0.36-cp310-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d31c726d7f13601a568a2a9e80592472da24657ff5428ef15c2c95bc458cb65b", size = 12705679, upload-time = "2026-04-23T15:37:18.038Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/61/4b264b1466251450856ed4768fa9a793f7c24172039f47f562cd899e0744/zensical-0.0.36-cp310-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a7e8b32e41784d19122cb16a0bd6fcb53852177ce689ceba1ba7a8bb20fe3a0", size = 13057470, upload-time = "2026-04-23T15:37:21.594Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/9b/c44a1ebc2fe8daadecbd9ea41c498e545c494204e239314347fbcec51159/zensical-0.0.36-cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe5d24716107edb033c2326c816b891952b98b9637c5308f5320712a2e70aac", size = 12792788, upload-time = "2026-04-23T15:37:24.784Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/94/4d0e345f75f892fce029b513a26f4491b6dd39ff73c5bee3f8fbb9305e8c/zensical-0.0.36-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9ed7a54465b497d1548aeb6b38a99ac6f45c8f191a5cf2a180902af28c0cd58a", size = 12940940, upload-time = "2026-04-23T15:37:27.975Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/2e/4612b97d8d493a6ac591ebb28a6b3a592eb4d969bbb8a92311125fe0b874/zensical-0.0.36-cp310-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:282eb4eaf7cd3bd389a4b826c1c13a30136e5c6fcfcafce26fc27cd05acc660f", size = 12980355, upload-time = "2026-04-23T15:37:30.998Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/90/c1a91b503aec105cdb7ccf4d466e8612c113186f090c61d795272cecce27/zensical-0.0.36-cp310-abi3-musllinux_1_2_i686.whl", hash = "sha256:36d5719df268697dbcf7aa5bbea9eea353501c80b1c6c17d6c7f2c69405be9af", size = 13124220, upload-time = "2026-04-23T15:37:34.506Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/e0/b9ffadaff0b80498699aaf0f2bcc0b659db074fd94071520d22f035e5125/zensical-0.0.36-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7771aaf33f7d06f779e041930812fe65f5f97a6f4fbd1c7e51924ce1a27c0c66", size = 13070894, upload-time = "2026-04-23T15:37:38.092Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -126,3 +126,5 @@ custom_fences = [
|
||||
[project.markdown_extensions.pymdownx.emoji]
|
||||
emoji_index = "zensical.extensions.emoji.twemoji"
|
||||
emoji_generator = "zensical.extensions.emoji.to_svg"
|
||||
|
||||
[project.markdown_extensions.zensical.extensions.glightbox]
|
||||
|
||||
Reference in New Issue
Block a user