mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-03-20 16:05:56 +00:00
Compare commits
6 Commits
feature-ma
...
release/v2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f7c02de5e | ||
|
|
95dea787f2 | ||
|
|
b6501b0c47 | ||
|
|
87ebd13abc | ||
|
|
a86c9d32fe | ||
|
|
7942edfdf4 |
@@ -2,6 +2,17 @@
|
|||||||
# shellcheck shell=bash
|
# shellcheck shell=bash
|
||||||
declare -r log_prefix="[init-user]"
|
declare -r log_prefix="[init-user]"
|
||||||
|
|
||||||
|
# When the container is started as a non-root user (e.g. via `user: 999:999`
|
||||||
|
# in Docker Compose), usermod/groupmod require root and are meaningless.
|
||||||
|
# USERMAP_* variables only apply to the root-started path.
|
||||||
|
if [[ -n "${USER_IS_NON_ROOT}" ]]; then
|
||||||
|
if [[ -n "${USERMAP_UID}" || -n "${USERMAP_GID}" ]]; then
|
||||||
|
echo "${log_prefix} WARNING: USERMAP_UID/USERMAP_GID are set but have no effect when the container is started as a non-root user"
|
||||||
|
fi
|
||||||
|
echo "${log_prefix} Running as non-root user ($(id --user):$(id --group)), skipping UID/GID remapping"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
declare -r usermap_original_uid=$(id -u paperless)
|
declare -r usermap_original_uid=$(id -u paperless)
|
||||||
declare -r usermap_original_gid=$(id -g paperless)
|
declare -r usermap_original_gid=$(id -g paperless)
|
||||||
declare -r usermap_new_uid=${USERMAP_UID:-$usermap_original_uid}
|
declare -r usermap_new_uid=${USERMAP_UID:-$usermap_original_uid}
|
||||||
|
|||||||
@@ -140,24 +140,17 @@ a [superuser](usage.md#superusers) account.
|
|||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
|
|
||||||
It is currently not possible to run the container rootless if additional languages are specified via `PAPERLESS_OCR_LANGUAGES`.
|
It is not possible to run the container rootless if additional languages are specified via `PAPERLESS_OCR_LANGUAGES`.
|
||||||
|
|
||||||
If you want to run Paperless as a rootless container, make this
|
If you want to run Paperless as a rootless container, set `user:` in `docker-compose.yml` to the UID and GID of your host user (use `id -u` and `id -g` to find these values). The container process starts directly as that user with no internal privilege remapping:
|
||||||
change in `docker-compose.yml`:
|
|
||||||
|
|
||||||
- Set the `user` running the container to map to the `paperless`
|
```yaml
|
||||||
user in the container. This value (`user_id` below) should be
|
webserver:
|
||||||
the same ID that `USERMAP_UID` and `USERMAP_GID` are set to in
|
image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
||||||
`docker-compose.env`. See `USERMAP_UID` and `USERMAP_GID`
|
user: '1000:1000'
|
||||||
[here](configuration.md#docker).
|
```
|
||||||
|
|
||||||
Your entry for Paperless should contain something like:
|
Do not combine this with `USERMAP_UID` or `USERMAP_GID`, which are intended for the non-rootless case described in step 3.
|
||||||
|
|
||||||
> ```
|
|
||||||
> webserver:
|
|
||||||
> image: ghcr.io/paperless-ngx/paperless-ngx:latest
|
|
||||||
> user: <user_id>
|
|
||||||
> ```
|
|
||||||
|
|
||||||
**File systems without inotify support (e.g. NFS)**
|
**File systems without inotify support (e.g. NFS)**
|
||||||
|
|
||||||
|
|||||||
154
src-ui/src/app/interceptors/auth-expiry.interceptor.spec.ts
Normal file
154
src-ui/src/app/interceptors/auth-expiry.interceptor.spec.ts
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import { HttpErrorResponse, HttpRequest } from '@angular/common/http'
|
||||||
|
import { TestBed } from '@angular/core/testing'
|
||||||
|
import { throwError } from 'rxjs'
|
||||||
|
import * as navUtils from '../utils/navigation'
|
||||||
|
import { AuthExpiryInterceptor } from './auth-expiry.interceptor'
|
||||||
|
|
||||||
|
describe('AuthExpiryInterceptor', () => {
|
||||||
|
let interceptor: AuthExpiryInterceptor
|
||||||
|
let dateNowSpy: jest.SpiedFunction<typeof Date.now>
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
TestBed.configureTestingModule({
|
||||||
|
providers: [AuthExpiryInterceptor],
|
||||||
|
})
|
||||||
|
|
||||||
|
interceptor = TestBed.inject(AuthExpiryInterceptor)
|
||||||
|
dateNowSpy = jest.spyOn(Date, 'now').mockReturnValue(1000)
|
||||||
|
})
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
jest.restoreAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('reloads when an API request returns 401', () => {
|
||||||
|
const reloadSpy = jest
|
||||||
|
.spyOn(navUtils, 'locationReload')
|
||||||
|
.mockImplementation(() => {})
|
||||||
|
|
||||||
|
interceptor
|
||||||
|
.intercept(new HttpRequest('GET', '/api/documents/'), {
|
||||||
|
handle: (_request) =>
|
||||||
|
throwError(
|
||||||
|
() =>
|
||||||
|
new HttpErrorResponse({
|
||||||
|
status: 401,
|
||||||
|
url: '/api/documents/',
|
||||||
|
})
|
||||||
|
),
|
||||||
|
})
|
||||||
|
.subscribe({
|
||||||
|
error: () => undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(reloadSpy).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not reload for non-401 errors', () => {
|
||||||
|
const reloadSpy = jest
|
||||||
|
.spyOn(navUtils, 'locationReload')
|
||||||
|
.mockImplementation(() => {})
|
||||||
|
|
||||||
|
interceptor
|
||||||
|
.intercept(new HttpRequest('GET', '/api/documents/'), {
|
||||||
|
handle: (_request) =>
|
||||||
|
throwError(
|
||||||
|
() =>
|
||||||
|
new HttpErrorResponse({
|
||||||
|
status: 500,
|
||||||
|
url: '/api/documents/',
|
||||||
|
})
|
||||||
|
),
|
||||||
|
})
|
||||||
|
.subscribe({
|
||||||
|
error: () => undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(reloadSpy).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('does not reload for non-api 401 responses', () => {
|
||||||
|
const reloadSpy = jest
|
||||||
|
.spyOn(navUtils, 'locationReload')
|
||||||
|
.mockImplementation(() => {})
|
||||||
|
|
||||||
|
interceptor
|
||||||
|
.intercept(new HttpRequest('GET', '/accounts/profile/'), {
|
||||||
|
handle: (_request) =>
|
||||||
|
throwError(
|
||||||
|
() =>
|
||||||
|
new HttpErrorResponse({
|
||||||
|
status: 401,
|
||||||
|
url: '/accounts/profile/',
|
||||||
|
})
|
||||||
|
),
|
||||||
|
})
|
||||||
|
.subscribe({
|
||||||
|
error: () => undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(reloadSpy).not.toHaveBeenCalled()
|
||||||
|
})
|
||||||
|
|
||||||
|
it('reloads only once even with multiple API 401 responses', () => {
|
||||||
|
const reloadSpy = jest
|
||||||
|
.spyOn(navUtils, 'locationReload')
|
||||||
|
.mockImplementation(() => {})
|
||||||
|
|
||||||
|
const request = new HttpRequest('GET', '/api/documents/')
|
||||||
|
const handler = {
|
||||||
|
handle: (_request) =>
|
||||||
|
throwError(
|
||||||
|
() =>
|
||||||
|
new HttpErrorResponse({
|
||||||
|
status: 401,
|
||||||
|
url: '/api/documents/',
|
||||||
|
})
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
interceptor.intercept(request, handler).subscribe({
|
||||||
|
error: () => undefined,
|
||||||
|
})
|
||||||
|
interceptor.intercept(request, handler).subscribe({
|
||||||
|
error: () => undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(reloadSpy).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it('retries reload after cooldown for repeated API 401 responses', () => {
|
||||||
|
const reloadSpy = jest
|
||||||
|
.spyOn(navUtils, 'locationReload')
|
||||||
|
.mockImplementation(() => {})
|
||||||
|
|
||||||
|
dateNowSpy
|
||||||
|
.mockReturnValueOnce(1000)
|
||||||
|
.mockReturnValueOnce(2500)
|
||||||
|
.mockReturnValueOnce(3501)
|
||||||
|
|
||||||
|
const request = new HttpRequest('GET', '/api/documents/')
|
||||||
|
const handler = {
|
||||||
|
handle: (_request) =>
|
||||||
|
throwError(
|
||||||
|
() =>
|
||||||
|
new HttpErrorResponse({
|
||||||
|
status: 401,
|
||||||
|
url: '/api/documents/',
|
||||||
|
})
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
interceptor.intercept(request, handler).subscribe({
|
||||||
|
error: () => undefined,
|
||||||
|
})
|
||||||
|
interceptor.intercept(request, handler).subscribe({
|
||||||
|
error: () => undefined,
|
||||||
|
})
|
||||||
|
interceptor.intercept(request, handler).subscribe({
|
||||||
|
error: () => undefined,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(reloadSpy).toHaveBeenCalledTimes(2)
|
||||||
|
})
|
||||||
|
})
|
||||||
38
src-ui/src/app/interceptors/auth-expiry.interceptor.ts
Normal file
38
src-ui/src/app/interceptors/auth-expiry.interceptor.ts
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
import {
|
||||||
|
HttpErrorResponse,
|
||||||
|
HttpEvent,
|
||||||
|
HttpHandler,
|
||||||
|
HttpInterceptor,
|
||||||
|
HttpRequest,
|
||||||
|
} from '@angular/common/http'
|
||||||
|
import { Injectable } from '@angular/core'
|
||||||
|
import { catchError, Observable, throwError } from 'rxjs'
|
||||||
|
import { locationReload } from '../utils/navigation'
|
||||||
|
|
||||||
|
@Injectable()
|
||||||
|
export class AuthExpiryInterceptor implements HttpInterceptor {
|
||||||
|
private lastReloadAttempt = Number.NEGATIVE_INFINITY
|
||||||
|
|
||||||
|
intercept(
|
||||||
|
request: HttpRequest<unknown>,
|
||||||
|
next: HttpHandler
|
||||||
|
): Observable<HttpEvent<unknown>> {
|
||||||
|
return next.handle(request).pipe(
|
||||||
|
catchError((error: unknown) => {
|
||||||
|
if (
|
||||||
|
error instanceof HttpErrorResponse &&
|
||||||
|
error.status === 401 &&
|
||||||
|
request.url.includes('/api/')
|
||||||
|
) {
|
||||||
|
const now = Date.now()
|
||||||
|
if (now - this.lastReloadAttempt >= 2000) {
|
||||||
|
this.lastReloadAttempt = now
|
||||||
|
locationReload()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return throwError(() => error)
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -147,6 +147,7 @@ import { DirtyDocGuard } from './app/guards/dirty-doc.guard'
|
|||||||
import { DirtySavedViewGuard } from './app/guards/dirty-saved-view.guard'
|
import { DirtySavedViewGuard } from './app/guards/dirty-saved-view.guard'
|
||||||
import { PermissionsGuard } from './app/guards/permissions.guard'
|
import { PermissionsGuard } from './app/guards/permissions.guard'
|
||||||
import { ApiVersionInterceptor } from './app/interceptors/api-version.interceptor'
|
import { ApiVersionInterceptor } from './app/interceptors/api-version.interceptor'
|
||||||
|
import { AuthExpiryInterceptor } from './app/interceptors/auth-expiry.interceptor'
|
||||||
import { CsrfInterceptor } from './app/interceptors/csrf.interceptor'
|
import { CsrfInterceptor } from './app/interceptors/csrf.interceptor'
|
||||||
import { DocumentTitlePipe } from './app/pipes/document-title.pipe'
|
import { DocumentTitlePipe } from './app/pipes/document-title.pipe'
|
||||||
import { FilterPipe } from './app/pipes/filter.pipe'
|
import { FilterPipe } from './app/pipes/filter.pipe'
|
||||||
@@ -390,6 +391,11 @@ bootstrapApplication(AppComponent, {
|
|||||||
useClass: ApiVersionInterceptor,
|
useClass: ApiVersionInterceptor,
|
||||||
multi: true,
|
multi: true,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
provide: HTTP_INTERCEPTORS,
|
||||||
|
useClass: AuthExpiryInterceptor,
|
||||||
|
multi: true,
|
||||||
|
},
|
||||||
FilterPipe,
|
FilterPipe,
|
||||||
DocumentTitlePipe,
|
DocumentTitlePipe,
|
||||||
{ provide: NgbDateAdapter, useClass: ISODateAdapter },
|
{ provide: NgbDateAdapter, useClass: ISODateAdapter },
|
||||||
|
|||||||
@@ -153,6 +153,11 @@ $form-check-radio-checked-bg-image-dark: url("data:image/svg+xml,<svg xmlns='htt
|
|||||||
--bs-list-group-action-active-color: var(--bs-body-color);
|
--bs-list-group-action-active-color: var(--bs-body-color);
|
||||||
--bs-list-group-action-active-bg: var(--pngx-bg-darker);
|
--bs-list-group-action-active-bg: var(--pngx-bg-darker);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.form-control:hover::file-selector-button {
|
||||||
|
background-color:var(--pngx-bg-dark) !important
|
||||||
|
}
|
||||||
|
|
||||||
.search-container {
|
.search-container {
|
||||||
input, input:focus, i-bs[name="search"] , ::placeholder {
|
input, input:focus, i-bs[name="search"] , ::placeholder {
|
||||||
color: var(--pngx-primary-text-contrast) !important;
|
color: var(--pngx-primary-text-contrast) !important;
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import shutil
|
import shutil
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -391,6 +392,14 @@ class CannotMoveFilesException(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _path_matches_checksum(path: Path, checksum: str | None) -> bool:
|
||||||
|
if checksum is None or not path.is_file():
|
||||||
|
return False
|
||||||
|
|
||||||
|
with path.open("rb") as f:
|
||||||
|
return hashlib.md5(f.read()).hexdigest() == checksum
|
||||||
|
|
||||||
|
|
||||||
def _filename_template_uses_custom_fields(doc: Document) -> bool:
|
def _filename_template_uses_custom_fields(doc: Document) -> bool:
|
||||||
template = None
|
template = None
|
||||||
if doc.storage_path is not None:
|
if doc.storage_path is not None:
|
||||||
@@ -461,10 +470,12 @@ def update_filename_and_move_files(
|
|||||||
old_filename = instance.filename
|
old_filename = instance.filename
|
||||||
old_source_path = instance.source_path
|
old_source_path = instance.source_path
|
||||||
move_original = False
|
move_original = False
|
||||||
|
original_already_moved = False
|
||||||
|
|
||||||
old_archive_filename = instance.archive_filename
|
old_archive_filename = instance.archive_filename
|
||||||
old_archive_path = instance.archive_path
|
old_archive_path = instance.archive_path
|
||||||
move_archive = False
|
move_archive = False
|
||||||
|
archive_already_moved = False
|
||||||
|
|
||||||
candidate_filename = generate_filename(instance)
|
candidate_filename = generate_filename(instance)
|
||||||
if len(str(candidate_filename)) > Document.MAX_STORED_FILENAME_LENGTH:
|
if len(str(candidate_filename)) > Document.MAX_STORED_FILENAME_LENGTH:
|
||||||
@@ -485,14 +496,23 @@ def update_filename_and_move_files(
|
|||||||
candidate_source_path.exists()
|
candidate_source_path.exists()
|
||||||
and candidate_source_path != old_source_path
|
and candidate_source_path != old_source_path
|
||||||
):
|
):
|
||||||
# Only fall back to unique search when there is an actual conflict
|
if not old_source_path.is_file() and _path_matches_checksum(
|
||||||
new_filename = generate_unique_filename(instance)
|
candidate_source_path,
|
||||||
|
instance.checksum,
|
||||||
|
):
|
||||||
|
new_filename = candidate_filename
|
||||||
|
original_already_moved = True
|
||||||
|
else:
|
||||||
|
# Only fall back to unique search when there is an actual conflict
|
||||||
|
new_filename = generate_unique_filename(instance)
|
||||||
else:
|
else:
|
||||||
new_filename = candidate_filename
|
new_filename = candidate_filename
|
||||||
|
|
||||||
# Need to convert to string to be able to save it to the db
|
# Need to convert to string to be able to save it to the db
|
||||||
instance.filename = str(new_filename)
|
instance.filename = str(new_filename)
|
||||||
move_original = old_filename != instance.filename
|
move_original = (
|
||||||
|
old_filename != instance.filename and not original_already_moved
|
||||||
|
)
|
||||||
|
|
||||||
if instance.has_archive_version:
|
if instance.has_archive_version:
|
||||||
archive_candidate = generate_filename(instance, archive_filename=True)
|
archive_candidate = generate_filename(instance, archive_filename=True)
|
||||||
@@ -513,24 +533,38 @@ def update_filename_and_move_files(
|
|||||||
archive_candidate_path.exists()
|
archive_candidate_path.exists()
|
||||||
and archive_candidate_path != old_archive_path
|
and archive_candidate_path != old_archive_path
|
||||||
):
|
):
|
||||||
new_archive_filename = generate_unique_filename(
|
if not old_archive_path.is_file() and _path_matches_checksum(
|
||||||
instance,
|
archive_candidate_path,
|
||||||
archive_filename=True,
|
instance.archive_checksum,
|
||||||
)
|
):
|
||||||
|
new_archive_filename = archive_candidate
|
||||||
|
archive_already_moved = True
|
||||||
|
else:
|
||||||
|
new_archive_filename = generate_unique_filename(
|
||||||
|
instance,
|
||||||
|
archive_filename=True,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
new_archive_filename = archive_candidate
|
new_archive_filename = archive_candidate
|
||||||
|
|
||||||
instance.archive_filename = str(new_archive_filename)
|
instance.archive_filename = str(new_archive_filename)
|
||||||
|
|
||||||
move_archive = old_archive_filename != instance.archive_filename
|
move_archive = (
|
||||||
|
old_archive_filename != instance.archive_filename
|
||||||
|
and not archive_already_moved
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
move_archive = False
|
move_archive = False
|
||||||
|
|
||||||
if not move_original and not move_archive:
|
if not move_original and not move_archive:
|
||||||
# Just update modified. Also, don't save() here to prevent infinite recursion.
|
updates = {"modified": timezone.now()}
|
||||||
Document.objects.filter(pk=instance.pk).update(
|
if old_filename != instance.filename:
|
||||||
modified=timezone.now(),
|
updates["filename"] = instance.filename
|
||||||
)
|
if old_archive_filename != instance.archive_filename:
|
||||||
|
updates["archive_filename"] = instance.archive_filename
|
||||||
|
|
||||||
|
# Don't save() here to prevent infinite recursion.
|
||||||
|
Document.objects.filter(pk=instance.pk).update(**updates)
|
||||||
return
|
return
|
||||||
|
|
||||||
if move_original:
|
if move_original:
|
||||||
@@ -833,10 +867,25 @@ def run_workflows(
|
|||||||
if not use_overrides:
|
if not use_overrides:
|
||||||
# limit title to 128 characters
|
# limit title to 128 characters
|
||||||
document.title = document.title[:128]
|
document.title = document.title[:128]
|
||||||
# Make sure the filename and archive filename are accurate
|
# Save only the fields that workflow actions can set directly.
|
||||||
document.refresh_from_db(fields=["filename", "archive_filename"])
|
# Deliberately excludes filename and archive_filename — those are
|
||||||
# save first before setting tags
|
# managed exclusively by update_filename_and_move_files via the
|
||||||
document.save()
|
# post_save signal. Writing stale in-memory values here would revert
|
||||||
|
# a concurrent update_filename_and_move_files DB write, leaving the
|
||||||
|
# DB pointing at the old path while the file is already at the new
|
||||||
|
# one (see: https://github.com/paperless-ngx/paperless-ngx/issues/12386).
|
||||||
|
# modified has auto_now=True but is not auto-added when update_fields
|
||||||
|
# is specified, so it must be listed explicitly.
|
||||||
|
document.save(
|
||||||
|
update_fields=[
|
||||||
|
"title",
|
||||||
|
"correspondent",
|
||||||
|
"document_type",
|
||||||
|
"storage_path",
|
||||||
|
"owner",
|
||||||
|
"modified",
|
||||||
|
],
|
||||||
|
)
|
||||||
document.tags.set(doc_tag_ids)
|
document.tags.set(doc_tag_ids)
|
||||||
|
|
||||||
WorkflowRun.objects.create(
|
WorkflowRun.objects.create(
|
||||||
|
|||||||
@@ -57,11 +57,18 @@ class TestSystemStatus(APITestCase):
|
|||||||
"""
|
"""
|
||||||
response = self.client.get(self.ENDPOINT)
|
response = self.client.get(self.ENDPOINT)
|
||||||
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
||||||
|
self.assertEqual(response["WWW-Authenticate"], "Token")
|
||||||
normal_user = User.objects.create_user(username="normal_user")
|
normal_user = User.objects.create_user(username="normal_user")
|
||||||
self.client.force_login(normal_user)
|
self.client.force_login(normal_user)
|
||||||
response = self.client.get(self.ENDPOINT)
|
response = self.client.get(self.ENDPOINT)
|
||||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||||
|
|
||||||
|
def test_system_status_with_bad_basic_auth_challenges(self) -> None:
|
||||||
|
self.client.credentials(HTTP_AUTHORIZATION="Basic invalid")
|
||||||
|
response = self.client.get(self.ENDPOINT)
|
||||||
|
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
|
||||||
|
self.assertEqual(response["WWW-Authenticate"], 'Basic realm="api"')
|
||||||
|
|
||||||
def test_system_status_container_detection(self):
|
def test_system_status_container_detection(self):
|
||||||
"""
|
"""
|
||||||
GIVEN:
|
GIVEN:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import tempfile
|
import tempfile
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -166,6 +167,52 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
)
|
)
|
||||||
self.assertEqual(document.filename, "none/none.pdf")
|
self.assertEqual(document.filename, "none/none.pdf")
|
||||||
|
|
||||||
|
@override_settings(FILENAME_FORMAT=None)
|
||||||
|
def test_stale_save_recovers_already_moved_files(self) -> None:
|
||||||
|
old_storage_path = StoragePath.objects.create(
|
||||||
|
name="old-path",
|
||||||
|
path="old/{{title}}",
|
||||||
|
)
|
||||||
|
new_storage_path = StoragePath.objects.create(
|
||||||
|
name="new-path",
|
||||||
|
path="new/{{title}}",
|
||||||
|
)
|
||||||
|
original_bytes = b"original"
|
||||||
|
archive_bytes = b"archive"
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="document",
|
||||||
|
mime_type="application/pdf",
|
||||||
|
checksum=hashlib.md5(original_bytes).hexdigest(),
|
||||||
|
archive_checksum=hashlib.md5(archive_bytes).hexdigest(),
|
||||||
|
filename="old/document.pdf",
|
||||||
|
archive_filename="old/document.pdf",
|
||||||
|
storage_path=old_storage_path,
|
||||||
|
)
|
||||||
|
create_source_path_directory(doc.source_path)
|
||||||
|
doc.source_path.write_bytes(original_bytes)
|
||||||
|
create_source_path_directory(doc.archive_path)
|
||||||
|
doc.archive_path.write_bytes(archive_bytes)
|
||||||
|
|
||||||
|
stale_doc = Document.objects.get(pk=doc.pk)
|
||||||
|
fresh_doc = Document.objects.get(pk=doc.pk)
|
||||||
|
fresh_doc.storage_path = new_storage_path
|
||||||
|
fresh_doc.save()
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertEqual(doc.filename, "new/document.pdf")
|
||||||
|
self.assertEqual(doc.archive_filename, "new/document.pdf")
|
||||||
|
|
||||||
|
stale_doc.storage_path = new_storage_path
|
||||||
|
stale_doc.save()
|
||||||
|
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertEqual(doc.filename, "new/document.pdf")
|
||||||
|
self.assertEqual(doc.archive_filename, "new/document.pdf")
|
||||||
|
self.assertIsFile(doc.source_path)
|
||||||
|
self.assertIsFile(doc.archive_path)
|
||||||
|
self.assertIsNotFile(settings.ORIGINALS_DIR / "old" / "document.pdf")
|
||||||
|
self.assertIsNotFile(settings.ARCHIVE_DIR / "old" / "document.pdf")
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||||
def test_document_delete(self):
|
def test_document_delete(self):
|
||||||
document = Document()
|
document = Document()
|
||||||
|
|||||||
@@ -956,6 +956,64 @@ class TestWorkflows(
|
|||||||
self.assertEqual(Path(doc.filename), expected_filename)
|
self.assertEqual(Path(doc.filename), expected_filename)
|
||||||
self.assertTrue(doc.source_path.is_file())
|
self.assertTrue(doc.source_path.is_file())
|
||||||
|
|
||||||
|
def test_workflow_document_updated_does_not_overwrite_filename(self) -> None:
|
||||||
|
"""
|
||||||
|
GIVEN:
|
||||||
|
- A document whose filename has been updated in the DB by a concurrent
|
||||||
|
bulk_update_documents task (simulating update_filename_and_move_files
|
||||||
|
completing and writing the new filename to the DB)
|
||||||
|
- A stale in-memory document instance still holding the old filename
|
||||||
|
- An active DOCUMENT_UPDATED workflow
|
||||||
|
WHEN:
|
||||||
|
- run_workflows is called with the stale in-memory instance
|
||||||
|
(as would happen in the second concurrent bulk_update_documents task)
|
||||||
|
THEN:
|
||||||
|
- The DB filename is NOT overwritten with the stale in-memory value
|
||||||
|
(regression test for GH #12386 — the race window between
|
||||||
|
refresh_from_db and document.save in run_workflows)
|
||||||
|
"""
|
||||||
|
trigger = WorkflowTrigger.objects.create(
|
||||||
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||||
|
)
|
||||||
|
action = WorkflowAction.objects.create(
|
||||||
|
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||||
|
assign_title="Updated by workflow",
|
||||||
|
)
|
||||||
|
workflow = Workflow.objects.create(name="Race condition test workflow", order=0)
|
||||||
|
workflow.triggers.add(trigger)
|
||||||
|
workflow.actions.add(action)
|
||||||
|
workflow.save()
|
||||||
|
|
||||||
|
doc = Document.objects.create(
|
||||||
|
title="race condition test",
|
||||||
|
mime_type="application/pdf",
|
||||||
|
checksum="racecondition123",
|
||||||
|
original_filename="old.pdf",
|
||||||
|
filename="old/path/old.pdf",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Simulate BUD-1 completing update_filename_and_move_files:
|
||||||
|
# the DB now holds the new filename while BUD-2's in-memory instance is stale.
|
||||||
|
new_filename = "new/path/new.pdf"
|
||||||
|
Document.global_objects.filter(pk=doc.pk).update(filename=new_filename)
|
||||||
|
|
||||||
|
# The stale instance still has filename="old/path/old.pdf" in memory.
|
||||||
|
# Mock refresh_from_db so the stale value persists through run_workflows,
|
||||||
|
# replicating the race window between refresh and save.
|
||||||
|
# Mock update_filename_and_move_files to prevent file-not-found errors
|
||||||
|
# since we are only testing DB state here.
|
||||||
|
with (
|
||||||
|
mock.patch(
|
||||||
|
"documents.signals.handlers.update_filename_and_move_files",
|
||||||
|
),
|
||||||
|
mock.patch.object(Document, "refresh_from_db"),
|
||||||
|
):
|
||||||
|
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||||
|
|
||||||
|
# The DB filename must not have been reverted to the stale old value.
|
||||||
|
doc.refresh_from_db()
|
||||||
|
self.assertEqual(doc.filename, new_filename)
|
||||||
|
|
||||||
def test_document_added_workflow(self):
|
def test_document_added_workflow(self):
|
||||||
trigger = WorkflowTrigger.objects.create(
|
trigger = WorkflowTrigger.objects.create(
|
||||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||||
|
|||||||
@@ -1038,6 +1038,7 @@ class DocumentViewSet(
|
|||||||
methods=["get", "post", "delete"],
|
methods=["get", "post", "delete"],
|
||||||
detail=True,
|
detail=True,
|
||||||
permission_classes=[PaperlessNotePermissions],
|
permission_classes=[PaperlessNotePermissions],
|
||||||
|
pagination_class=None,
|
||||||
filter_backends=[],
|
filter_backends=[],
|
||||||
)
|
)
|
||||||
def notes(self, request, pk=None):
|
def notes(self, request, pk=None):
|
||||||
|
|||||||
@@ -83,3 +83,11 @@ class PaperlessBasicAuthentication(authentication.BasicAuthentication):
|
|||||||
raise exceptions.AuthenticationFailed("MFA required")
|
raise exceptions.AuthenticationFailed("MFA required")
|
||||||
|
|
||||||
return user_tuple
|
return user_tuple
|
||||||
|
|
||||||
|
def authenticate_header(self, request):
|
||||||
|
auth_header = request.META.get("HTTP_AUTHORIZATION", "")
|
||||||
|
if auth_header.lower().startswith("basic "):
|
||||||
|
return super().authenticate_header(request)
|
||||||
|
|
||||||
|
# Still 401 for anonymous API access
|
||||||
|
return authentication.TokenAuthentication.keyword
|
||||||
|
|||||||
Reference in New Issue
Block a user