mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-03-15 21:45:58 +00:00
Compare commits
15 Commits
v2.20.9
...
fix-mail-c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
90deeb8285 | ||
|
|
75e9fe823f | ||
|
|
40255cfdbb | ||
|
|
d919c341b1 | ||
|
|
ba0a80a8ad | ||
|
|
60319c6d37 | ||
|
|
d6a316b1df | ||
|
|
8f311c4b6b | ||
|
|
f25322600d | ||
|
|
615f27e6fb | ||
|
|
5b809122b5 | ||
|
|
8b8307571a | ||
|
|
0e97419e0e | ||
|
|
7ff51452f0 | ||
|
|
a6c974589f |
BIN
docs/assets/logo_full_black.png
Normal file
BIN
docs/assets/logo_full_black.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 57 KiB |
BIN
docs/assets/logo_full_white.png
Normal file
BIN
docs/assets/logo_full_white.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
@@ -1,5 +1,43 @@
|
||||
# Changelog
|
||||
|
||||
## paperless-ngx 2.20.10
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fix: support string coercion in filepath jinja templates [@shamoon](https://github.com/shamoon) ([#12244](https://github.com/paperless-ngx/paperless-ngx/pull/12244))
|
||||
- Fix: apply ordering after annotating tag document count [@shamoon](https://github.com/shamoon) ([#12238](https://github.com/paperless-ngx/paperless-ngx/pull/12238))
|
||||
- Fix: enforce path limit for db filename fields [@shamoon](https://github.com/shamoon) ([#12235](https://github.com/paperless-ngx/paperless-ngx/pull/12235))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>3 changes</summary>
|
||||
|
||||
- Fix: support string coercion in filepath jinja templates [@shamoon](https://github.com/shamoon) ([#12244](https://github.com/paperless-ngx/paperless-ngx/pull/12244))
|
||||
- Fix: apply ordering after annotating tag document count [@shamoon](https://github.com/shamoon) ([#12238](https://github.com/paperless-ngx/paperless-ngx/pull/12238))
|
||||
- Fix: enforce path limit for db filename fields [@shamoon](https://github.com/shamoon) ([#12235](https://github.com/paperless-ngx/paperless-ngx/pull/12235))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.9
|
||||
|
||||
### Security
|
||||
|
||||
- Resolve [GHSA-386h-chg4-cfw9](https://github.com/paperless-ngx/paperless-ngx/security/advisories/GHSA-386h-chg4-cfw9)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- Fixhancement: config option reset [@shamoon](https://github.com/shamoon) ([#12176](https://github.com/paperless-ngx/paperless-ngx/pull/12176))
|
||||
- Fix: correct page count by separating display vs collection sizes for tags [@shamoon](https://github.com/shamoon) ([#12170](https://github.com/paperless-ngx/paperless-ngx/pull/12170))
|
||||
|
||||
### All App Changes
|
||||
|
||||
<details>
|
||||
<summary>2 changes</summary>
|
||||
|
||||
- Fixhancement: config option reset [@shamoon](https://github.com/shamoon) ([#12176](https://github.com/paperless-ngx/paperless-ngx/pull/12176))
|
||||
- Fix: correct page count by separating display vs collection sizes for tags [@shamoon](https://github.com/shamoon) ([#12170](https://github.com/paperless-ngx/paperless-ngx/pull/12170))
|
||||
</details>
|
||||
|
||||
## paperless-ngx 2.20.8
|
||||
|
||||
### Security
|
||||
|
||||
@@ -570,7 +570,7 @@ applied. You can use the following placeholders in the template with any trigger
|
||||
- `{{added_day}}`: added day
|
||||
- `{{added_time}}`: added time in HH:MM format
|
||||
- `{{original_filename}}`: original file name without extension
|
||||
- `{{filename}}`: current file name without extension
|
||||
- `{{filename}}`: current file name without extension (for "added" workflows this may not be final yet, you can use `{{original_filename}}`)
|
||||
- `{{doc_title}}`: current document title
|
||||
|
||||
The following placeholders are only available for "added" or "updated" triggers
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "paperless-ngx"
|
||||
version = "2.20.9"
|
||||
version = "2.20.10"
|
||||
description = "A community-supported supercharged document management system: scan, index and archive all your physical documents"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "paperless-ngx-ui",
|
||||
"version": "2.20.9",
|
||||
"version": "2.20.10",
|
||||
"scripts": {
|
||||
"preinstall": "npx only-allow pnpm",
|
||||
"ng": "ng",
|
||||
|
||||
@@ -631,6 +631,59 @@ describe('FilterableDropdownComponent & FilterableDropdownSelectionModel', () =>
|
||||
])
|
||||
})
|
||||
|
||||
it('deselecting a parent clears selected descendants', () => {
|
||||
const root: Tag = { id: 100, name: 'Root Tag' }
|
||||
const child: Tag = { id: 101, name: 'Child Tag', parent: root.id }
|
||||
const grandchild: Tag = {
|
||||
id: 102,
|
||||
name: 'Grandchild Tag',
|
||||
parent: child.id,
|
||||
}
|
||||
const other: Tag = { id: 103, name: 'Other Tag' }
|
||||
|
||||
selectionModel.items = [root, child, grandchild, other]
|
||||
selectionModel.set(root.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(child.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(grandchild.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(other.id, ToggleableItemState.Selected, false)
|
||||
|
||||
selectionModel.toggle(root.id, false)
|
||||
|
||||
expect(selectionModel.getSelectedItems()).toEqual([other])
|
||||
})
|
||||
|
||||
it('un-excluding a parent clears excluded descendants', () => {
|
||||
const root: Tag = { id: 110, name: 'Root Tag' }
|
||||
const child: Tag = { id: 111, name: 'Child Tag', parent: root.id }
|
||||
const other: Tag = { id: 112, name: 'Other Tag' }
|
||||
|
||||
selectionModel.items = [root, child, other]
|
||||
selectionModel.set(root.id, ToggleableItemState.Excluded, false)
|
||||
selectionModel.set(child.id, ToggleableItemState.Excluded, false)
|
||||
selectionModel.set(other.id, ToggleableItemState.Excluded, false)
|
||||
|
||||
selectionModel.exclude(root.id, false)
|
||||
|
||||
expect(selectionModel.getExcludedItems()).toEqual([other])
|
||||
})
|
||||
|
||||
it('excluding a selected parent clears selected descendants', () => {
|
||||
const root: Tag = { id: 120, name: 'Root Tag' }
|
||||
const child: Tag = { id: 121, name: 'Child Tag', parent: root.id }
|
||||
const other: Tag = { id: 122, name: 'Other Tag' }
|
||||
|
||||
selectionModel.manyToOne = true
|
||||
selectionModel.items = [root, child, other]
|
||||
selectionModel.set(root.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(child.id, ToggleableItemState.Selected, false)
|
||||
selectionModel.set(other.id, ToggleableItemState.Selected, false)
|
||||
|
||||
selectionModel.exclude(root.id, false)
|
||||
|
||||
expect(selectionModel.getExcludedItems()).toEqual([root])
|
||||
expect(selectionModel.getSelectedItems()).toEqual([other])
|
||||
})
|
||||
|
||||
it('resorts items immediately when document count sorting enabled', () => {
|
||||
const apple: Tag = { id: 55, name: 'Apple' }
|
||||
const zebra: Tag = { id: 56, name: 'Zebra' }
|
||||
|
||||
@@ -231,6 +231,7 @@ export class FilterableDropdownSelectionModel {
|
||||
state == ToggleableItemState.Excluded
|
||||
) {
|
||||
this.temporarySelectionStates.delete(id)
|
||||
this.clearDescendantSelections(id)
|
||||
}
|
||||
|
||||
if (!id) {
|
||||
@@ -257,6 +258,7 @@ export class FilterableDropdownSelectionModel {
|
||||
|
||||
if (this.manyToOne || this.singleSelect) {
|
||||
this.temporarySelectionStates.set(id, ToggleableItemState.Excluded)
|
||||
this.clearDescendantSelections(id)
|
||||
|
||||
if (this.singleSelect) {
|
||||
for (let key of this.temporarySelectionStates.keys()) {
|
||||
@@ -277,9 +279,15 @@ export class FilterableDropdownSelectionModel {
|
||||
newState = ToggleableItemState.NotSelected
|
||||
}
|
||||
this.temporarySelectionStates.set(id, newState)
|
||||
if (newState == ToggleableItemState.Excluded) {
|
||||
this.clearDescendantSelections(id)
|
||||
}
|
||||
}
|
||||
} else if (!id || state == ToggleableItemState.Excluded) {
|
||||
this.temporarySelectionStates.delete(id)
|
||||
if (id) {
|
||||
this.clearDescendantSelections(id)
|
||||
}
|
||||
}
|
||||
|
||||
if (fireEvent) {
|
||||
@@ -291,6 +299,33 @@ export class FilterableDropdownSelectionModel {
|
||||
return this.selectionStates.get(id) || ToggleableItemState.NotSelected
|
||||
}
|
||||
|
||||
private clearDescendantSelections(id: number) {
|
||||
for (const descendantID of this.getDescendantIDs(id)) {
|
||||
this.temporarySelectionStates.delete(descendantID)
|
||||
}
|
||||
}
|
||||
|
||||
private getDescendantIDs(id: number): number[] {
|
||||
const descendants: number[] = []
|
||||
const queue: number[] = [id]
|
||||
|
||||
while (queue.length) {
|
||||
const parentID = queue.shift()
|
||||
for (const item of this._items) {
|
||||
if (
|
||||
typeof item?.id === 'number' &&
|
||||
typeof (item as any)['parent'] === 'number' &&
|
||||
(item as any)['parent'] === parentID
|
||||
) {
|
||||
descendants.push(item.id)
|
||||
queue.push(item.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return descendants
|
||||
}
|
||||
|
||||
get logicalOperator(): LogicalOperator {
|
||||
return this.temporaryLogicalOperator
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
}
|
||||
|
||||
@if (document && displayFields?.includes(DisplayField.TAGS)) {
|
||||
<div class="tags d-flex flex-column text-end position-absolute me-1 fs-6">
|
||||
<div class="tags d-flex flex-column text-end position-absolute me-1 fs-6" [class.tags-no-wrap]="document.tags.length > 3">
|
||||
@for (tagID of tagIDs; track tagID) {
|
||||
<pngx-tag [tagID]="tagID" (click)="clickTag.emit(tagID);$event.stopPropagation()" [clickable]="true" linkTitle="Toggle tag filter" i18n-linkTitle></pngx-tag>
|
||||
}
|
||||
|
||||
@@ -72,4 +72,14 @@ a {
|
||||
max-width: 80%;
|
||||
row-gap: .2rem;
|
||||
line-height: 1;
|
||||
|
||||
&.tags-no-wrap {
|
||||
::ng-deep .badge {
|
||||
display: inline-block;
|
||||
max-width: 100%;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,6 +82,16 @@ describe('DocumentCardSmallComponent', () => {
|
||||
).toHaveLength(6)
|
||||
})
|
||||
|
||||
it('should clear hidden tag counter when tag count falls below the limit', () => {
|
||||
expect(component.moreTags).toEqual(3)
|
||||
|
||||
component.document.tags = [1, 2, 3, 4, 5, 6]
|
||||
fixture.detectChanges()
|
||||
|
||||
expect(component.moreTags).toBeNull()
|
||||
expect(fixture.nativeElement.textContent).not.toContain('+ 3')
|
||||
})
|
||||
|
||||
it('should try to close the preview on mouse leave', () => {
|
||||
component.popupPreview = {
|
||||
close: jest.fn(),
|
||||
|
||||
@@ -126,6 +126,7 @@ export class DocumentCardSmallComponent
|
||||
this.moreTags = this.document.tags.length - (limit - 1)
|
||||
return this.document.tags.slice(0, limit - 1)
|
||||
} else {
|
||||
this.moreTags = null
|
||||
return this.document.tags
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ export const environment = {
|
||||
apiVersion: '9', // match src/paperless/settings.py
|
||||
appTitle: 'Paperless-ngx',
|
||||
tag: 'prod',
|
||||
version: '2.20.9',
|
||||
version: '2.20.10',
|
||||
webSocketHost: window.location.host,
|
||||
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
|
||||
webSocketBaseUrl: base_url.pathname + 'ws/',
|
||||
|
||||
@@ -149,6 +149,10 @@ $form-check-radio-checked-bg-image-dark: url("data:image/svg+xml,<svg xmlns='htt
|
||||
background-color: var(--pngx-body-color-accent);
|
||||
}
|
||||
|
||||
.list-group-item-action:not(.active):active {
|
||||
--bs-list-group-action-active-color: var(--bs-body-color);
|
||||
--bs-list-group-action-active-bg: var(--pngx-bg-darker);
|
||||
}
|
||||
.search-container {
|
||||
input, input:focus, i-bs[name="search"] , ::placeholder {
|
||||
color: var(--pngx-primary-text-contrast) !important;
|
||||
|
||||
@@ -19,6 +19,7 @@ from documents.classifier import load_classifier
|
||||
from documents.data_models import ConsumableDocument
|
||||
from documents.data_models import DocumentMetadataOverrides
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import generate_filename
|
||||
from documents.file_handling import generate_unique_filename
|
||||
from documents.loggers import LoggingMixin
|
||||
from documents.models import Correspondent
|
||||
@@ -493,7 +494,19 @@ class ConsumerPlugin(
|
||||
# After everything is in the database, copy the files into
|
||||
# place. If this fails, we'll also rollback the transaction.
|
||||
with FileLock(settings.MEDIA_LOCK):
|
||||
document.filename = generate_unique_filename(document)
|
||||
generated_filename = generate_unique_filename(document)
|
||||
if (
|
||||
len(str(generated_filename))
|
||||
> Document.MAX_STORED_FILENAME_LENGTH
|
||||
):
|
||||
self.log.warning(
|
||||
"Generated source filename exceeds db path limit, falling back to default naming",
|
||||
)
|
||||
generated_filename = generate_filename(
|
||||
document,
|
||||
use_format=False,
|
||||
)
|
||||
document.filename = generated_filename
|
||||
create_source_path_directory(document.source_path)
|
||||
|
||||
self._write(
|
||||
@@ -511,10 +524,23 @@ class ConsumerPlugin(
|
||||
)
|
||||
|
||||
if archive_path and Path(archive_path).is_file():
|
||||
document.archive_filename = generate_unique_filename(
|
||||
generated_archive_filename = generate_unique_filename(
|
||||
document,
|
||||
archive_filename=True,
|
||||
)
|
||||
if (
|
||||
len(str(generated_archive_filename))
|
||||
> Document.MAX_STORED_FILENAME_LENGTH
|
||||
):
|
||||
self.log.warning(
|
||||
"Generated archive filename exceeds db path limit, falling back to default naming",
|
||||
)
|
||||
generated_archive_filename = generate_filename(
|
||||
document,
|
||||
archive_filename=True,
|
||||
use_format=False,
|
||||
)
|
||||
document.archive_filename = generated_archive_filename
|
||||
create_source_path_directory(document.archive_path)
|
||||
self._write(
|
||||
document.storage_type,
|
||||
|
||||
@@ -128,17 +128,21 @@ def generate_filename(
|
||||
counter=0,
|
||||
append_gpg=True,
|
||||
archive_filename=False,
|
||||
use_format=True,
|
||||
) -> Path:
|
||||
base_path: Path | None = None
|
||||
|
||||
# Determine the source of the format string
|
||||
if doc.storage_path is not None:
|
||||
filename_format = doc.storage_path.path
|
||||
elif settings.FILENAME_FORMAT is not None:
|
||||
# Maybe convert old to new style
|
||||
filename_format = convert_format_str_to_template_format(
|
||||
settings.FILENAME_FORMAT,
|
||||
)
|
||||
if use_format:
|
||||
if doc.storage_path is not None:
|
||||
filename_format = doc.storage_path.path
|
||||
elif settings.FILENAME_FORMAT is not None:
|
||||
# Maybe convert old to new style
|
||||
filename_format = convert_format_str_to_template_format(
|
||||
settings.FILENAME_FORMAT,
|
||||
)
|
||||
else:
|
||||
filename_format = None
|
||||
else:
|
||||
filename_format = None
|
||||
|
||||
|
||||
@@ -160,6 +160,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
(STORAGE_TYPE_UNENCRYPTED, _("Unencrypted")),
|
||||
(STORAGE_TYPE_GPG, _("Encrypted with GNU Privacy Guard")),
|
||||
)
|
||||
MAX_STORED_FILENAME_LENGTH: Final[int] = 1024
|
||||
|
||||
correspondent = models.ForeignKey(
|
||||
Correspondent,
|
||||
@@ -267,7 +268,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
|
||||
filename = models.FilePathField(
|
||||
_("filename"),
|
||||
max_length=1024,
|
||||
max_length=MAX_STORED_FILENAME_LENGTH,
|
||||
editable=False,
|
||||
default=None,
|
||||
unique=True,
|
||||
@@ -277,7 +278,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
|
||||
archive_filename = models.FilePathField(
|
||||
_("archive filename"),
|
||||
max_length=1024,
|
||||
max_length=MAX_STORED_FILENAME_LENGTH,
|
||||
editable=False,
|
||||
default=None,
|
||||
unique=True,
|
||||
@@ -287,7 +288,7 @@ class Document(SoftDeleteModel, ModelWithOwner):
|
||||
|
||||
original_filename = models.CharField(
|
||||
_("original filename"),
|
||||
max_length=1024,
|
||||
max_length=MAX_STORED_FILENAME_LENGTH,
|
||||
editable=False,
|
||||
default=None,
|
||||
unique=False,
|
||||
|
||||
@@ -460,8 +460,22 @@ def update_filename_and_move_files(
|
||||
|
||||
old_filename = instance.filename
|
||||
old_source_path = instance.source_path
|
||||
move_original = False
|
||||
|
||||
old_archive_filename = instance.archive_filename
|
||||
old_archive_path = instance.archive_path
|
||||
move_archive = False
|
||||
|
||||
candidate_filename = generate_filename(instance)
|
||||
if len(str(candidate_filename)) > Document.MAX_STORED_FILENAME_LENGTH:
|
||||
msg = (
|
||||
f"Document {instance!s}: Generated filename exceeds db path "
|
||||
f"limit ({len(str(candidate_filename))} > "
|
||||
f"{Document.MAX_STORED_FILENAME_LENGTH}): {candidate_filename!s}"
|
||||
)
|
||||
logger.warning(msg)
|
||||
raise CannotMoveFilesException(msg)
|
||||
|
||||
candidate_source_path = (
|
||||
settings.ORIGINALS_DIR / candidate_filename
|
||||
).resolve()
|
||||
@@ -480,11 +494,16 @@ def update_filename_and_move_files(
|
||||
instance.filename = str(new_filename)
|
||||
move_original = old_filename != instance.filename
|
||||
|
||||
old_archive_filename = instance.archive_filename
|
||||
old_archive_path = instance.archive_path
|
||||
|
||||
if instance.has_archive_version:
|
||||
archive_candidate = generate_filename(instance, archive_filename=True)
|
||||
if len(str(archive_candidate)) > Document.MAX_STORED_FILENAME_LENGTH:
|
||||
msg = (
|
||||
f"Document {instance!s}: Generated archive filename exceeds "
|
||||
f"db path limit ({len(str(archive_candidate))} > "
|
||||
f"{Document.MAX_STORED_FILENAME_LENGTH}): {archive_candidate!s}"
|
||||
)
|
||||
logger.warning(msg)
|
||||
raise CannotMoveFilesException(msg)
|
||||
archive_candidate_path = (
|
||||
settings.ARCHIVE_DIR / archive_candidate
|
||||
).resolve()
|
||||
@@ -814,6 +833,8 @@ def run_workflows(
|
||||
if not use_overrides:
|
||||
# limit title to 128 characters
|
||||
document.title = document.title[:128]
|
||||
# Make sure the filename and archive filename are accurate
|
||||
document.refresh_from_db(fields=["filename", "archive_filename"])
|
||||
# save first before setting tags
|
||||
document.save()
|
||||
document.tags.set(doc_tag_ids)
|
||||
|
||||
@@ -79,6 +79,23 @@ class PlaceholderString(str):
|
||||
NO_VALUE_PLACEHOLDER = PlaceholderString("-none-")
|
||||
|
||||
|
||||
class MatchingModelContext:
|
||||
"""
|
||||
Safe template context for related objects.
|
||||
|
||||
Keeps legacy behavior where including the object ina template yields the related object's
|
||||
name as a string, while still exposing limited attributes.
|
||||
"""
|
||||
|
||||
def __init__(self, *, id: int, name: str, path: str | None = None):
|
||||
self.id = id
|
||||
self.name = name
|
||||
self.path = path
|
||||
|
||||
def __str__(self) -> str:
|
||||
return self.name
|
||||
|
||||
|
||||
_template_environment.undefined = _LogStrictUndefined
|
||||
|
||||
_template_environment.filters["get_cf_value"] = get_cf_value
|
||||
@@ -221,19 +238,26 @@ def get_safe_document_context(
|
||||
else None,
|
||||
"tags": [{"name": tag.name, "id": tag.id} for tag in tags],
|
||||
"correspondent": (
|
||||
{"name": document.correspondent.name, "id": document.correspondent.id}
|
||||
MatchingModelContext(
|
||||
name=document.correspondent.name,
|
||||
id=document.correspondent.id,
|
||||
)
|
||||
if document.correspondent
|
||||
else None
|
||||
),
|
||||
"document_type": (
|
||||
{"name": document.document_type.name, "id": document.document_type.id}
|
||||
MatchingModelContext(
|
||||
name=document.document_type.name,
|
||||
id=document.document_type.id,
|
||||
)
|
||||
if document.document_type
|
||||
else None
|
||||
),
|
||||
"storage_path": {
|
||||
"path": document.storage_path.path,
|
||||
"id": document.storage_path.id,
|
||||
}
|
||||
"storage_path": MatchingModelContext(
|
||||
name=document.storage_path.name,
|
||||
path=document.storage_path.path,
|
||||
id=document.storage_path.id,
|
||||
)
|
||||
if document.storage_path
|
||||
else None,
|
||||
}
|
||||
|
||||
@@ -633,6 +633,33 @@ class TestConsumer(
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@mock.patch("documents.consumer.generate_unique_filename")
|
||||
def testFilenameHandlingFallsBackWhenGeneratedPathExceedsDbLimit(self, m):
|
||||
m.side_effect = lambda doc, archive_filename=False: Path(
|
||||
("a" * 1100 + ".pdf") if not archive_filename else ("b" * 1100 + ".pdf"),
|
||||
)
|
||||
|
||||
with self.get_consumer(
|
||||
self.get_test_file(),
|
||||
DocumentMetadataOverrides(title="new docs"),
|
||||
) as consumer:
|
||||
consumer.run()
|
||||
|
||||
document = Document.objects.first()
|
||||
self.assertIsNotNone(document)
|
||||
assert document is not None
|
||||
|
||||
self.assertEqual(document.filename, f"{document.pk:07d}.pdf")
|
||||
self.assertLessEqual(len(document.filename), 1024)
|
||||
self.assertLessEqual(
|
||||
len(document.archive_filename),
|
||||
1024,
|
||||
)
|
||||
self.assertIsFile(document.source_path)
|
||||
self.assertIsFile(document.archive_path)
|
||||
|
||||
self._assert_first_last_send_progress()
|
||||
|
||||
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
|
||||
@mock.patch("documents.signals.handlers.generate_unique_filename")
|
||||
def testFilenameHandlingUnstableFormat(self, m):
|
||||
|
||||
@@ -1341,6 +1341,41 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
||||
Path("somepath/asn-201-400/asn-3xx/Does Matter.pdf"),
|
||||
)
|
||||
|
||||
def test_template_related_context_keeps_legacy_string_coercion(self):
|
||||
"""
|
||||
GIVEN:
|
||||
- A storage path template that uses related objects directly as strings
|
||||
WHEN:
|
||||
- Filepath for a document with this format is called
|
||||
THEN:
|
||||
- Related objects coerce to their names (legacy behavior)
|
||||
- Explicit attribute access remains available for new templates
|
||||
"""
|
||||
sp = StoragePath.objects.create(
|
||||
name="PARTNER",
|
||||
path=(
|
||||
"{{ document.storage_path|lower }} / "
|
||||
"{{ document.correspondent|lower|replace('mi:', 'mieter/') }} / "
|
||||
"{{ document_type|lower }} / "
|
||||
"{{ title|lower }}"
|
||||
),
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="scan_017562",
|
||||
created=datetime.date(2025, 7, 2),
|
||||
added=timezone.make_aware(datetime.datetime(2026, 3, 3, 11, 53, 16)),
|
||||
mime_type="application/pdf",
|
||||
checksum="test-checksum",
|
||||
storage_path=sp,
|
||||
correspondent=Correspondent.objects.create(name="mi:kochkach"),
|
||||
document_type=DocumentType.objects.create(name="Mietvertrag"),
|
||||
)
|
||||
|
||||
self.assertEqual(
|
||||
generate_filename(doc),
|
||||
Path("partner/mieter/kochkach/mietvertrag/scan_017562.pdf"),
|
||||
)
|
||||
|
||||
@override_settings(
|
||||
FILENAME_FORMAT="{{creation_date}}/{{ title_name_str }}",
|
||||
)
|
||||
@@ -1699,6 +1734,21 @@ class TestCustomFieldFilenameUpdates(
|
||||
self.assertTrue(Path(self.doc.source_path).is_file())
|
||||
self.assertLessEqual(m.call_count, 1)
|
||||
|
||||
@override_settings(FILENAME_FORMAT=None)
|
||||
def test_overlong_storage_path_keeps_existing_filename(self):
|
||||
initial_filename = generate_filename(self.doc)
|
||||
Document.objects.filter(pk=self.doc.pk).update(filename=str(initial_filename))
|
||||
self.doc.refresh_from_db()
|
||||
Path(self.doc.source_path).parent.mkdir(parents=True, exist_ok=True)
|
||||
Path(self.doc.source_path).touch()
|
||||
|
||||
self.doc.storage_path = StoragePath.objects.create(path="a" * 1100)
|
||||
self.doc.save()
|
||||
|
||||
self.doc.refresh_from_db()
|
||||
self.assertEqual(Path(self.doc.filename), initial_filename)
|
||||
self.assertTrue(Path(self.doc.source_path).is_file())
|
||||
|
||||
|
||||
class TestPathDateLocalization:
|
||||
"""
|
||||
|
||||
@@ -147,6 +147,16 @@ class TestTagHierarchy(APITestCase):
|
||||
assert serializer.data # triggers serialization
|
||||
assert "document_count_filter" in context
|
||||
|
||||
def test_tag_list_can_order_by_document_count_with_children(self) -> None:
|
||||
self.document.tags.add(self.child)
|
||||
|
||||
response = self.client.get(
|
||||
"/api/tags/",
|
||||
{"ordering": "document_count"},
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_cannot_set_parent_to_self(self):
|
||||
tag = Tag.objects.create(name="Selfie")
|
||||
resp = self.client.patch(
|
||||
|
||||
@@ -25,6 +25,7 @@ from rest_framework.test import APIClient
|
||||
from rest_framework.test import APITestCase
|
||||
|
||||
from documents.file_handling import create_source_path_directory
|
||||
from documents.file_handling import generate_filename
|
||||
from documents.file_handling import generate_unique_filename
|
||||
from documents.signals.handlers import run_workflows
|
||||
from documents.workflows.webhooks import send_webhook
|
||||
@@ -898,6 +899,63 @@ class TestWorkflows(
|
||||
expected_str = f"Document matched {trigger} from {w}"
|
||||
self.assertIn(expected_str, cm.output[0])
|
||||
|
||||
def test_workflow_assign_custom_field_keeps_storage_filename_in_sync(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing document with a storage path template that depends on a custom field
|
||||
- Existing workflow triggered on document update assigning that custom field
|
||||
WHEN:
|
||||
- Workflow runs for the document
|
||||
THEN:
|
||||
- The database filename remains aligned with the moved file on disk
|
||||
"""
|
||||
storage_path = StoragePath.objects.create(
|
||||
name="workflow-custom-field-path",
|
||||
path="{{ custom_fields|get_cf_value('Custom Field 1', 'none') }}/{{ title }}",
|
||||
)
|
||||
doc = Document.objects.create(
|
||||
title="workflow custom field sync",
|
||||
mime_type="application/pdf",
|
||||
checksum="workflow-custom-field-sync",
|
||||
storage_path=storage_path,
|
||||
original_filename="workflow-custom-field-sync.pdf",
|
||||
)
|
||||
CustomFieldInstance.objects.create(
|
||||
document=doc,
|
||||
field=self.cf1,
|
||||
value_text="initial",
|
||||
)
|
||||
|
||||
generated = generate_unique_filename(doc)
|
||||
destination = (settings.ORIGINALS_DIR / generated).resolve()
|
||||
create_source_path_directory(destination)
|
||||
shutil.copy(self.SAMPLE_DIR / "simple.pdf", destination)
|
||||
Document.objects.filter(pk=doc.pk).update(filename=generated.as_posix())
|
||||
doc.refresh_from_db()
|
||||
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED,
|
||||
)
|
||||
action = WorkflowAction.objects.create(
|
||||
type=WorkflowAction.WorkflowActionType.ASSIGNMENT,
|
||||
assign_custom_fields_values={self.cf1.pk: "cars"},
|
||||
)
|
||||
action.assign_custom_fields.add(self.cf1.pk)
|
||||
workflow = Workflow.objects.create(
|
||||
name="Workflow custom field filename sync",
|
||||
order=0,
|
||||
)
|
||||
workflow.triggers.add(trigger)
|
||||
workflow.actions.add(action)
|
||||
workflow.save()
|
||||
|
||||
run_workflows(WorkflowTrigger.WorkflowTriggerType.DOCUMENT_UPDATED, doc)
|
||||
|
||||
doc.refresh_from_db()
|
||||
expected_filename = generate_filename(doc)
|
||||
self.assertEqual(Path(doc.filename), expected_filename)
|
||||
self.assertTrue(doc.source_path.is_file())
|
||||
|
||||
def test_document_added_workflow(self):
|
||||
trigger = WorkflowTrigger.objects.create(
|
||||
type=WorkflowTrigger.WorkflowTriggerType.DOCUMENT_ADDED,
|
||||
|
||||
@@ -487,13 +487,13 @@ class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
|
||||
user = getattr(getattr(self, "request", None), "user", None)
|
||||
children_source = list(
|
||||
annotate_document_count_for_related_queryset(
|
||||
Tag.objects.filter(pk__in=descendant_pks | {t.pk for t in all_tags})
|
||||
.select_related("owner")
|
||||
.order_by(*ordering),
|
||||
Tag.objects.filter(
|
||||
pk__in=descendant_pks | {t.pk for t in all_tags},
|
||||
).select_related("owner"),
|
||||
through_model=self.document_count_through,
|
||||
related_object_field=self.document_count_source_field,
|
||||
user=user,
|
||||
),
|
||||
).order_by(*ordering),
|
||||
)
|
||||
else:
|
||||
children_source = all_tags
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from typing import Final
|
||||
|
||||
__version__: Final[tuple[int, int, int]] = (2, 20, 9)
|
||||
__version__: Final[tuple[int, int, int]] = (2, 20, 10)
|
||||
# Version string like X.Y.Z
|
||||
__full_version_str__: Final[str] = ".".join(map(str, __version__))
|
||||
# Version string like X.Y
|
||||
|
||||
@@ -472,6 +472,7 @@ class MailAccountHandler(LoggingMixin):
|
||||
name=name,
|
||||
defaults={
|
||||
"match": name,
|
||||
"matching_algorithm": Correspondent.MATCH_AUTO,
|
||||
},
|
||||
)[0]
|
||||
except DatabaseError as e:
|
||||
|
||||
@@ -448,7 +448,7 @@ class TestMail(
|
||||
c = handler._get_correspondent(message, rule)
|
||||
self.assertIsNotNone(c)
|
||||
self.assertEqual(c.name, "someone@somewhere.com")
|
||||
self.assertEqual(c.matching_algorithm, MatchingModel.MATCH_ANY)
|
||||
self.assertEqual(c.matching_algorithm, MatchingModel.MATCH_AUTO)
|
||||
self.assertEqual(c.match, "someone@somewhere.com")
|
||||
c = handler._get_correspondent(message2, rule)
|
||||
self.assertIsNotNone(c)
|
||||
|
||||
Reference in New Issue
Block a user