Compare commits

..

3 Commits

Author SHA1 Message Date
Trenton H
9a28e2893a Encodes the string just once for compare json 2026-04-10 14:00:47 -07:00
Trenton H
9e871dce25 Fix: Add directory marker entries to zip exports
Without explicit directory entries, some zip viewers (simpler tools,
web-based viewers) don't show the folder structure when browsing the
archive. Add a _ensure_zip_dirs() helper that writes directory markers
for all parent paths of each file entry, deduplicating via a set.
Uses ZipFile.mkdir() (available since Python 3.11, the project minimum).

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-10 13:36:12 -07:00
Trenton H
f875863fe0 Refactor: Write zip exports directly into ZipFile instead of temp dir
Replace the temp-dir + shutil.make_archive() workaround with direct
zipfile.ZipFile writes. Document files are added via zf.write() and
JSON manifests via zf.writestr()/StringIO buffering, eliminating the
double-I/O and 2x disk usage of the previous approach.

Key changes:
- Removed tempfile.TemporaryDirectory and shutil.make_archive() from handle()
- ZipFile opened on a .tmp path; renamed to final .zip atomically on success;
  .tmp cleaned up on failure
- StreamingManifestWriter: zip mode buffers manifest in io.StringIO and
  writes to zip atomically on close() (zipfile allows only one open write
  handle at a time)
- check_and_copy(): zip mode calls zf.write(source, arcname=...) directly
- check_and_write_json(): zip mode calls zf.writestr(arcname, ...) directly
- files_in_export_dir scan skipped in zip mode (always fresh write)
- --compare-checksums and --compare-json emit warnings when used with --zip
- --delete in zip mode removes pre-existing files from target dir, skipping
  the in-progress .tmp and any prior .zip
- Added tests: atomicity on failure, no SCRATCH_DIR usage

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-10 13:36:11 -07:00
14 changed files with 7994 additions and 6749 deletions

View File

@@ -165,7 +165,6 @@ jobs:
contents: read
env:
DEFAULT_PYTHON: "3.12"
PAPERLESS_SECRET_KEY: "ci-typing-not-a-real-secret"
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2

View File

@@ -88,7 +88,6 @@ jobs:
uv export --quiet --no-dev --all-extras --format requirements-txt --output-file requirements.txt
- name: Compile messages
env:
PAPERLESS_SECRET_KEY: "ci-release-not-a-real-secret"
PYTHON_VERSION: ${{ steps.setup-python.outputs.python-version }}
run: |
cd src/
@@ -97,7 +96,6 @@ jobs:
manage.py compilemessages
- name: Collect static files
env:
PAPERLESS_SECRET_KEY: "ci-release-not-a-real-secret"
PYTHON_VERSION: ${{ steps.setup-python.outputs.python-version }}
run: |
cd src/

View File

@@ -36,8 +36,6 @@ jobs:
--group dev \
--frozen
- name: Generate backend translation strings
env:
PAPERLESS_SECRET_KEY: "ci-translate-not-a-real-secret"
run: cd src/ && uv run manage.py makemessages -l en_US -i "samples*"
- name: Install pnpm
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v5.0.0

3
.gitignore vendored
View File

@@ -79,7 +79,6 @@ virtualenv
/docker-compose.env
/docker-compose.yml
.ruff_cache/
.mypy_cache/
# Used for development
scripts/import-for-development
@@ -112,6 +111,4 @@ celerybeat-schedule*
# ignore pnpm package store folder created when setting up the devcontainer
.pnpm-store/
# Git worktree local folder
.worktrees

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@@ -1,8 +1,9 @@
import hashlib
import io
import json
import os
import shutil
import tempfile
import zipfile
from itertools import islice
from pathlib import Path
from typing import TYPE_CHECKING
@@ -97,6 +98,8 @@ class StreamingManifestWriter:
*,
compare_json: bool = False,
files_in_export_dir: "set[Path] | None" = None,
zip_file: "zipfile.ZipFile | None" = None,
zip_arcname: str | None = None,
) -> None:
self._path = path.resolve()
self._tmp_path = self._path.with_suffix(self._path.suffix + ".tmp")
@@ -104,12 +107,20 @@ class StreamingManifestWriter:
self._files_in_export_dir: set[Path] = (
files_in_export_dir if files_in_export_dir is not None else set()
)
self._zip_file = zip_file
self._zip_arcname = zip_arcname
self._zip_mode = zip_file is not None
self._file = None
self._first = True
def open(self) -> None:
self._path.parent.mkdir(parents=True, exist_ok=True)
self._file = self._tmp_path.open("w", encoding="utf-8")
if self._zip_mode:
# zipfile only allows one open write handle at a time, so buffer
# the manifest in memory and write it atomically on close()
self._file = io.StringIO()
else:
self._path.parent.mkdir(parents=True, exist_ok=True)
self._file = self._tmp_path.open("w", encoding="utf-8")
self._file.write("[")
self._first = True
@@ -130,15 +141,18 @@ class StreamingManifestWriter:
if self._file is None:
return
self._file.write("\n]")
if self._zip_mode:
self._zip_file.writestr(self._zip_arcname, self._file.getvalue())
self._file.close()
self._file = None
self._finalize()
if not self._zip_mode:
self._finalize()
def discard(self) -> None:
if self._file is not None:
self._file.close()
self._file = None
if self._tmp_path.exists():
if not self._zip_mode and self._tmp_path.exists():
self._tmp_path.unlink()
def _finalize(self) -> None:
@@ -315,18 +329,13 @@ class Command(CryptMixin, PaperlessCommand):
self.files_in_export_dir: set[Path] = set()
self.exported_files: set[str] = set()
self.zip_file: zipfile.ZipFile | None = None
self._zip_dirs: set[str] = set()
# If zipping, save the original target for later and
# get a temporary directory for the target instead
temp_dir = None
self.original_target = self.target
if self.zip_export:
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
temp_dir = tempfile.TemporaryDirectory(
dir=settings.SCRATCH_DIR,
prefix="paperless-export",
)
self.target = Path(temp_dir.name).resolve()
zip_name = options["zip_name"]
self.zip_path = (self.target / zip_name).with_suffix(".zip")
self.zip_tmp_path = self.zip_path.parent / (self.zip_path.name + ".tmp")
if not self.target.exists():
raise CommandError("That path doesn't exist")
@@ -337,30 +346,53 @@ class Command(CryptMixin, PaperlessCommand):
if not os.access(self.target, os.W_OK):
raise CommandError("That path doesn't appear to be writable")
if self.zip_export:
if self.compare_checksums:
self.stdout.write(
self.style.WARNING(
"--compare-checksums is ignored when --zip is used",
),
)
if self.compare_json:
self.stdout.write(
self.style.WARNING(
"--compare-json is ignored when --zip is used",
),
)
try:
# Prevent any ongoing changes in the documents
with FileLock(settings.MEDIA_LOCK):
self.dump()
# We've written everything to the temporary directory in this case,
# now make an archive in the original target, with all files stored
if self.zip_export and temp_dir is not None:
shutil.make_archive(
self.original_target / options["zip_name"],
format="zip",
root_dir=temp_dir.name,
if self.zip_export:
self.zip_file = zipfile.ZipFile(
self.zip_tmp_path,
"w",
compression=zipfile.ZIP_DEFLATED,
allowZip64=True,
)
self.dump()
if self.zip_file is not None:
self.zip_file.close()
self.zip_file = None
self.zip_tmp_path.rename(self.zip_path)
finally:
# Always cleanup the temporary directory, if one was created
if self.zip_export and temp_dir is not None:
temp_dir.cleanup()
# Ensure zip_file is closed and the incomplete .tmp is removed on failure
if self.zip_file is not None:
self.zip_file.close()
self.zip_file = None
if self.zip_export and self.zip_tmp_path.exists():
self.zip_tmp_path.unlink()
def dump(self) -> None:
# 1. Take a snapshot of what files exist in the current export folder
for x in self.target.glob("**/*"):
if x.is_file():
self.files_in_export_dir.add(x.resolve())
# (skipped in zip mode — always write fresh, no skip/compare logic applies)
if not self.zip_export:
for x in self.target.glob("**/*"):
if x.is_file():
self.files_in_export_dir.add(x.resolve())
# 2. Create manifest, containing all correspondents, types, tags, storage paths
# note, documents and ui_settings
@@ -421,6 +453,8 @@ class Command(CryptMixin, PaperlessCommand):
manifest_path,
compare_json=self.compare_json,
files_in_export_dir=self.files_in_export_dir,
zip_file=self.zip_file,
zip_arcname="manifest.json",
) as writer:
with transaction.atomic():
for key, qs in manifest_key_to_object_query.items():
@@ -539,8 +573,12 @@ class Command(CryptMixin, PaperlessCommand):
self.target,
)
else:
# 5. Remove anything in the original location (before moving the zip)
for item in self.original_target.glob("*"):
# 5. Remove pre-existing files/dirs from target, keeping the
# in-progress zip (.tmp) and any prior zip at the final path
skip = {self.zip_path.resolve(), self.zip_tmp_path.resolve()}
for item in self.target.glob("*"):
if item.resolve() in skip:
continue
if item.is_dir():
shutil.rmtree(item)
else:
@@ -710,9 +748,23 @@ class Command(CryptMixin, PaperlessCommand):
if self.use_folder_prefix:
manifest_name = Path("json") / manifest_name
manifest_name = (self.target / manifest_name).resolve()
manifest_name.parent.mkdir(parents=True, exist_ok=True)
if not self.zip_export:
manifest_name.parent.mkdir(parents=True, exist_ok=True)
self.check_and_write_json(content, manifest_name)
def _ensure_zip_dirs(self, arcname: str) -> None:
"""Write directory marker entries for all parent directories of arcname.
Some zip viewers only show folder structure when explicit directory
entries exist, so we add them to avoid confusing users.
"""
parts = Path(arcname).parts[:-1]
for i in range(len(parts)):
dir_arc = "/".join(parts[: i + 1]) + "/"
if dir_arc not in self._zip_dirs:
self._zip_dirs.add(dir_arc)
self.zip_file.mkdir(dir_arc)
def check_and_write_json(
self,
content: list[dict] | dict,
@@ -725,32 +777,38 @@ class Command(CryptMixin, PaperlessCommand):
This preserves the file timestamps when no changes are made.
"""
target = target.resolve()
perform_write = True
if target in self.files_in_export_dir:
self.files_in_export_dir.remove(target)
if self.compare_json:
target_checksum = hashlib.blake2b(target.read_bytes()).hexdigest()
src_str = json.dumps(
content,
cls=DjangoJSONEncoder,
indent=2,
ensure_ascii=False,
)
src_checksum = hashlib.blake2b(src_str.encode("utf-8")).hexdigest()
if src_checksum == target_checksum:
perform_write = False
if perform_write:
target.write_text(
if self.zip_export:
arcname = str(target.resolve().relative_to(self.target))
self._ensure_zip_dirs(arcname)
self.zip_file.writestr(
arcname,
json.dumps(
content,
cls=DjangoJSONEncoder,
indent=2,
ensure_ascii=False,
),
encoding="utf-8",
)
return
target = target.resolve()
json_str = json.dumps(
content,
cls=DjangoJSONEncoder,
indent=2,
ensure_ascii=False,
)
perform_write = True
if target in self.files_in_export_dir:
self.files_in_export_dir.remove(target)
if self.compare_json:
target_checksum = hashlib.blake2b(target.read_bytes()).hexdigest()
src_checksum = hashlib.blake2b(json_str.encode("utf-8")).hexdigest()
if src_checksum == target_checksum:
perform_write = False
if perform_write:
target.write_text(json_str, encoding="utf-8")
def check_and_copy(
self,
@@ -763,6 +821,12 @@ class Command(CryptMixin, PaperlessCommand):
the source attributes
"""
if self.zip_export:
arcname = str(target.resolve().relative_to(self.target))
self._ensure_zip_dirs(arcname)
self.zip_file.write(source, arcname=arcname)
return
target = target.resolve()
if target in self.files_in_export_dir:
self.files_in_export_dir.remove(target)

View File

@@ -100,7 +100,7 @@ logger = logging.getLogger("paperless.serializers")
# https://www.django-rest-framework.org/api-guide/serializers/#example
class DynamicFieldsModelSerializer(serializers.ModelSerializer[Any]):
class DynamicFieldsModelSerializer(serializers.ModelSerializer):
"""
A ModelSerializer that takes an additional `fields` argument that
controls which fields should be displayed.
@@ -121,7 +121,7 @@ class DynamicFieldsModelSerializer(serializers.ModelSerializer[Any]):
self.fields.pop(field_name)
class MatchingModelSerializer(serializers.ModelSerializer[Any]):
class MatchingModelSerializer(serializers.ModelSerializer):
document_count = serializers.IntegerField(read_only=True)
def get_slug(self, obj) -> str:
@@ -261,7 +261,7 @@ class SetPermissionsSerializer(serializers.DictField):
class OwnedObjectSerializer(
SerializerWithPerms,
serializers.ModelSerializer[Any],
serializers.ModelSerializer,
SetPermissionsMixin,
):
def __init__(self, *args, **kwargs) -> None:
@@ -469,7 +469,7 @@ class OwnedObjectSerializer(
return super().update(instance, validated_data)
class OwnedObjectListSerializer(serializers.ListSerializer[Any]):
class OwnedObjectListSerializer(serializers.ListSerializer):
def to_representation(self, documents):
self.child.context["shared_object_pks"] = self.child.get_shared_object_pks(
documents,
@@ -682,27 +682,27 @@ class TagSerializer(MatchingModelSerializer, OwnedObjectSerializer):
return super().validate(attrs)
class CorrespondentField(serializers.PrimaryKeyRelatedField[Correspondent]):
class CorrespondentField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
return Correspondent.objects.all()
class TagsField(serializers.PrimaryKeyRelatedField[Tag]):
class TagsField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
return Tag.objects.all()
class DocumentTypeField(serializers.PrimaryKeyRelatedField[DocumentType]):
class DocumentTypeField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
return DocumentType.objects.all()
class StoragePathField(serializers.PrimaryKeyRelatedField[StoragePath]):
class StoragePathField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
return StoragePath.objects.all()
class CustomFieldSerializer(serializers.ModelSerializer[CustomField]):
class CustomFieldSerializer(serializers.ModelSerializer):
data_type = serializers.ChoiceField(
choices=CustomField.FieldDataType,
read_only=False,
@@ -816,7 +816,7 @@ def validate_documentlink_targets(user, doc_ids):
)
class CustomFieldInstanceSerializer(serializers.ModelSerializer[CustomFieldInstance]):
class CustomFieldInstanceSerializer(serializers.ModelSerializer):
field = serializers.PrimaryKeyRelatedField(queryset=CustomField.objects.all())
value = ReadWriteSerializerMethodField(allow_null=True)
@@ -922,14 +922,14 @@ class CustomFieldInstanceSerializer(serializers.ModelSerializer[CustomFieldInsta
]
class BasicUserSerializer(serializers.ModelSerializer[User]):
class BasicUserSerializer(serializers.ModelSerializer):
# Different than paperless.serializers.UserSerializer
class Meta:
model = User
fields = ["id", "username", "first_name", "last_name"]
class NotesSerializer(serializers.ModelSerializer[Note]):
class NotesSerializer(serializers.ModelSerializer):
user = BasicUserSerializer(read_only=True)
class Meta:
@@ -1256,7 +1256,7 @@ class DocumentSerializer(
list_serializer_class = OwnedObjectListSerializer
class SearchResultListSerializer(serializers.ListSerializer[Document]):
class SearchResultListSerializer(serializers.ListSerializer):
def to_representation(self, hits):
document_ids = [hit["id"] for hit in hits]
# Fetch all Document objects in the list in one SQL query.
@@ -1313,7 +1313,7 @@ class SearchResultSerializer(DocumentSerializer):
list_serializer_class = SearchResultListSerializer
class SavedViewFilterRuleSerializer(serializers.ModelSerializer[SavedViewFilterRule]):
class SavedViewFilterRuleSerializer(serializers.ModelSerializer):
class Meta:
model = SavedViewFilterRule
fields = ["rule_type", "value"]
@@ -2401,7 +2401,7 @@ class StoragePathSerializer(MatchingModelSerializer, OwnedObjectSerializer):
return super().update(instance, validated_data)
class UiSettingsViewSerializer(serializers.ModelSerializer[UiSettings]):
class UiSettingsViewSerializer(serializers.ModelSerializer):
settings = serializers.DictField(required=False, allow_null=True)
class Meta:
@@ -2760,7 +2760,7 @@ class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin):
return attrs
class WorkflowTriggerSerializer(serializers.ModelSerializer[WorkflowTrigger]):
class WorkflowTriggerSerializer(serializers.ModelSerializer):
id = serializers.IntegerField(required=False, allow_null=True)
sources = fields.MultipleChoiceField(
choices=WorkflowTrigger.DocumentSourceChoices.choices,
@@ -2870,7 +2870,7 @@ class WorkflowTriggerSerializer(serializers.ModelSerializer[WorkflowTrigger]):
return super().update(instance, validated_data)
class WorkflowActionEmailSerializer(serializers.ModelSerializer[WorkflowActionEmail]):
class WorkflowActionEmailSerializer(serializers.ModelSerializer):
id = serializers.IntegerField(allow_null=True, required=False)
class Meta:
@@ -2884,9 +2884,7 @@ class WorkflowActionEmailSerializer(serializers.ModelSerializer[WorkflowActionEm
]
class WorkflowActionWebhookSerializer(
serializers.ModelSerializer[WorkflowActionWebhook],
):
class WorkflowActionWebhookSerializer(serializers.ModelSerializer):
id = serializers.IntegerField(allow_null=True, required=False)
def validate_url(self, url):
@@ -2907,7 +2905,7 @@ class WorkflowActionWebhookSerializer(
]
class WorkflowActionSerializer(serializers.ModelSerializer[WorkflowAction]):
class WorkflowActionSerializer(serializers.ModelSerializer):
id = serializers.IntegerField(required=False, allow_null=True)
assign_correspondent = CorrespondentField(allow_null=True, required=False)
assign_tags = TagsField(many=True, allow_null=True, required=False)
@@ -3029,7 +3027,7 @@ class WorkflowActionSerializer(serializers.ModelSerializer[WorkflowAction]):
return attrs
class WorkflowSerializer(serializers.ModelSerializer[Workflow]):
class WorkflowSerializer(serializers.ModelSerializer):
order = serializers.IntegerField(required=False)
triggers = WorkflowTriggerSerializer(many=True)

View File

@@ -615,7 +615,7 @@ class TestExportImport(
self.assertIsFile(expected_file)
with ZipFile(expected_file) as zip:
# Extras are from the directories, which also appear in the listing
# 11 files + 3 directory marker entries for the subdirectory structure
self.assertEqual(len(zip.namelist()), 14)
self.assertIn("manifest.json", zip.namelist())
self.assertIn("metadata.json", zip.namelist())
@@ -666,6 +666,57 @@ class TestExportImport(
self.assertIn("manifest.json", zip.namelist())
self.assertIn("metadata.json", zip.namelist())
def test_export_zip_atomic_on_failure(self) -> None:
"""
GIVEN:
- Request to export documents to zipfile
WHEN:
- Export raises an exception mid-way
THEN:
- No .zip file is written at the final path
- The .tmp file is cleaned up
"""
args = ["document_exporter", self.target, "--zip"]
with mock.patch.object(
document_exporter.Command,
"dump",
side_effect=RuntimeError("simulated failure"),
):
with self.assertRaises(RuntimeError):
call_command(*args)
expected_zip = self.target / f"export-{timezone.localdate().isoformat()}.zip"
expected_tmp = (
self.target / f"export-{timezone.localdate().isoformat()}.zip.tmp"
)
self.assertIsNotFile(expected_zip)
self.assertIsNotFile(expected_tmp)
def test_export_zip_no_scratch_dir(self) -> None:
"""
GIVEN:
- Request to export documents to zipfile
WHEN:
- Documents are exported
THEN:
- No files are written under SCRATCH_DIR during the export
(the old workaround used a temp dir there)
"""
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
shutil.copytree(
Path(__file__).parent / "samples" / "documents",
Path(self.dirs.media_dir) / "documents",
)
scratch_before = set(settings.SCRATCH_DIR.glob("paperless-export*"))
args = ["document_exporter", self.target, "--zip"]
call_command(*args)
scratch_after = set(settings.SCRATCH_DIR.glob("paperless-export*"))
self.assertEqual(scratch_before, scratch_after)
def test_export_target_not_exists(self) -> None:
"""
GIVEN:

View File

@@ -291,7 +291,7 @@ class IndexView(TemplateView):
return context
class PassUserMixin(GenericAPIView[Any]):
class PassUserMixin(GenericAPIView):
"""
Pass a user object to serializer
"""
@@ -457,10 +457,7 @@ class PermissionsAwareDocumentCountMixin(BulkPermissionMixin, PassUserMixin):
@extend_schema_view(**generate_object_with_permissions_schema(CorrespondentSerializer))
class CorrespondentViewSet(
PermissionsAwareDocumentCountMixin,
ModelViewSet[Correspondent],
):
class CorrespondentViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
model = Correspondent
queryset = Correspondent.objects.select_related("owner").order_by(Lower("name"))
@@ -497,7 +494,7 @@ class CorrespondentViewSet(
@extend_schema_view(**generate_object_with_permissions_schema(TagSerializer))
class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet[Tag]):
class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
model = Tag
serializer_class = TagSerializer
document_count_through = Document.tags.through
@@ -576,10 +573,7 @@ class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet[Tag]):
@extend_schema_view(**generate_object_with_permissions_schema(DocumentTypeSerializer))
class DocumentTypeViewSet(
PermissionsAwareDocumentCountMixin,
ModelViewSet[DocumentType],
):
class DocumentTypeViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
model = DocumentType
queryset = DocumentType.objects.select_related("owner").order_by(Lower("name"))
@@ -814,7 +808,7 @@ class DocumentViewSet(
UpdateModelMixin,
DestroyModelMixin,
ListModelMixin,
GenericViewSet[Document],
GenericViewSet,
):
model = Document
queryset = Document.objects.all()
@@ -1958,7 +1952,7 @@ class ChatStreamingSerializer(serializers.Serializer):
],
name="dispatch",
)
class ChatStreamingView(GenericAPIView[Any]):
class ChatStreamingView(GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = ChatStreamingSerializer
@@ -2284,7 +2278,7 @@ class LogViewSet(ViewSet):
@extend_schema_view(**generate_object_with_permissions_schema(SavedViewSerializer))
class SavedViewViewSet(BulkPermissionMixin, PassUserMixin, ModelViewSet[SavedView]):
class SavedViewViewSet(BulkPermissionMixin, PassUserMixin, ModelViewSet):
model = SavedView
queryset = SavedView.objects.select_related("owner").prefetch_related(
@@ -2762,7 +2756,7 @@ class RemovePasswordDocumentsView(DocumentOperationPermissionMixin):
},
),
)
class PostDocumentView(GenericAPIView[Any]):
class PostDocumentView(GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = PostDocumentSerializer
parser_classes = (parsers.MultiPartParser,)
@@ -2883,7 +2877,7 @@ class PostDocumentView(GenericAPIView[Any]):
},
),
)
class SelectionDataView(GenericAPIView[Any]):
class SelectionDataView(GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = DocumentListSerializer
parser_classes = (parsers.MultiPartParser, parsers.JSONParser)
@@ -2987,7 +2981,7 @@ class SelectionDataView(GenericAPIView[Any]):
},
),
)
class SearchAutoCompleteView(GenericAPIView[Any]):
class SearchAutoCompleteView(GenericAPIView):
permission_classes = (IsAuthenticated,)
def get(self, request, format=None):
@@ -3268,7 +3262,7 @@ class GlobalSearchView(PassUserMixin):
},
),
)
class StatisticsView(GenericAPIView[Any]):
class StatisticsView(GenericAPIView):
permission_classes = (IsAuthenticated,)
def get(self, request, format=None):
@@ -3370,7 +3364,7 @@ class StatisticsView(GenericAPIView[Any]):
)
class BulkDownloadView(DocumentSelectionMixin, GenericAPIView[Any]):
class BulkDownloadView(DocumentSelectionMixin, GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = BulkDownloadSerializer
parser_classes = (parsers.JSONParser,)
@@ -3423,7 +3417,7 @@ class BulkDownloadView(DocumentSelectionMixin, GenericAPIView[Any]):
@extend_schema_view(**generate_object_with_permissions_schema(StoragePathSerializer))
class StoragePathViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet[StoragePath]):
class StoragePathViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
model = StoragePath
queryset = StoragePath.objects.select_related("owner").order_by(
@@ -3487,7 +3481,7 @@ class StoragePathViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet[Storag
return Response(result)
class UiSettingsView(GenericAPIView[Any]):
class UiSettingsView(GenericAPIView):
queryset = UiSettings.objects.all()
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
serializer_class = UiSettingsViewSerializer
@@ -3585,7 +3579,7 @@ class UiSettingsView(GenericAPIView[Any]):
},
),
)
class RemoteVersionView(GenericAPIView[Any]):
class RemoteVersionView(GenericAPIView):
cache_key = "remote_version_view_latest_release"
def get(self, request, format=None):
@@ -3662,7 +3656,7 @@ class RemoteVersionView(GenericAPIView[Any]):
),
],
)
class TasksViewSet(ReadOnlyModelViewSet[PaperlessTask]):
class TasksViewSet(ReadOnlyModelViewSet):
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
serializer_class = TasksViewSerializer
filter_backends = (
@@ -3736,7 +3730,7 @@ class TasksViewSet(ReadOnlyModelViewSet[PaperlessTask]):
)
class ShareLinkViewSet(PassUserMixin, ModelViewSet[ShareLink]):
class ShareLinkViewSet(ModelViewSet, PassUserMixin):
model = ShareLink
queryset = ShareLink.objects.all()
@@ -3753,7 +3747,7 @@ class ShareLinkViewSet(PassUserMixin, ModelViewSet[ShareLink]):
ordering_fields = ("created", "expiration", "document")
class ShareLinkBundleViewSet(PassUserMixin, ModelViewSet[ShareLinkBundle]):
class ShareLinkBundleViewSet(ModelViewSet, PassUserMixin):
model = ShareLinkBundle
queryset = ShareLinkBundle.objects.all()
@@ -4110,7 +4104,7 @@ class BulkEditObjectsView(PassUserMixin):
return Response({"result": "OK"})
class WorkflowTriggerViewSet(ModelViewSet[WorkflowTrigger]):
class WorkflowTriggerViewSet(ModelViewSet):
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
serializer_class = WorkflowTriggerSerializer
@@ -4128,7 +4122,7 @@ class WorkflowTriggerViewSet(ModelViewSet[WorkflowTrigger]):
return super().partial_update(request, *args, **kwargs)
class WorkflowActionViewSet(ModelViewSet[WorkflowAction]):
class WorkflowActionViewSet(ModelViewSet):
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
serializer_class = WorkflowActionSerializer
@@ -4153,7 +4147,7 @@ class WorkflowActionViewSet(ModelViewSet[WorkflowAction]):
return super().partial_update(request, *args, **kwargs)
class WorkflowViewSet(ModelViewSet[Workflow]):
class WorkflowViewSet(ModelViewSet):
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
serializer_class = WorkflowSerializer
@@ -4171,7 +4165,7 @@ class WorkflowViewSet(ModelViewSet[Workflow]):
)
class CustomFieldViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet[CustomField]):
class CustomFieldViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
serializer_class = CustomFieldSerializer

View File

@@ -74,7 +74,7 @@ class PaperlessAuthTokenSerializer(AuthTokenSerializer):
return attrs
class UserSerializer(PasswordValidationMixin, serializers.ModelSerializer[User]):
class UserSerializer(PasswordValidationMixin, serializers.ModelSerializer):
password = ObfuscatedPasswordField(required=False)
user_permissions = serializers.SlugRelatedField(
many=True,
@@ -142,7 +142,7 @@ class UserSerializer(PasswordValidationMixin, serializers.ModelSerializer[User])
return user
class GroupSerializer(serializers.ModelSerializer[Group]):
class GroupSerializer(serializers.ModelSerializer):
permissions = serializers.SlugRelatedField(
many=True,
queryset=Permission.objects.exclude(content_type__app_label="admin"),
@@ -158,7 +158,7 @@ class GroupSerializer(serializers.ModelSerializer[Group]):
)
class SocialAccountSerializer(serializers.ModelSerializer[SocialAccount]):
class SocialAccountSerializer(serializers.ModelSerializer):
name = serializers.SerializerMethodField()
class Meta:
@@ -176,7 +176,7 @@ class SocialAccountSerializer(serializers.ModelSerializer[SocialAccount]):
return "Unknown App"
class ProfileSerializer(PasswordValidationMixin, serializers.ModelSerializer[User]):
class ProfileSerializer(PasswordValidationMixin, serializers.ModelSerializer):
email = serializers.EmailField(allow_blank=True, required=False)
password = ObfuscatedPasswordField(required=False, allow_null=False)
auth_token = serializers.SlugRelatedField(read_only=True, slug_field="key")
@@ -209,9 +209,7 @@ class ProfileSerializer(PasswordValidationMixin, serializers.ModelSerializer[Use
)
class ApplicationConfigurationSerializer(
serializers.ModelSerializer[ApplicationConfiguration],
):
class ApplicationConfigurationSerializer(serializers.ModelSerializer):
user_args = serializers.JSONField(binary=True, allow_null=True)
barcode_tag_mapping = serializers.JSONField(binary=True, allow_null=True)
llm_api_key = ObfuscatedPasswordField(

View File

@@ -1,6 +1,5 @@
from collections import OrderedDict
from pathlib import Path
from typing import Any
from allauth.mfa import signals
from allauth.mfa.adapter import get_adapter as get_mfa_adapter
@@ -115,7 +114,7 @@ class FaviconView(View):
return HttpResponseNotFound("favicon.ico not found")
class UserViewSet(ModelViewSet[User]):
class UserViewSet(ModelViewSet):
_BOOL_NOT_PROVIDED = object()
model = User
@@ -217,7 +216,7 @@ class UserViewSet(ModelViewSet[User]):
return HttpResponseNotFound("TOTP not found")
class GroupViewSet(ModelViewSet[Group]):
class GroupViewSet(ModelViewSet):
model = Group
queryset = Group.objects.order_by(Lower("name"))
@@ -230,7 +229,7 @@ class GroupViewSet(ModelViewSet[Group]):
ordering_fields = ("name",)
class ProfileView(GenericAPIView[Any]):
class ProfileView(GenericAPIView):
"""
User profile view, only available when logged in
"""
@@ -289,7 +288,7 @@ class ProfileView(GenericAPIView[Any]):
},
),
)
class TOTPView(GenericAPIView[Any]):
class TOTPView(GenericAPIView):
"""
TOTP views
"""
@@ -369,7 +368,7 @@ class TOTPView(GenericAPIView[Any]):
},
),
)
class GenerateAuthTokenView(GenericAPIView[Any]):
class GenerateAuthTokenView(GenericAPIView):
"""
Generates (or re-generates) an auth token, requires a logged in user
unlike the default DRF endpoint
@@ -398,7 +397,7 @@ class GenerateAuthTokenView(GenericAPIView[Any]):
},
),
)
class ApplicationConfigurationViewSet(ModelViewSet[ApplicationConfiguration]):
class ApplicationConfigurationViewSet(ModelViewSet):
model = ApplicationConfiguration
queryset = ApplicationConfiguration.objects
@@ -451,7 +450,7 @@ class ApplicationConfigurationViewSet(ModelViewSet[ApplicationConfiguration]):
},
),
)
class DisconnectSocialAccountView(GenericAPIView[Any]):
class DisconnectSocialAccountView(GenericAPIView):
"""
Disconnects a social account provider from the user account
"""
@@ -477,7 +476,7 @@ class DisconnectSocialAccountView(GenericAPIView[Any]):
},
),
)
class SocialAccountProvidersView(GenericAPIView[Any]):
class SocialAccountProvidersView(GenericAPIView):
"""
List of social account providers
"""

View File

@@ -57,7 +57,7 @@ class MailAccountSerializer(OwnedObjectSerializer):
return instance
class AccountField(serializers.PrimaryKeyRelatedField[MailAccount]):
class AccountField(serializers.PrimaryKeyRelatedField):
def get_queryset(self):
return MailAccount.objects.all().order_by("-id")

View File

@@ -1,7 +1,6 @@
import datetime
import logging
from datetime import timedelta
from typing import Any
from django.http import HttpResponseBadRequest
from django.http import HttpResponseForbidden
@@ -66,7 +65,7 @@ from paperless_mail.tasks import process_mail_accounts
},
),
)
class MailAccountViewSet(PassUserMixin, ModelViewSet[MailAccount]):
class MailAccountViewSet(ModelViewSet, PassUserMixin):
model = MailAccount
queryset = MailAccount.objects.all().order_by("pk")
@@ -160,7 +159,7 @@ class MailAccountViewSet(PassUserMixin, ModelViewSet[MailAccount]):
return Response({"result": "OK"})
class ProcessedMailViewSet(PassUserMixin, ReadOnlyModelViewSet[ProcessedMail]):
class ProcessedMailViewSet(ReadOnlyModelViewSet, PassUserMixin):
permission_classes = (IsAuthenticated, PaperlessObjectPermissions)
serializer_class = ProcessedMailSerializer
pagination_class = StandardPagination
@@ -188,7 +187,7 @@ class ProcessedMailViewSet(PassUserMixin, ReadOnlyModelViewSet[ProcessedMail]):
return Response({"result": "OK", "deleted_mail_ids": mail_ids})
class MailRuleViewSet(PassUserMixin, ModelViewSet[MailRule]):
class MailRuleViewSet(ModelViewSet, PassUserMixin):
model = MailRule
queryset = MailRule.objects.all().order_by("order")
@@ -204,7 +203,7 @@ class MailRuleViewSet(PassUserMixin, ModelViewSet[MailRule]):
responses={200: None},
),
)
class OauthCallbackView(GenericAPIView[Any]):
class OauthCallbackView(GenericAPIView):
permission_classes = (IsAuthenticated,)
def get(self, request, format=None):