mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-03-14 21:21:24 +00:00
Compare commits
23 Commits
feature-dr
...
fix-bulk-e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1775846483 | ||
|
|
13671b7d85 | ||
|
|
0bb7d755ab | ||
|
|
e4d43175af | ||
|
|
04945ff3f7 | ||
|
|
7b430e27c6 | ||
|
|
b329581111 | ||
|
|
84e8caf25f | ||
|
|
97602f79fb | ||
|
|
568be982cf | ||
|
|
d753b698db | ||
|
|
eabd11546a | ||
|
|
43072b7a74 | ||
|
|
1c65a1bb0e | ||
|
|
0ed3103227 | ||
|
|
ea55ec8bc5 | ||
|
|
c977445718 | ||
|
|
b313759903 | ||
|
|
5f0887046c | ||
|
|
047d4eca84 | ||
|
|
d86cfdb088 | ||
|
|
c2e1085418 | ||
|
|
ee0d1a3094 |
@@ -437,3 +437,6 @@ Initial API version.
|
||||
moved from the bulk edit endpoint to their own individual endpoints. Using these methods via
|
||||
the bulk edit endpoint is still supported for compatibility with versions < 10 until support
|
||||
for API v9 is dropped.
|
||||
- The `all` parameter of list endpoints is now deprecated and will be removed in a future version.
|
||||
- The bulk edit objects endpoint now supports `all` and `filters` parameters to avoid having to send
|
||||
large lists of object IDs for operations affecting many objects.
|
||||
|
||||
@@ -468,7 +468,7 @@
|
||||
"time": 0.951,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__in=9",
|
||||
"url": "http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__in=9",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -534,7 +534,7 @@
|
||||
"time": 0.653,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__all=9",
|
||||
"url": "http://localhost:8000/api/documents/?page=1&page_size=10&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__all=9",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
|
||||
@@ -883,7 +883,7 @@
|
||||
"time": 0.93,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__all=4",
|
||||
"url": "http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__all=4",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
@@ -961,7 +961,7 @@
|
||||
"time": -1,
|
||||
"request": {
|
||||
"method": "GET",
|
||||
"url": "http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__all=4",
|
||||
"url": "http://localhost:8000/api/documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__all=4",
|
||||
"httpVersion": "HTTP/1.1",
|
||||
"cookies": [],
|
||||
"headers": [
|
||||
|
||||
@@ -16,7 +16,7 @@ test('basic filtering', async ({ page }) => {
|
||||
await expect(page).toHaveURL(/tags__id__all=9/)
|
||||
await expect(page.locator('pngx-document-list')).toHaveText(/8 documents/)
|
||||
await page.getByRole('button', { name: 'Document type' }).click()
|
||||
await page.getByRole('menuitem', { name: 'Invoice Test 3' }).click()
|
||||
await page.getByRole('menuitem', { name: /^Invoice Test/ }).click()
|
||||
await expect(page).toHaveURL(/document_type__id__in=1/)
|
||||
await expect(page.locator('pngx-document-list')).toHaveText(/3 documents/)
|
||||
await page.getByRole('button', { name: 'Reset filters' }).first().click()
|
||||
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -20,9 +20,9 @@ import { Subject, filter, takeUntil } from 'rxjs'
|
||||
import { NEGATIVE_NULL_FILTER_VALUE } from 'src/app/data/filter-rule-type'
|
||||
import { MatchingModel } from 'src/app/data/matching-model'
|
||||
import { ObjectWithPermissions } from 'src/app/data/object-with-permissions'
|
||||
import { SelectionDataItem } from 'src/app/data/results'
|
||||
import { FilterPipe } from 'src/app/pipes/filter.pipe'
|
||||
import { HotKeyService } from 'src/app/services/hot-key.service'
|
||||
import { SelectionDataItem } from 'src/app/services/rest/document.service'
|
||||
import { pngxPopperOptions } from 'src/app/utils/popper-options'
|
||||
import { LoadingComponentWithPermissions } from '../../loading-component/loading.component'
|
||||
import { ClearableBadgeComponent } from '../clearable-badge/clearable-badge.component'
|
||||
|
||||
@@ -300,7 +300,7 @@ describe('BulkEditorComponent', () => {
|
||||
parameters: { add_tags: [101], remove_tags: [] },
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -332,7 +332,7 @@ describe('BulkEditorComponent', () => {
|
||||
.expectOne(`${environment.apiBaseUrl}documents/bulk_edit/`)
|
||||
.flush(true)
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -423,7 +423,7 @@ describe('BulkEditorComponent', () => {
|
||||
parameters: { correspondent: 101 },
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -455,7 +455,7 @@ describe('BulkEditorComponent', () => {
|
||||
.expectOne(`${environment.apiBaseUrl}documents/bulk_edit/`)
|
||||
.flush(true)
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -521,7 +521,7 @@ describe('BulkEditorComponent', () => {
|
||||
parameters: { document_type: 101 },
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -553,7 +553,7 @@ describe('BulkEditorComponent', () => {
|
||||
.expectOne(`${environment.apiBaseUrl}documents/bulk_edit/`)
|
||||
.flush(true)
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -619,7 +619,7 @@ describe('BulkEditorComponent', () => {
|
||||
parameters: { storage_path: 101 },
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -651,7 +651,7 @@ describe('BulkEditorComponent', () => {
|
||||
.expectOne(`${environment.apiBaseUrl}documents/bulk_edit/`)
|
||||
.flush(true)
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -717,7 +717,7 @@ describe('BulkEditorComponent', () => {
|
||||
parameters: { add_custom_fields: [101], remove_custom_fields: [102] },
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -749,7 +749,7 @@ describe('BulkEditorComponent', () => {
|
||||
.expectOne(`${environment.apiBaseUrl}documents/bulk_edit/`)
|
||||
.flush(true)
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -858,7 +858,7 @@ describe('BulkEditorComponent', () => {
|
||||
documents: [3, 4],
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -951,7 +951,7 @@ describe('BulkEditorComponent', () => {
|
||||
documents: [3, 4],
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -986,7 +986,7 @@ describe('BulkEditorComponent', () => {
|
||||
source_mode: 'latest_version',
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -1027,7 +1027,7 @@ describe('BulkEditorComponent', () => {
|
||||
metadata_document_id: 3,
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -1046,7 +1046,7 @@ describe('BulkEditorComponent', () => {
|
||||
delete_originals: true,
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -1067,7 +1067,7 @@ describe('BulkEditorComponent', () => {
|
||||
archive_fallback: true,
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -1153,7 +1153,7 @@ describe('BulkEditorComponent', () => {
|
||||
},
|
||||
})
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
@@ -1460,7 +1460,7 @@ describe('BulkEditorComponent', () => {
|
||||
expect(toastServiceShowInfoSpy).toHaveBeenCalled()
|
||||
expect(listReloadSpy).toHaveBeenCalled()
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
) // list reload
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id`
|
||||
|
||||
@@ -16,6 +16,7 @@ import { first, map, Observable, Subject, switchMap, takeUntil } from 'rxjs'
|
||||
import { ConfirmDialogComponent } from 'src/app/components/common/confirm-dialog/confirm-dialog.component'
|
||||
import { CustomField } from 'src/app/data/custom-field'
|
||||
import { MatchingModel } from 'src/app/data/matching-model'
|
||||
import { SelectionDataItem } from 'src/app/data/results'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { IfPermissionsDirective } from 'src/app/directives/if-permissions.directive'
|
||||
import { DocumentListViewService } from 'src/app/services/document-list-view.service'
|
||||
@@ -32,7 +33,6 @@ import {
|
||||
DocumentBulkEditMethod,
|
||||
DocumentService,
|
||||
MergeDocumentsRequest,
|
||||
SelectionDataItem,
|
||||
} from 'src/app/services/rest/document.service'
|
||||
import { SavedViewService } from 'src/app/services/rest/saved-view.service'
|
||||
import { ShareLinkBundleService } from 'src/app/services/rest/share-link-bundle.service'
|
||||
|
||||
@@ -76,6 +76,7 @@ import {
|
||||
FILTER_TITLE_CONTENT,
|
||||
NEGATIVE_NULL_FILTER_VALUE,
|
||||
} from 'src/app/data/filter-rule-type'
|
||||
import { SelectionData, SelectionDataItem } from 'src/app/data/results'
|
||||
import {
|
||||
PermissionAction,
|
||||
PermissionType,
|
||||
@@ -84,11 +85,7 @@ import {
|
||||
import { CorrespondentService } from 'src/app/services/rest/correspondent.service'
|
||||
import { CustomFieldsService } from 'src/app/services/rest/custom-fields.service'
|
||||
import { DocumentTypeService } from 'src/app/services/rest/document-type.service'
|
||||
import {
|
||||
DocumentService,
|
||||
SelectionData,
|
||||
SelectionDataItem,
|
||||
} from 'src/app/services/rest/document.service'
|
||||
import { DocumentService } from 'src/app/services/rest/document.service'
|
||||
import { SearchService } from 'src/app/services/rest/search.service'
|
||||
import { StoragePathService } from 'src/app/services/rest/storage-path.service'
|
||||
import { TagService } from 'src/app/services/rest/tag.service'
|
||||
|
||||
@@ -9,8 +9,8 @@
|
||||
<div ngbDropdown class="btn-group flex-fill d-sm-none">
|
||||
<button class="btn btn-sm btn-outline-primary" id="dropdownSelectMobile" ngbDropdownToggle>
|
||||
<i-bs name="text-indent-left"></i-bs><div class="d-none d-sm-inline ms-1"><ng-container i18n>Select</ng-container></div>
|
||||
@if (activeManagementList.selectedObjects.size > 0) {
|
||||
<pngx-clearable-badge [selected]="activeManagementList.selectedObjects.size > 0" [number]="activeManagementList.selectedObjects.size" (cleared)="activeManagementList.selectNone()"></pngx-clearable-badge><span class="visually-hidden">selected</span>
|
||||
@if (activeManagementList.hasSelection) {
|
||||
<pngx-clearable-badge [selected]="activeManagementList.hasSelection" [number]="activeManagementList.selectedCount" (cleared)="activeManagementList.selectNone()"></pngx-clearable-badge><span class="visually-hidden">selected</span>
|
||||
}
|
||||
</button>
|
||||
<div ngbDropdownMenu aria-labelledby="dropdownSelectMobile" class="shadow">
|
||||
@@ -25,7 +25,7 @@
|
||||
<span class="input-group-text border-0" i18n>Select:</span>
|
||||
</div>
|
||||
<div class="btn-group btn-group-sm flex-nowrap">
|
||||
@if (activeManagementList.selectedObjects.size > 0) {
|
||||
@if (activeManagementList.hasSelection) {
|
||||
<button class="btn btn-sm btn-outline-secondary" (click)="activeManagementList.selectNone()">
|
||||
<i-bs name="slash-circle" class="me-1"></i-bs><ng-container i18n>None</ng-container>
|
||||
</button>
|
||||
@@ -40,11 +40,11 @@
|
||||
</div>
|
||||
|
||||
<button type="button" class="btn btn-sm btn-outline-primary" (click)="activeManagementList.setPermissions()"
|
||||
[disabled]="!activeManagementList.userCanBulkEdit(PermissionAction.Change) || activeManagementList.selectedObjects.size === 0">
|
||||
[disabled]="!activeManagementList.userCanBulkEdit(PermissionAction.Change) || !activeManagementList.hasSelection">
|
||||
<i-bs name="person-fill-lock" class="me-1"></i-bs><ng-container i18n>Permissions</ng-container>
|
||||
</button>
|
||||
<button type="button" class="btn btn-sm btn-outline-danger" (click)="activeManagementList.delete()"
|
||||
[disabled]="!activeManagementList.userCanBulkEdit(PermissionAction.Delete) || activeManagementList.selectedObjects.size === 0">
|
||||
[disabled]="!activeManagementList.userCanBulkEdit(PermissionAction.Delete) || !activeManagementList.hasSelection">
|
||||
<i-bs name="trash" class="me-1"></i-bs><ng-container i18n>Delete</ng-container>
|
||||
</button>
|
||||
<button type="button" class="btn btn-sm btn-outline-primary ms-md-5" (click)="activeManagementList.openCreateDialog()"
|
||||
|
||||
@@ -65,8 +65,8 @@
|
||||
@if (displayCollectionSize > 0) {
|
||||
<div>
|
||||
<ng-container i18n>{displayCollectionSize, plural, =1 {One {{typeName}}} other {{{displayCollectionSize || 0}} total {{typeNamePlural}}}}</ng-container>
|
||||
@if (selectedObjects.size > 0) {
|
||||
({{selectedObjects.size}} selected)
|
||||
@if (hasSelection) {
|
||||
({{selectedCount}} selected)
|
||||
}
|
||||
</div>
|
||||
}
|
||||
|
||||
@@ -117,7 +117,6 @@ describe('ManagementListComponent', () => {
|
||||
: tags
|
||||
return of({
|
||||
count: results.length,
|
||||
all: results.map((o) => o.id),
|
||||
results,
|
||||
})
|
||||
}
|
||||
@@ -231,11 +230,11 @@ describe('ManagementListComponent', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should use API count for pagination and all ids for displayed total', fakeAsync(() => {
|
||||
it('should use API count for pagination and nested ids for displayed total', fakeAsync(() => {
|
||||
jest.spyOn(tagService, 'listFiltered').mockReturnValueOnce(
|
||||
of({
|
||||
count: 1,
|
||||
all: [1, 2, 3],
|
||||
display_count: 3,
|
||||
results: tags.slice(0, 1),
|
||||
})
|
||||
)
|
||||
@@ -315,13 +314,17 @@ describe('ManagementListComponent', () => {
|
||||
expect(component.togggleAll).toBe(false)
|
||||
})
|
||||
|
||||
it('selectAll should use all IDs when collection size exists', () => {
|
||||
;(component as any).allIDs = [1, 2, 3, 4]
|
||||
component.collectionSize = 4
|
||||
it('selectAll should activate all-selection mode', () => {
|
||||
;(tagService.listFiltered as jest.Mock).mockClear()
|
||||
component.collectionSize = tags.length
|
||||
|
||||
component.selectAll()
|
||||
|
||||
expect(component.selectedObjects).toEqual(new Set([1, 2, 3, 4]))
|
||||
expect(tagService.listFiltered).not.toHaveBeenCalled()
|
||||
expect(component.selectedObjects).toEqual(new Set(tags.map((t) => t.id)))
|
||||
expect((component as any).allSelectionActive).toBe(true)
|
||||
expect(component.hasSelection).toBe(true)
|
||||
expect(component.selectedCount).toBe(tags.length)
|
||||
expect(component.togggleAll).toBe(true)
|
||||
})
|
||||
|
||||
@@ -395,6 +398,33 @@ describe('ManagementListComponent', () => {
|
||||
expect(successToastSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should support bulk edit permissions for all filtered items', () => {
|
||||
const bulkEditPermsSpy = jest
|
||||
.spyOn(tagService, 'bulk_edit_objects')
|
||||
.mockReturnValue(of('OK'))
|
||||
component.selectAll()
|
||||
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[m.length - 1]))
|
||||
fixture.detectChanges()
|
||||
component.setPermissions()
|
||||
expect(modal).not.toBeUndefined()
|
||||
|
||||
modal.componentInstance.confirmClicked.emit({
|
||||
permissions: {},
|
||||
merge: true,
|
||||
})
|
||||
|
||||
expect(bulkEditPermsSpy).toHaveBeenCalledWith(
|
||||
[],
|
||||
BulkEditObjectOperation.SetPermissions,
|
||||
{},
|
||||
true,
|
||||
true,
|
||||
{ is_root: true }
|
||||
)
|
||||
})
|
||||
|
||||
it('should support bulk delete objects', () => {
|
||||
const bulkEditSpy = jest.spyOn(tagService, 'bulk_edit_objects')
|
||||
component.toggleSelected(tags[0])
|
||||
@@ -415,7 +445,11 @@ describe('ManagementListComponent', () => {
|
||||
modal.componentInstance.confirmClicked.emit(null)
|
||||
expect(bulkEditSpy).toHaveBeenCalledWith(
|
||||
Array.from(selected),
|
||||
BulkEditObjectOperation.Delete
|
||||
BulkEditObjectOperation.Delete,
|
||||
null,
|
||||
null,
|
||||
false,
|
||||
null
|
||||
)
|
||||
expect(errorToastSpy).toHaveBeenCalled()
|
||||
|
||||
@@ -426,6 +460,29 @@ describe('ManagementListComponent', () => {
|
||||
expect(successToastSpy).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should support bulk delete for all filtered items', () => {
|
||||
const bulkEditSpy = jest
|
||||
.spyOn(tagService, 'bulk_edit_objects')
|
||||
.mockReturnValue(of('OK'))
|
||||
|
||||
component.selectAll()
|
||||
let modal: NgbModalRef
|
||||
modalService.activeInstances.subscribe((m) => (modal = m[m.length - 1]))
|
||||
fixture.detectChanges()
|
||||
component.delete()
|
||||
expect(modal).not.toBeUndefined()
|
||||
|
||||
modal.componentInstance.confirmClicked.emit(null)
|
||||
expect(bulkEditSpy).toHaveBeenCalledWith(
|
||||
[],
|
||||
BulkEditObjectOperation.Delete,
|
||||
null,
|
||||
null,
|
||||
true,
|
||||
{ is_root: true }
|
||||
)
|
||||
})
|
||||
|
||||
it('should disallow bulk permissions or delete objects if no global perms', () => {
|
||||
jest.spyOn(permissionsService, 'currentUserCan').mockReturnValue(false)
|
||||
expect(component.userCanBulkEdit(PermissionAction.Delete)).toBeFalsy()
|
||||
|
||||
@@ -90,7 +90,8 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
|
||||
public data: T[] = []
|
||||
private unfilteredData: T[] = []
|
||||
private allIDs: number[] = []
|
||||
private currentExtraParams: { [key: string]: any } = null
|
||||
private allSelectionActive = false
|
||||
|
||||
public page = 1
|
||||
|
||||
@@ -107,6 +108,16 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
public selectedObjects: Set<number> = new Set()
|
||||
public togggleAll: boolean = false
|
||||
|
||||
public get hasSelection(): boolean {
|
||||
return this.selectedObjects.size > 0 || this.allSelectionActive
|
||||
}
|
||||
|
||||
public get selectedCount(): number {
|
||||
return this.allSelectionActive
|
||||
? this.displayCollectionSize
|
||||
: this.selectedObjects.size
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
this.reloadData()
|
||||
|
||||
@@ -150,11 +161,11 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
}
|
||||
|
||||
protected getCollectionSize(results: Results<T>): number {
|
||||
return results.all?.length ?? results.count
|
||||
return results.count
|
||||
}
|
||||
|
||||
protected getDisplayCollectionSize(results: Results<T>): number {
|
||||
return this.getCollectionSize(results)
|
||||
return results.display_count ?? this.getCollectionSize(results)
|
||||
}
|
||||
|
||||
getDocumentCount(object: MatchingModel): number {
|
||||
@@ -171,6 +182,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
|
||||
reloadData(extraParams: { [key: string]: any } = null) {
|
||||
this.loading = true
|
||||
this.currentExtraParams = extraParams
|
||||
this.clearSelection()
|
||||
this.service
|
||||
.listFiltered(
|
||||
@@ -189,7 +201,6 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
this.data = this.filterData(c.results)
|
||||
this.collectionSize = this.getCollectionSize(c)
|
||||
this.displayCollectionSize = this.getDisplayCollectionSize(c)
|
||||
this.allIDs = c.all
|
||||
}),
|
||||
delay(100)
|
||||
)
|
||||
@@ -346,7 +357,16 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
return objects.map((o) => o.id)
|
||||
}
|
||||
|
||||
private getBulkEditFilters(): { [key: string]: any } {
|
||||
const filters = { ...this.currentExtraParams }
|
||||
if (this._nameFilter?.length) {
|
||||
filters['name__icontains'] = this._nameFilter
|
||||
}
|
||||
return filters
|
||||
}
|
||||
|
||||
clearSelection() {
|
||||
this.allSelectionActive = false
|
||||
this.togggleAll = false
|
||||
this.selectedObjects.clear()
|
||||
}
|
||||
@@ -356,6 +376,7 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
}
|
||||
|
||||
selectPage() {
|
||||
this.allSelectionActive = false
|
||||
this.selectedObjects = new Set(this.getSelectableIDs(this.data))
|
||||
this.togggleAll = this.areAllPageItemsSelected()
|
||||
}
|
||||
@@ -365,11 +386,16 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
this.clearSelection()
|
||||
return
|
||||
}
|
||||
this.selectedObjects = new Set(this.allIDs)
|
||||
|
||||
this.allSelectionActive = true
|
||||
this.selectedObjects = new Set(this.getSelectableIDs(this.data))
|
||||
this.togggleAll = this.areAllPageItemsSelected()
|
||||
}
|
||||
|
||||
toggleSelected(object) {
|
||||
if (this.allSelectionActive) {
|
||||
this.allSelectionActive = false
|
||||
}
|
||||
this.selectedObjects.has(object.id)
|
||||
? this.selectedObjects.delete(object.id)
|
||||
: this.selectedObjects.add(object.id)
|
||||
@@ -377,6 +403,9 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
}
|
||||
|
||||
protected areAllPageItemsSelected(): boolean {
|
||||
if (this.allSelectionActive) {
|
||||
return this.data.length > 0
|
||||
}
|
||||
const ids = this.getSelectableIDs(this.data)
|
||||
return ids.length > 0 && ids.every((id) => this.selectedObjects.has(id))
|
||||
}
|
||||
@@ -390,10 +419,12 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
modal.componentInstance.buttonsEnabled = false
|
||||
this.service
|
||||
.bulk_edit_objects(
|
||||
Array.from(this.selectedObjects),
|
||||
this.allSelectionActive ? [] : Array.from(this.selectedObjects),
|
||||
BulkEditObjectOperation.SetPermissions,
|
||||
permissions,
|
||||
merge
|
||||
merge,
|
||||
this.allSelectionActive,
|
||||
this.allSelectionActive ? this.getBulkEditFilters() : null
|
||||
)
|
||||
.subscribe({
|
||||
next: () => {
|
||||
@@ -428,8 +459,12 @@ export abstract class ManagementListComponent<T extends MatchingModel>
|
||||
modal.componentInstance.buttonsEnabled = false
|
||||
this.service
|
||||
.bulk_edit_objects(
|
||||
Array.from(this.selectedObjects),
|
||||
BulkEditObjectOperation.Delete
|
||||
this.allSelectionActive ? [] : Array.from(this.selectedObjects),
|
||||
BulkEditObjectOperation.Delete,
|
||||
null,
|
||||
null,
|
||||
this.allSelectionActive,
|
||||
this.allSelectionActive ? this.getBulkEditFilters() : null
|
||||
)
|
||||
.subscribe({
|
||||
next: () => {
|
||||
|
||||
@@ -41,7 +41,6 @@ describe('TagListComponent', () => {
|
||||
listFilteredSpy = jest.spyOn(tagService, 'listFiltered').mockReturnValue(
|
||||
of({
|
||||
count: 3,
|
||||
all: [1, 2, 3],
|
||||
results: [
|
||||
{
|
||||
id: 1,
|
||||
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
import { NgxBootstrapIconsModule } from 'ngx-bootstrap-icons'
|
||||
import { TagEditDialogComponent } from 'src/app/components/common/edit-dialog/tag-edit-dialog/tag-edit-dialog.component'
|
||||
import { FILTER_HAS_TAGS_ALL } from 'src/app/data/filter-rule-type'
|
||||
import { Results } from 'src/app/data/results'
|
||||
import { Tag } from 'src/app/data/tag'
|
||||
import { IfPermissionsDirective } from 'src/app/directives/if-permissions.directive'
|
||||
import { SortableDirective } from 'src/app/directives/sortable.directive'
|
||||
@@ -77,16 +76,6 @@ export class TagListComponent extends ManagementListComponent<Tag> {
|
||||
return data.filter((tag) => !tag.parent || !availableIds.has(tag.parent))
|
||||
}
|
||||
|
||||
protected override getCollectionSize(results: Results<Tag>): number {
|
||||
// Tag list pages are requested with is_root=true (when unfiltered), so
|
||||
// pagination must follow root count even though `all` includes descendants
|
||||
return results.count
|
||||
}
|
||||
|
||||
protected override getDisplayCollectionSize(results: Results<Tag>): number {
|
||||
return super.getCollectionSize(results)
|
||||
}
|
||||
|
||||
protected override getSelectableIDs(tags: Tag[]): number[] {
|
||||
const ids: number[] = []
|
||||
for (const tag of tags.filter(Boolean)) {
|
||||
|
||||
@@ -1,7 +1,26 @@
|
||||
import { Document } from './document'
|
||||
|
||||
export interface Results<T> {
|
||||
count: number
|
||||
|
||||
results: T[]
|
||||
display_count?: number
|
||||
|
||||
all: number[]
|
||||
results: T[]
|
||||
}
|
||||
|
||||
export interface SelectionDataItem {
|
||||
id: number
|
||||
document_count: number
|
||||
}
|
||||
|
||||
export interface SelectionData {
|
||||
selected_storage_paths: SelectionDataItem[]
|
||||
selected_correspondents: SelectionDataItem[]
|
||||
selected_tags: SelectionDataItem[]
|
||||
selected_document_types: SelectionDataItem[]
|
||||
selected_custom_fields: SelectionDataItem[]
|
||||
}
|
||||
|
||||
export interface DocumentResults extends Results<Document> {
|
||||
selection_data?: SelectionData
|
||||
}
|
||||
|
||||
@@ -126,13 +126,10 @@ describe('DocumentListViewService', () => {
|
||||
expect(documentListViewService.currentPage).toEqual(1)
|
||||
documentListViewService.reload()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush(full_results)
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/selection_data/`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
expect(documentListViewService.isReloading).toBeFalsy()
|
||||
expect(documentListViewService.activeSavedViewId).toBeNull()
|
||||
@@ -144,12 +141,12 @@ describe('DocumentListViewService', () => {
|
||||
it('should handle error on page request out of range', () => {
|
||||
documentListViewService.currentPage = 50
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=50&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=50&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush([], { status: 404, statusText: 'Unexpected error' })
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
expect(documentListViewService.currentPage).toEqual(1)
|
||||
@@ -166,7 +163,7 @@ describe('DocumentListViewService', () => {
|
||||
]
|
||||
documentListViewService.setFilterRules(filterRulesAny)
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__in=${tags__id__in}`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__in=${tags__id__in}`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush(
|
||||
@@ -174,13 +171,13 @@ describe('DocumentListViewService', () => {
|
||||
{ status: 404, statusText: 'Unexpected error' }
|
||||
)
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
// reset the list
|
||||
documentListViewService.setFilterRules([])
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
})
|
||||
|
||||
@@ -188,7 +185,7 @@ describe('DocumentListViewService', () => {
|
||||
documentListViewService.currentPage = 1
|
||||
documentListViewService.sortField = 'custom_field_999'
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-custom_field_999&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-custom_field_999&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush(
|
||||
@@ -197,7 +194,7 @@ describe('DocumentListViewService', () => {
|
||||
)
|
||||
// resets itself
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
})
|
||||
|
||||
@@ -212,7 +209,7 @@ describe('DocumentListViewService', () => {
|
||||
]
|
||||
documentListViewService.setFilterRules(filterRulesAny)
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__in=${tags__id__in}`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__in=${tags__id__in}`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush('Generic error', { status: 404, statusText: 'Unexpected error' })
|
||||
@@ -220,7 +217,7 @@ describe('DocumentListViewService', () => {
|
||||
// reset the list
|
||||
documentListViewService.setFilterRules([])
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
})
|
||||
|
||||
@@ -229,7 +226,7 @@ describe('DocumentListViewService', () => {
|
||||
expect(documentListViewService.sortReverse).toBeTruthy()
|
||||
documentListViewService.setSort('added', false)
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=added&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=added&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
expect(documentListViewService.sortField).toEqual('added')
|
||||
@@ -237,12 +234,12 @@ describe('DocumentListViewService', () => {
|
||||
|
||||
documentListViewService.sortField = 'created'
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(documentListViewService.sortField).toEqual('created')
|
||||
documentListViewService.sortReverse = true
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
expect(documentListViewService.sortReverse).toBeTruthy()
|
||||
@@ -262,7 +259,7 @@ describe('DocumentListViewService', () => {
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=${page}&page_size=${
|
||||
documentListViewService.pageSize
|
||||
}&ordering=${reverse ? '-' : ''}${sort}&truncate_content=true`
|
||||
}&ordering=${reverse ? '-' : ''}${sort}&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
expect(documentListViewService.currentPage).toEqual(page)
|
||||
@@ -279,7 +276,7 @@ describe('DocumentListViewService', () => {
|
||||
}
|
||||
documentListViewService.loadFromQueryParams(convertToParamMap(params))
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=${documentListViewService.currentPage}&page_size=${documentListViewService.pageSize}&ordering=-added&truncate_content=true&tags__id__all=${tags__id__all}`
|
||||
`${environment.apiBaseUrl}documents/?page=${documentListViewService.currentPage}&page_size=${documentListViewService.pageSize}&ordering=-added&truncate_content=true&include_selection_data=true&tags__id__all=${tags__id__all}`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
expect(documentListViewService.filterRules).toEqual([
|
||||
@@ -289,15 +286,12 @@ describe('DocumentListViewService', () => {
|
||||
},
|
||||
])
|
||||
req.flush(full_results)
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/selection_data/`
|
||||
)
|
||||
})
|
||||
|
||||
it('should use filter rules to update query params', () => {
|
||||
documentListViewService.setFilterRules(filterRules)
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=${documentListViewService.currentPage}&page_size=${documentListViewService.pageSize}&ordering=-created&truncate_content=true&tags__id__all=${tags__id__all}`
|
||||
`${environment.apiBaseUrl}documents/?page=${documentListViewService.currentPage}&page_size=${documentListViewService.pageSize}&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__all=${tags__id__all}`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
})
|
||||
@@ -306,34 +300,26 @@ describe('DocumentListViewService', () => {
|
||||
documentListViewService.currentPage = 2
|
||||
let req = httpTestingController.expectOne((request) =>
|
||||
request.urlWithParams.startsWith(
|
||||
`${environment.apiBaseUrl}documents/?page=2&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=2&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush(full_results)
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/selection_data/`
|
||||
)
|
||||
req.flush([])
|
||||
|
||||
documentListViewService.setFilterRules(filterRules, true)
|
||||
|
||||
const filteredReqs = httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__all=${tags__id__all}`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__all=${tags__id__all}`
|
||||
)
|
||||
expect(filteredReqs).toHaveLength(1)
|
||||
filteredReqs[0].flush(full_results)
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/selection_data/`
|
||||
)
|
||||
req.flush([])
|
||||
expect(documentListViewService.currentPage).toEqual(1)
|
||||
})
|
||||
|
||||
it('should support quick filter', () => {
|
||||
documentListViewService.quickFilter(filterRules)
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=${documentListViewService.currentPage}&page_size=${documentListViewService.pageSize}&ordering=-created&truncate_content=true&tags__id__all=${tags__id__all}`
|
||||
`${environment.apiBaseUrl}documents/?page=${documentListViewService.currentPage}&page_size=${documentListViewService.pageSize}&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__all=${tags__id__all}`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
})
|
||||
@@ -356,21 +342,21 @@ describe('DocumentListViewService', () => {
|
||||
convertToParamMap(params)
|
||||
)
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=${page}&page_size=${documentListViewService.pageSize}&ordering=-added&truncate_content=true&tags__id__all=${tags__id__all}`
|
||||
`${environment.apiBaseUrl}documents/?page=${page}&page_size=${documentListViewService.pageSize}&ordering=-added&truncate_content=true&include_selection_data=true&tags__id__all=${tags__id__all}`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
// reset the list
|
||||
documentListViewService.currentPage = 1
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-added&truncate_content=true&tags__id__all=9`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-added&truncate_content=true&include_selection_data=true&tags__id__all=9`
|
||||
)
|
||||
documentListViewService.setFilterRules([])
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-added&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-added&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
documentListViewService.sortField = 'created'
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
documentListViewService.activateSavedView(null)
|
||||
})
|
||||
@@ -378,21 +364,18 @@ describe('DocumentListViewService', () => {
|
||||
it('should support navigating next / previous', () => {
|
||||
documentListViewService.setFilterRules([])
|
||||
let req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(documentListViewService.currentPage).toEqual(1)
|
||||
documentListViewService.pageSize = 3
|
||||
req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=3&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=3&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush({
|
||||
count: 3,
|
||||
results: documents.slice(0, 3),
|
||||
})
|
||||
httpTestingController
|
||||
.expectOne(`${environment.apiBaseUrl}documents/selection_data/`)
|
||||
.flush([])
|
||||
expect(documentListViewService.hasNext(documents[0].id)).toBeTruthy()
|
||||
expect(documentListViewService.hasPrevious(documents[0].id)).toBeFalsy()
|
||||
documentListViewService.getNext(documents[0].id).subscribe((docId) => {
|
||||
@@ -439,7 +422,7 @@ describe('DocumentListViewService', () => {
|
||||
expect(documentListViewService.currentPage).toEqual(1)
|
||||
documentListViewService.pageSize = 3
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=3&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=3&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
jest
|
||||
.spyOn(documentListViewService, 'getLastPage')
|
||||
@@ -454,7 +437,7 @@ describe('DocumentListViewService', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
expect(documentListViewService.currentPage).toEqual(2)
|
||||
const reqs = httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=2&page_size=3&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=2&page_size=3&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(reqs.length).toBeGreaterThan(0)
|
||||
})
|
||||
@@ -489,11 +472,11 @@ describe('DocumentListViewService', () => {
|
||||
.mockReturnValue(documents)
|
||||
documentListViewService.currentPage = 2
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=2&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=2&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
documentListViewService.pageSize = 3
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=2&page_size=3&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=2&page_size=3&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
const reloadSpy = jest.spyOn(documentListViewService, 'reload')
|
||||
documentListViewService.getPrevious(1).subscribe({
|
||||
@@ -503,7 +486,7 @@ describe('DocumentListViewService', () => {
|
||||
expect(reloadSpy).toHaveBeenCalled()
|
||||
expect(documentListViewService.currentPage).toEqual(1)
|
||||
const reqs = httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=3&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=3&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(reqs.length).toBeGreaterThan(0)
|
||||
})
|
||||
@@ -516,13 +499,10 @@ describe('DocumentListViewService', () => {
|
||||
it('should support select a document', () => {
|
||||
documentListViewService.reload()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush(full_results)
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/selection_data/`
|
||||
)
|
||||
documentListViewService.toggleSelected(documents[0])
|
||||
expect(documentListViewService.isSelected(documents[0])).toBeTruthy()
|
||||
documentListViewService.toggleSelected(documents[0])
|
||||
@@ -544,16 +524,13 @@ describe('DocumentListViewService', () => {
|
||||
it('should support select page', () => {
|
||||
documentListViewService.pageSize = 3
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=3&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=3&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush({
|
||||
count: 3,
|
||||
results: documents.slice(0, 3),
|
||||
})
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/selection_data/`
|
||||
)
|
||||
documentListViewService.selectPage()
|
||||
expect(documentListViewService.selected.size).toEqual(3)
|
||||
expect(documentListViewService.isSelected(documents[5])).toBeFalsy()
|
||||
@@ -562,13 +539,10 @@ describe('DocumentListViewService', () => {
|
||||
it('should support select range', () => {
|
||||
documentListViewService.reload()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(req.request.method).toEqual('GET')
|
||||
req.flush(full_results)
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/selection_data/`
|
||||
)
|
||||
documentListViewService.toggleSelected(documents[0])
|
||||
expect(documentListViewService.isSelected(documents[0])).toBeTruthy()
|
||||
documentListViewService.selectRangeTo(documents[2])
|
||||
@@ -588,7 +562,7 @@ describe('DocumentListViewService', () => {
|
||||
|
||||
documentListViewService.setFilterRules(filterRules)
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__all=9`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__all=9`
|
||||
)
|
||||
const reqs = httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=100000&fields=id&tags__id__all=9`
|
||||
@@ -604,7 +578,7 @@ describe('DocumentListViewService', () => {
|
||||
const cancelSpy = jest.spyOn(documentListViewService, 'cancelPending')
|
||||
documentListViewService.reload()
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&tags__id__all=9`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true&tags__id__all=9`
|
||||
)
|
||||
expect(cancelSpy).toHaveBeenCalled()
|
||||
})
|
||||
@@ -623,7 +597,7 @@ describe('DocumentListViewService', () => {
|
||||
documentListViewService.setFilterRules([])
|
||||
expect(documentListViewService.sortField).toEqual('created')
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
})
|
||||
|
||||
@@ -650,11 +624,11 @@ describe('DocumentListViewService', () => {
|
||||
expect(localStorageSpy).toHaveBeenCalled()
|
||||
// reload triggered
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
documentListViewService.displayFields = null
|
||||
httpTestingController.match(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
expect(documentListViewService.displayFields).toEqual(
|
||||
DEFAULT_DISPLAY_FIELDS.filter((f) => f.id !== DisplayField.ADDED).map(
|
||||
@@ -694,7 +668,7 @@ describe('DocumentListViewService', () => {
|
||||
it('should generate quick filter URL preserving default state', () => {
|
||||
documentListViewService.reload()
|
||||
httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true`
|
||||
`${environment.apiBaseUrl}documents/?page=1&page_size=50&ordering=-created&truncate_content=true&include_selection_data=true`
|
||||
)
|
||||
const urlTree = documentListViewService.getQuickFilterUrl(filterRules)
|
||||
expect(urlTree).toBeDefined()
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Injectable, inject } from '@angular/core'
|
||||
import { ParamMap, Router, UrlTree } from '@angular/router'
|
||||
import { Observable, Subject, first, takeUntil } from 'rxjs'
|
||||
import { Observable, Subject, takeUntil } from 'rxjs'
|
||||
import {
|
||||
DEFAULT_DISPLAY_FIELDS,
|
||||
DisplayField,
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
Document,
|
||||
} from '../data/document'
|
||||
import { FilterRule } from '../data/filter-rule'
|
||||
import { DocumentResults, SelectionData } from '../data/results'
|
||||
import { SavedView } from '../data/saved-view'
|
||||
import { DOCUMENT_LIST_SERVICE } from '../data/storage-keys'
|
||||
import { SETTINGS_KEYS } from '../data/ui-settings'
|
||||
@@ -17,7 +18,7 @@ import {
|
||||
isFullTextFilterRule,
|
||||
} from '../utils/filter-rules'
|
||||
import { paramsFromViewState, paramsToViewState } from '../utils/query-params'
|
||||
import { DocumentService, SelectionData } from './rest/document.service'
|
||||
import { DocumentService } from './rest/document.service'
|
||||
import { SettingsService } from './settings.service'
|
||||
|
||||
const LIST_DEFAULT_DISPLAY_FIELDS: DisplayField[] = DEFAULT_DISPLAY_FIELDS.map(
|
||||
@@ -260,27 +261,17 @@ export class DocumentListViewService {
|
||||
activeListViewState.sortField,
|
||||
activeListViewState.sortReverse,
|
||||
activeListViewState.filterRules,
|
||||
{ truncate_content: true }
|
||||
{ truncate_content: true, include_selection_data: true }
|
||||
)
|
||||
.pipe(takeUntil(this.unsubscribeNotifier))
|
||||
.subscribe({
|
||||
next: (result) => {
|
||||
const resultWithSelectionData = result as DocumentResults
|
||||
this.initialized = true
|
||||
this.isReloading = false
|
||||
activeListViewState.collectionSize = result.count
|
||||
activeListViewState.documents = result.results
|
||||
|
||||
this.documentService
|
||||
.getSelectionData(result.all)
|
||||
.pipe(first())
|
||||
.subscribe({
|
||||
next: (selectionData) => {
|
||||
this.selectionData = selectionData
|
||||
},
|
||||
error: () => {
|
||||
this.selectionData = null
|
||||
},
|
||||
})
|
||||
this.selectionData = resultWithSelectionData.selection_data ?? null
|
||||
|
||||
if (updateQueryParams && !this._activeSavedViewId) {
|
||||
let base = ['/documents']
|
||||
|
||||
@@ -96,6 +96,30 @@ export const commonAbstractNameFilterPaperlessServiceTests = (
|
||||
})
|
||||
req.flush([])
|
||||
})
|
||||
|
||||
test('should call appropriate api endpoint for bulk delete on all filtered objects', () => {
|
||||
subscription = service
|
||||
.bulk_edit_objects(
|
||||
[],
|
||||
BulkEditObjectOperation.Delete,
|
||||
null,
|
||||
null,
|
||||
true,
|
||||
{ name__icontains: 'hello' }
|
||||
)
|
||||
.subscribe()
|
||||
const req = httpTestingController.expectOne(
|
||||
`${environment.apiBaseUrl}bulk_edit_objects/`
|
||||
)
|
||||
expect(req.request.method).toEqual('POST')
|
||||
expect(req.request.body).toEqual({
|
||||
object_type: endpoint,
|
||||
operation: BulkEditObjectOperation.Delete,
|
||||
all: true,
|
||||
filters: { name__icontains: 'hello' },
|
||||
})
|
||||
req.flush([])
|
||||
})
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
|
||||
@@ -37,13 +37,22 @@ export abstract class AbstractNameFilterService<
|
||||
objects: Array<number>,
|
||||
operation: BulkEditObjectOperation,
|
||||
permissions: { owner: number; set_permissions: PermissionsObject } = null,
|
||||
merge: boolean = null
|
||||
merge: boolean = null,
|
||||
all: boolean = false,
|
||||
filters: { [key: string]: any } = null
|
||||
): Observable<string> {
|
||||
const params = {
|
||||
objects,
|
||||
const params: any = {
|
||||
object_type: this.resourceName,
|
||||
operation,
|
||||
}
|
||||
if (all) {
|
||||
params['all'] = true
|
||||
if (filters) {
|
||||
params['filters'] = filters
|
||||
}
|
||||
} else {
|
||||
params['objects'] = objects
|
||||
}
|
||||
if (operation === BulkEditObjectOperation.SetPermissions) {
|
||||
params['owner'] = permissions?.owner
|
||||
params['permissions'] = permissions?.set_permissions
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
import { DocumentMetadata } from 'src/app/data/document-metadata'
|
||||
import { DocumentSuggestions } from 'src/app/data/document-suggestions'
|
||||
import { FilterRule } from 'src/app/data/filter-rule'
|
||||
import { Results } from 'src/app/data/results'
|
||||
import { Results, SelectionData } from 'src/app/data/results'
|
||||
import { SETTINGS_KEYS } from 'src/app/data/ui-settings'
|
||||
import { queryParamsFromFilterRules } from '../../utils/query-params'
|
||||
import {
|
||||
@@ -24,19 +24,6 @@ import { SettingsService } from '../settings.service'
|
||||
import { AbstractPaperlessService } from './abstract-paperless-service'
|
||||
import { CustomFieldsService } from './custom-fields.service'
|
||||
|
||||
export interface SelectionDataItem {
|
||||
id: number
|
||||
document_count: number
|
||||
}
|
||||
|
||||
export interface SelectionData {
|
||||
selected_storage_paths: SelectionDataItem[]
|
||||
selected_correspondents: SelectionDataItem[]
|
||||
selected_tags: SelectionDataItem[]
|
||||
selected_document_types: SelectionDataItem[]
|
||||
selected_custom_fields: SelectionDataItem[]
|
||||
}
|
||||
|
||||
export enum BulkEditSourceMode {
|
||||
LATEST_VERSION = 'latest_version',
|
||||
EXPLICIT_SELECTION = 'explicit_selection',
|
||||
|
||||
@@ -51,11 +51,28 @@ from documents.templating.workflows import parse_w_workflow_placeholders
|
||||
from documents.utils import copy_basic_file_stats
|
||||
from documents.utils import copy_file_with_basic_stats
|
||||
from documents.utils import run_subprocess
|
||||
from paperless.parsers.text import TextDocumentParser
|
||||
from paperless_mail.parsers import MailDocumentParser
|
||||
|
||||
LOGGING_NAME: Final[str] = "paperless.consumer"
|
||||
|
||||
|
||||
def _parser_cleanup(parser: DocumentParser) -> None:
|
||||
"""
|
||||
Call cleanup on a parser, handling the new-style context-manager parsers.
|
||||
|
||||
New-style parsers (e.g. TextDocumentParser) use __exit__ for teardown
|
||||
instead of a cleanup() method. This shim will be removed once all existing parsers
|
||||
have switched to the new style and this consumer is updated to use it
|
||||
|
||||
TODO(stumpylog): Remove me in the future
|
||||
"""
|
||||
if isinstance(parser, TextDocumentParser):
|
||||
parser.__exit__(None, None, None)
|
||||
else:
|
||||
parser.cleanup()
|
||||
|
||||
|
||||
class WorkflowTriggerPlugin(
|
||||
NoCleanupPluginMixin,
|
||||
NoSetupPluginMixin,
|
||||
@@ -459,6 +476,9 @@ class ConsumerPlugin(
|
||||
self.filename,
|
||||
self.input_doc.mailrule_id,
|
||||
)
|
||||
elif isinstance(document_parser, TextDocumentParser):
|
||||
# TODO(stumpylog): Remove me in the future
|
||||
document_parser.parse(self.working_copy, mime_type)
|
||||
else:
|
||||
document_parser.parse(self.working_copy, mime_type, self.filename)
|
||||
|
||||
@@ -469,11 +489,15 @@ class ConsumerPlugin(
|
||||
ProgressStatusOptions.WORKING,
|
||||
ConsumerStatusShortMessage.GENERATING_THUMBNAIL,
|
||||
)
|
||||
thumbnail = document_parser.get_thumbnail(
|
||||
self.working_copy,
|
||||
mime_type,
|
||||
self.filename,
|
||||
)
|
||||
if isinstance(document_parser, TextDocumentParser):
|
||||
# TODO(stumpylog): Remove me in the future
|
||||
thumbnail = document_parser.get_thumbnail(self.working_copy, mime_type)
|
||||
else:
|
||||
thumbnail = document_parser.get_thumbnail(
|
||||
self.working_copy,
|
||||
mime_type,
|
||||
self.filename,
|
||||
)
|
||||
|
||||
text = document_parser.get_text()
|
||||
date = document_parser.get_date()
|
||||
@@ -490,7 +514,7 @@ class ConsumerPlugin(
|
||||
page_count = document_parser.get_page_count(self.working_copy, mime_type)
|
||||
|
||||
except ParseError as e:
|
||||
document_parser.cleanup()
|
||||
_parser_cleanup(document_parser)
|
||||
if tempdir:
|
||||
tempdir.cleanup()
|
||||
self._fail(
|
||||
@@ -500,7 +524,7 @@ class ConsumerPlugin(
|
||||
exception=e,
|
||||
)
|
||||
except Exception as e:
|
||||
document_parser.cleanup()
|
||||
_parser_cleanup(document_parser)
|
||||
if tempdir:
|
||||
tempdir.cleanup()
|
||||
self._fail(
|
||||
@@ -702,7 +726,7 @@ class ConsumerPlugin(
|
||||
exception=e,
|
||||
)
|
||||
finally:
|
||||
document_parser.cleanup()
|
||||
_parser_cleanup(document_parser)
|
||||
tempdir.cleanup()
|
||||
|
||||
self.run_post_consume_script(document)
|
||||
|
||||
@@ -375,6 +375,26 @@ class DelayedQuery:
|
||||
]
|
||||
return self._manual_hits_cache
|
||||
|
||||
def get_result_ids(self) -> list[int]:
|
||||
"""
|
||||
Return all matching document IDs for the current query and ordering.
|
||||
"""
|
||||
if self._manual_sort_requested():
|
||||
return [hit["id"] for hit in self._manual_hits()]
|
||||
|
||||
q, mask, suggested_correction = self._get_query()
|
||||
self.suggested_correction = suggested_correction
|
||||
sortedby, reverse = self._get_query_sortedby()
|
||||
results = self.searcher.search(
|
||||
q,
|
||||
mask=mask,
|
||||
filter=MappedDocIdSet(self.filter_queryset, self.searcher.ixreader),
|
||||
limit=None,
|
||||
sortedby=sortedby,
|
||||
reverse=reverse,
|
||||
)
|
||||
return [hit["id"] for hit in results]
|
||||
|
||||
def __getitem__(self, item):
|
||||
if item.start in self.saved_results:
|
||||
return self.saved_results[item.start]
|
||||
|
||||
@@ -30,6 +30,7 @@ def _process_document(doc_id: int) -> None:
|
||||
)
|
||||
shutil.move(thumb, document.thumbnail_path)
|
||||
finally:
|
||||
# TODO(stumpylog): Cleanup once all parsers are handled
|
||||
parser.cleanup()
|
||||
|
||||
|
||||
|
||||
@@ -1540,6 +1540,41 @@ class DocumentListSerializer(serializers.Serializer):
|
||||
return documents
|
||||
|
||||
|
||||
class DocumentSelectionSerializer(DocumentListSerializer):
|
||||
documents = serializers.ListField(
|
||||
required=False,
|
||||
label="Documents",
|
||||
write_only=True,
|
||||
child=serializers.IntegerField(),
|
||||
)
|
||||
|
||||
all = serializers.BooleanField(
|
||||
default=False,
|
||||
required=False,
|
||||
write_only=True,
|
||||
)
|
||||
|
||||
filters = serializers.DictField(
|
||||
required=False,
|
||||
allow_empty=True,
|
||||
write_only=True,
|
||||
)
|
||||
|
||||
def validate(self, attrs):
|
||||
if attrs.get("all", False):
|
||||
attrs.setdefault("documents", [])
|
||||
return attrs
|
||||
|
||||
if "documents" not in attrs:
|
||||
raise serializers.ValidationError(
|
||||
"documents is required unless all is true.",
|
||||
)
|
||||
|
||||
documents = attrs["documents"]
|
||||
self._validate_document_id_list(documents)
|
||||
return attrs
|
||||
|
||||
|
||||
class SourceModeValidationMixin:
|
||||
def validate_source_mode(self, source_mode: str) -> str:
|
||||
if source_mode not in bulk_edit.SourceModeChoices.__dict__.values():
|
||||
@@ -1547,7 +1582,7 @@ class SourceModeValidationMixin:
|
||||
return source_mode
|
||||
|
||||
|
||||
class RotateDocumentsSerializer(DocumentListSerializer, SourceModeValidationMixin):
|
||||
class RotateDocumentsSerializer(DocumentSelectionSerializer, SourceModeValidationMixin):
|
||||
degrees = serializers.IntegerField(required=True)
|
||||
source_mode = serializers.CharField(
|
||||
required=False,
|
||||
@@ -1630,17 +1665,17 @@ class RemovePasswordDocumentsSerializer(
|
||||
)
|
||||
|
||||
|
||||
class DeleteDocumentsSerializer(DocumentListSerializer):
|
||||
class DeleteDocumentsSerializer(DocumentSelectionSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class ReprocessDocumentsSerializer(DocumentListSerializer):
|
||||
class ReprocessDocumentsSerializer(DocumentSelectionSerializer):
|
||||
pass
|
||||
|
||||
|
||||
class BulkEditSerializer(
|
||||
SerializerWithPerms,
|
||||
DocumentListSerializer,
|
||||
DocumentSelectionSerializer,
|
||||
SetPermissionsMixin,
|
||||
SourceModeValidationMixin,
|
||||
):
|
||||
@@ -1955,6 +1990,19 @@ class BulkEditSerializer(
|
||||
raise serializers.ValidationError("password must be a string")
|
||||
|
||||
def validate(self, attrs):
|
||||
attrs = super().validate(attrs)
|
||||
|
||||
if attrs.get("all", False) and attrs["method"] in [
|
||||
bulk_edit.merge,
|
||||
bulk_edit.split,
|
||||
bulk_edit.delete_pages,
|
||||
bulk_edit.edit_pdf,
|
||||
bulk_edit.remove_password,
|
||||
]:
|
||||
raise serializers.ValidationError(
|
||||
"This method does not support all=true.",
|
||||
)
|
||||
|
||||
method = attrs["method"]
|
||||
parameters = attrs["parameters"]
|
||||
|
||||
@@ -2212,7 +2260,7 @@ class DocumentVersionLabelSerializer(serializers.Serializer):
|
||||
return normalized or None
|
||||
|
||||
|
||||
class BulkDownloadSerializer(DocumentListSerializer):
|
||||
class BulkDownloadSerializer(DocumentSelectionSerializer):
|
||||
content = serializers.ChoiceField(
|
||||
choices=["archive", "originals", "both"],
|
||||
default="archive",
|
||||
@@ -2571,13 +2619,25 @@ class ShareLinkBundleSerializer(OwnedObjectSerializer):
|
||||
|
||||
class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin):
|
||||
objects = serializers.ListField(
|
||||
required=True,
|
||||
allow_empty=False,
|
||||
required=False,
|
||||
allow_empty=True,
|
||||
label="Objects",
|
||||
write_only=True,
|
||||
child=serializers.IntegerField(),
|
||||
)
|
||||
|
||||
all = serializers.BooleanField(
|
||||
default=False,
|
||||
required=False,
|
||||
write_only=True,
|
||||
)
|
||||
|
||||
filters = serializers.DictField(
|
||||
required=False,
|
||||
allow_empty=True,
|
||||
write_only=True,
|
||||
)
|
||||
|
||||
object_type = serializers.ChoiceField(
|
||||
choices=[
|
||||
"tags",
|
||||
@@ -2650,10 +2710,20 @@ class BulkEditObjectsSerializer(SerializerWithPerms, SetPermissionsMixin):
|
||||
|
||||
def validate(self, attrs):
|
||||
object_type = attrs["object_type"]
|
||||
objects = attrs["objects"]
|
||||
objects = attrs.get("objects")
|
||||
apply_to_all = attrs.get("all", False)
|
||||
operation = attrs.get("operation")
|
||||
|
||||
self._validate_objects(objects, object_type)
|
||||
if apply_to_all:
|
||||
attrs.setdefault("objects", [])
|
||||
else:
|
||||
if objects is None:
|
||||
raise serializers.ValidationError(
|
||||
"objects is required unless all is true.",
|
||||
)
|
||||
if len(objects) == 0:
|
||||
raise serializers.ValidationError("objects must not be empty")
|
||||
self._validate_objects(objects, object_type)
|
||||
|
||||
if operation == "set_permissions":
|
||||
permissions = attrs.get("permissions")
|
||||
|
||||
@@ -399,6 +399,7 @@ def update_document_content_maybe_archive_file(document_id) -> None:
|
||||
f"Error while parsing document {document} (ID: {document_id})",
|
||||
)
|
||||
finally:
|
||||
# TODO(stumpylog): Cleanup once all parsers are handled
|
||||
parser.cleanup()
|
||||
|
||||
|
||||
|
||||
@@ -1119,21 +1119,19 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
[u1_doc1.id],
|
||||
)
|
||||
|
||||
def test_pagination_all(self) -> None:
|
||||
def test_pagination_results(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- A set of 50 documents
|
||||
WHEN:
|
||||
- API request for document filtering
|
||||
THEN:
|
||||
- Results are paginated (25 items) and response["all"] returns all ids (50 items)
|
||||
- Results are paginated (25 items) and count reflects all results (50 items)
|
||||
"""
|
||||
t = Tag.objects.create(name="tag")
|
||||
docs = []
|
||||
for i in range(50):
|
||||
d = Document.objects.create(checksum=i, content=f"test{i}")
|
||||
d.tags.add(t)
|
||||
docs.append(d)
|
||||
|
||||
response = self.client.get(
|
||||
f"/api/documents/?tags__id__in={t.id}",
|
||||
@@ -1141,9 +1139,84 @@ class TestDocumentApi(DirectoriesMixin, DocumentConsumeDelayMixin, APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
results = response.data["results"]
|
||||
self.assertEqual(len(results), 25)
|
||||
self.assertEqual(len(response.data["all"]), 50)
|
||||
self.assertEqual(response.data["count"], 50)
|
||||
self.assertNotIn("all", response.data)
|
||||
|
||||
def test_pagination_all_for_api_version_9(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- A set of documents matching a filter
|
||||
WHEN:
|
||||
- API request uses legacy version 9
|
||||
THEN:
|
||||
- Response includes "all" for backward compatibility
|
||||
"""
|
||||
t = Tag.objects.create(name="tag")
|
||||
docs = []
|
||||
for i in range(4):
|
||||
d = Document.objects.create(checksum=i, content=f"test{i}")
|
||||
d.tags.add(t)
|
||||
docs.append(d)
|
||||
|
||||
response = self.client.get(
|
||||
f"/api/documents/?tags__id__in={t.id}",
|
||||
headers={"Accept": "application/json; version=9"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertIn("all", response.data)
|
||||
self.assertCountEqual(response.data["all"], [d.id for d in docs])
|
||||
|
||||
def test_list_with_include_selection_data(self) -> None:
|
||||
correspondent = Correspondent.objects.create(name="c1")
|
||||
doc_type = DocumentType.objects.create(name="dt1")
|
||||
storage_path = StoragePath.objects.create(name="sp1")
|
||||
tag = Tag.objects.create(name="tag")
|
||||
|
||||
matching_doc = Document.objects.create(
|
||||
checksum="A",
|
||||
correspondent=correspondent,
|
||||
document_type=doc_type,
|
||||
storage_path=storage_path,
|
||||
)
|
||||
matching_doc.tags.add(tag)
|
||||
|
||||
non_matching_doc = Document.objects.create(checksum="B")
|
||||
non_matching_doc.tags.add(Tag.objects.create(name="other"))
|
||||
|
||||
response = self.client.get(
|
||||
f"/api/documents/?tags__id__in={tag.id}&include_selection_data=true",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertIn("selection_data", response.data)
|
||||
|
||||
selected_correspondent = next(
|
||||
item
|
||||
for item in response.data["selection_data"]["selected_correspondents"]
|
||||
if item["id"] == correspondent.id
|
||||
)
|
||||
selected_tag = next(
|
||||
item
|
||||
for item in response.data["selection_data"]["selected_tags"]
|
||||
if item["id"] == tag.id
|
||||
)
|
||||
selected_type = next(
|
||||
item
|
||||
for item in response.data["selection_data"]["selected_document_types"]
|
||||
if item["id"] == doc_type.id
|
||||
)
|
||||
selected_storage_path = next(
|
||||
item
|
||||
for item in response.data["selection_data"]["selected_storage_paths"]
|
||||
if item["id"] == storage_path.id
|
||||
)
|
||||
|
||||
self.assertEqual(selected_correspondent["document_count"], 1)
|
||||
self.assertEqual(selected_tag["document_count"], 1)
|
||||
self.assertEqual(selected_type["document_count"], 1)
|
||||
self.assertEqual(selected_storage_path["document_count"], 1)
|
||||
|
||||
def test_statistics(self) -> None:
|
||||
doc1 = Document.objects.create(
|
||||
title="none1",
|
||||
|
||||
@@ -145,6 +145,22 @@ class TestApiObjects(DirectoriesMixin, APITestCase):
|
||||
response.data["last_correspondence"],
|
||||
)
|
||||
|
||||
def test_paginated_objects_include_all_only_for_legacy_version(self) -> None:
|
||||
response_v10 = self.client.get("/api/correspondents/")
|
||||
self.assertEqual(response_v10.status_code, status.HTTP_200_OK)
|
||||
self.assertNotIn("all", response_v10.data)
|
||||
|
||||
response_v9 = self.client.get(
|
||||
"/api/correspondents/",
|
||||
headers={"Accept": "application/json; version=9"},
|
||||
)
|
||||
self.assertEqual(response_v9.status_code, status.HTTP_200_OK)
|
||||
self.assertIn("all", response_v9.data)
|
||||
self.assertCountEqual(
|
||||
response_v9.data["all"],
|
||||
[self.c1.id, self.c2.id, self.c3.id],
|
||||
)
|
||||
|
||||
|
||||
class TestApiStoragePaths(DirectoriesMixin, APITestCase):
|
||||
ENDPOINT = "/api/storage_paths/"
|
||||
@@ -774,6 +790,62 @@ class TestBulkEditObjects(APITestCase):
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(StoragePath.objects.count(), 0)
|
||||
|
||||
def test_bulk_objects_delete_all_filtered(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Existing objects that can be filtered by name
|
||||
WHEN:
|
||||
- bulk_edit_objects API endpoint is called with all=true and filters
|
||||
THEN:
|
||||
- Matching objects are deleted without passing explicit IDs
|
||||
"""
|
||||
Correspondent.objects.create(name="c2")
|
||||
|
||||
response = self.client.post(
|
||||
"/api/bulk_edit_objects/",
|
||||
json.dumps(
|
||||
{
|
||||
"all": True,
|
||||
"filters": {"name__icontains": "c"},
|
||||
"object_type": "correspondents",
|
||||
"operation": "delete",
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertEqual(Correspondent.objects.count(), 0)
|
||||
|
||||
def test_bulk_objects_delete_all_filtered_tags_includes_descendants(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Root tag with descendants
|
||||
WHEN:
|
||||
- bulk_edit_objects API endpoint is called with all=true
|
||||
THEN:
|
||||
- Root tags and descendants are deleted
|
||||
"""
|
||||
parent = Tag.objects.create(name="parent")
|
||||
child = Tag.objects.create(name="child", tn_parent=parent)
|
||||
|
||||
response = self.client.post(
|
||||
"/api/bulk_edit_objects/",
|
||||
json.dumps(
|
||||
{
|
||||
"all": True,
|
||||
"filters": {"is_root": True},
|
||||
"object_type": "tags",
|
||||
"operation": "delete",
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertFalse(Tag.objects.filter(id=parent.id).exists())
|
||||
self.assertFalse(Tag.objects.filter(id=child.id).exists())
|
||||
|
||||
def test_bulk_edit_object_permissions_insufficient_global_perms(self) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
@@ -861,3 +933,40 @@ class TestBulkEditObjects(APITestCase):
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
|
||||
self.assertEqual(response.content, b"Insufficient permissions")
|
||||
|
||||
def test_bulk_edit_all_filtered_permissions_insufficient_object_perms(
|
||||
self,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Filter-matching objects include one that the user cannot edit
|
||||
WHEN:
|
||||
- bulk_edit_objects API endpoint is called with all=true
|
||||
THEN:
|
||||
- Operation applies only to editable objects
|
||||
"""
|
||||
self.t2.owner = User.objects.get(username="temp_admin")
|
||||
self.t2.save()
|
||||
|
||||
self.user1.user_permissions.add(
|
||||
*Permission.objects.filter(codename="delete_tag"),
|
||||
)
|
||||
self.user1.save()
|
||||
self.client.force_authenticate(user=self.user1)
|
||||
|
||||
response = self.client.post(
|
||||
"/api/bulk_edit_objects/",
|
||||
json.dumps(
|
||||
{
|
||||
"all": True,
|
||||
"filters": {"name__icontains": "t"},
|
||||
"object_type": "tags",
|
||||
"operation": "delete",
|
||||
},
|
||||
),
|
||||
content_type="application/json",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertTrue(Tag.objects.filter(id=self.t2.id).exists())
|
||||
self.assertFalse(Tag.objects.filter(id=self.t1.id).exists())
|
||||
|
||||
@@ -68,26 +68,88 @@ class TestDocumentSearchApi(DirectoriesMixin, APITestCase):
|
||||
results = response.data["results"]
|
||||
self.assertEqual(response.data["count"], 3)
|
||||
self.assertEqual(len(results), 3)
|
||||
self.assertCountEqual(response.data["all"], [d1.id, d2.id, d3.id])
|
||||
|
||||
response = self.client.get("/api/documents/?query=september")
|
||||
results = response.data["results"]
|
||||
self.assertEqual(response.data["count"], 1)
|
||||
self.assertEqual(len(results), 1)
|
||||
self.assertCountEqual(response.data["all"], [d3.id])
|
||||
self.assertEqual(results[0]["original_file_name"], "someepdf.pdf")
|
||||
|
||||
response = self.client.get("/api/documents/?query=statement")
|
||||
results = response.data["results"]
|
||||
self.assertEqual(response.data["count"], 2)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertCountEqual(response.data["all"], [d2.id, d3.id])
|
||||
|
||||
response = self.client.get("/api/documents/?query=sfegdfg")
|
||||
results = response.data["results"]
|
||||
self.assertEqual(response.data["count"], 0)
|
||||
self.assertEqual(len(results), 0)
|
||||
self.assertCountEqual(response.data["all"], [])
|
||||
|
||||
def test_search_returns_all_for_api_version_9(self) -> None:
|
||||
d1 = Document.objects.create(
|
||||
title="invoice",
|
||||
content="bank payment",
|
||||
checksum="A",
|
||||
pk=1,
|
||||
)
|
||||
d2 = Document.objects.create(
|
||||
title="bank statement",
|
||||
content="bank transfer",
|
||||
checksum="B",
|
||||
pk=2,
|
||||
)
|
||||
with AsyncWriter(index.open_index()) as writer:
|
||||
index.update_document(writer, d1)
|
||||
index.update_document(writer, d2)
|
||||
|
||||
response = self.client.get(
|
||||
"/api/documents/?query=bank",
|
||||
headers={"Accept": "application/json; version=9"},
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertIn("all", response.data)
|
||||
self.assertCountEqual(response.data["all"], [d1.id, d2.id])
|
||||
|
||||
def test_search_with_include_selection_data(self) -> None:
|
||||
correspondent = Correspondent.objects.create(name="c1")
|
||||
doc_type = DocumentType.objects.create(name="dt1")
|
||||
storage_path = StoragePath.objects.create(name="sp1")
|
||||
tag = Tag.objects.create(name="tag")
|
||||
|
||||
matching_doc = Document.objects.create(
|
||||
title="bank statement",
|
||||
content="bank content",
|
||||
checksum="A",
|
||||
correspondent=correspondent,
|
||||
document_type=doc_type,
|
||||
storage_path=storage_path,
|
||||
)
|
||||
matching_doc.tags.add(tag)
|
||||
|
||||
with AsyncWriter(index.open_index()) as writer:
|
||||
index.update_document(writer, matching_doc)
|
||||
|
||||
response = self.client.get(
|
||||
"/api/documents/?query=bank&include_selection_data=true",
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
||||
self.assertIn("selection_data", response.data)
|
||||
|
||||
selected_correspondent = next(
|
||||
item
|
||||
for item in response.data["selection_data"]["selected_correspondents"]
|
||||
if item["id"] == correspondent.id
|
||||
)
|
||||
selected_tag = next(
|
||||
item
|
||||
for item in response.data["selection_data"]["selected_tags"]
|
||||
if item["id"] == tag.id
|
||||
)
|
||||
|
||||
self.assertEqual(selected_correspondent["document_count"], 1)
|
||||
self.assertEqual(selected_tag["document_count"], 1)
|
||||
|
||||
def test_search_custom_field_ordering(self) -> None:
|
||||
custom_field = CustomField.objects.create(
|
||||
|
||||
@@ -9,8 +9,8 @@ from documents.parsers import get_default_file_extension
|
||||
from documents.parsers import get_parser_class_for_mime_type
|
||||
from documents.parsers import get_supported_file_extensions
|
||||
from documents.parsers import is_file_ext_supported
|
||||
from paperless.parsers.text import TextDocumentParser
|
||||
from paperless_tesseract.parsers import RasterisedDocumentParser
|
||||
from paperless_text.parsers import TextDocumentParser
|
||||
from paperless_tika.parsers import TikaDocumentParser
|
||||
|
||||
|
||||
|
||||
@@ -555,7 +555,9 @@ class TagViewSet(PermissionsAwareDocumentCountMixin, ModelViewSet):
|
||||
page = self.paginate_queryset(queryset)
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
response = self.get_paginated_response(serializer.data)
|
||||
if descendant_pks:
|
||||
response.data["display_count"] = len(children_source)
|
||||
api_version = int(request.version or settings.REST_FRAMEWORK["DEFAULT_VERSION"])
|
||||
if descendant_pks and api_version < 10:
|
||||
# Include children in the "all" field, if needed
|
||||
response.data["all"] = [tag.pk for tag in children_source]
|
||||
return response
|
||||
@@ -835,6 +837,61 @@ class DocumentViewSet(
|
||||
"custom_field_",
|
||||
)
|
||||
|
||||
def _get_selection_data_for_queryset(self, queryset):
|
||||
correspondents = Correspondent.objects.annotate(
|
||||
document_count=Count(
|
||||
"documents",
|
||||
filter=Q(documents__in=queryset),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
tags = Tag.objects.annotate(
|
||||
document_count=Count(
|
||||
"documents",
|
||||
filter=Q(documents__in=queryset),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
document_types = DocumentType.objects.annotate(
|
||||
document_count=Count(
|
||||
"documents",
|
||||
filter=Q(documents__in=queryset),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
storage_paths = StoragePath.objects.annotate(
|
||||
document_count=Count(
|
||||
"documents",
|
||||
filter=Q(documents__in=queryset),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
custom_fields = CustomField.objects.annotate(
|
||||
document_count=Count(
|
||||
"fields__document",
|
||||
filter=Q(fields__document__in=queryset),
|
||||
distinct=True,
|
||||
),
|
||||
)
|
||||
|
||||
return {
|
||||
"selected_correspondents": [
|
||||
{"id": t.id, "document_count": t.document_count} for t in correspondents
|
||||
],
|
||||
"selected_tags": [
|
||||
{"id": t.id, "document_count": t.document_count} for t in tags
|
||||
],
|
||||
"selected_document_types": [
|
||||
{"id": t.id, "document_count": t.document_count} for t in document_types
|
||||
],
|
||||
"selected_storage_paths": [
|
||||
{"id": t.id, "document_count": t.document_count} for t in storage_paths
|
||||
],
|
||||
"selected_custom_fields": [
|
||||
{"id": t.id, "document_count": t.document_count} for t in custom_fields
|
||||
],
|
||||
}
|
||||
|
||||
def get_queryset(self):
|
||||
latest_version_content = Subquery(
|
||||
Document.objects.filter(root_document=OuterRef("pk"))
|
||||
@@ -982,6 +1039,25 @@ class DocumentViewSet(
|
||||
|
||||
return response
|
||||
|
||||
def list(self, request, *args, **kwargs):
|
||||
if not get_boolean(
|
||||
str(request.query_params.get("include_selection_data", "false")),
|
||||
):
|
||||
return super().list(request, *args, **kwargs)
|
||||
|
||||
queryset = self.filter_queryset(self.get_queryset())
|
||||
selection_data = self._get_selection_data_for_queryset(queryset)
|
||||
|
||||
page = self.paginate_queryset(queryset)
|
||||
if page is not None:
|
||||
serializer = self.get_serializer(page, many=True)
|
||||
response = self.get_paginated_response(serializer.data)
|
||||
response.data["selection_data"] = selection_data
|
||||
return response
|
||||
|
||||
serializer = self.get_serializer(queryset, many=True)
|
||||
return Response({"results": serializer.data, "selection_data": selection_data})
|
||||
|
||||
def destroy(self, request, *args, **kwargs):
|
||||
from documents import index
|
||||
|
||||
@@ -2002,6 +2078,21 @@ class UnifiedSearchViewSet(DocumentViewSet):
|
||||
else None
|
||||
)
|
||||
|
||||
if get_boolean(
|
||||
str(
|
||||
request.query_params.get(
|
||||
"include_selection_data",
|
||||
"false",
|
||||
),
|
||||
),
|
||||
):
|
||||
result_ids = queryset.get_result_ids()
|
||||
response.data["selection_data"] = (
|
||||
self._get_selection_data_for_queryset(
|
||||
Document.objects.filter(pk__in=result_ids),
|
||||
)
|
||||
)
|
||||
|
||||
return response
|
||||
except NotFound:
|
||||
raise
|
||||
@@ -2124,7 +2215,36 @@ class SavedViewViewSet(BulkPermissionMixin, PassUserMixin, ModelViewSet):
|
||||
ordering_fields = ("name",)
|
||||
|
||||
|
||||
class DocumentOperationPermissionMixin(PassUserMixin):
|
||||
class DocumentSelectionMixin:
|
||||
def _resolve_document_ids(
|
||||
self,
|
||||
*,
|
||||
user: User,
|
||||
validated_data: dict[str, Any],
|
||||
permission_codename: str = "view_document",
|
||||
) -> list[int]:
|
||||
if not validated_data.get("all", False):
|
||||
# if all is not true, just pass through the provided document ids
|
||||
return validated_data["documents"]
|
||||
|
||||
# otherwise, reconstruct the document list based on the provided filters
|
||||
filters = validated_data.get("filters") or {}
|
||||
permitted_documents = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
permission_codename,
|
||||
Document,
|
||||
)
|
||||
return list(
|
||||
DocumentFilterSet(
|
||||
data=filters,
|
||||
queryset=permitted_documents,
|
||||
)
|
||||
.qs.distinct()
|
||||
.values_list("pk", flat=True),
|
||||
)
|
||||
|
||||
|
||||
class DocumentOperationPermissionMixin(PassUserMixin, DocumentSelectionMixin):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
parser_classes = (parsers.JSONParser,)
|
||||
METHOD_NAMES_REQUIRING_USER = {
|
||||
@@ -2218,8 +2338,15 @@ class DocumentOperationPermissionMixin(PassUserMixin):
|
||||
validated_data: dict[str, Any],
|
||||
operation_label: str,
|
||||
):
|
||||
documents = validated_data["documents"]
|
||||
parameters = {k: v for k, v in validated_data.items() if k != "documents"}
|
||||
documents = self._resolve_document_ids(
|
||||
user=self.request.user,
|
||||
validated_data=validated_data,
|
||||
)
|
||||
parameters = {
|
||||
k: v
|
||||
for k, v in validated_data.items()
|
||||
if k not in {"documents", "all", "filters"}
|
||||
}
|
||||
user = self.request.user
|
||||
|
||||
if method.__name__ in self.METHOD_NAMES_REQUIRING_USER:
|
||||
@@ -2307,7 +2434,10 @@ class BulkEditView(DocumentOperationPermissionMixin):
|
||||
user = self.request.user
|
||||
method = serializer.validated_data.get("method")
|
||||
parameters = serializer.validated_data.get("parameters")
|
||||
documents = serializer.validated_data.get("documents")
|
||||
documents = self._resolve_document_ids(
|
||||
user=user,
|
||||
validated_data=serializer.validated_data,
|
||||
)
|
||||
if method.__name__ in self.METHOD_NAMES_REQUIRING_USER:
|
||||
parameters["user"] = user
|
||||
if not self._has_document_permissions(
|
||||
@@ -3151,7 +3281,7 @@ class StatisticsView(GenericAPIView):
|
||||
)
|
||||
|
||||
|
||||
class BulkDownloadView(GenericAPIView):
|
||||
class BulkDownloadView(DocumentSelectionMixin, GenericAPIView):
|
||||
permission_classes = (IsAuthenticated,)
|
||||
serializer_class = BulkDownloadSerializer
|
||||
parser_classes = (parsers.JSONParser,)
|
||||
@@ -3160,7 +3290,10 @@ class BulkDownloadView(GenericAPIView):
|
||||
serializer = self.get_serializer(data=request.data)
|
||||
serializer.is_valid(raise_exception=True)
|
||||
|
||||
ids = serializer.validated_data.get("documents")
|
||||
ids = self._resolve_document_ids(
|
||||
user=request.user,
|
||||
validated_data=serializer.validated_data,
|
||||
)
|
||||
documents = Document.objects.filter(pk__in=ids)
|
||||
compression = serializer.validated_data.get("compression")
|
||||
content = serializer.validated_data.get("content")
|
||||
@@ -3790,20 +3923,55 @@ class BulkEditObjectsView(PassUserMixin):
|
||||
user = self.request.user
|
||||
object_type = serializer.validated_data.get("object_type")
|
||||
object_ids = serializer.validated_data.get("objects")
|
||||
apply_to_all = serializer.validated_data.get("all")
|
||||
object_class = serializer.get_object_class(object_type)
|
||||
operation = serializer.validated_data.get("operation")
|
||||
model_name = object_class._meta.model_name
|
||||
perm_codename = (
|
||||
f"change_{model_name}"
|
||||
if operation == "set_permissions"
|
||||
else f"delete_{model_name}"
|
||||
)
|
||||
|
||||
objs = object_class.objects.select_related("owner").filter(pk__in=object_ids)
|
||||
if apply_to_all:
|
||||
# Support all to avoid sending large lists of ids for bulk operations, with optional filters
|
||||
filters = serializer.validated_data.get("filters") or {}
|
||||
filterset_class = {
|
||||
"tags": TagFilterSet,
|
||||
"correspondents": CorrespondentFilterSet,
|
||||
"document_types": DocumentTypeFilterSet,
|
||||
"storage_paths": StoragePathFilterSet,
|
||||
}[object_type]
|
||||
user_permitted_objects = get_objects_for_user_owner_aware(
|
||||
user,
|
||||
perm_codename,
|
||||
object_class,
|
||||
)
|
||||
objs = filterset_class(
|
||||
data=filters,
|
||||
queryset=user_permitted_objects,
|
||||
).qs
|
||||
if object_type == "tags":
|
||||
editable_ids = set(user_permitted_objects.values_list("pk", flat=True))
|
||||
all_ids = set(objs.values_list("pk", flat=True))
|
||||
for tag in objs:
|
||||
all_ids.update(
|
||||
descendant.pk
|
||||
for descendant in tag.get_descendants()
|
||||
if descendant.pk in editable_ids
|
||||
)
|
||||
objs = object_class.objects.filter(pk__in=all_ids)
|
||||
objs = objs.select_related("owner")
|
||||
object_ids = list(objs.values_list("pk", flat=True))
|
||||
else:
|
||||
objs = object_class.objects.select_related("owner").filter(
|
||||
pk__in=object_ids,
|
||||
)
|
||||
|
||||
if not user.is_superuser:
|
||||
model_name = object_class._meta.model_name
|
||||
perm = (
|
||||
f"documents.change_{model_name}"
|
||||
if operation == "set_permissions"
|
||||
else f"documents.delete_{model_name}"
|
||||
)
|
||||
perm = f"documents.{perm_codename}"
|
||||
has_perms = user.has_perm(perm) and all(
|
||||
(obj.owner == user or obj.owner is None) for obj in objs
|
||||
has_perms_owner_aware(user, perm_codename, obj) for obj in objs
|
||||
)
|
||||
|
||||
if not has_perms:
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import os
|
||||
|
||||
from celery import Celery
|
||||
from celery.signals import worker_process_init
|
||||
|
||||
# Set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "paperless.settings")
|
||||
@@ -15,3 +16,19 @@ app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
|
||||
# Load task modules from all registered Django apps.
|
||||
app.autodiscover_tasks()
|
||||
|
||||
|
||||
@worker_process_init.connect
|
||||
def on_worker_process_init(**kwargs) -> None: # pragma: no cover
|
||||
"""
|
||||
Register built-in parsers eagerly in each Celery worker process.
|
||||
|
||||
This registers only the built-in parsers (no entrypoint discovery) so
|
||||
that workers can begin consuming documents immediately. Entrypoint
|
||||
discovery for third-party parsers is deferred to the first call of
|
||||
get_parser_registry() inside a task, keeping worker_process_init
|
||||
well within its 4-second timeout budget.
|
||||
"""
|
||||
from paperless.parsers.registry import init_builtin_parsers
|
||||
|
||||
init_builtin_parsers()
|
||||
|
||||
@@ -1,62 +1,51 @@
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
from asgiref.sync import async_to_sync
|
||||
from channels.exceptions import AcceptConnection
|
||||
from channels.exceptions import DenyConnection
|
||||
from channels.generic.websocket import WebsocketConsumer
|
||||
from channels.generic.websocket import AsyncWebsocketConsumer
|
||||
|
||||
|
||||
class StatusConsumer(WebsocketConsumer):
|
||||
def _authenticated(self):
|
||||
return "user" in self.scope and self.scope["user"].is_authenticated
|
||||
class StatusConsumer(AsyncWebsocketConsumer):
|
||||
def _authenticated(self) -> bool:
|
||||
user: Any = self.scope.get("user")
|
||||
return user is not None and user.is_authenticated
|
||||
|
||||
def _can_view(self, data):
|
||||
user = self.scope.get("user") if self.scope.get("user") else None
|
||||
async def _can_view(self, data: dict[str, Any]) -> bool:
|
||||
user: Any = self.scope.get("user")
|
||||
if user is None:
|
||||
return False
|
||||
owner_id = data.get("owner_id")
|
||||
users_can_view = data.get("users_can_view", [])
|
||||
groups_can_view = data.get("groups_can_view", [])
|
||||
return (
|
||||
user.is_superuser
|
||||
or user.id == owner_id
|
||||
or user.id in users_can_view
|
||||
or any(
|
||||
user.groups.filter(pk=group_id).exists() for group_id in groups_can_view
|
||||
)
|
||||
)
|
||||
|
||||
def connect(self):
|
||||
if user.is_superuser or user.id == owner_id or user.id in users_can_view:
|
||||
return True
|
||||
|
||||
return await user.groups.filter(pk__in=groups_can_view).aexists()
|
||||
|
||||
async def connect(self) -> None:
|
||||
if not self._authenticated():
|
||||
raise DenyConnection
|
||||
else:
|
||||
async_to_sync(self.channel_layer.group_add)(
|
||||
"status_updates",
|
||||
self.channel_name,
|
||||
)
|
||||
raise AcceptConnection
|
||||
await self.close()
|
||||
return
|
||||
await self.channel_layer.group_add("status_updates", self.channel_name)
|
||||
await self.accept()
|
||||
|
||||
def disconnect(self, close_code) -> None:
|
||||
async_to_sync(self.channel_layer.group_discard)(
|
||||
"status_updates",
|
||||
self.channel_name,
|
||||
)
|
||||
async def disconnect(self, code: int) -> None:
|
||||
await self.channel_layer.group_discard("status_updates", self.channel_name)
|
||||
|
||||
def status_update(self, event) -> None:
|
||||
async def status_update(self, event: dict[str, Any]) -> None:
|
||||
if not self._authenticated():
|
||||
self.close()
|
||||
else:
|
||||
if self._can_view(event["data"]):
|
||||
self.send(json.dumps(event))
|
||||
await self.close()
|
||||
elif await self._can_view(event["data"]):
|
||||
await self.send(json.dumps(event))
|
||||
|
||||
def documents_deleted(self, event) -> None:
|
||||
async def documents_deleted(self, event: dict[str, Any]) -> None:
|
||||
if not self._authenticated():
|
||||
self.close()
|
||||
await self.close()
|
||||
else:
|
||||
self.send(json.dumps(event))
|
||||
await self.send(json.dumps(event))
|
||||
|
||||
def document_updated(self, event: Any) -> None:
|
||||
async def document_updated(self, event: dict[str, Any]) -> None:
|
||||
if not self._authenticated():
|
||||
self.close()
|
||||
else:
|
||||
if self._can_view(event["data"]):
|
||||
self.send(json.dumps(event))
|
||||
await self.close()
|
||||
elif await self._can_view(event["data"]):
|
||||
await self.send(json.dumps(event))
|
||||
|
||||
379
src/paperless/parsers/__init__.py
Normal file
379
src/paperless/parsers/__init__.py
Normal file
@@ -0,0 +1,379 @@
|
||||
"""
|
||||
Public interface for the Paperless-ngx parser plugin system.
|
||||
|
||||
This module defines ParserProtocol — the structural contract that every
|
||||
document parser must satisfy, whether it is a built-in parser shipped with
|
||||
Paperless-ngx or a third-party parser installed via a Python entrypoint.
|
||||
|
||||
Phase 1/2 scope: only the Protocol is defined here. The transitional
|
||||
DocumentParser ABC (Phase 3) and concrete built-in parsers (Phase 3+) will
|
||||
be added in later phases, so there are intentionally no imports of parser
|
||||
implementations here.
|
||||
|
||||
Usage example (third-party parser)::
|
||||
|
||||
from paperless.parsers import ParserProtocol
|
||||
|
||||
class MyParser:
|
||||
name = "my-parser"
|
||||
version = "1.0.0"
|
||||
author = "Acme Corp"
|
||||
url = "https://example.com/my-parser"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls) -> dict[str, str]:
|
||||
return {"application/x-my-format": ".myf"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return 10
|
||||
|
||||
# … implement remaining protocol methods …
|
||||
|
||||
assert isinstance(MyParser(), ParserProtocol)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Protocol
|
||||
from typing import Self
|
||||
from typing import TypedDict
|
||||
from typing import runtime_checkable
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from types import TracebackType
|
||||
|
||||
__all__ = [
|
||||
"MetadataEntry",
|
||||
"ParserProtocol",
|
||||
]
|
||||
|
||||
|
||||
class MetadataEntry(TypedDict):
|
||||
"""A single metadata field extracted from a document.
|
||||
|
||||
All four keys are required. Values are always serialised to strings —
|
||||
type-specific conversion (dates, integers, lists) is the responsibility
|
||||
of the parser before returning.
|
||||
"""
|
||||
|
||||
namespace: str
|
||||
"""URI of the metadata namespace (e.g. 'http://ns.adobe.com/pdf/1.3/')."""
|
||||
|
||||
prefix: str
|
||||
"""Conventional namespace prefix (e.g. 'pdf', 'xmp', 'dc')."""
|
||||
|
||||
key: str
|
||||
"""Field name within the namespace (e.g. 'Author', 'CreateDate')."""
|
||||
|
||||
value: str
|
||||
"""String representation of the field value."""
|
||||
|
||||
|
||||
@runtime_checkable
|
||||
class ParserProtocol(Protocol):
|
||||
"""Structural contract for all Paperless-ngx document parsers.
|
||||
|
||||
Both built-in parsers and third-party plugins (discovered via the
|
||||
"paperless_ngx.parsers" entrypoint group) must satisfy this Protocol.
|
||||
Because it is decorated with runtime_checkable, isinstance(obj,
|
||||
ParserProtocol) works at runtime based on method presence, which is
|
||||
useful for validation in ParserRegistry.discover.
|
||||
|
||||
Parsers must expose four string attributes at the class level so the
|
||||
registry can log attribution information without instantiating the parser:
|
||||
|
||||
name : str
|
||||
Human-readable parser name (e.g. "Tesseract OCR").
|
||||
version : str
|
||||
Semantic version string (e.g. "1.2.3").
|
||||
author : str
|
||||
Author or organisation name.
|
||||
url : str
|
||||
URL for documentation, source code, or issue tracker.
|
||||
"""
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Class-level identity (checked by the registry, not Protocol methods)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
name: str
|
||||
version: str
|
||||
author: str
|
||||
url: str
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Class methods
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls) -> dict[str, str]:
|
||||
"""Return a mapping of supported MIME types to preferred file extensions.
|
||||
|
||||
The keys are MIME type strings (e.g. "application/pdf"), and the
|
||||
values are the preferred file extension including the leading dot
|
||||
(e.g. ".pdf"). The registry uses this mapping both to decide whether
|
||||
a parser is a candidate for a given file and to determine the default
|
||||
extension when creating archive copies.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict[str, str]
|
||||
{mime_type: extension} mapping — may be empty if the parser
|
||||
has been temporarily disabled.
|
||||
"""
|
||||
...
|
||||
|
||||
@classmethod
|
||||
def score(
|
||||
cls,
|
||||
mime_type: str,
|
||||
filename: str,
|
||||
path: Path | None = None,
|
||||
) -> int | None:
|
||||
"""Return a priority score for handling this file, or None to decline.
|
||||
|
||||
The registry calls this after confirming that the MIME type is in
|
||||
supported_mime_types. Parsers may inspect filename and optionally
|
||||
the file at path to refine their confidence level.
|
||||
|
||||
A higher score wins. Return None to explicitly decline handling a file
|
||||
even though the MIME type is listed as supported (e.g. when a feature
|
||||
flag is disabled, or a required service is not configured).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
mime_type:
|
||||
The detected MIME type of the file to be parsed.
|
||||
filename:
|
||||
The original filename, including extension.
|
||||
path:
|
||||
Optional filesystem path to the file. Parsers that need to
|
||||
inspect file content (e.g. magic-byte sniffing) may use this.
|
||||
May be None when scoring happens before the file is available locally.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int | None
|
||||
Priority score (higher wins), or None to decline.
|
||||
"""
|
||||
...
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Properties
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@property
|
||||
def can_produce_archive(self) -> bool:
|
||||
"""Whether this parser can produce a searchable PDF archive copy.
|
||||
|
||||
If True, the consumption pipeline may request an archive version when
|
||||
processing the document, subject to the ARCHIVE_FILE_GENERATION
|
||||
setting. If False, only thumbnail and text extraction are performed.
|
||||
"""
|
||||
...
|
||||
|
||||
@property
|
||||
def requires_pdf_rendition(self) -> bool:
|
||||
"""Whether the parser must produce a PDF for the frontend to display.
|
||||
|
||||
True for formats the browser cannot display natively (e.g. DOCX, ODT).
|
||||
When True, the pipeline always stores the PDF output regardless of the
|
||||
ARCHIVE_FILE_GENERATION setting, since the original format cannot be
|
||||
shown to the user.
|
||||
"""
|
||||
...
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Core parsing interface
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def parse(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
*,
|
||||
produce_archive: bool = True,
|
||||
) -> None:
|
||||
"""Parse document_path and populate internal state.
|
||||
|
||||
After a successful call, callers retrieve results via get_text,
|
||||
get_date, and get_archive_path.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
document_path:
|
||||
Absolute path to the document file to parse.
|
||||
mime_type:
|
||||
Detected MIME type of the document.
|
||||
produce_archive:
|
||||
When True (the default) and can_produce_archive is also True,
|
||||
the parser should produce a searchable PDF at the path returned
|
||||
by get_archive_path. Pass False when only text extraction and
|
||||
thumbnail generation are required and disk I/O should be minimised.
|
||||
|
||||
Raises
|
||||
------
|
||||
documents.parsers.ParseError
|
||||
If parsing fails for any reason.
|
||||
"""
|
||||
...
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Result accessors
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_text(self) -> str | None:
|
||||
"""Return the plain-text content extracted during parse.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str | None
|
||||
Extracted text, or None if no text could be found.
|
||||
"""
|
||||
...
|
||||
|
||||
def get_date(self) -> datetime.datetime | None:
|
||||
"""Return the document date detected during parse.
|
||||
|
||||
Returns
|
||||
-------
|
||||
datetime.datetime | None
|
||||
Detected document date, or None if no date was found.
|
||||
"""
|
||||
...
|
||||
|
||||
def get_archive_path(self) -> Path | None:
|
||||
"""Return the path to the generated archive PDF, or None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path | None
|
||||
Path to the searchable PDF archive, or None if no archive was
|
||||
produced (e.g. because produce_archive=False or the parser does
|
||||
not support archive generation).
|
||||
"""
|
||||
...
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Thumbnail and metadata
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_thumbnail(self, document_path: Path, mime_type: str) -> Path:
|
||||
"""Generate and return the path to a thumbnail image for the document.
|
||||
|
||||
May be called independently of parse. The returned path must point to
|
||||
an existing WebP image file inside the parser's temporary working
|
||||
directory.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
document_path:
|
||||
Absolute path to the source document.
|
||||
mime_type:
|
||||
Detected MIME type of the document.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
Path to the generated thumbnail image (WebP format preferred).
|
||||
"""
|
||||
...
|
||||
|
||||
def get_page_count(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
) -> int | None:
|
||||
"""Return the number of pages in the document, if determinable.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
document_path:
|
||||
Absolute path to the source document.
|
||||
mime_type:
|
||||
Detected MIME type of the document.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int | None
|
||||
Page count, or None if the parser cannot determine it.
|
||||
"""
|
||||
...
|
||||
|
||||
def extract_metadata(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
) -> list[MetadataEntry]:
|
||||
"""Extract format-specific metadata from the document.
|
||||
|
||||
Called by the API view layer on demand — not during the consumption
|
||||
pipeline. Results are returned to the frontend for per-file display.
|
||||
|
||||
For documents with an archive version, this method is called twice:
|
||||
once for the original file (with its native MIME type) and once for
|
||||
the archive file (with ``"application/pdf"``). Parsers that produce
|
||||
archives should handle both cases.
|
||||
|
||||
Implementations must not raise. A failure to read metadata is not
|
||||
fatal — log a warning and return whatever partial results were
|
||||
collected, or ``[]`` if none.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
document_path:
|
||||
Absolute path to the file to extract metadata from.
|
||||
mime_type:
|
||||
MIME type of the file at ``document_path``. May be
|
||||
``"application/pdf"`` when called for the archive version.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[MetadataEntry]
|
||||
Zero or more metadata entries. Returns ``[]`` if no metadata
|
||||
could be extracted or the format does not support it.
|
||||
"""
|
||||
...
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Context manager
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
"""Enter the parser context, returning the parser instance.
|
||||
|
||||
Implementations should perform any resource allocation here if not
|
||||
done in __init__ (e.g. creating API clients or temp directories).
|
||||
|
||||
Returns
|
||||
-------
|
||||
Self
|
||||
The parser instance itself.
|
||||
"""
|
||||
...
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
"""Exit the parser context and release all resources.
|
||||
|
||||
Implementations must clean up all temporary files and other resources
|
||||
regardless of whether an exception occurred.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
exc_type:
|
||||
The exception class, or None if no exception was raised.
|
||||
exc_val:
|
||||
The exception instance, or None.
|
||||
exc_tb:
|
||||
The traceback, or None.
|
||||
"""
|
||||
...
|
||||
364
src/paperless/parsers/registry.py
Normal file
364
src/paperless/parsers/registry.py
Normal file
@@ -0,0 +1,364 @@
|
||||
"""
|
||||
Singleton registry that tracks all document parsers available to
|
||||
Paperless-ngx — both built-ins shipped with the application and third-party
|
||||
plugins installed via Python entrypoints.
|
||||
|
||||
Public surface
|
||||
--------------
|
||||
get_parser_registry
|
||||
Lazy-initialise and return the shared ParserRegistry. This is the primary
|
||||
entry point for production code.
|
||||
|
||||
init_builtin_parsers
|
||||
Register built-in parsers only, without entrypoint discovery. Safe to
|
||||
call from Celery worker_process_init where importing all entrypoints
|
||||
would be wasteful or cause side effects.
|
||||
|
||||
reset_parser_registry
|
||||
Reset module-level state. For tests only.
|
||||
|
||||
Entrypoint group
|
||||
----------------
|
||||
Third-party parsers must advertise themselves under the
|
||||
"paperless_ngx.parsers" entrypoint group in their pyproject.toml::
|
||||
|
||||
[project.entry-points."paperless_ngx.parsers"]
|
||||
my_parser = "my_package.parsers:MyParser"
|
||||
|
||||
The loaded class must expose the following attributes at the class level
|
||||
(not just on instances) for the registry to accept it:
|
||||
name, version, author, url, supported_mime_types (callable), score (callable).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from importlib.metadata import entry_points
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from pathlib import Path
|
||||
|
||||
from paperless.parsers import ParserProtocol
|
||||
|
||||
logger = logging.getLogger("paperless.parsers.registry")
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Module-level singleton state
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_registry: ParserRegistry | None = None
|
||||
_discovery_complete: bool = False
|
||||
|
||||
# Attribute names that every registered external parser class must expose.
|
||||
_REQUIRED_ATTRS: tuple[str, ...] = (
|
||||
"name",
|
||||
"version",
|
||||
"author",
|
||||
"url",
|
||||
"supported_mime_types",
|
||||
"score",
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Module-level accessor functions
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def get_parser_registry() -> ParserRegistry:
|
||||
"""Return the shared ParserRegistry instance.
|
||||
|
||||
On the first call this function:
|
||||
|
||||
1. Creates a new ParserRegistry.
|
||||
2. Calls register_defaults to install built-in parsers.
|
||||
3. Calls discover to load third-party plugins via importlib.metadata entrypoints.
|
||||
4. Calls log_summary to emit a startup summary.
|
||||
|
||||
Subsequent calls return the same instance immediately.
|
||||
|
||||
Returns
|
||||
-------
|
||||
ParserRegistry
|
||||
The shared registry singleton.
|
||||
"""
|
||||
global _registry, _discovery_complete
|
||||
|
||||
if _registry is None:
|
||||
_registry = ParserRegistry()
|
||||
_registry.register_defaults()
|
||||
|
||||
if not _discovery_complete:
|
||||
_registry.discover()
|
||||
_registry.log_summary()
|
||||
_discovery_complete = True
|
||||
|
||||
return _registry
|
||||
|
||||
|
||||
def init_builtin_parsers() -> None:
|
||||
"""Register built-in parsers without performing entrypoint discovery.
|
||||
|
||||
Intended for use in Celery worker_process_init handlers where importing
|
||||
all installed entrypoints would be wasteful, slow, or could produce
|
||||
undesirable side effects. Entrypoint discovery (third-party plugins) is
|
||||
deliberately not performed.
|
||||
|
||||
Safe to call multiple times — subsequent calls are no-ops.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
"""
|
||||
global _registry
|
||||
|
||||
if _registry is None:
|
||||
_registry = ParserRegistry()
|
||||
_registry.register_defaults()
|
||||
|
||||
|
||||
def reset_parser_registry() -> None:
|
||||
"""Reset the module-level registry state to its initial values.
|
||||
|
||||
Resets _registry and _discovery_complete so the next call to
|
||||
get_parser_registry will re-initialise everything from scratch.
|
||||
|
||||
FOR TESTS ONLY. Do not call this in production code — resetting the
|
||||
registry mid-request causes all subsequent parser lookups to go through
|
||||
discovery again, which is expensive and may have unexpected side effects
|
||||
in multi-threaded environments.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
"""
|
||||
global _registry, _discovery_complete
|
||||
|
||||
_registry = None
|
||||
_discovery_complete = False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Registry class
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class ParserRegistry:
|
||||
"""Registry that maps MIME types to the best available parser class.
|
||||
|
||||
Parsers are partitioned into two lists:
|
||||
|
||||
_builtins
|
||||
Parser classes registered via register_builtin (populated by
|
||||
register_defaults in Phase 3+).
|
||||
|
||||
_external
|
||||
Parser classes loaded from installed Python entrypoints via discover.
|
||||
|
||||
When resolving a parser for a file, external parsers are evaluated
|
||||
alongside built-in parsers using a uniform scoring mechanism. Both lists
|
||||
are iterated together; the class with the highest score wins. If an
|
||||
external parser wins, its attribution details are logged so users can
|
||||
identify which third-party package handled their document.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._external: list[type[ParserProtocol]] = []
|
||||
self._builtins: list[type[ParserProtocol]] = []
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Registration
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def register_builtin(self, parser_class: type[ParserProtocol]) -> None:
|
||||
"""Register a built-in parser class.
|
||||
|
||||
Built-in parsers are shipped with Paperless-ngx and are appended to
|
||||
the _builtins list. They are never overridden by external parsers;
|
||||
instead, scoring determines which parser wins for any given file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
parser_class:
|
||||
The parser class to register. Must satisfy ParserProtocol.
|
||||
"""
|
||||
self._builtins.append(parser_class)
|
||||
|
||||
def register_defaults(self) -> None:
|
||||
"""Register the built-in parsers that ship with Paperless-ngx.
|
||||
|
||||
Each parser that has been migrated to the new ParserProtocol interface
|
||||
is registered here. Parsers are added in ascending weight order so
|
||||
that log output is predictable; scoring determines which parser wins
|
||||
at runtime regardless of registration order.
|
||||
"""
|
||||
from paperless.parsers.text import TextDocumentParser
|
||||
|
||||
self.register_builtin(TextDocumentParser)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Discovery
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def discover(self) -> None:
|
||||
"""Load third-party parsers from the "paperless_ngx.parsers" entrypoint group.
|
||||
|
||||
For each advertised entrypoint the method:
|
||||
|
||||
1. Calls ep.load() to import the class.
|
||||
2. Validates that the class exposes all required attributes.
|
||||
3. On success, appends the class to _external and logs an info message.
|
||||
4. On failure (import error or missing attributes), logs an appropriate
|
||||
warning/error and continues to the next entrypoint.
|
||||
|
||||
Errors during discovery of a single parser do not prevent other parsers
|
||||
from being loaded.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
"""
|
||||
eps = entry_points(group="paperless_ngx.parsers")
|
||||
|
||||
for ep in eps:
|
||||
try:
|
||||
parser_class = ep.load()
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to load parser entrypoint '%s' — skipping.",
|
||||
ep.name,
|
||||
)
|
||||
continue
|
||||
|
||||
missing = [
|
||||
attr for attr in _REQUIRED_ATTRS if not hasattr(parser_class, attr)
|
||||
]
|
||||
if missing:
|
||||
logger.warning(
|
||||
"Parser loaded from entrypoint '%s' is missing required "
|
||||
"attributes %r — skipping.",
|
||||
ep.name,
|
||||
missing,
|
||||
)
|
||||
continue
|
||||
|
||||
self._external.append(parser_class)
|
||||
logger.info(
|
||||
"Loaded third-party parser '%s' v%s by %s (entrypoint: '%s').",
|
||||
parser_class.name,
|
||||
parser_class.version,
|
||||
parser_class.author,
|
||||
ep.name,
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Summary logging
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def log_summary(self) -> None:
|
||||
"""Log a startup summary of all registered parsers.
|
||||
|
||||
Built-in parsers are listed first, followed by any external parsers
|
||||
discovered from entrypoints. If no external parsers were found a
|
||||
short informational message is logged instead of an empty list.
|
||||
|
||||
Returns
|
||||
-------
|
||||
None
|
||||
"""
|
||||
logger.info(
|
||||
"Built-in parsers (%d):",
|
||||
len(self._builtins),
|
||||
)
|
||||
for cls in self._builtins:
|
||||
logger.info(
|
||||
" [built-in] %s v%s — %s",
|
||||
getattr(cls, "name", repr(cls)),
|
||||
getattr(cls, "version", "unknown"),
|
||||
getattr(cls, "url", "built-in"),
|
||||
)
|
||||
|
||||
if not self._external:
|
||||
logger.info("No third-party parsers discovered.")
|
||||
return
|
||||
|
||||
logger.info(
|
||||
"Third-party parsers (%d):",
|
||||
len(self._external),
|
||||
)
|
||||
for cls in self._external:
|
||||
logger.info(
|
||||
" [external] %s v%s by %s — report issues at %s",
|
||||
getattr(cls, "name", repr(cls)),
|
||||
getattr(cls, "version", "unknown"),
|
||||
getattr(cls, "author", "unknown"),
|
||||
getattr(cls, "url", "unknown"),
|
||||
)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Parser resolution
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_parser_for_file(
|
||||
self,
|
||||
mime_type: str,
|
||||
filename: str,
|
||||
path: Path | None = None,
|
||||
) -> type[ParserProtocol] | None:
|
||||
"""Return the best parser class for the given file, or None.
|
||||
|
||||
All registered parsers (external first, then built-ins) are evaluated
|
||||
against the file. A parser is eligible if mime_type appears in the dict
|
||||
returned by its supported_mime_types classmethod, and its score
|
||||
classmethod returns a non-None integer.
|
||||
|
||||
The parser with the highest score wins. When two parsers return the
|
||||
same score, the one that appears earlier in the evaluation order wins
|
||||
(external parsers are evaluated before built-ins, giving third-party
|
||||
packages a chance to override defaults at equal priority).
|
||||
|
||||
When an external parser is selected, its identity is logged at INFO
|
||||
level so operators can trace which package handled a document.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
mime_type:
|
||||
The detected MIME type of the file.
|
||||
filename:
|
||||
The original filename, including extension.
|
||||
path:
|
||||
Optional filesystem path to the file. Forwarded to each
|
||||
parser's score method.
|
||||
|
||||
Returns
|
||||
-------
|
||||
type[ParserProtocol] | None
|
||||
The winning parser class, or None if no parser can handle the file.
|
||||
"""
|
||||
best_score: int | None = None
|
||||
best_parser: type[ParserProtocol] | None = None
|
||||
|
||||
# External parsers are placed first so that, at equal scores, an
|
||||
# external parser wins over a built-in (first-seen policy).
|
||||
for parser_class in (*self._external, *self._builtins):
|
||||
if mime_type not in parser_class.supported_mime_types():
|
||||
continue
|
||||
|
||||
score = parser_class.score(mime_type, filename, path)
|
||||
if score is None:
|
||||
continue
|
||||
|
||||
if best_score is None or score > best_score:
|
||||
best_score = score
|
||||
best_parser = parser_class
|
||||
|
||||
if best_parser is not None and best_parser in self._external:
|
||||
logger.info(
|
||||
"Document handled by third-party parser '%s' v%s — %s",
|
||||
getattr(best_parser, "name", repr(best_parser)),
|
||||
getattr(best_parser, "version", "unknown"),
|
||||
getattr(best_parser, "url", "unknown"),
|
||||
)
|
||||
|
||||
return best_parser
|
||||
320
src/paperless/parsers/text.py
Normal file
320
src/paperless/parsers/text.py
Normal file
@@ -0,0 +1,320 @@
|
||||
"""
|
||||
Built-in plain-text document parser.
|
||||
|
||||
Handles text/plain, text/csv, and application/csv MIME types by reading the
|
||||
file content directly. Thumbnails are generated by rendering a page-sized
|
||||
WebP image from the first 100,000 characters using Pillow.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import shutil
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import Self
|
||||
|
||||
from django.conf import settings
|
||||
from PIL import Image
|
||||
from PIL import ImageDraw
|
||||
from PIL import ImageFont
|
||||
|
||||
from paperless.version import __full_version_str__
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import datetime
|
||||
from types import TracebackType
|
||||
|
||||
from paperless.parsers import MetadataEntry
|
||||
|
||||
logger = logging.getLogger("paperless.parsing.text")
|
||||
|
||||
_SUPPORTED_MIME_TYPES: dict[str, str] = {
|
||||
"text/plain": ".txt",
|
||||
"text/csv": ".csv",
|
||||
"application/csv": ".csv",
|
||||
}
|
||||
|
||||
|
||||
class TextDocumentParser:
|
||||
"""Parse plain-text documents (txt, csv) for Paperless-ngx.
|
||||
|
||||
This parser reads the file content directly as UTF-8 text and renders a
|
||||
simple thumbnail using Pillow. It does not perform OCR and does not
|
||||
produce a searchable PDF archive copy.
|
||||
|
||||
Class attributes
|
||||
----------------
|
||||
name : str
|
||||
Human-readable parser name.
|
||||
version : str
|
||||
Semantic version string, kept in sync with Paperless-ngx releases.
|
||||
author : str
|
||||
Maintainer name.
|
||||
url : str
|
||||
Issue tracker / source URL.
|
||||
"""
|
||||
|
||||
name: str = "Paperless-ngx Text Parser"
|
||||
version: str = __full_version_str__
|
||||
author: str = "Paperless-ngx Contributors"
|
||||
url: str = "https://github.com/paperless-ngx/paperless-ngx"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Class methods
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls) -> dict[str, str]:
|
||||
"""Return the MIME types this parser handles.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict[str, str]
|
||||
Mapping of MIME type to preferred file extension.
|
||||
"""
|
||||
return _SUPPORTED_MIME_TYPES
|
||||
|
||||
@classmethod
|
||||
def score(
|
||||
cls,
|
||||
mime_type: str,
|
||||
filename: str,
|
||||
path: Path | None = None,
|
||||
) -> int | None:
|
||||
"""Return the priority score for handling this file.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
mime_type:
|
||||
Detected MIME type of the file.
|
||||
filename:
|
||||
Original filename including extension.
|
||||
path:
|
||||
Optional filesystem path. Not inspected by this parser.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int | None
|
||||
10 if the MIME type is supported, otherwise None.
|
||||
"""
|
||||
if mime_type in _SUPPORTED_MIME_TYPES:
|
||||
return 10
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Properties
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@property
|
||||
def can_produce_archive(self) -> bool:
|
||||
"""Whether this parser can produce a searchable PDF archive copy.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
Always False — the text parser does not produce a PDF archive.
|
||||
"""
|
||||
return False
|
||||
|
||||
@property
|
||||
def requires_pdf_rendition(self) -> bool:
|
||||
"""Whether the parser must produce a PDF for the frontend to display.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
Always False — plain text files are displayable as-is.
|
||||
"""
|
||||
return False
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Lifecycle
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def __init__(self, logging_group: object = None) -> None:
|
||||
settings.SCRATCH_DIR.mkdir(parents=True, exist_ok=True)
|
||||
self._tempdir = Path(
|
||||
tempfile.mkdtemp(prefix="paperless-", dir=settings.SCRATCH_DIR),
|
||||
)
|
||||
self._text: str | None = None
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
return self
|
||||
|
||||
def __exit__(
|
||||
self,
|
||||
exc_type: type[BaseException] | None,
|
||||
exc_val: BaseException | None,
|
||||
exc_tb: TracebackType | None,
|
||||
) -> None:
|
||||
logger.debug("Cleaning up temporary directory %s", self._tempdir)
|
||||
shutil.rmtree(self._tempdir, ignore_errors=True)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Core parsing interface
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def parse(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
*,
|
||||
produce_archive: bool = True,
|
||||
) -> None:
|
||||
"""Read the document and store its text content.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
document_path:
|
||||
Absolute path to the text file.
|
||||
mime_type:
|
||||
Detected MIME type of the document.
|
||||
produce_archive:
|
||||
Ignored — this parser never produces a PDF archive.
|
||||
|
||||
Raises
|
||||
------
|
||||
documents.parsers.ParseError
|
||||
If the file cannot be read.
|
||||
"""
|
||||
self._text = self._read_text(document_path)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Result accessors
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_text(self) -> str | None:
|
||||
"""Return the plain-text content extracted during parse.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str | None
|
||||
Extracted text, or None if parse has not been called yet.
|
||||
"""
|
||||
return self._text
|
||||
|
||||
def get_date(self) -> datetime.datetime | None:
|
||||
"""Return the document date detected during parse.
|
||||
|
||||
Returns
|
||||
-------
|
||||
datetime.datetime | None
|
||||
Always None — the text parser does not detect dates.
|
||||
"""
|
||||
return None
|
||||
|
||||
def get_archive_path(self) -> Path | None:
|
||||
"""Return the path to a generated archive PDF, or None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path | None
|
||||
Always None — the text parser does not produce a PDF archive.
|
||||
"""
|
||||
return None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Thumbnail and metadata
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_thumbnail(self, document_path: Path, mime_type: str) -> Path:
|
||||
"""Render the first portion of the document as a WebP thumbnail.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
document_path:
|
||||
Absolute path to the source document.
|
||||
mime_type:
|
||||
Detected MIME type of the document.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
Path to the generated WebP thumbnail inside the temporary directory.
|
||||
"""
|
||||
max_chars = 100_000
|
||||
file_size_limit = 50 * 1024 * 1024
|
||||
|
||||
if document_path.stat().st_size > file_size_limit:
|
||||
text = "[File too large to preview]"
|
||||
else:
|
||||
with Path(document_path).open("r", encoding="utf-8", errors="replace") as f:
|
||||
text = f.read(max_chars)
|
||||
|
||||
img = Image.new("RGB", (500, 700), color="white")
|
||||
draw = ImageDraw.Draw(img)
|
||||
font = ImageFont.truetype(
|
||||
font=settings.THUMBNAIL_FONT_NAME,
|
||||
size=20,
|
||||
layout_engine=ImageFont.Layout.BASIC,
|
||||
)
|
||||
draw.multiline_text((5, 5), text, font=font, fill="black", spacing=4)
|
||||
|
||||
out_path = self._tempdir / "thumb.webp"
|
||||
img.save(out_path, format="WEBP")
|
||||
|
||||
return out_path
|
||||
|
||||
def get_page_count(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
) -> int | None:
|
||||
"""Return the number of pages in the document.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
document_path:
|
||||
Absolute path to the source document.
|
||||
mime_type:
|
||||
Detected MIME type of the document.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int | None
|
||||
Always None — page count is not meaningful for plain text.
|
||||
"""
|
||||
return None
|
||||
|
||||
def extract_metadata(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
) -> list[MetadataEntry]:
|
||||
"""Extract format-specific metadata from the document.
|
||||
|
||||
Returns
|
||||
-------
|
||||
list[MetadataEntry]
|
||||
Always ``[]`` — plain text files carry no structured metadata.
|
||||
"""
|
||||
return []
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Private helpers
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _read_text(self, filepath: Path) -> str:
|
||||
"""Read file content, replacing invalid UTF-8 bytes rather than failing.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
filepath:
|
||||
Path to the file to read.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
File content as a string.
|
||||
"""
|
||||
try:
|
||||
return filepath.read_text(encoding="utf-8")
|
||||
except UnicodeDecodeError as exc:
|
||||
logger.warning(
|
||||
"Unicode error reading %s, replacing bad bytes: %s",
|
||||
filepath,
|
||||
exc,
|
||||
)
|
||||
return filepath.read_bytes().decode("utf-8", errors="replace")
|
||||
48
src/paperless/tests/conftest.py
Normal file
48
src/paperless/tests/conftest.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""
|
||||
Fixtures defined here are available to every test module under
|
||||
src/paperless/tests/ (including sub-packages such as parsers/).
|
||||
|
||||
Session-scoped fixtures for the shared samples directory live here so
|
||||
sub-package conftest files can reference them without duplicating path logic.
|
||||
Parser-specific fixtures (concrete parser instances, format-specific sample
|
||||
files) live in paperless/tests/parsers/conftest.py.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
from paperless.parsers.registry import reset_parser_registry
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def samples_dir() -> Path:
|
||||
"""Absolute path to the shared parser sample files directory.
|
||||
|
||||
Sub-package conftest files derive format-specific paths from this root,
|
||||
e.g. ``samples_dir / "text" / "test.txt"``.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
Directory containing all sample documents used by parser tests.
|
||||
"""
|
||||
return (Path(__file__).parent / "samples").resolve()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def clean_registry() -> Generator[None, None, None]:
|
||||
"""Reset the parser registry before and after every test.
|
||||
|
||||
This prevents registry state from leaking between tests that call
|
||||
get_parser_registry() or init_builtin_parsers().
|
||||
"""
|
||||
reset_parser_registry()
|
||||
yield
|
||||
reset_parser_registry()
|
||||
0
src/paperless/tests/parsers/__init__.py
Normal file
0
src/paperless/tests/parsers/__init__.py
Normal file
76
src/paperless/tests/parsers/conftest.py
Normal file
76
src/paperless/tests/parsers/conftest.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""
|
||||
Parser fixtures that are used across multiple test modules in this package
|
||||
are defined here. Format-specific sample-file fixtures are grouped by parser
|
||||
so it is easy to see which files belong to which test module.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import pytest
|
||||
|
||||
from paperless.parsers.text import TextDocumentParser
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Text parser sample files
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def text_samples_dir(samples_dir: Path) -> Path:
|
||||
"""Absolute path to the text parser sample files directory.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
``<samples_dir>/text/``
|
||||
"""
|
||||
return samples_dir / "text"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def sample_txt_file(text_samples_dir: Path) -> Path:
|
||||
"""Path to a valid UTF-8 plain-text sample file.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
Absolute path to ``text/test.txt``.
|
||||
"""
|
||||
return text_samples_dir / "test.txt"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def malformed_txt_file(text_samples_dir: Path) -> Path:
|
||||
"""Path to a text file containing invalid UTF-8 bytes.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Path
|
||||
Absolute path to ``text/decode_error.txt``.
|
||||
"""
|
||||
return text_samples_dir / "decode_error.txt"
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Text parser instance
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def text_parser() -> Generator[TextDocumentParser, None, None]:
|
||||
"""Yield a TextDocumentParser and clean up its temporary directory afterwards.
|
||||
|
||||
Yields
|
||||
------
|
||||
TextDocumentParser
|
||||
A ready-to-use parser instance.
|
||||
"""
|
||||
with TextDocumentParser() as parser:
|
||||
yield parser
|
||||
256
src/paperless/tests/parsers/test_text_parser.py
Normal file
256
src/paperless/tests/parsers/test_text_parser.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""
|
||||
Tests for paperless.parsers.text.TextDocumentParser.
|
||||
|
||||
All tests use the context-manager protocol for parser lifecycle. Sample
|
||||
files are provided by session-scoped fixtures defined in conftest.py.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from paperless.parsers import ParserProtocol
|
||||
from paperless.parsers.text import TextDocumentParser
|
||||
|
||||
|
||||
class TestTextParserProtocol:
|
||||
"""Verify that TextDocumentParser satisfies the ParserProtocol contract."""
|
||||
|
||||
def test_isinstance_satisfies_protocol(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
) -> None:
|
||||
assert isinstance(text_parser, ParserProtocol)
|
||||
|
||||
def test_class_attributes_present(self) -> None:
|
||||
assert isinstance(TextDocumentParser.name, str) and TextDocumentParser.name
|
||||
assert (
|
||||
isinstance(TextDocumentParser.version, str) and TextDocumentParser.version
|
||||
)
|
||||
assert isinstance(TextDocumentParser.author, str) and TextDocumentParser.author
|
||||
assert isinstance(TextDocumentParser.url, str) and TextDocumentParser.url
|
||||
|
||||
def test_supported_mime_types_returns_dict(self) -> None:
|
||||
mime_types = TextDocumentParser.supported_mime_types()
|
||||
assert isinstance(mime_types, dict)
|
||||
assert "text/plain" in mime_types
|
||||
assert "text/csv" in mime_types
|
||||
assert "application/csv" in mime_types
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("mime_type", "expected"),
|
||||
[
|
||||
("text/plain", 10),
|
||||
("text/csv", 10),
|
||||
("application/csv", 10),
|
||||
("application/pdf", None),
|
||||
("image/png", None),
|
||||
],
|
||||
)
|
||||
def test_score(self, mime_type: str, expected: int | None) -> None:
|
||||
assert TextDocumentParser.score(mime_type, "file.txt") == expected
|
||||
|
||||
def test_can_produce_archive_is_false(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
) -> None:
|
||||
assert text_parser.can_produce_archive is False
|
||||
|
||||
def test_requires_pdf_rendition_is_false(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
) -> None:
|
||||
assert text_parser.requires_pdf_rendition is False
|
||||
|
||||
|
||||
class TestTextParserLifecycle:
|
||||
"""Verify context-manager behaviour and temporary directory cleanup."""
|
||||
|
||||
def test_context_manager_cleans_up_tempdir(self) -> None:
|
||||
with TextDocumentParser() as parser:
|
||||
tempdir = parser._tempdir
|
||||
assert tempdir.exists()
|
||||
assert not tempdir.exists()
|
||||
|
||||
def test_context_manager_cleans_up_after_exception(self) -> None:
|
||||
tempdir: Path | None = None
|
||||
with pytest.raises(RuntimeError):
|
||||
with TextDocumentParser() as parser:
|
||||
tempdir = parser._tempdir
|
||||
raise RuntimeError("boom")
|
||||
assert tempdir is not None
|
||||
assert not tempdir.exists()
|
||||
|
||||
|
||||
class TestTextParserParse:
|
||||
"""Verify parse() and the result accessors."""
|
||||
|
||||
def test_parse_valid_utf8(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
text_parser.parse(sample_txt_file, "text/plain")
|
||||
|
||||
assert text_parser.get_text() == "This is a test file.\n"
|
||||
|
||||
def test_parse_returns_none_for_archive_path(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
text_parser.parse(sample_txt_file, "text/plain")
|
||||
|
||||
assert text_parser.get_archive_path() is None
|
||||
|
||||
def test_parse_returns_none_for_date(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
text_parser.parse(sample_txt_file, "text/plain")
|
||||
|
||||
assert text_parser.get_date() is None
|
||||
|
||||
def test_parse_invalid_utf8_bytes_replaced(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
malformed_txt_file: Path,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- A text file containing invalid UTF-8 byte sequences
|
||||
WHEN:
|
||||
- The file is parsed
|
||||
THEN:
|
||||
- Parsing succeeds
|
||||
- Invalid bytes are replaced with the Unicode replacement character
|
||||
"""
|
||||
text_parser.parse(malformed_txt_file, "text/plain")
|
||||
|
||||
assert text_parser.get_text() == "Pantothens\ufffdure\n"
|
||||
|
||||
def test_get_text_none_before_parse(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
) -> None:
|
||||
assert text_parser.get_text() is None
|
||||
|
||||
|
||||
class TestTextParserThumbnail:
|
||||
"""Verify thumbnail generation."""
|
||||
|
||||
def test_thumbnail_exists_and_is_file(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
thumb = text_parser.get_thumbnail(sample_txt_file, "text/plain")
|
||||
|
||||
assert thumb.exists()
|
||||
assert thumb.is_file()
|
||||
|
||||
def test_thumbnail_large_file_does_not_read_all(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- A text file larger than 50 MB
|
||||
WHEN:
|
||||
- A thumbnail is requested
|
||||
THEN:
|
||||
- The thumbnail is generated without loading the full file
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
delete=False,
|
||||
mode="w",
|
||||
encoding="utf-8",
|
||||
suffix=".txt",
|
||||
) as tmp:
|
||||
tmp.write("A" * (51 * 1024 * 1024))
|
||||
large_file = Path(tmp.name)
|
||||
|
||||
try:
|
||||
thumb = text_parser.get_thumbnail(large_file, "text/plain")
|
||||
assert thumb.exists()
|
||||
assert thumb.is_file()
|
||||
finally:
|
||||
large_file.unlink(missing_ok=True)
|
||||
|
||||
def test_get_page_count_returns_none(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
assert text_parser.get_page_count(sample_txt_file, "text/plain") is None
|
||||
|
||||
|
||||
class TestTextParserMetadata:
|
||||
"""Verify extract_metadata behaviour."""
|
||||
|
||||
def test_extract_metadata_returns_empty_list(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
result = text_parser.extract_metadata(sample_txt_file, "text/plain")
|
||||
|
||||
assert result == []
|
||||
|
||||
def test_extract_metadata_returns_list_type(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
result = text_parser.extract_metadata(sample_txt_file, "text/plain")
|
||||
|
||||
assert isinstance(result, list)
|
||||
|
||||
def test_extract_metadata_ignores_mime_type(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
"""extract_metadata returns [] regardless of the mime_type argument."""
|
||||
assert text_parser.extract_metadata(sample_txt_file, "application/pdf") == []
|
||||
assert text_parser.extract_metadata(sample_txt_file, "text/csv") == []
|
||||
|
||||
|
||||
class TestTextParserRegistry:
|
||||
"""Verify that TextDocumentParser is registered by default."""
|
||||
|
||||
def test_registered_in_defaults(self) -> None:
|
||||
from paperless.parsers.registry import ParserRegistry
|
||||
|
||||
registry = ParserRegistry()
|
||||
registry.register_defaults()
|
||||
|
||||
assert TextDocumentParser in registry._builtins
|
||||
|
||||
def test_get_parser_for_text_plain(self) -> None:
|
||||
from paperless.parsers.registry import get_parser_registry
|
||||
|
||||
registry = get_parser_registry()
|
||||
parser_cls = registry.get_parser_for_file("text/plain", "doc.txt")
|
||||
|
||||
assert parser_cls is TextDocumentParser
|
||||
|
||||
def test_get_parser_for_text_csv(self) -> None:
|
||||
from paperless.parsers.registry import get_parser_registry
|
||||
|
||||
registry = get_parser_registry()
|
||||
parser_cls = registry.get_parser_for_file("text/csv", "data.csv")
|
||||
|
||||
assert parser_cls is TextDocumentParser
|
||||
|
||||
def test_get_parser_for_unknown_type_returns_none(self) -> None:
|
||||
from paperless.parsers.registry import get_parser_registry
|
||||
|
||||
registry = get_parser_registry()
|
||||
parser_cls = registry.get_parser_for_file("application/pdf", "doc.pdf")
|
||||
|
||||
assert parser_cls is None
|
||||
714
src/paperless/tests/test_registry.py
Normal file
714
src/paperless/tests/test_registry.py
Normal file
@@ -0,0 +1,714 @@
|
||||
"""
|
||||
Tests for :mod:`paperless.parsers` (ParserProtocol) and
|
||||
:mod:`paperless.parsers.registry` (ParserRegistry + module-level helpers).
|
||||
|
||||
All tests use pytest-style functions/classes — no unittest.TestCase.
|
||||
The ``clean_registry`` fixture ensures complete isolation between tests by
|
||||
resetting the module-level singleton before and after every test.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from importlib.metadata import EntryPoint
|
||||
from pathlib import Path
|
||||
from typing import Self
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
from paperless.parsers import ParserProtocol
|
||||
from paperless.parsers.registry import ParserRegistry
|
||||
from paperless.parsers.registry import get_parser_registry
|
||||
from paperless.parsers.registry import init_builtin_parsers
|
||||
from paperless.parsers.registry import reset_parser_registry
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def dummy_parser_cls() -> type:
|
||||
"""Return a class that fully satisfies :class:`ParserProtocol`.
|
||||
|
||||
GIVEN: A need to exercise registry and Protocol logic with a minimal
|
||||
but complete parser.
|
||||
WHEN: A test requests this fixture.
|
||||
THEN: A class with all required attributes and methods is returned.
|
||||
"""
|
||||
|
||||
class DummyParser:
|
||||
name = "dummy-parser"
|
||||
version = "0.1.0"
|
||||
author = "Test Author"
|
||||
url = "https://example.com/dummy-parser"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls) -> dict[str, str]:
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(
|
||||
cls,
|
||||
mime_type: str,
|
||||
filename: str,
|
||||
path: Path | None = None,
|
||||
) -> int | None:
|
||||
return 10
|
||||
|
||||
@property
|
||||
def can_produce_archive(self) -> bool:
|
||||
return False
|
||||
|
||||
@property
|
||||
def requires_pdf_rendition(self) -> bool:
|
||||
return False
|
||||
|
||||
def parse(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
*,
|
||||
produce_archive: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Required to exist, but doesn't need to do anything
|
||||
"""
|
||||
|
||||
def get_text(self) -> str | None:
|
||||
return None
|
||||
|
||||
def get_date(self) -> None:
|
||||
return None
|
||||
|
||||
def get_archive_path(self) -> Path | None:
|
||||
return None
|
||||
|
||||
def get_thumbnail(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
) -> Path:
|
||||
return Path("/tmp/thumbnail.webp")
|
||||
|
||||
def get_page_count(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
) -> int | None:
|
||||
return None
|
||||
|
||||
def extract_metadata(
|
||||
self,
|
||||
document_path: Path,
|
||||
mime_type: str,
|
||||
) -> list:
|
||||
return []
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb) -> None:
|
||||
"""
|
||||
Required to exist, but doesn't need to do anything
|
||||
"""
|
||||
|
||||
return DummyParser
|
||||
|
||||
|
||||
class TestParserProtocol:
|
||||
"""Verify runtime isinstance() checks against ParserProtocol."""
|
||||
|
||||
def test_compliant_class_instance_passes_isinstance(
|
||||
self,
|
||||
dummy_parser_cls: type,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A class that implements every method required by ParserProtocol.
|
||||
WHEN: isinstance() is called with the Protocol.
|
||||
THEN: The check passes (returns True).
|
||||
"""
|
||||
instance = dummy_parser_cls()
|
||||
assert isinstance(instance, ParserProtocol)
|
||||
|
||||
def test_non_compliant_class_instance_fails_isinstance(self) -> None:
|
||||
"""
|
||||
GIVEN: A plain class with no parser-related methods.
|
||||
WHEN: isinstance() is called with ParserProtocol.
|
||||
THEN: The check fails (returns False).
|
||||
"""
|
||||
|
||||
class Unrelated:
|
||||
pass
|
||||
|
||||
assert not isinstance(Unrelated(), ParserProtocol)
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"missing_method",
|
||||
[
|
||||
pytest.param("parse", id="missing-parse"),
|
||||
pytest.param("get_text", id="missing-get_text"),
|
||||
pytest.param("get_thumbnail", id="missing-get_thumbnail"),
|
||||
pytest.param("__enter__", id="missing-__enter__"),
|
||||
pytest.param("__exit__", id="missing-__exit__"),
|
||||
],
|
||||
)
|
||||
def test_partial_compliant_fails_isinstance(
|
||||
self,
|
||||
dummy_parser_cls: type,
|
||||
missing_method: str,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A class that satisfies ParserProtocol except for one method.
|
||||
WHEN: isinstance() is called with ParserProtocol.
|
||||
THEN: The check fails because the Protocol is not fully satisfied.
|
||||
"""
|
||||
# Create a subclass and delete the specified method to break compliance.
|
||||
partial_cls = type(
|
||||
"PartialParser",
|
||||
(dummy_parser_cls,),
|
||||
{missing_method: None}, # Replace with None — not callable
|
||||
)
|
||||
assert not isinstance(partial_cls(), ParserProtocol)
|
||||
|
||||
|
||||
class TestRegistrySingleton:
|
||||
"""Verify the module-level singleton lifecycle functions."""
|
||||
|
||||
def test_get_parser_registry_returns_instance(self) -> None:
|
||||
"""
|
||||
GIVEN: No registry has been created yet.
|
||||
WHEN: get_parser_registry() is called.
|
||||
THEN: A ParserRegistry instance is returned.
|
||||
"""
|
||||
registry = get_parser_registry()
|
||||
assert isinstance(registry, ParserRegistry)
|
||||
|
||||
def test_get_parser_registry_same_instance_on_repeated_calls(self) -> None:
|
||||
"""
|
||||
GIVEN: A registry instance was created by a prior call.
|
||||
WHEN: get_parser_registry() is called a second time.
|
||||
THEN: The exact same object (identity) is returned.
|
||||
"""
|
||||
first = get_parser_registry()
|
||||
second = get_parser_registry()
|
||||
assert first is second
|
||||
|
||||
def test_reset_parser_registry_gives_fresh_instance(self) -> None:
|
||||
"""
|
||||
GIVEN: A registry instance already exists.
|
||||
WHEN: reset_parser_registry() is called and then get_parser_registry()
|
||||
is called again.
|
||||
THEN: A new, distinct registry instance is returned.
|
||||
"""
|
||||
first = get_parser_registry()
|
||||
reset_parser_registry()
|
||||
second = get_parser_registry()
|
||||
assert first is not second
|
||||
|
||||
def test_init_builtin_parsers_does_not_run_discover(
|
||||
self,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: discover() would raise an exception if called.
|
||||
WHEN: init_builtin_parsers() is called.
|
||||
THEN: No exception is raised, confirming discover() was not invoked.
|
||||
"""
|
||||
|
||||
def exploding_discover(self) -> None:
|
||||
raise RuntimeError(
|
||||
"discover() must not be called from init_builtin_parsers",
|
||||
)
|
||||
|
||||
monkeypatch.setattr(ParserRegistry, "discover", exploding_discover)
|
||||
|
||||
# Should complete without raising.
|
||||
init_builtin_parsers()
|
||||
|
||||
def test_init_builtin_parsers_idempotent(self) -> None:
|
||||
"""
|
||||
GIVEN: init_builtin_parsers() has already been called once.
|
||||
WHEN: init_builtin_parsers() is called a second time.
|
||||
THEN: No error is raised and the same registry instance is reused.
|
||||
"""
|
||||
init_builtin_parsers()
|
||||
# Capture the registry created by the first call.
|
||||
import paperless.parsers.registry as reg_module
|
||||
|
||||
first_registry = reg_module._registry
|
||||
|
||||
init_builtin_parsers()
|
||||
|
||||
assert reg_module._registry is first_registry
|
||||
|
||||
|
||||
class TestParserRegistryGetParserForFile:
|
||||
"""Verify parser selection logic in get_parser_for_file()."""
|
||||
|
||||
def test_returns_none_when_no_parsers_registered(self) -> None:
|
||||
"""
|
||||
GIVEN: A registry with no parsers registered.
|
||||
WHEN: get_parser_for_file() is called for any MIME type.
|
||||
THEN: None is returned.
|
||||
"""
|
||||
registry = ParserRegistry()
|
||||
result = registry.get_parser_for_file("text/plain", "doc.txt")
|
||||
assert result is None
|
||||
|
||||
def test_returns_none_for_unsupported_mime_type(
|
||||
self,
|
||||
dummy_parser_cls: type,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A registry with a parser that supports only 'text/plain'.
|
||||
WHEN: get_parser_for_file() is called with 'application/pdf'.
|
||||
THEN: None is returned.
|
||||
"""
|
||||
registry = ParserRegistry()
|
||||
registry.register_builtin(dummy_parser_cls)
|
||||
result = registry.get_parser_for_file("application/pdf", "file.pdf")
|
||||
assert result is None
|
||||
|
||||
def test_returns_parser_for_supported_mime_type(
|
||||
self,
|
||||
dummy_parser_cls: type,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A registry with a parser registered for 'text/plain'.
|
||||
WHEN: get_parser_for_file() is called with 'text/plain'.
|
||||
THEN: The registered parser class is returned.
|
||||
"""
|
||||
registry = ParserRegistry()
|
||||
registry.register_builtin(dummy_parser_cls)
|
||||
result = registry.get_parser_for_file("text/plain", "readme.txt")
|
||||
assert result is dummy_parser_cls
|
||||
|
||||
def test_highest_score_wins(self) -> None:
|
||||
"""
|
||||
GIVEN: Two parsers both supporting 'text/plain' with scores 5 and 20.
|
||||
WHEN: get_parser_for_file() is called for 'text/plain'.
|
||||
THEN: The parser with score 20 is returned.
|
||||
"""
|
||||
|
||||
class LowScoreParser:
|
||||
name = "low"
|
||||
version = "1.0"
|
||||
author = "A"
|
||||
url = "https://example.com/low"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return 5
|
||||
|
||||
class HighScoreParser:
|
||||
name = "high"
|
||||
version = "1.0"
|
||||
author = "B"
|
||||
url = "https://example.com/high"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return 20
|
||||
|
||||
registry = ParserRegistry()
|
||||
registry.register_builtin(LowScoreParser)
|
||||
registry.register_builtin(HighScoreParser)
|
||||
result = registry.get_parser_for_file("text/plain", "readme.txt")
|
||||
assert result is HighScoreParser
|
||||
|
||||
def test_parser_returning_none_score_is_skipped(self) -> None:
|
||||
"""
|
||||
GIVEN: A parser that returns None from score() for the given file.
|
||||
WHEN: get_parser_for_file() is called.
|
||||
THEN: That parser is skipped and None is returned (no other candidates).
|
||||
"""
|
||||
|
||||
class DecliningParser:
|
||||
name = "declining"
|
||||
version = "1.0"
|
||||
author = "A"
|
||||
url = "https://example.com"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return None # Explicitly declines
|
||||
|
||||
registry = ParserRegistry()
|
||||
registry.register_builtin(DecliningParser)
|
||||
result = registry.get_parser_for_file("text/plain", "readme.txt")
|
||||
assert result is None
|
||||
|
||||
def test_all_parsers_decline_returns_none(self) -> None:
|
||||
"""
|
||||
GIVEN: Multiple parsers that all return None from score().
|
||||
WHEN: get_parser_for_file() is called.
|
||||
THEN: None is returned.
|
||||
"""
|
||||
|
||||
class AlwaysDeclines:
|
||||
name = "declines"
|
||||
version = "1.0"
|
||||
author = "A"
|
||||
url = "https://example.com"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return None
|
||||
|
||||
registry = ParserRegistry()
|
||||
registry.register_builtin(AlwaysDeclines)
|
||||
registry._external.append(AlwaysDeclines)
|
||||
result = registry.get_parser_for_file("text/plain", "file.txt")
|
||||
assert result is None
|
||||
|
||||
def test_external_parser_beats_builtin_same_score(self) -> None:
|
||||
"""
|
||||
GIVEN: An external and a built-in parser both returning score 10.
|
||||
WHEN: get_parser_for_file() is called.
|
||||
THEN: The external parser wins because externals are evaluated first
|
||||
and the first-seen-wins policy applies at equal scores.
|
||||
"""
|
||||
|
||||
class BuiltinParser:
|
||||
name = "builtin"
|
||||
version = "1.0"
|
||||
author = "Core"
|
||||
url = "https://example.com/builtin"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return 10
|
||||
|
||||
class ExternalParser:
|
||||
name = "external"
|
||||
version = "2.0"
|
||||
author = "Third Party"
|
||||
url = "https://example.com/external"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return 10
|
||||
|
||||
registry = ParserRegistry()
|
||||
registry.register_builtin(BuiltinParser)
|
||||
registry._external.append(ExternalParser)
|
||||
result = registry.get_parser_for_file("text/plain", "file.txt")
|
||||
assert result is ExternalParser
|
||||
|
||||
def test_builtin_wins_when_external_declines(self) -> None:
|
||||
"""
|
||||
GIVEN: An external parser that declines (score None) and a built-in
|
||||
that returns score 5.
|
||||
WHEN: get_parser_for_file() is called.
|
||||
THEN: The built-in parser is returned.
|
||||
"""
|
||||
|
||||
class DecliningExternal:
|
||||
name = "declining-external"
|
||||
version = "1.0"
|
||||
author = "Third Party"
|
||||
url = "https://example.com/declining"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return None
|
||||
|
||||
class AcceptingBuiltin:
|
||||
name = "accepting-builtin"
|
||||
version = "1.0"
|
||||
author = "Core"
|
||||
url = "https://example.com/accepting"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return 5
|
||||
|
||||
registry = ParserRegistry()
|
||||
registry.register_builtin(AcceptingBuiltin)
|
||||
registry._external.append(DecliningExternal)
|
||||
result = registry.get_parser_for_file("text/plain", "file.txt")
|
||||
assert result is AcceptingBuiltin
|
||||
|
||||
|
||||
class TestDiscover:
|
||||
"""Verify entrypoint discovery in ParserRegistry.discover()."""
|
||||
|
||||
def test_discover_with_no_entrypoints(self) -> None:
|
||||
"""
|
||||
GIVEN: No entrypoints are registered under 'paperless_ngx.parsers'.
|
||||
WHEN: discover() is called.
|
||||
THEN: _external remains empty and no errors are raised.
|
||||
"""
|
||||
registry = ParserRegistry()
|
||||
|
||||
with patch(
|
||||
"paperless.parsers.registry.entry_points",
|
||||
return_value=[],
|
||||
):
|
||||
registry.discover()
|
||||
|
||||
assert registry._external == []
|
||||
|
||||
def test_discover_adds_valid_external_parser(self) -> None:
|
||||
"""
|
||||
GIVEN: One valid entrypoint whose loaded class has all required attrs.
|
||||
WHEN: discover() is called.
|
||||
THEN: The class is appended to _external.
|
||||
"""
|
||||
|
||||
class ValidExternal:
|
||||
name = "valid-external"
|
||||
version = "3.0.0"
|
||||
author = "Someone"
|
||||
url = "https://example.com/valid"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"application/pdf": ".pdf"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return 5
|
||||
|
||||
mock_ep = MagicMock(spec=EntryPoint)
|
||||
mock_ep.name = "valid_external"
|
||||
mock_ep.load.return_value = ValidExternal
|
||||
|
||||
registry = ParserRegistry()
|
||||
|
||||
with patch(
|
||||
"paperless.parsers.registry.entry_points",
|
||||
return_value=[mock_ep],
|
||||
):
|
||||
registry.discover()
|
||||
|
||||
assert ValidExternal in registry._external
|
||||
|
||||
def test_discover_skips_entrypoint_with_load_error(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: An entrypoint whose load() method raises ImportError.
|
||||
WHEN: discover() is called.
|
||||
THEN: The entrypoint is skipped, an error is logged, and _external
|
||||
remains empty.
|
||||
"""
|
||||
mock_ep = MagicMock(spec=EntryPoint)
|
||||
mock_ep.name = "broken_ep"
|
||||
mock_ep.load.side_effect = ImportError("missing dependency")
|
||||
|
||||
registry = ParserRegistry()
|
||||
|
||||
with caplog.at_level(logging.ERROR, logger="paperless.parsers.registry"):
|
||||
with patch(
|
||||
"paperless.parsers.registry.entry_points",
|
||||
return_value=[mock_ep],
|
||||
):
|
||||
registry.discover()
|
||||
|
||||
assert registry._external == []
|
||||
assert any(
|
||||
"broken_ep" in record.message
|
||||
for record in caplog.records
|
||||
if record.levelno >= logging.ERROR
|
||||
)
|
||||
|
||||
def test_discover_skips_entrypoint_with_missing_attrs(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A class loaded from an entrypoint that is missing the 'score'
|
||||
attribute.
|
||||
WHEN: discover() is called.
|
||||
THEN: The entrypoint is skipped, a warning is logged, and _external
|
||||
remains empty.
|
||||
"""
|
||||
|
||||
class MissingScore:
|
||||
name = "missing-score"
|
||||
version = "1.0"
|
||||
author = "Someone"
|
||||
url = "https://example.com"
|
||||
|
||||
# 'score' classmethod is intentionally absent.
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"text/plain": ".txt"}
|
||||
|
||||
mock_ep = MagicMock(spec=EntryPoint)
|
||||
mock_ep.name = "missing_score_ep"
|
||||
mock_ep.load.return_value = MissingScore
|
||||
|
||||
registry = ParserRegistry()
|
||||
|
||||
with caplog.at_level(logging.WARNING, logger="paperless.parsers.registry"):
|
||||
with patch(
|
||||
"paperless.parsers.registry.entry_points",
|
||||
return_value=[mock_ep],
|
||||
):
|
||||
registry.discover()
|
||||
|
||||
assert registry._external == []
|
||||
assert any(
|
||||
"missing_score_ep" in record.message
|
||||
for record in caplog.records
|
||||
if record.levelno >= logging.WARNING
|
||||
)
|
||||
|
||||
def test_discover_logs_loaded_parser_info(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A valid entrypoint that loads successfully.
|
||||
WHEN: discover() is called.
|
||||
THEN: An INFO log message is emitted containing the parser name,
|
||||
version, author, and entrypoint name.
|
||||
"""
|
||||
|
||||
class LoggableParser:
|
||||
name = "loggable"
|
||||
version = "4.2.0"
|
||||
author = "Log Tester"
|
||||
url = "https://example.com/loggable"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {"image/png": ".png"}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return 1
|
||||
|
||||
mock_ep = MagicMock(spec=EntryPoint)
|
||||
mock_ep.name = "loggable_ep"
|
||||
mock_ep.load.return_value = LoggableParser
|
||||
|
||||
registry = ParserRegistry()
|
||||
|
||||
with caplog.at_level(logging.INFO, logger="paperless.parsers.registry"):
|
||||
with patch(
|
||||
"paperless.parsers.registry.entry_points",
|
||||
return_value=[mock_ep],
|
||||
):
|
||||
registry.discover()
|
||||
|
||||
info_messages = " ".join(
|
||||
r.message for r in caplog.records if r.levelno == logging.INFO
|
||||
)
|
||||
assert "loggable" in info_messages
|
||||
assert "4.2.0" in info_messages
|
||||
assert "Log Tester" in info_messages
|
||||
assert "loggable_ep" in info_messages
|
||||
|
||||
|
||||
class TestLogSummary:
|
||||
"""Verify log output from ParserRegistry.log_summary()."""
|
||||
|
||||
def test_log_summary_with_no_external_parsers(
|
||||
self,
|
||||
dummy_parser_cls: type,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A registry with one built-in parser and no external parsers.
|
||||
WHEN: log_summary() is called.
|
||||
THEN: The built-in parser name appears in the logs.
|
||||
"""
|
||||
registry = ParserRegistry()
|
||||
registry.register_builtin(dummy_parser_cls)
|
||||
|
||||
with caplog.at_level(logging.INFO, logger="paperless.parsers.registry"):
|
||||
registry.log_summary()
|
||||
|
||||
all_messages = " ".join(r.message for r in caplog.records)
|
||||
assert dummy_parser_cls.name in all_messages
|
||||
|
||||
def test_log_summary_with_external_parsers(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A registry with one external parser registered.
|
||||
WHEN: log_summary() is called.
|
||||
THEN: The external parser name, version, author, and url appear in
|
||||
the log output.
|
||||
"""
|
||||
|
||||
class ExtParser:
|
||||
name = "ext-parser"
|
||||
version = "9.9.9"
|
||||
author = "Ext Corp"
|
||||
url = "https://ext.example.com"
|
||||
|
||||
@classmethod
|
||||
def supported_mime_types(cls):
|
||||
return {}
|
||||
|
||||
@classmethod
|
||||
def score(cls, mime_type, filename, path=None):
|
||||
return None
|
||||
|
||||
registry = ParserRegistry()
|
||||
registry._external.append(ExtParser)
|
||||
|
||||
with caplog.at_level(logging.INFO, logger="paperless.parsers.registry"):
|
||||
registry.log_summary()
|
||||
|
||||
all_messages = " ".join(r.message for r in caplog.records)
|
||||
assert "ext-parser" in all_messages
|
||||
assert "9.9.9" in all_messages
|
||||
assert "Ext Corp" in all_messages
|
||||
assert "https://ext.example.com" in all_messages
|
||||
|
||||
def test_log_summary_logs_no_third_party_message_when_none(
|
||||
self,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN: A registry with no external parsers.
|
||||
WHEN: log_summary() is called.
|
||||
THEN: A message containing 'No third-party parsers discovered.' is
|
||||
logged.
|
||||
"""
|
||||
registry = ParserRegistry()
|
||||
|
||||
with caplog.at_level(logging.INFO, logger="paperless.parsers.registry"):
|
||||
registry.log_summary()
|
||||
|
||||
all_messages = " ".join(r.message for r in caplog.records)
|
||||
assert "No third-party parsers discovered." in all_messages
|
||||
@@ -1,186 +1,175 @@
|
||||
from unittest import mock
|
||||
|
||||
import pytest
|
||||
from channels.layers import get_channel_layer
|
||||
from channels.testing import WebsocketCommunicator
|
||||
from django.test import TestCase
|
||||
from django.test import override_settings
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from documents.plugins.helpers import DocumentsStatusManager
|
||||
from documents.plugins.helpers import ProgressManager
|
||||
from documents.plugins.helpers import ProgressStatusOptions
|
||||
from paperless.asgi import application
|
||||
|
||||
TEST_CHANNEL_LAYERS = {
|
||||
"default": {
|
||||
"BACKEND": "channels.layers.InMemoryChannelLayer",
|
||||
},
|
||||
}
|
||||
|
||||
class TestWebSockets:
|
||||
@pytest.fixture(autouse=True)
|
||||
def anyio_backend(self) -> str:
|
||||
return "asyncio"
|
||||
|
||||
@override_settings(CHANNEL_LAYERS=TEST_CHANNEL_LAYERS)
|
||||
class TestWebSockets(TestCase):
|
||||
@pytest.mark.anyio
|
||||
async def test_no_auth(self) -> None:
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertFalse(connected)
|
||||
assert not connected
|
||||
await communicator.disconnect()
|
||||
|
||||
@mock.patch("paperless.consumers.StatusConsumer.close")
|
||||
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
|
||||
async def test_close_on_no_auth(self, _authenticated, mock_close) -> None:
|
||||
_authenticated.return_value = True
|
||||
@pytest.mark.anyio
|
||||
async def test_close_on_no_auth(self, mocker: MockerFixture) -> None:
|
||||
mock_auth = mocker.patch(
|
||||
"paperless.consumers.StatusConsumer._authenticated",
|
||||
return_value=True,
|
||||
)
|
||||
mock_close = mocker.patch(
|
||||
"paperless.consumers.StatusConsumer.close",
|
||||
new_callable=mocker.AsyncMock,
|
||||
)
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
message = {"type": "status_update", "data": {"task_id": "test"}}
|
||||
|
||||
_authenticated.return_value = False
|
||||
assert connected
|
||||
|
||||
mock_auth.return_value = False
|
||||
channel_layer = get_channel_layer()
|
||||
assert channel_layer is not None
|
||||
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
{"type": "status_update", "data": {"task_id": "test"}},
|
||||
)
|
||||
await communicator.receive_nothing()
|
||||
|
||||
mock_close.assert_called_once()
|
||||
mock_close.assert_awaited_once()
|
||||
mock_close.reset_mock()
|
||||
|
||||
message = {
|
||||
"type": "document_updated",
|
||||
"data": {"document_id": 10, "modified": "2026-02-17T00:00:00Z"},
|
||||
}
|
||||
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
{
|
||||
"type": "document_updated",
|
||||
"data": {"document_id": 10, "modified": "2026-02-17T00:00:00Z"},
|
||||
},
|
||||
)
|
||||
await communicator.receive_nothing()
|
||||
|
||||
mock_close.assert_called_once()
|
||||
mock_close.assert_awaited_once()
|
||||
mock_close.reset_mock()
|
||||
|
||||
message = {"type": "documents_deleted", "data": {"documents": [1, 2, 3]}}
|
||||
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
{"type": "documents_deleted", "data": {"documents": [1, 2, 3]}},
|
||||
)
|
||||
await communicator.receive_nothing()
|
||||
mock_close.assert_awaited_once()
|
||||
|
||||
mock_close.assert_called_once()
|
||||
|
||||
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
|
||||
async def test_auth(self, _authenticated) -> None:
|
||||
_authenticated.return_value = True
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
await communicator.disconnect()
|
||||
|
||||
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
|
||||
async def test_receive_status_update(self, _authenticated) -> None:
|
||||
_authenticated.return_value = True
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
|
||||
message = {"type": "status_update", "data": {"task_id": "test"}}
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
@pytest.mark.anyio
|
||||
async def test_auth(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch(
|
||||
"paperless.consumers.StatusConsumer._authenticated",
|
||||
return_value=True,
|
||||
)
|
||||
|
||||
response = await communicator.receive_json_from()
|
||||
|
||||
self.assertEqual(response, message)
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, _ = await communicator.connect()
|
||||
assert connected
|
||||
|
||||
await communicator.disconnect()
|
||||
|
||||
async def test_status_update_check_perms(self) -> None:
|
||||
@pytest.mark.anyio
|
||||
async def test_receive_status_update(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch(
|
||||
"paperless.consumers.StatusConsumer._authenticated",
|
||||
return_value=True,
|
||||
)
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
|
||||
communicator.scope["user"] = mock.Mock()
|
||||
communicator.scope["user"].is_authenticated = True
|
||||
communicator.scope["user"].is_superuser = False
|
||||
communicator.scope["user"].id = 1
|
||||
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
assert connected
|
||||
|
||||
# Test as owner
|
||||
message = {"type": "status_update", "data": {"task_id": "test"}}
|
||||
channel_layer = get_channel_layer()
|
||||
assert channel_layer is not None
|
||||
await channel_layer.group_send("status_updates", message)
|
||||
|
||||
assert await communicator.receive_json_from() == message
|
||||
|
||||
await communicator.disconnect()
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_status_update_check_perms(self, mocker: MockerFixture) -> None:
|
||||
user = mocker.MagicMock()
|
||||
user.is_authenticated = True
|
||||
user.is_superuser = False
|
||||
user.id = 1
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
communicator.scope["user"] = user # type: ignore[typeddict-unknown-key]
|
||||
connected, _ = await communicator.connect()
|
||||
assert connected
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
assert channel_layer is not None
|
||||
|
||||
# Message received as owner
|
||||
message = {"type": "status_update", "data": {"task_id": "test", "owner_id": 1}}
|
||||
channel_layer = get_channel_layer()
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
)
|
||||
response = await communicator.receive_json_from()
|
||||
self.assertEqual(response, message)
|
||||
await channel_layer.group_send("status_updates", message)
|
||||
assert await communicator.receive_json_from() == message
|
||||
|
||||
# Test with a group that the user belongs to
|
||||
communicator.scope["user"].groups.filter.return_value.exists.return_value = True
|
||||
# Message received via group membership
|
||||
user.groups.filter.return_value.aexists = mocker.AsyncMock(return_value=True)
|
||||
message = {
|
||||
"type": "status_update",
|
||||
"data": {"task_id": "test", "owner_id": 2, "groups_can_view": [1]},
|
||||
}
|
||||
channel_layer = get_channel_layer()
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
)
|
||||
response = await communicator.receive_json_from()
|
||||
self.assertEqual(response, message)
|
||||
await channel_layer.group_send("status_updates", message)
|
||||
assert await communicator.receive_json_from() == message
|
||||
|
||||
# Test with a different owner_id
|
||||
# Message not received for different owner with no group match
|
||||
user.groups.filter.return_value.aexists = mocker.AsyncMock(return_value=False)
|
||||
message = {"type": "status_update", "data": {"task_id": "test", "owner_id": 2}}
|
||||
channel_layer = get_channel_layer()
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
)
|
||||
response = await communicator.receive_nothing()
|
||||
self.assertNotEqual(response, message)
|
||||
await channel_layer.group_send("status_updates", message)
|
||||
assert await communicator.receive_nothing()
|
||||
|
||||
await communicator.disconnect()
|
||||
|
||||
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
|
||||
async def test_receive_documents_deleted(self, _authenticated) -> None:
|
||||
_authenticated.return_value = True
|
||||
@pytest.mark.anyio
|
||||
async def test_receive_documents_deleted(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch(
|
||||
"paperless.consumers.StatusConsumer._authenticated",
|
||||
return_value=True,
|
||||
)
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
assert connected
|
||||
|
||||
message = {"type": "documents_deleted", "data": {"documents": [1, 2, 3]}}
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
)
|
||||
assert channel_layer is not None
|
||||
await channel_layer.group_send("status_updates", message)
|
||||
|
||||
response = await communicator.receive_json_from()
|
||||
|
||||
self.assertEqual(response, message)
|
||||
assert await communicator.receive_json_from() == message
|
||||
|
||||
await communicator.disconnect()
|
||||
|
||||
@mock.patch("paperless.consumers.StatusConsumer._can_view")
|
||||
@mock.patch("paperless.consumers.StatusConsumer._authenticated")
|
||||
async def test_receive_document_updated(self, _authenticated, _can_view) -> None:
|
||||
_authenticated.return_value = True
|
||||
_can_view.return_value = True
|
||||
@pytest.mark.anyio
|
||||
async def test_receive_document_updated(self, mocker: MockerFixture) -> None:
|
||||
mocker.patch(
|
||||
"paperless.consumers.StatusConsumer._authenticated",
|
||||
return_value=True,
|
||||
)
|
||||
mocker.patch(
|
||||
"paperless.consumers.StatusConsumer._can_view",
|
||||
return_value=True,
|
||||
)
|
||||
|
||||
communicator = WebsocketCommunicator(application, "/ws/status/")
|
||||
connected, _ = await communicator.connect()
|
||||
self.assertTrue(connected)
|
||||
assert connected
|
||||
|
||||
message = {
|
||||
"type": "document_updated",
|
||||
@@ -192,67 +181,52 @@ class TestWebSockets(TestCase):
|
||||
"groups_can_view": [],
|
||||
},
|
||||
}
|
||||
|
||||
channel_layer = get_channel_layer()
|
||||
assert channel_layer is not None
|
||||
await channel_layer.group_send(
|
||||
"status_updates",
|
||||
message,
|
||||
)
|
||||
await channel_layer.group_send("status_updates", message)
|
||||
|
||||
response = await communicator.receive_json_from()
|
||||
|
||||
self.assertEqual(response, message)
|
||||
assert await communicator.receive_json_from() == message
|
||||
|
||||
await communicator.disconnect()
|
||||
|
||||
@mock.patch("channels.layers.InMemoryChannelLayer.group_send")
|
||||
def test_manager_send_progress(self, mock_group_send) -> None:
|
||||
def test_manager_send_progress(self, mocker: MockerFixture) -> None:
|
||||
mock_group_send = mocker.patch(
|
||||
"channels.layers.InMemoryChannelLayer.group_send",
|
||||
)
|
||||
|
||||
with ProgressManager(task_id="test") as manager:
|
||||
manager.send_progress(
|
||||
ProgressStatusOptions.STARTED,
|
||||
"Test message",
|
||||
1,
|
||||
10,
|
||||
extra_args={
|
||||
"foo": "bar",
|
||||
},
|
||||
extra_args={"foo": "bar"},
|
||||
)
|
||||
|
||||
message = mock_group_send.call_args[0][1]
|
||||
|
||||
self.assertEqual(
|
||||
message,
|
||||
{
|
||||
"type": "status_update",
|
||||
"data": {
|
||||
"filename": None,
|
||||
"task_id": "test",
|
||||
"current_progress": 1,
|
||||
"max_progress": 10,
|
||||
"status": ProgressStatusOptions.STARTED,
|
||||
"message": "Test message",
|
||||
"foo": "bar",
|
||||
},
|
||||
assert mock_group_send.call_args[0][1] == {
|
||||
"type": "status_update",
|
||||
"data": {
|
||||
"filename": None,
|
||||
"task_id": "test",
|
||||
"current_progress": 1,
|
||||
"max_progress": 10,
|
||||
"status": ProgressStatusOptions.STARTED,
|
||||
"message": "Test message",
|
||||
"foo": "bar",
|
||||
},
|
||||
}
|
||||
|
||||
def test_manager_send_documents_deleted(self, mocker: MockerFixture) -> None:
|
||||
mock_group_send = mocker.patch(
|
||||
"channels.layers.InMemoryChannelLayer.group_send",
|
||||
)
|
||||
|
||||
@mock.patch("channels.layers.InMemoryChannelLayer.group_send")
|
||||
def test_manager_send_documents_deleted(
|
||||
self,
|
||||
mock_group_send: mock.MagicMock,
|
||||
) -> None:
|
||||
with DocumentsStatusManager() as manager:
|
||||
manager.send_documents_deleted([1, 2, 3])
|
||||
|
||||
message = mock_group_send.call_args[0][1]
|
||||
|
||||
self.assertEqual(
|
||||
message,
|
||||
{
|
||||
"type": "documents_deleted",
|
||||
"data": {
|
||||
"documents": [1, 2, 3],
|
||||
},
|
||||
assert mock_group_send.call_args[0][1] == {
|
||||
"type": "documents_deleted",
|
||||
"data": {
|
||||
"documents": [1, 2, 3],
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ from allauth.mfa.recovery_codes.internal.flows import auto_generate_recovery_cod
|
||||
from allauth.mfa.totp.internal import auth as totp_auth
|
||||
from allauth.socialaccount.adapter import get_adapter
|
||||
from allauth.socialaccount.models import SocialAccount
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.models import Group
|
||||
from django.contrib.auth.models import User
|
||||
from django.contrib.staticfiles.storage import staticfiles_storage
|
||||
@@ -56,17 +57,27 @@ class StandardPagination(PageNumberPagination):
|
||||
page_size_query_param = "page_size"
|
||||
max_page_size = 100000
|
||||
|
||||
def _get_api_version(self) -> int:
|
||||
request = getattr(self, "request", None)
|
||||
default_version = settings.REST_FRAMEWORK["DEFAULT_VERSION"]
|
||||
return int(request.version if request else default_version)
|
||||
|
||||
def _should_include_all(self) -> bool:
|
||||
# TODO: remove legacy `all` support when API v9 is dropped.
|
||||
return self._get_api_version() < 10
|
||||
|
||||
def get_paginated_response(self, data):
|
||||
response_data = [
|
||||
("count", self.page.paginator.count),
|
||||
("next", self.get_next_link()),
|
||||
("previous", self.get_previous_link()),
|
||||
]
|
||||
if self._should_include_all():
|
||||
response_data.append(("all", self.get_all_result_ids()))
|
||||
response_data.append(("results", data))
|
||||
|
||||
return Response(
|
||||
OrderedDict(
|
||||
[
|
||||
("count", self.page.paginator.count),
|
||||
("next", self.get_next_link()),
|
||||
("previous", self.get_previous_link()),
|
||||
("all", self.get_all_result_ids()),
|
||||
("results", data),
|
||||
],
|
||||
),
|
||||
OrderedDict(response_data),
|
||||
)
|
||||
|
||||
def get_all_result_ids(self):
|
||||
@@ -87,11 +98,14 @@ class StandardPagination(PageNumberPagination):
|
||||
|
||||
def get_paginated_response_schema(self, schema):
|
||||
response_schema = super().get_paginated_response_schema(schema)
|
||||
response_schema["properties"]["all"] = {
|
||||
"type": "array",
|
||||
"example": "[1, 2, 3]",
|
||||
"items": {"type": "integer"},
|
||||
}
|
||||
if self._should_include_all():
|
||||
response_schema["properties"]["all"] = {
|
||||
"type": "array",
|
||||
"example": "[1, 2, 3]",
|
||||
"items": {"type": "integer"},
|
||||
}
|
||||
else:
|
||||
response_schema["properties"].pop("all", None)
|
||||
return response_schema
|
||||
|
||||
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
from pathlib import Path
|
||||
|
||||
from django.conf import settings
|
||||
from PIL import Image
|
||||
from PIL import ImageDraw
|
||||
from PIL import ImageFont
|
||||
|
||||
from documents.parsers import DocumentParser
|
||||
|
||||
|
||||
class TextDocumentParser(DocumentParser):
|
||||
"""
|
||||
This parser directly parses a text document (.txt, .md, or .csv)
|
||||
"""
|
||||
|
||||
logging_name = "paperless.parsing.text"
|
||||
|
||||
def get_thumbnail(self, document_path: Path, mime_type, file_name=None) -> Path:
|
||||
# Avoid reading entire file into memory
|
||||
max_chars = 100_000
|
||||
file_size_limit = 50 * 1024 * 1024
|
||||
|
||||
if document_path.stat().st_size > file_size_limit:
|
||||
text = "[File too large to preview]"
|
||||
else:
|
||||
with Path(document_path).open("r", encoding="utf-8", errors="replace") as f:
|
||||
text = f.read(max_chars)
|
||||
|
||||
img = Image.new("RGB", (500, 700), color="white")
|
||||
draw = ImageDraw.Draw(img)
|
||||
font = ImageFont.truetype(
|
||||
font=settings.THUMBNAIL_FONT_NAME,
|
||||
size=20,
|
||||
layout_engine=ImageFont.Layout.BASIC,
|
||||
)
|
||||
draw.multiline_text((5, 5), text, font=font, fill="black", spacing=4)
|
||||
|
||||
out_path = self.tempdir / "thumb.webp"
|
||||
img.save(out_path, format="WEBP")
|
||||
|
||||
return out_path
|
||||
|
||||
def parse(self, document_path, mime_type, file_name=None) -> None:
|
||||
self.text = self.read_file_handle_unicode_errors(document_path)
|
||||
|
||||
def get_settings(self) -> None:
|
||||
"""
|
||||
This parser does not implement additional settings yet
|
||||
"""
|
||||
return None
|
||||
@@ -1,7 +1,13 @@
|
||||
def get_parser(*args, **kwargs):
|
||||
from paperless_text.parsers import TextDocumentParser
|
||||
from paperless.parsers.text import TextDocumentParser
|
||||
|
||||
return TextDocumentParser(*args, **kwargs)
|
||||
# The new TextDocumentParser does not accept the legacy logging_group /
|
||||
# progress_callback kwargs injected by the old signal-based consumer.
|
||||
# These are dropped here; Phase 4 will replace this signal path with the
|
||||
# new ParserRegistry so the shim can be removed at that point.
|
||||
kwargs.pop("logging_group", None)
|
||||
kwargs.pop("progress_callback", None)
|
||||
return TextDocumentParser()
|
||||
|
||||
|
||||
def text_consumer_declaration(sender, **kwargs):
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
from collections.abc import Generator
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from paperless_text.parsers import TextDocumentParser
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def sample_dir() -> Path:
|
||||
return (Path(__file__).parent / Path("samples")).resolve()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def text_parser() -> Generator[TextDocumentParser, None, None]:
|
||||
try:
|
||||
parser = TextDocumentParser(logging_group=None)
|
||||
yield parser
|
||||
finally:
|
||||
parser.cleanup()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def sample_txt_file(sample_dir: Path) -> Path:
|
||||
return sample_dir / "test.txt"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def malformed_txt_file(sample_dir: Path) -> Path:
|
||||
return sample_dir / "decode_error.txt"
|
||||
@@ -1,69 +0,0 @@
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
from paperless_text.parsers import TextDocumentParser
|
||||
|
||||
|
||||
class TestTextParser:
|
||||
def test_thumbnail(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
# just make sure that it does not crash
|
||||
f = text_parser.get_thumbnail(sample_txt_file, "text/plain")
|
||||
assert f.exists()
|
||||
assert f.is_file()
|
||||
|
||||
def test_parse(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
sample_txt_file: Path,
|
||||
) -> None:
|
||||
text_parser.parse(sample_txt_file, "text/plain")
|
||||
|
||||
assert text_parser.get_text() == "This is a test file.\n"
|
||||
assert text_parser.get_archive_path() is None
|
||||
|
||||
def test_parse_invalid_bytes(
|
||||
self,
|
||||
text_parser: TextDocumentParser,
|
||||
malformed_txt_file: Path,
|
||||
) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- Text file which contains invalid UTF bytes
|
||||
WHEN:
|
||||
- The file is parsed
|
||||
THEN:
|
||||
- Parsing continues
|
||||
- Invalid bytes are removed
|
||||
"""
|
||||
|
||||
text_parser.parse(malformed_txt_file, "text/plain")
|
||||
|
||||
assert text_parser.get_text() == "Pantothens<EFBFBD>ure\n"
|
||||
assert text_parser.get_archive_path() is None
|
||||
|
||||
def test_thumbnail_large_file(self, text_parser: TextDocumentParser) -> None:
|
||||
"""
|
||||
GIVEN:
|
||||
- A very large text file (>50MB)
|
||||
WHEN:
|
||||
- A thumbnail is requested
|
||||
THEN:
|
||||
- A thumbnail is created without reading the entire file into memory
|
||||
"""
|
||||
with tempfile.NamedTemporaryFile(
|
||||
delete=False,
|
||||
mode="w",
|
||||
encoding="utf-8",
|
||||
suffix=".txt",
|
||||
) as tmp:
|
||||
tmp.write("A" * (51 * 1024 * 1024)) # 51 MB of 'A'
|
||||
large_file = Path(tmp.name)
|
||||
|
||||
thumb = text_parser.get_thumbnail(large_file, "text/plain")
|
||||
assert thumb.exists()
|
||||
assert thumb.is_file()
|
||||
large_file.unlink()
|
||||
@@ -12,6 +12,7 @@ def tika_parser() -> Generator[TikaDocumentParser, None, None]:
|
||||
parser = TikaDocumentParser(logging_group=None)
|
||||
yield parser
|
||||
finally:
|
||||
# TODO(stumpylog): Cleanup once all parsers are handled
|
||||
parser.cleanup()
|
||||
|
||||
|
||||
|
||||
144
uv.lock
generated
144
uv.lock
generated
@@ -1393,74 +1393,74 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "granian"
|
||||
version = "2.7.0"
|
||||
version = "2.7.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/43/75/bdea4ab49a02772a3007e667284764081d401169e96d0270d95509e3e240/granian-2.7.0.tar.gz", hash = "sha256:bee8e8a81a259e6f08613c973062df9db5f8451b521bb0259ed8f27d3e2bab23", size = 127963, upload-time = "2026-02-02T11:39:57.525Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/57/19/d4ea523715ba8dd2ed295932cc3dda6bb197060f78aada6e886ff08587b2/granian-2.7.2.tar.gz", hash = "sha256:cdae2f3a26fa998d41fefad58f1d1c84a0b035a6cc9377addd81b51ba82f927f", size = 128969, upload-time = "2026-02-24T23:04:23.314Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/8a/28/a3ee3f2220c0b9045f8caa2a2cb7484618961b7500f88594349a7889d391/granian-2.7.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e76afb483d7f42a0b911bdb447d282f70ad7a96caabd4c99cdc300117c5f8977", size = 4580966, upload-time = "2026-02-02T11:38:14.077Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1b/60/b53da9c255f6853a5516d0f8a3e7325c24123f0f7e77856558c49810f4ce/granian-2.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:628523302274f95ca967f295a9aa7bc4ade5e1eced42afc60d06dfe20f2da07a", size = 4210344, upload-time = "2026-02-02T11:38:15.34Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/bb/c3380106565bc99edfb90baafa1a8081a4334709ce0200d207ddda36275e/granian-2.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a62560b64a17e1cbae61038285d5fa8a32613ada9a46f05047dc607ea7d38f23", size = 5130258, upload-time = "2026-02-02T11:38:17.175Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a2/8f/2c3348d6d33807e3b818ac07366b5251e811ce2548fbe82e0b55982d8a13/granian-2.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47b8e0e9497d24466d6511443cc18f22f18405aab5a7e2fece1dd38206af88c4", size = 4576496, upload-time = "2026-02-02T11:38:18.577Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f6/71/d1d146170a23f3523d8629b47f849b30ba0d513eb519188ce5d7bfd1b916/granian-2.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc6039c61a07b2d36462c487b66b131ae3fd862bdc8fb81d6e5c206c1a2b683c", size = 4975062, upload-time = "2026-02-02T11:38:20.084Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/f9/f3acbf8c41cd10ff81109bd9078d3228f23e52bab8673763c65739a87e30/granian-2.7.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:f3b0442beb11b035ee09959726f44b3730d0b55688110defd1d9a9a6c7486955", size = 4827755, upload-time = "2026-02-02T11:38:21.817Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/f8/503135b89539feea2be495b47858c22409ba77ffcb71920ae0727c674189/granian-2.7.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:741d0b58a5133cc5902b3129a8a4c55143f0f8769a80e7aa80caadc64c9f1d8b", size = 4939033, upload-time = "2026-02-02T11:38:23.033Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/99/90/aaabe2c1162d07a6af55532b6f616199aa237805ef1d732fa78d9883d217/granian-2.7.0-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:02a6fe6a19f290b70bc23feeb3809511becdaff2263b0469f02c28772af97652", size = 5292980, upload-time = "2026-02-02T11:38:24.823Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/eb/aa/d1eb7342676893ab0ec1e66cceca4450bec3f29c488db2a92af5b4211d4d/granian-2.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8239b1a661271428c3e358e4bdcaaaf877a432cc593e93fc6b5a612ae521b06a", size = 5087230, upload-time = "2026-02-02T11:38:26.09Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/97/1a/b6d7840bfd9cd9bed627b138e6e8e49d1961997adba30ee39ad75d07ed58/granian-2.7.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d9c42562dcbf52848d0a9d0db58f8f2e790586eb0c363b8ad1b30fe0bd362117", size = 4572728, upload-time = "2026-02-02T11:38:30.143Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/93/f8f7224d9eaaaf4dbf493035a85287fa2e27c17e5f7aacc01821d8aa66b4/granian-2.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3421bd5c90430073e1f3f88fc63bc8d0a8ee547a9a5c06d577a281f384160bd", size = 4195034, upload-time = "2026-02-02T11:38:32.007Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/db/66843a35e1b6345da2a1c71839fb9aa7eb0f17d380fbf4cb5c7e06eb6f85/granian-2.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b8057dc81772932e208f2327b5e347459eb78896118e27af9845801e267cec5", size = 5123768, upload-time = "2026-02-02T11:38:33.449Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/ce/631c5c1f7a4e6b8c98ec857b3e6795fe64e474b6f48df388ac701a21f3fe/granian-2.7.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e5e70f438b1a4787d76566770e98bf7732407efa02802f38f10c960247107d7", size = 4562424, upload-time = "2026-02-02T11:38:34.815Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/41/19bdfa3719e22c4dcf6fa1a53323551a37aa58a4ca7a768db6a0ba714ab0/granian-2.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:213dd224a47c7bfcbb91718c7eeb56d6067825a28dcae50f537964e2dafb729a", size = 5006002, upload-time = "2026-02-02T11:38:36.76Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3c/5b/3b40f489e2449eb58df93ad38f42d1a6c2910502a4bc8017c047e16d637c/granian-2.7.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:bb5be27c0265268d43bab9a878ac27a20b4288843ffc9fda1009b8226673f629", size = 4825073, upload-time = "2026-02-02T11:38:37.998Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/04/92/b6de6f8c4146409efb58aee75277b810d54de03a1687d33f1f3f1feb3395/granian-2.7.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a6ff95aede82903c06eb560a32b10e9235fdafc4568c8fe7dcac28d62be5ffa2", size = 4928628, upload-time = "2026-02-02T11:38:39.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/21/d8a191dcfbf8422b868ab847829670075ba3e4325611e0a9fd2dc909a142/granian-2.7.0-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e44f0c1676b27582df26d47cf466fedebd72f520edc2025f125c83ff58af77f9", size = 5282898, upload-time = "2026-02-02T11:38:40.815Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/46/2746f1a4f0f093576fb64b63c3f022f254c6d2c4cc66d37dd881608397ce/granian-2.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9241b72f95ceb57e2bbce55e0f61c250c1c02e9d2f8531b027dd3dc204209fdd", size = 5118453, upload-time = "2026-02-02T11:38:42.716Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/df/b68626242fb4913df0968ee5662f5a394857b3d6fc4ee17c94be69664491/granian-2.7.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:bc61451791c8963232e4921c6805e7c2e366635e1e658267b1854889116ff6d7", size = 4572200, upload-time = "2026-02-02T11:38:46.194Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c0/15/2fe28bca0751d9dc46e5c7e9e4b0c4fd1a55e3e8ba062f28292322ee160b/granian-2.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e274a0d6a01c475b9135212106ca5b69f5ec2f67f4ca6ce812d185d80255cdf5", size = 4195415, upload-time = "2026-02-02T11:38:47.78Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/2a/d4dc40e58a55835cac5296f5090cc3ce2d43332ad486bbf78b3a00e46199/granian-2.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34bd28075adae3453c596ee20089e0288379e3fdf1cec8bafff89bb175ea0eb4", size = 5122981, upload-time = "2026-02-02T11:38:49.55Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bd/fe/8c79837df620dc0eca6a8b799505910cbba2d85d92ccc58d1c549f7027be/granian-2.7.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f526583b72cf9e6ca9a4849c781ed546f44005f0ad4b5c7eb1090e1ebec209bf", size = 4561440, upload-time = "2026-02-02T11:38:50.799Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4f/e7/d7abfaa9829ff50cddc27919bd3ce5a335402ebbbaa650e96fe579136674/granian-2.7.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ac07d5314e03e667210349dfc76124d69726731007c24716e21a2554cc15ca", size = 5005076, upload-time = "2026-02-02T11:38:52.157Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1a/45/108afaa0636c93b6a8ff12810787e4a1ea27fffe59f12ca0de7c784b119a/granian-2.7.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f6812e342c41ca80e1b34fb6c9a7e51a4bbd14f59025bd1bb59d45a39e02b8d5", size = 4825142, upload-time = "2026-02-02T11:38:53.506Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/eb/cedf4675b1047490f819ce8bd1ee1ea74b6c772ae9d9dd1c117ae690a3eb/granian-2.7.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a4099ba59885123405699a5313757556ff106f90336dccdf4ceda76f32657d0", size = 4927830, upload-time = "2026-02-02T11:38:54.92Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f9/b5/2d7a2e03ba29a6915ad41502e2870899b9eb54861e3d06ad8470c5e70b41/granian-2.7.0-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:c487731fbae86808410e88c587eb4071213812c5f52570b7981bf07a1b84be25", size = 5282142, upload-time = "2026-02-02T11:38:56.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/e7/c851b2e2351727186b4bc4a35df832e2e97e4f77b8a93dfdb6daa098cf9e/granian-2.7.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ca4877ebf8873488ba72a299206621bd0c6febb8f091f3da62117c1fe344501f", size = 5117907, upload-time = "2026-02-02T11:38:57.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e1/2f/c9bcd4aa36d3092fe88a623e60aa89bd4ff16836803a633b8b454946a845/granian-2.7.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:e1df8e4669b4fb69b373b2ab40a10a8c511eeb41838d65adb375d1c0e4e7454c", size = 4493110, upload-time = "2026-02-02T11:39:01.294Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6a/b4/02d11870255920d35f8eab390e509d3688fe0018011bb606aa00057b778f/granian-2.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6331ed9d3eb06cfba737dfb8efa3f0a8b4d4312a5af91c0a67bfbaa078b62eb4", size = 4122388, upload-time = "2026-02-02T11:39:02.509Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/50/dfad5a414a2e3e14c30cd0d54cef1dab4874a67c1e6f8b1124d9998ed8b2/granian-2.7.0-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:093e1c277eddba00eaa94ca82ff7a9ab57b0554cd7013e5b2f3468635dbe520d", size = 4379344, upload-time = "2026-02-02T11:39:04.489Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/53/ef086af03ef31aa3c1dbff2da5928a9b5dd1f48d8ebee18dd6628951ae9e/granian-2.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8e8e317bdc9ca9905d0b20f665f8fe31080c7f13d90675439113932bb3272c24", size = 5069172, upload-time = "2026-02-02T11:39:05.757Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c3/57/117864ea46c6cbcbeff733a4da736e814b06d6634beeb201b9db176bd6be/granian-2.7.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:391e8589265178fd7f444b6711b6dda157a6b66059a15bf1033ffceeaf26918c", size = 4848246, upload-time = "2026-02-02T11:39:07.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/60/da/2d45b7b6638a77362228d6770a61fa2bc3feae6c52a80993c230f344b197/granian-2.7.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:49b6873f4a8ee7a1ea627ff98d67ecdd644cfc18aab475b2e15f651dbcbe4140", size = 4669023, upload-time = "2026-02-02T11:39:09.612Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/22/69/49e54eb6ed67ccf471c19d4c65f64197dd5a416d501620519e28ea92c82e/granian-2.7.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:39778147c7527de0bcda12cd9c38863d4e6a80d3a8a96ddeb6fe2d1342f337db", size = 4896002, upload-time = "2026-02-02T11:39:10.996Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/f1/a864a78029265d06a6fd61c760c8facf032be0d345deca5081718cbb006f/granian-2.7.0-cp313-cp313t-musllinux_1_1_armv7l.whl", hash = "sha256:8135d0a4574dc5a0acf3a815fc6cad5bbe9075ef86df2c091ec34fbd21639c1c", size = 5239945, upload-time = "2026-02-02T11:39:12.726Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/26/33/feef40e4570b771d815c1ddd1008ccc9c0e81ce5a015deded6788e919f18/granian-2.7.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:47df2d9e50f22fa820b34fd38ceeeedc0b97994fa164425fa30e746759db8a44", size = 5078968, upload-time = "2026-02-02T11:39:14.048Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b9/6a/b8d58474bbcbca450f030fd41b65c94ae0afb5e8f58c39fbea2df4efee2b/granian-2.7.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:23c6531b75c94c7b533812aed4f40dc93008c406cfa5629ec93397cd0f6770cb", size = 4569780, upload-time = "2026-02-02T11:39:16.671Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/dc/a8b11425ebdf6cb58e1084fdb7759d853ca7f0b00376e4bb66300322f5d3/granian-2.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e4939b86f2b7918202ce56cb01c2efe20a393c742d41640b444e82c8b444b614", size = 4195285, upload-time = "2026-02-02T11:39:18.596Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/b5/6cc0b94f997d93f4b1510b2d953f07a7f1d16a143d60b53e0e50b887fa12/granian-2.7.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38fa10adf3c4d50e31a08401e6701ee2488613d905bb316cad456e5ebad5aa81", size = 5121311, upload-time = "2026-02-02T11:39:20.092Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f4/f9/df3d862874cf4b233f97253bb78991ae4f31179a5581beaa41a2100e3bce/granian-2.7.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b366a9fd713a20321e668768b122b7b0140bfaeb3cb0557b6cb11dce827a4fb", size = 4557737, upload-time = "2026-02-02T11:39:21.992Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/7f/e3063368345f39188afe5baa1ab62fdd951097656cd83bec3964f91f6e66/granian-2.7.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a916413e0dcd5c6eaf7f7413a6d899f7ba53a988d08e3b3c7ab2e0b5fa687559", size = 5004108, upload-time = "2026-02-02T11:39:23.306Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/bc/eb/892bcc0cfc44ed791795bab251e0b6ed767397182bac134d9f0fcecc552e/granian-2.7.0-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:e315adf24162294d35ca4bed66c8f66ac15a0696f2cb462e729122d148f6d958", size = 4823143, upload-time = "2026-02-02T11:39:24.696Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b3/e0/ff8528bf620b6da7833171f6d30bfe4b4b1d6e7d155b634bd17590e0c4b4/granian-2.7.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:486f8785e716f76f96534aaba25acd5dee1a8398725ffd2a55f0833689c75933", size = 4926328, upload-time = "2026-02-02T11:39:26.111Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/02/f7/fb0a761d39245295660703a42e9448f3c04ce1f26b2f62e044d179167880/granian-2.7.0-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:0e5e2c1c6ff1501e3675e5237096b90b767f506bb0ef88594310b7b9eaa95532", size = 5281190, upload-time = "2026-02-02T11:39:27.68Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/d8/860e7e96ea109c6db431c8284040d265758bded35f9ce2de05f3969d7c0c/granian-2.7.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:d4418b417f9c2162b4fa9ec41ec34ed3e8ed891463bb058873034222be53542f", size = 5117989, upload-time = "2026-02-02T11:39:29.008Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fb/9a/500ab01ae273870e8fc056956cc49716707b4a0e76fb2b5993258e1494f7/granian-2.7.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:b4367c088c00bdc38a8a495282070010914931edb4c488499f290c91018d9e80", size = 4492656, upload-time = "2026-02-02T11:39:31.614Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d0/26/86dc5a6fff60ee0cc38c2fcd1a0d4cebd52e6764a9f752a20458001ca57e/granian-2.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c8f3df224284ed1ff673f61de652337d7721100bf4cfc336b2047005b0edb2e0", size = 4122201, upload-time = "2026-02-02T11:39:33.162Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/60/887dc5a099135ff449adcdea9a2aa38f39673baf99de9acb78077b701432/granian-2.7.0-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6682c08b0d82ad75f8e9d1571254630133e1563c49f0600c2e2dc26cec743ae7", size = 4377489, upload-time = "2026-02-02T11:39:34.532Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5a/6b/68c12f8c4c1f1c109bf55d66beeb37a817fd908af5d5d9b48afcbdc3e623/granian-2.7.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d6ccc3bdc2248775b6bd292d7d37a1bff79eb1aaf931f3a217ea9fb9a6fe7ca4", size = 5067294, upload-time = "2026-02-02T11:39:35.84Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ff/4f/be4f9c129f5f80f52654f257abe91f647defec020fa134b3600013b7219d/granian-2.7.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5431272a4d6f49a200aeb7b01010a3785b93b9bd8cd813d98ed29c8e9ba1c476", size = 4848356, upload-time = "2026-02-02T11:39:37.443Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d7/aa/f6efcfb435f370a6f3626bd5837465bfb71950f6b3cb3c74e54b176c72e2/granian-2.7.0-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:790b150255576775672f26dbcbd6eb05f70260dd661b91ce462f6f3846db9501", size = 4669022, upload-time = "2026-02-02T11:39:38.782Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/36/e86050c476046ef1f0aae0eb86d098fa787abfc8887a131c82baccc7565e/granian-2.7.0-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:ce9be999273c181e4b65efbbd82a5bc6f223f1db3463660514d1dc229c8ba760", size = 4895567, upload-time = "2026-02-02T11:39:40.144Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/5e/25283ff7fc12fcf42ae8a5687243119739cf4b0bf5ccb1c32d11d37987b1/granian-2.7.0-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:319b34f18ed3162354513acb5a9e8cee720ac166cd88fe05f0f057703eb47e4f", size = 5238652, upload-time = "2026-02-02T11:39:41.648Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/60/06148781120e086c7437aa9513198025ea1eb847cb2e244d5e2b9801782e/granian-2.7.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:b01bed8ad748840e7ab49373f642076f3bc459e39937a4ce11c5be03e67cdfd9", size = 5079018, upload-time = "2026-02-02T11:39:43.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0f/0b/39ebf1b791bbd4049239ecfee8f072321211879e5617a023921961be1d55/granian-2.7.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:24a1f6a894bea95ef0e603bebacbccd19c319c0da493bb4fde8b94b8629f3dc8", size = 4581648, upload-time = "2026-02-02T11:39:45.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/cd/4642192520478bba4cd547124d92607c958a0786864ebe378f3008b40048/granian-2.7.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:c2799497ac896cffea85512983c5d9eb4ae51ebacd7a9a5fd3d2ac81f1755fac", size = 4214257, upload-time = "2026-02-02T11:39:47.507Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/3f/615f93753c3b682219fe546196fc9eb3a045d846e57883312c97de4d785a/granian-2.7.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b66a15d004136e641706e0e5522b3509151e2027a0677cf4fa97d049d9ddfa41", size = 4979656, upload-time = "2026-02-02T11:39:48.838Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/68/1f2c36a964f93bfe8d6189431b8425acc591b735e47d8898b2e70c478398/granian-2.7.0-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:de5a6fa93d2138ba2372d20d97b87c1af75fa16a59a93841745326825c3ddf83", size = 4844448, upload-time = "2026-02-02T11:39:50.5Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/df/23/d8c83fe6a6656026c734c2ea771cbcdec6f0010e749f8ab0db1bfc8a3dfe/granian-2.7.0-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:aacda2ad46724490c4cd811b8dcadff2260603a3e95ca0d8c33552d791a3c6ac", size = 4930755, upload-time = "2026-02-02T11:39:51.866Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/20/e5/2a86ee18544185e72fc50b50985b6bfb4504f7835875d2636f573e100071/granian-2.7.0-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:7efb5ebdb308ed1685a80cded6ea51447753e8afe92c21fc3abf9a06a9eb5d2e", size = 5295728, upload-time = "2026-02-02T11:39:53.364Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/bd/0d47d17769601c56d876b289456f27799611571227b99ad300e221600bbd/granian-2.7.0-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ae96b75420d01d9a7dbe1bd84f1898b2b0ade6883db59bfe2b233d7c28c6b0df", size = 5095149, upload-time = "2026-02-02T11:39:54.767Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/58/dcf0e8a54b9a7f8b7482ed617bca08503a47eb6b702aea73cda9efd2c81c/granian-2.7.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a0d33ada95a1421e5a22d447d918e5615ff0aa37f12de5b84455afe89970875", size = 6522860, upload-time = "2026-02-24T23:02:15.901Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2b/dd/398de0f273fdcf0e96bd70d8cd97364625176990e67457f11e23f95772bd/granian-2.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ee26f0258cc1b6ccf87c7bdcee6d1f90710505522fc9880ec02b299fb15679ad", size = 6135934, upload-time = "2026-02-24T23:02:18.52Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/b7/7bf635bbdfb88dfc6591fa2ce5c3837ab9535e57e197a780c4a338363de7/granian-2.7.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f52338cfab08b8cdaadaa5b93665e0be5b4c4f718fbd132d76ceacacb9ff864e", size = 7138393, upload-time = "2026-02-24T23:02:19.911Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/90/e424fd8a703add1e8922390503be8d057882b35b42ba51796157aabd659b/granian-2.7.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e377d03a638fecb6949ab05c8fd4a76f892993aed17c602d179bfd56aebc2de", size = 6467189, upload-time = "2026-02-24T23:02:21.896Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/9a/5de24d7e2dba1aa9fbac6f0a80dace975cfac1b7c7624ece21da75a38987/granian-2.7.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f742f3ca1797a746fae4a9337fe5d966460c957fa8efeaccf464b872e158d3d", size = 6870813, upload-time = "2026-02-24T23:02:23.972Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/cd/a604e38237857f4ad4262eadc409f94fe08fed3e86fa0b8734479cc5bfb1/granian-2.7.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:ca4402e8f28a958f0c0f6ebff94cd0b04ca79690aded785648a438bc3c875ba3", size = 7046583, upload-time = "2026-02-24T23:02:25.94Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/ad/79eaae0cddd90c4e191b37674cedd8f4863b44465cb435b10396d0f12c82/granian-2.7.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1f9a899123b0d084783626e5225608094f1d2f6fc81b3a7c77ab8daac33ab74a", size = 7121958, upload-time = "2026-02-24T23:02:27.641Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ca/51/e5c923b1baa003f5b4b7fc148be6f8d2e3cabe55d41040fe8139da52e31b/granian-2.7.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:56ba4bef79d0ae3736328038deed2b5d281b11672bc0b08ffc8ce6210e406ef8", size = 7303047, upload-time = "2026-02-24T23:02:30.863Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/c0/ebd68144a3ce9ead1a3192ac02e1c26e4874df1257435ce6137adf92fedb/granian-2.7.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea46e3f43d94715aa89d1f2f5754753d46e6b653d561b82b0291e62a31bdfb35", size = 7011349, upload-time = "2026-02-24T23:02:32.887Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/ed/37f5d7d887ec9159dd8f5b1c9c38cee711d51016d203959f2d51c536a33b/granian-2.7.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a836f3f8ebfe61cb25d9afb655f2e5d3851154fd2ad97d47bb4fb202817212fc", size = 6451593, upload-time = "2026-02-24T23:02:36.203Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1e/06/84ee67a68504836a52c48ec3b4b2b406cbd927c9b43aae89d82db8d097a0/granian-2.7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09b1c543ba30886dea515a156baf6d857bbb8b57dbfd8b012c578b93c80ef0c3", size = 6101239, upload-time = "2026-02-24T23:02:37.636Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ed/50/ece7dc8efe144542cd626b88b1475b649e2eaa3eb5f7541ca57390151b05/granian-2.7.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d334d4fbefb97001e78aa8067deafb107b867c102ba2120b4b2ec989fa58a89", size = 7079443, upload-time = "2026-02-24T23:02:39.651Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7e/e8/0f37b531d3cc96b8538cca2dc86eda92102e0ee345b30aa689354194a4cb/granian-2.7.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c86081d8c87989db69650e9d0e50ed925b8cd5dad21e0a86aa72d7a45f45925", size = 6428683, upload-time = "2026-02-24T23:02:41.827Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/09/228626706554b389407270e2a6b19b7dee06d6890e8c01a39c6a785827fd/granian-2.7.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9eda33dca2c8bc6471bb6e9e25863077bca3877a1bba4069cd5e0ee2de41765", size = 6959520, upload-time = "2026-02-24T23:02:43.488Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/61/c0/a639ceabd59b8acae2d71b5c918fcb2d42f8ef98994eedcf9a8b6813731d/granian-2.7.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:9cf69aaff6f632074ffbe7c1ee214e50f64be36101b7cb8253eeec1d460f2dba", size = 6991548, upload-time = "2026-02-24T23:02:44.954Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b1/99/a35ed838a3095dcad02ae3944d19ebafe1d5a98cdc72bb61835fb5faf933/granian-2.7.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f761a748cc7f3843b430422d2539da679daf5d3ef0259a101b90d5e55a0aafa7", size = 7121475, upload-time = "2026-02-24T23:02:46.991Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ce/24/3952c464432b904ec1cf537d2bd80d2dfde85524fa428ab9db2b5afe653c/granian-2.7.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:41c7b8390b78647fe34662ed7296e1465dad4a5112af9b0ecf8e367083d6c76a", size = 7243647, upload-time = "2026-02-24T23:02:49.165Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c9/fa/ab39e39c6b78eab6b42cf5bb36f56badde2aaafc3807f03f781d00e7861a/granian-2.7.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a052ed466da5922cb443435a95a0c751566943278a6f22cef3d2e19d4e7ecdea", size = 7048915, upload-time = "2026-02-24T23:02:50.773Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/bc/cf0bc29f583096a842cf0f26ae2fe40c72ed5286d4548be99ecfcdbb17e2/granian-2.7.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:76b840ff13dde8838fd33cd096f2e7cadf2c21a499a67f695f53de57deab6ff8", size = 6440868, upload-time = "2026-02-24T23:02:53.619Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2f/0d/bae1dcd2182ba5d9a5df33eb50b56dc5bbe67e31033d822e079aa8c1ff30/granian-2.7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:00ccc8d7284bc7360f310179d0b4d17e5ca3077bbe24427e9e9310df397e3831", size = 6097336, upload-time = "2026-02-24T23:02:55.185Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/65/7d/3e0a7f32b0ad5faa1d847c51191391552fa239821c95fc7c022688985df2/granian-2.7.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:675987c1b321dc8af593db8639e00c25277449b32e8c1b2ddd46b35f28d9fac4", size = 7098742, upload-time = "2026-02-24T23:02:57.898Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/41/3b44386d636ac6467f0f13f45474c71fc3b90a4f0ba8b536de91b2845a09/granian-2.7.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:681c6fbe3354aaa6251e6191ec89f5174ac3b9fbc4b4db606fea456d01969fcb", size = 6430667, upload-time = "2026-02-24T23:02:59.789Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/52/70/7b24e187aed3fb7ac2b29d2480a045559a509ef9fec54cffb8694a2d94af/granian-2.7.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5c9ae65af5e572dca27d8ca0da4c5180b08473ac47e6f5329699e9455a5cc3", size = 6948424, upload-time = "2026-02-24T23:03:01.406Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fa/4c/cb74c367f9efb874f2c8433fe9bf3e824f05cf719f2251d40e29e07f08c0/granian-2.7.2-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:e37fab2be919ceb195db00d7f49ec220444b1ecaa07c03f7c1c874cacff9de83", size = 7000407, upload-time = "2026-02-24T23:03:03.214Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/58/98/dfed3966ed7fbd3aae56e123598f90dc206484092b8373d0a71e2d8b82a8/granian-2.7.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:8ec167ab30f5396b5caaff16820a39f4e91986d2fe5bdc02992a03c2b2b2b313", size = 7121626, upload-time = "2026-02-24T23:03:05.349Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/82/acec732a345cd03b2f6e48ac04b66b7b8b61f5c50eb08d7421fc8c56591a/granian-2.7.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:63f426d793f2116d23be265dd826bec1e623680baf94cc270fe08923113a86ba", size = 7253447, upload-time = "2026-02-24T23:03:06.986Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/2b/64779e69b08c1ff1bfc09a4ede904ab761ff63f936c275710886057c52f7/granian-2.7.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1617cbb4efe3112f07fb6762cf81d2d9fe4bdb78971d1fd0a310f8b132f6a51e", size = 7053005, upload-time = "2026-02-24T23:03:09.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/49/9eb88875d709db7e7844e1c681546448dab5ff5651cd1c1d80ac4b1de4e3/granian-2.7.2-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:016c5857c8baedeab7eb065f98417f5ea26bb72b0f7e0544fe76071efc5ab255", size = 6401748, upload-time = "2026-02-24T23:03:12.802Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e3/80/85726ad9999ed89cb6a32f7f57eb50ce7261459d9c30c3b194ae4c5aa2c5/granian-2.7.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dcbe01fa141adf3f90964e86a959e250754aa7c6dad8fa7a855e6fd382de4c13", size = 6101265, upload-time = "2026-02-24T23:03:14.435Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/07/82/0df56a42b9f4c327d0e0b052f43369127e1b565b9e66bf2c9488f1c8d759/granian-2.7.2-cp313-cp313t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:283ba23817a685784b66f45423d2f25715fdc076c8ffb43c49a807ee56a0ffc0", size = 6249488, upload-time = "2026-02-24T23:03:16.387Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/cc/d83a351560a3d6377672636129c52f06f8393f5831c5ee0f06f274883ea6/granian-2.7.2-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3258419c741897273ce155568b5a9cbacb7700a00516e87119a90f7d520d6783", size = 7104734, upload-time = "2026-02-24T23:03:17.993Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/84/d1/539907ee96d0ee2bcceabb4a6a9643b75378d6dfea09b7a9e4fd22cdf977/granian-2.7.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a196125c4837491c139c9cc83541b48c408c92b9cfbbf004fd28717f9c02ad21", size = 6785504, upload-time = "2026-02-24T23:03:19.763Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/86/bf/4b6f45882f8341e7c6cb824d693deb94c306be6525b483c76fb373d1e749/granian-2.7.2-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:746555ac8a2dcd9257bfe7ad58f1d7a60892bc4613df6a7d8f736692b3bb3b88", size = 6902790, upload-time = "2026-02-24T23:03:22.215Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/44/b8/832970d2d4b144b87be39f5b9dfd31fdb17f298dc238a0b2100c95002cf8/granian-2.7.2-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:5ac1843c6084933a54a07d9dcae643365f1d83aaff3fd4f2676ea301185e4e8b", size = 7082682, upload-time = "2026-02-24T23:03:23.875Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/38/bc/1521dbf026d1c9d2465cd54e016efd8ff6e1e72eff521071dab20dd61c44/granian-2.7.2-cp313-cp313t-musllinux_1_1_armv7l.whl", hash = "sha256:3612eb6a3f4351dd2c4df246ed0d21056c0556a6b1ed772dd865310aa55a9ba9", size = 7264742, upload-time = "2026-02-24T23:03:25.562Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/19/ae/00884ab77045a2f54db90932f9d1ca522201e2a6b2cf2a9b38840db0fd54/granian-2.7.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:34708b145e31b4538e0556704a07454a76d6776c55c5bc3a1335e80ef6b3bae3", size = 7062571, upload-time = "2026-02-24T23:03:27.278Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/4a/8ce622f4f7d58e035d121b9957dd5a8929028dc99cfc5d2bf7f2aa28912c/granian-2.7.2-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:592806c28c491f9c1d1501bac706ecf5e72b73969f20f912678d53308786d658", size = 6442041, upload-time = "2026-02-24T23:03:30.986Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/62/7d36ed38a40a68c2856b6d2a6fedd40833e7f82eb90ba0d03f2d69ffadf5/granian-2.7.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c9dcde3968b921654bde999468e97d03031f28668bc1fc145c81d8bedb0fb2a4", size = 6100793, upload-time = "2026-02-24T23:03:32.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b4/c5/17fea68f4cb280c217cbd65534664722c9c9b0138c2754e20c235d70b5f4/granian-2.7.2-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d4d78408283ec51f0fb00557856b4593947ad5b48287c04e1c22764a0ac28a5", size = 7119810, upload-time = "2026-02-24T23:03:34.807Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/76/35e240d107e0f158662652fd61191de4fb0c2c080e3786ca8f16c71547b7/granian-2.7.2-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d28b078e8087f794b83822055f95caf93d83b23f47f4efcd5e2f0f7a5d8a81", size = 6450789, upload-time = "2026-02-24T23:03:36.81Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4c/55/a6d08cfecc808149a910e51c57883ab26fad69d922dc2e76fb2d87469e2d/granian-2.7.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ff7a93123ab339ba6cad51cc7141f8880ec47b152ce2491595bb08edda20106", size = 6902672, upload-time = "2026-02-24T23:03:38.655Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/2e/c86d95f324248fcc5dcaf034c9f688b32f7a488f0b2a4a25e6673776107f/granian-2.7.2-cp314-cp314-manylinux_2_28_aarch64.whl", hash = "sha256:a52effb9889f0944f0353afd6ce5a9d9aa83826d44bbf3c8013e978a3d6ef7b7", size = 6964399, upload-time = "2026-02-24T23:03:40.459Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/37/4b/44fde33fe10245a3fba76bf843c387fad2d548244345115b9d87e1c40994/granian-2.7.2-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:76c987c3ca78bf7666ab053c3ed7e3af405af91b2e5ce2f1cf92634c1581e238", size = 7034929, upload-time = "2026-02-24T23:03:42.149Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/76/38d205cb527046241a9ee4f51048bf44101c626ad4d2af16dd9d14dc1db6/granian-2.7.2-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:6590f8092c2bb6614e561ba771f084cbf72ecbc38dbf9849762ac38718085c29", size = 7259609, upload-time = "2026-02-24T23:03:43.852Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/37/04245c7259e65f1083ce193875c6c44da4c98604d3b00a264a74dd4f042b/granian-2.7.2-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:7c1ce9b0c9446b680e9545e7fc95a75f0c53a25dedcf924b1750c3e5ba5bf908", size = 7073161, upload-time = "2026-02-24T23:03:45.655Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cc/07/0e56fb4f178e14b4c1fa1f6f00586ca81761ccbe2d8803f2c12b6b17a7d6/granian-2.7.2-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:a698d9b662d5648c8ae3dc01ad01688e1a8afc3525e431e7cddb841c53e5e291", size = 6415279, upload-time = "2026-02-24T23:03:48.932Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/bc/3e69305bf34806cd852f4683deec844a2cb9a4d8888d7f172b507f6080a8/granian-2.7.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:17516095b520b3c039ddbe41a6beb2c59d554b668cc229d36d82c93154a799af", size = 6090528, upload-time = "2026-02-24T23:03:50.52Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ec/10/7d58a922b44417a6207c0a3230b0841cd7385a36fc518ac15fed16ebf6f7/granian-2.7.2-cp314-cp314t-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:96b0fd9eac60f939b3cbe44c8f32a42fdb7c1a1a9e07ca89e7795cdc7a606beb", size = 6252291, upload-time = "2026-02-24T23:03:52.248Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/54/56/65776c6d759dcef9cce15bc11bdea2c64fe668088faf35d87916bd88f595/granian-2.7.2-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e50fb13e053384b8bd3823d4967606c6fd89f2b0d20e64de3ae212b85ffdfed2", size = 7106748, upload-time = "2026-02-24T23:03:53.994Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/81/ee/d9ed836316607401f158ac264a3f770469d1b1edbf119402777a9eff1833/granian-2.7.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bb1ef13125bc05ab2e18869ed311beaeb085a4c4c195d55d0865f5753a4c0b4", size = 6778883, upload-time = "2026-02-24T23:03:55.574Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/46/eabab80e07a14527c336dec6d902329399f3ba2b82dc94b6435651021359/granian-2.7.2-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b1c77189335070c6ba6b8d158518fde4c50f892753620f0b22a7552ad4347143", size = 6903426, upload-time = "2026-02-24T23:03:57.296Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/24/8a/8ce186826066f6d453316229383a5be3b0b8a4130146c21f321ee64fe2cb/granian-2.7.2-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:1777166c3c853eed4440adb3cbbf34bba2b77d595bfc143a5826904a80b22f34", size = 7083877, upload-time = "2026-02-24T23:03:59.425Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/cf/eb/91ed4646ce1c920ad39db0bcddb6f4755e1823002b14fb026104e3eb8bce/granian-2.7.2-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:0ffac19208ae548f3647c849579b803beaed2b50dfb0f3790ad26daac0033484", size = 7267282, upload-time = "2026-02-24T23:04:01.218Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/49/2f/58cba479254530ab09132e150e4ab55362f6e875d9e82b6790477843e0aa/granian-2.7.2-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:82f34e78c1297bf5a1b6a5097e30428db98b59fce60a7387977b794855c0c3bc", size = 7054941, upload-time = "2026-02-24T23:04:03.211Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/71/f21b26c7dc7a8bc9d8288552c9c12128e73f1c3f04799b6e28a0a269b9b0/granian-2.7.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5613ee8c1233a79e56e1735e19c8c70af22a8c6b5808d7c1423dc5387bee4c05", size = 6504773, upload-time = "2026-02-24T23:04:06.498Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6e/68/282fbf5418f9348f657f505dc744cdca70ac850d39a805b21395211bf099/granian-2.7.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:0cd6fee79f585de2e1a90b6a311f62b3768c7cda649bc0e02908157ffa2553cc", size = 6138096, upload-time = "2026-02-24T23:04:09.138Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e7/e0/b578709020f84c07ad2ca88f77ac67fd2c62e6b16f93ff8c8d65b7d99296/granian-2.7.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e94c825f8b327114f7062d158c502a540ef5819f809e10158f0edddddaf41bb9", size = 6900043, upload-time = "2026-02-24T23:04:11.015Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/2f/a2671cc160f29ccf8e605eb8fa113c01051b0d7947048c5b29eb4e603384/granian-2.7.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a6adea5fb8a537d18f3f2b848023151063bc45896415fdebfeb0bf0663d5a03b", size = 7040211, upload-time = "2026-02-24T23:04:13.31Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/36/ce/df9bba3b211cda2d47535bb21bc040007e021e8c8adc20ce36619f903bc4/granian-2.7.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2392ab03cb92b1b2d4363f450b2d875177e10f0e22d67a4423052e6885e430f2", size = 7118085, upload-time = "2026-02-24T23:04:15.05Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/87/37124b2ee0cddce6ba438b0ff879ddae094ae2c92b24b28ffbe35110931f/granian-2.7.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:406c0bb1f5bf55c72cfbfdfd2ccec21299eb3f7b311d85c4889dde357fd36f33", size = 7314667, upload-time = "2026-02-24T23:04:16.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/ac/8b142ed352bc525e3c97440aab312928beebc735927b0cf979692bfcda3b/granian-2.7.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:362a6001daa2ce62532a49df407fe545076052ef29289a76d5760064d820f48b", size = 7004934, upload-time = "2026-02-24T23:04:19.059Z" },
|
||||
]
|
||||
|
||||
[package.optional-dependencies]
|
||||
@@ -4813,18 +4813,16 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "tornado"
|
||||
version = "6.5.4"
|
||||
version = "6.5.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/37/1d/0a336abf618272d53f62ebe274f712e213f5a03c0b2339575430b8362ef2/tornado-6.5.4.tar.gz", hash = "sha256:a22fa9047405d03260b483980635f0b041989d8bcc9a313f8fe18b411d84b1d7", size = 513632, upload-time = "2025-12-15T19:21:03.836Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f8/f1/3173dfa4a18db4a9b03e5d55325559dab51ee653763bb8745a75af491286/tornado-6.5.5.tar.gz", hash = "sha256:192b8f3ea91bd7f1f50c06955416ed76c6b72f96779b962f07f911b91e8d30e9", size = 516006, upload-time = "2026-03-10T21:31:02.067Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/a9/e94a9d5224107d7ce3cc1fab8d5dc97f5ea351ccc6322ee4fb661da94e35/tornado-6.5.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:d6241c1a16b1c9e4cc28148b1cda97dd1c6cb4fb7068ac1bedc610768dff0ba9", size = 443909, upload-time = "2025-12-15T19:20:48.382Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/db/7e/f7b8d8c4453f305a51f80dbb49014257bb7d28ccb4bbb8dd328ea995ecad/tornado-6.5.4-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2d50f63dda1d2cac3ae1fa23d254e16b5e38153758470e9956cbc3d813d40843", size = 442163, upload-time = "2025-12-15T19:20:49.791Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ba/b5/206f82d51e1bfa940ba366a8d2f83904b15942c45a78dd978b599870ab44/tornado-6.5.4-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1cf66105dc6acb5af613c054955b8137e34a03698aa53272dbda4afe252be17", size = 445746, upload-time = "2025-12-15T19:20:51.491Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8e/9d/1a3338e0bd30ada6ad4356c13a0a6c35fbc859063fa7eddb309183364ac1/tornado-6.5.4-cp39-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50ff0a58b0dc97939d29da29cd624da010e7f804746621c78d14b80238669335", size = 445083, upload-time = "2025-12-15T19:20:52.778Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/d4/e51d52047e7eb9a582da59f32125d17c0482d065afd5d3bc435ff2120dc5/tornado-6.5.4-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fb5e04efa54cf0baabdd10061eb4148e0be137166146fff835745f59ab9f7f", size = 445315, upload-time = "2025-12-15T19:20:53.996Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/27/07/2273972f69ca63dbc139694a3fc4684edec3ea3f9efabf77ed32483b875c/tornado-6.5.4-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9c86b1643b33a4cd415f8d0fe53045f913bf07b4a3ef646b735a6a86047dda84", size = 446003, upload-time = "2025-12-15T19:20:56.101Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d1/83/41c52e47502bf7260044413b6770d1a48dda2f0246f95ee1384a3cd9c44a/tornado-6.5.4-cp39-abi3-musllinux_1_2_i686.whl", hash = "sha256:6eb82872335a53dd063a4f10917b3efd28270b56a33db69009606a0312660a6f", size = 445412, upload-time = "2025-12-15T19:20:57.398Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/10/c7/bc96917f06cbee182d44735d4ecde9c432e25b84f4c2086143013e7b9e52/tornado-6.5.4-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6076d5dda368c9328ff41ab5d9dd3608e695e8225d1cd0fd1e006f05da3635a8", size = 445392, upload-time = "2025-12-15T19:20:58.692Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/59/8c/77f5097695f4dd8255ecbd08b2a1ed8ba8b953d337804dd7080f199e12bf/tornado-6.5.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:487dc9cc380e29f58c7ab88f9e27cdeef04b2140862e5076a66fb6bb68bb1bfa", size = 445983, upload-time = "2026-03-10T21:30:44.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/5e/7625b76cd10f98f1516c36ce0346de62061156352353ef2da44e5c21523c/tornado-6.5.5-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:65a7f1d46d4bb41df1ac99f5fcb685fb25c7e61613742d5108b010975a9a6521", size = 444246, upload-time = "2026-03-10T21:30:46.571Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/04/7b5705d5b3c0fab088f434f9c83edac1573830ca49ccf29fb83bf7178eec/tornado-6.5.5-cp39-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e74c92e8e65086b338fd56333fb9a68b9f6f2fe7ad532645a290a464bcf46be5", size = 447229, upload-time = "2026-03-10T21:30:48.273Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/34/01/74e034a30ef59afb4097ef8659515e96a39d910b712a89af76f5e4e1f93c/tornado-6.5.5-cp39-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:435319e9e340276428bbdb4e7fa732c2d399386d1de5686cb331ec8eee754f07", size = 448192, upload-time = "2026-03-10T21:30:51.22Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/be/00/fe9e02c5a96429fce1a1d15a517f5d8444f9c412e0bb9eadfbe3b0fc55bf/tornado-6.5.5-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3f54aa540bdbfee7b9eb268ead60e7d199de5021facd276819c193c0fb28ea4e", size = 448039, upload-time = "2026-03-10T21:30:53.52Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/82/9e/656ee4cec0398b1d18d0f1eb6372c41c6b889722641d84948351ae19556d/tornado-6.5.5-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:36abed1754faeb80fbd6e64db2758091e1320f6bba74a4cf8c09cd18ccce8aca", size = 447445, upload-time = "2026-03-10T21:30:55.541Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
Reference in New Issue
Block a user