Compare commits

..

26 Commits

Author SHA1 Message Date
Trenton Holmes
c5881f75c9 Bumps version to 2.1.3 2023-12-15 17:05:41 -08:00
Trenton Holmes
c4b7429e99 Merge remote-tracking branch 'origin/dev' 2023-12-15 17:05:16 -08:00
github-actions[bot]
b1eced3612 New Crowdin translations by GitHub Action (#4967)
Co-authored-by: Crowdin Bot <support+bot@crowdin.com>
2023-12-15 17:03:38 -08:00
Adam Bogdał
9d5b07537d Reduce number of db queries (#4990) 2023-12-15 11:36:25 -08:00
Trenton H
122e4141b0 Fix: Document metadata is lost during barcode splitting (#4982)
* Fixes barcode splitting dropping metadata that might be needed for the round 2
2023-12-15 09:17:25 -08:00
Trenton H
be2de4f15d Fixes export of custom field instances during a split manifest export (#4984) 2023-12-14 19:23:39 -08:00
Trenton H
92a920021d Apply user arguments even in the case of the safe fallback to forcing OCR (#4981) 2023-12-14 11:20:47 -08:00
shamoon
72000cac36 Fix: show errors for select dropdowns (#4979) 2023-12-14 10:05:36 -08:00
Adam Bogdał
4510902677 Fix: Don't attempt to parse none objects during date searching 2023-12-14 07:39:49 -08:00
github-actions[bot]
c2b9d2fa7b [Documentation] Add v2.1.2 changelog (#4960)
* Changelog v2.1.2 - GHA

* Fix mis-categorized PR

---------

Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: shamoon <4887959+shamoon@users.noreply.github.com>
2023-12-12 18:15:37 -08:00
Trenton Holmes
cd38c39908 Resets to -dev version string 2023-12-12 17:42:20 -08:00
Trenton Holmes
9016a1e6df Bumps version to 2.1.2 2023-12-12 17:41:26 -08:00
github-actions[bot]
627254d5a7 New Crowdin translations by GitHub Action (#4892)
Co-authored-by: Crowdin Bot <support+bot@crowdin.com>
2023-12-12 17:29:33 -08:00
shamoon
ff31558252 Fix: Sort consumption templates by order by default (#4956) 2023-12-12 16:27:26 +00:00
Trenton H
9454978264 Updates gotenberg-client, including workaround for Gotenberg handling of non-latin filenames (#4944) 2023-12-12 15:05:33 +00:00
shamoon
e2d25a7a09 Chore: reorganize api tests (#4935)
* Move permissions-related API tests

* Move bulk-edit-related API tests

* Move bulk-download-related API tests

* Move uisettings-related API tests

* Move remoteversion-related API tests

* Move tasks API tests

* Move object-related API tests

* Move consumption-template-related API tests

* Rename pared-down documents API test file

Co-Authored-By: Trenton H <797416+stumpylog@users.noreply.github.com>
2023-12-12 04:08:51 +00:00
dependabot[bot]
85f824f032 Chore(deps-dev): Bump the small-changes group with 2 updates (#4942)
Bumps the small-changes group with 2 updates: [pre-commit](https://github.com/pre-commit/pre-commit) and [mkdocs-glightbox](https://github.com/Blueswen/mkdocs-glightbox).


Updates `pre-commit` from 3.5.0 to 3.6.0
- [Release notes](https://github.com/pre-commit/pre-commit/releases)
- [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md)
- [Commits](https://github.com/pre-commit/pre-commit/compare/v3.5.0...v3.6.0)

Updates `mkdocs-glightbox` from 0.3.4 to 0.3.5
- [Release notes](https://github.com/Blueswen/mkdocs-glightbox/releases)
- [Changelog](https://github.com/blueswen/mkdocs-glightbox/blob/main/CHANGELOG)
- [Commits](https://github.com/Blueswen/mkdocs-glightbox/compare/v0.3.4...v0.3.5)

---
updated-dependencies:
- dependency-name: pre-commit
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: small-changes
- dependency-name: mkdocs-glightbox
  dependency-type: direct:development
  update-type: version-update:semver-patch
  dependency-group: small-changes
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-11 19:40:33 -08:00
shamoon
1a48910e6b Fix: allow text copy in pngx pdf viewer (#4938) 2023-12-12 01:06:30 +00:00
dependabot[bot]
bffd5829d0 Chore(deps-dev): Bump the development group with 1 update (#4939)
Bumps the development group with 1 update: [mkdocs-material](https://github.com/squidfunk/mkdocs-material).

- [Release notes](https://github.com/squidfunk/mkdocs-material/releases)
- [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG)
- [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.4.14...9.5.2)

---
updated-dependencies:
- dependency-name: mkdocs-material
  dependency-type: direct:development
  update-type: version-update:semver-minor
  dependency-group: development
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-12-11 15:24:01 -08:00
Trenton H
7e12bd1bef Fix: Removes the FieldParser plugin from autocomplete searches (#4934) 2023-12-11 10:21:58 -08:00
Adam Bogdał
af0817ab74 Fix: Convert search dates to UTC in advanced search (#4891)
* Index documents using local timezone

* Add local date parser
2023-12-11 09:32:43 -08:00
Trenton H
fbf1a051a2 Use the attachment filename so downstream template matching works against it (#4931) 2023-12-11 09:08:42 -08:00
shamoon
7ecf7f704a Fix: frontend handle autocomplete failure gracefully (#4903) 2023-12-11 15:41:40 +00:00
Tom Hoover
7b7a74d821 Fix: Correct spelling of 'initialization' in install script (#4928) 2023-12-11 15:10:42 +00:00
shamoon
e4acc33519 Add -dev to version string 2023-12-07 22:20:48 -08:00
github-actions[bot]
2fd141d914 [Documentation] Add v2.1.1 changelog (#4886)
* Changelog v2.1.1 - GHA

* Fix incorrectly categorized PR

---------

Co-authored-by: github-actions <41898282+github-actions[bot]@users.noreply.github.com>
Co-authored-by: shamoon <4887959+shamoon@users.noreply.github.com>
2023-12-07 22:15:29 -08:00
51 changed files with 6928 additions and 6625 deletions

55
Pipfile.lock generated
View File

@@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "b395058a24154f74cb1f2d685d51de3f1028ecb48389fac9971209e258a15543"
"sha256": "d7ef8db734997cda7c11971f2ddb66bf1918f4232b0956a9bf604c41763ce461"
},
"pipfile-spec": 6,
"requires": {},
@@ -566,12 +566,12 @@
},
"gotenberg-client": {
"hashes": [
"sha256:3026726d1a47f41e9d43f18c95e530ff64f506e2ec436f116a088da27c7430da",
"sha256:c2555f7401faa48213a7cbe29c5e4a68316a003a6953753bc58d1e2b19873771"
"sha256:69e9dd5264b75ed0ba1f9eebebdc750b13d190710fd82ca0670d161c249155c9",
"sha256:dd0f49d3d4e01399949f39ac5024a5512566c8ded6ee457a336a5f77ce4c1a25"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==0.4.0"
"version": "==0.4.1"
},
"gunicorn": {
"hashes": [
@@ -2707,7 +2707,6 @@
"sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e",
"sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==3.13.1"
},
@@ -2754,11 +2753,11 @@
},
"identify": {
"hashes": [
"sha256:7736b3c7a28233637e3c36550646fc6389bedd74ae84cb788200cc8e2dd60b75",
"sha256:90199cb9e7bd3c5407a9b7e81b4abec4bb9d249991c79439ec8af740afc6293d"
"sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d",
"sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"
],
"markers": "python_version >= '3.8'",
"version": "==2.5.31"
"version": "==2.5.33"
},
"idna": {
"hashes": [
@@ -2899,20 +2898,20 @@
},
"mkdocs-glightbox": {
"hashes": [
"sha256:8f894435b4f75231164e5d9fb023c01e922e6769e74a121e822c4914f310a41d",
"sha256:96aaf98216f83c0d0fad2e42a8d805cfa6329d6ab25b54265012ccb2154010d8"
"sha256:096c2753cf4f46f548b02070a2ff5dd8b823a431ce17873a62dcef304cf3364c",
"sha256:f572256cca17c912da50a045129026566a79b8c6477e1170258ccc0ac5b162da"
],
"index": "pypi",
"version": "==0.3.4"
"version": "==0.3.5"
},
"mkdocs-material": {
"hashes": [
"sha256:a511d3ff48fa8718b033e7e37d17abd9cc1de0fdf0244a625ca2ae2387e2416d",
"sha256:dbc78a4fea97b74319a6aa9a2f0be575a6028be6958f813ba367188f7b8428f6"
"sha256:6ed0fbf4682491766f0ec1acc955db6901c2fd424c7ab343964ef51b819741f5",
"sha256:ca8b9cd2b3be53e858e5a1a45ac9668bd78d95d77a30288bb5ebc1a31db6184c"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==9.4.14"
"version": "==9.5.2"
},
"mkdocs-material-extensions": {
"hashes": [
@@ -2996,11 +2995,11 @@
},
"pathspec": {
"hashes": [
"sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20",
"sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"
"sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08",
"sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"
],
"markers": "python_version >= '3.7'",
"version": "==0.11.2"
"markers": "python_version >= '3.8'",
"version": "==0.12.1"
},
"pillow": {
"hashes": [
@@ -3080,12 +3079,12 @@
},
"pre-commit": {
"hashes": [
"sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32",
"sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"
"sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376",
"sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"
],
"index": "pypi",
"markers": "python_version >= '3.8'",
"version": "==3.5.0"
"markers": "python_version >= '3.9'",
"version": "==3.6.0"
},
"pyasn1": {
"hashes": [
@@ -3473,11 +3472,11 @@
},
"setuptools": {
"hashes": [
"sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87",
"sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"
"sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2",
"sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"
],
"markers": "python_version >= '3.8'",
"version": "==68.2.2"
"version": "==69.0.2"
},
"six": {
"hashes": [
@@ -3548,11 +3547,11 @@
},
"virtualenv": {
"hashes": [
"sha256:02ece4f56fbf939dbbc33c0715159951d6bf14aaf5457b092e4548e1382455af",
"sha256:520d056652454c5098a00c0f073611ccbea4c79089331f60bf9d7ba247bb7381"
"sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3",
"sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"
],
"markers": "python_version >= '3.7'",
"version": "==20.24.6"
"version": "==20.25.0"
},
"watchdog": {
"hashes": [

View File

@@ -1,5 +1,64 @@
# Changelog
## paperless-ngx 2.1.2
### Bug Fixes
- Fix: sort consumption templates by order by default [@shamoon](https://github.com/shamoon) ([#4956](https://github.com/paperless-ngx/paperless-ngx/pull/4956))
- Fix: Updates gotenberg-client, including workaround for Gotenberg non-latin handling [@stumpylog](https://github.com/stumpylog) ([#4944](https://github.com/paperless-ngx/paperless-ngx/pull/4944))
- Fix: allow text copy in pngx pdf viewer [@shamoon](https://github.com/shamoon) ([#4938](https://github.com/paperless-ngx/paperless-ngx/pull/4938))
- Fix: Don't allow autocomplete searches to fail on schema field matches [@stumpylog](https://github.com/stumpylog) ([#4934](https://github.com/paperless-ngx/paperless-ngx/pull/4934))
- Fix: Convert search dates to UTC in advanced search [@bogdal](https://github.com/bogdal) ([#4891](https://github.com/paperless-ngx/paperless-ngx/pull/4891))
- Fix: Use the attachment filename so downstream template matching works [@stumpylog](https://github.com/stumpylog) ([#4931](https://github.com/paperless-ngx/paperless-ngx/pull/4931))
- Fix: frontend handle autocomplete failure gracefully [@shamoon](https://github.com/shamoon) ([#4903](https://github.com/paperless-ngx/paperless-ngx/pull/4903))
### Dependencies
- Chore(deps-dev): Bump the small-changes group with 2 updates [@dependabot](https://github.com/dependabot) ([#4942](https://github.com/paperless-ngx/paperless-ngx/pull/4942))
- Chore(deps-dev): Bump the development group with 1 update [@dependabot](https://github.com/dependabot) ([#4939](https://github.com/paperless-ngx/paperless-ngx/pull/4939))
### All App Changes
<details>
<summary>9 changes</summary>
- Fix: sort consumption templates by order by default [@shamoon](https://github.com/shamoon) ([#4956](https://github.com/paperless-ngx/paperless-ngx/pull/4956))
- Chore: reorganize api tests [@shamoon](https://github.com/shamoon) ([#4935](https://github.com/paperless-ngx/paperless-ngx/pull/4935))
- Chore(deps-dev): Bump the small-changes group with 2 updates [@dependabot](https://github.com/dependabot) ([#4942](https://github.com/paperless-ngx/paperless-ngx/pull/4942))
- Fix: allow text copy in pngx pdf viewer [@shamoon](https://github.com/shamoon) ([#4938](https://github.com/paperless-ngx/paperless-ngx/pull/4938))
- Chore(deps-dev): Bump the development group with 1 update [@dependabot](https://github.com/dependabot) ([#4939](https://github.com/paperless-ngx/paperless-ngx/pull/4939))
- Fix: Don't allow autocomplete searches to fail on schema field matches [@stumpylog](https://github.com/stumpylog) ([#4934](https://github.com/paperless-ngx/paperless-ngx/pull/4934))
- Fix: Convert search dates to UTC in advanced search [@bogdal](https://github.com/bogdal) ([#4891](https://github.com/paperless-ngx/paperless-ngx/pull/4891))
- Fix: Use the attachment filename so downstream template matching works [@stumpylog](https://github.com/stumpylog) ([#4931](https://github.com/paperless-ngx/paperless-ngx/pull/4931))
- Fix: frontend handle autocomplete failure gracefully [@shamoon](https://github.com/shamoon) ([#4903](https://github.com/paperless-ngx/paperless-ngx/pull/4903))
</details>
## paperless-ngx 2.1.1
### Bug Fixes
- Fix: disable toggle for share link creation without archive version, fix auto-copy in Safari [@shamoon](https://github.com/shamoon) ([#4885](https://github.com/paperless-ngx/paperless-ngx/pull/4885))
- Fix: storage paths link incorrect in dashboard widget [@shamoon](https://github.com/shamoon) ([#4878](https://github.com/paperless-ngx/paperless-ngx/pull/4878))
- Fix: respect baseURI for pdfjs worker URL [@shamoon](https://github.com/shamoon) ([#4865](https://github.com/paperless-ngx/paperless-ngx/pull/4865))
- Fix: Allow users to configure the From email for password reset [@stumpylog](https://github.com/stumpylog) ([#4867](https://github.com/paperless-ngx/paperless-ngx/pull/4867))
- Fix: dont show move icon for file tasks badge [@shamoon](https://github.com/shamoon) ([#4860](https://github.com/paperless-ngx/paperless-ngx/pull/4860))
### Maintenance
- Chore: Simplifies how the documentation site is deployed [@stumpylog](https://github.com/stumpylog) ([#4858](https://github.com/paperless-ngx/paperless-ngx/pull/4858))
### All App Changes
<details>
<summary>5 changes</summary>
- Fix: disable toggle for share link creation without archive version, fix auto-copy in Safari [@shamoon](https://github.com/shamoon) ([#4885](https://github.com/paperless-ngx/paperless-ngx/pull/4885))
- Fix: storage paths link incorrect in dashboard widget [@shamoon](https://github.com/shamoon) ([#4878](https://github.com/paperless-ngx/paperless-ngx/pull/4878))
- Fix: respect baseURI for pdfjs worker URL [@shamoon](https://github.com/shamoon) ([#4865](https://github.com/paperless-ngx/paperless-ngx/pull/4865))
- Fix: Allow users to configure the From email for password reset [@stumpylog](https://github.com/stumpylog) ([#4867](https://github.com/paperless-ngx/paperless-ngx/pull/4867))
- Fix: dont show move icon for file tasks badge [@shamoon](https://github.com/shamoon) ([#4860](https://github.com/paperless-ngx/paperless-ngx/pull/4860))
</details>
## paperless-ngx 2.1.0
### Features

View File

@@ -733,7 +733,7 @@ they use underscores instead of dashes.
Paperless has been tested to work with the OCR options provided
above. There are many options that are incompatible with each other,
so specifying invalid options may prevent paperless from consuming
any documents.
any documents. Use with caution!
Specify arguments as a JSON dictionary. Keep note of lower case
booleans and double quoted parameter names and strings. Examples:

View File

@@ -380,7 +380,7 @@ fi
docker compose pull
if [ "$DATABASE_BACKEND" == "postgres" ] || [ "$DATABASE_BACKEND" == "mariadb" ] ; then
echo "Starting DB first for initilzation"
echo "Starting DB first for initialization"
docker compose up --detach db
# hopefully enough time for even the slower systems
sleep 15

View File

@@ -19,7 +19,7 @@ import { SETTINGS_KEYS } from 'src/app/data/paperless-uisettings'
import { RemoteVersionService } from 'src/app/services/rest/remote-version.service'
import { IfPermissionsDirective } from 'src/app/directives/if-permissions.directive'
import { FormsModule, ReactiveFormsModule } from '@angular/forms'
import { of, throwError } from 'rxjs'
import { Observable, of, tap, throwError } from 'rxjs'
import { ToastService } from 'src/app/services/toast.service'
import { environment } from 'src/environments/environment'
import { OpenDocumentsService } from 'src/app/services/open-documents.service'
@@ -298,6 +298,21 @@ describe('AppFrameComponent', () => {
expect(autocompleteSpy).toHaveBeenCalled()
}))
it('should handle autocomplete backend failure gracefully', fakeAsync(() => {
const serviceAutocompleteSpy = jest.spyOn(searchService, 'autocomplete')
serviceAutocompleteSpy.mockReturnValue(
throwError(() => new Error('autcomplete failed'))
)
// serviceAutocompleteSpy.mockReturnValue(of([' world']))
let result
component.searchAutoComplete(of('hello')).subscribe((res) => {
result = res
})
tick(250)
expect(serviceAutocompleteSpy).toHaveBeenCalled()
expect(result).toEqual([])
}))
it('should support reset search field', () => {
const resetSpy = jest.spyOn(component, 'resetSearchField')
const input = (fixture.nativeElement as HTMLDivElement).querySelector(

View File

@@ -8,6 +8,7 @@ import {
map,
switchMap,
first,
catchError,
} from 'rxjs/operators'
import { PaperlessDocument } from 'src/app/data/paperless-document'
import { OpenDocumentsService } from 'src/app/services/open-documents.service'
@@ -166,7 +167,13 @@ export class AppFrameComponent
}
}),
switchMap((term) =>
term.length < 2 ? from([[]]) : this.searchService.autocomplete(term)
term.length < 2
? from([[]])
: this.searchService.autocomplete(term).pipe(
catchError(() => {
return from([[]])
})
)
)
)

View File

@@ -17,7 +17,7 @@
<div class="col-md-4">
<h5 class="border-bottom pb-2" i18n>Filters</h5>
<p class="small" i18n>Process documents that match <em>all</em> filters specified below.</p>
<pngx-input-select i18n-title title="Filter sources" [items]="sourceOptions" [multiple]="true" formControlName="sources" [error]="error?.filter_filename"></pngx-input-select>
<pngx-input-select i18n-title title="Filter sources" [items]="sourceOptions" [multiple]="true" formControlName="sources" [error]="error?.sources"></pngx-input-select>
<pngx-input-text i18n-title title="Filter filename" formControlName="filter_filename" i18n-hint hint="Apply to documents that match this filename. Wildcards such as *.pdf or *invoice* are allowed. Case insensitive." [error]="error?.filter_filename"></pngx-input-text>
<pngx-input-text i18n-title title="Filter path" formControlName="filter_path" i18n-hint hint="Apply to documents that match this path. Wildcards specified as * are allowed. Case insensitive.</a>" [error]="error?.filter_path"></pngx-input-text>
<pngx-input-select i18n-title title="Filter mail rule" [items]="mailRules" [allowNull]="true" formControlName="filter_mailrule" i18n-hint hint="Apply to documents consumed via this mail rule." [error]="error?.filter_mailrule"></pngx-input-select>

View File

@@ -9,7 +9,7 @@
</button>
</div>
<div [class.col-md-9]="horizontal">
<div [class.input-group]="allowCreateNew || showFilter">
<div [class.input-group]="allowCreateNew || showFilter" [class.is-invalid]="error">
<ng-select name="inputId" [(ngModel)]="value"
[disabled]="disabled"
[style.color]="textColor"
@@ -42,6 +42,9 @@
</svg>
</button>
</div>
<div class="invalid-feedback">
{{error}}
</div>
<small *ngIf="hint" class="form-text text-muted">{{hint}}</small>
<small *ngIf="getSuggestions().length > 0">
<span i18n>Suggestions:</span>&nbsp;

View File

@@ -17,3 +17,12 @@
font-style: italic;
opacity: .75;
}
::ng-deep .is-invalid ng-select .ng-select-container input {
// replicate bootstrap
padding-right: calc(1.5em + 0.75rem) !important;
background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 12' width='12' height='12' fill='none' stroke='%23dc3545'%3e%3ccircle cx='6' cy='6' r='4.5'/%3e%3cpath stroke-linejoin='round' d='M5.8 3.6h.4L6 6.5z'/%3e%3ccircle cx='6' cy='8.2' r='.6' fill='%23dc3545' stroke='none'/%3e%3c/svg%3e") !important;
background-repeat: no-repeat !important;
background-position: right calc(0.375em + 0.1875rem) center !important;
background-size: calc(0.75em + 0.375rem) calc(0.75em + 0.375rem) !important;
}

View File

@@ -258,7 +258,6 @@
[(page)]="previewCurrentPage"
[zoom-scale]="previewZoomScale"
[zoom]="previewZoomSetting"
[render-text-mode]="2"
(error)="onError($event)"
(after-load-complete)="pdfPreviewLoaded($event)">
</pngx-pdf-viewer>

View File

@@ -5,7 +5,7 @@ export const environment = {
apiBaseUrl: document.baseURI + 'api/',
apiVersion: '3',
appTitle: 'Paperless-ngx',
version: '2.1.1',
version: '2.1.3',
webSocketHost: window.location.host,
webSocketProtocol: window.location.protocol == 'https:' ? 'wss:' : 'ws:',
webSocketBaseUrl: base_url.pathname + 'ws/',

View File

@@ -2811,7 +2811,7 @@
<context context-type="sourcefile">src/app/components/common/edit-dialog/consumption-template-edit-dialog/consumption-template-edit-dialog.component.html</context>
<context context-type="linenumber">38</context>
</context-group>
<target state="needs-translation">Assign custom fields</target>
<target state="translated">Assigna camnps personalitzats</target>
</trans-unit>
<trans-unit id="5057200219587080996" datatype="html">
<source>Assign owner</source>
@@ -3896,7 +3896,7 @@
<context context-type="sourcefile">src/app/components/common/input/document-link/document-link.component.ts</context>
<context context-type="linenumber">44</context>
</context-group>
<target state="needs-translation">No documents found</target>
<target state="translated">Documents no trobats</target>
</trans-unit>
<trans-unit id="5554528553553249088" datatype="html">
<source>Show password</source>
@@ -4700,7 +4700,7 @@
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
<context context-type="linenumber">9</context>
</context-group>
<target state="needs-translation">-</target>
<target state="translated">-</target>
</trans-unit>
<trans-unit id="8479257185772414452" datatype="html">
<source>+</source>
@@ -4708,7 +4708,7 @@
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
<context context-type="linenumber">15</context>
</context-group>
<target state="needs-translation">+</target>
<target state="translated">+</target>
</trans-unit>
<trans-unit id="8659635229098859487" datatype="html">
<source>Download original</source>
@@ -5132,7 +5132,7 @@
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">781</context>
</context-group>
<target state="needs-translation">Page Fit</target>
<target state="translated">Encaix Pàgina</target>
</trans-unit>
<trans-unit id="6857598786757174736" datatype="html">
<source>Select:</source>
@@ -6870,7 +6870,7 @@
<context context-type="sourcefile">src/app/data/paperless-custom-field.ts</context>
<context context-type="linenumber">45</context>
</context-group>
<target state="needs-translation">Document Link</target>
<target state="translated">Enllaç Document</target>
</trans-unit>
<trans-unit id="5948496158474272829" datatype="html">
<source>Warning: You have unsaved changes to your document(s).</source>

View File

@@ -10,13 +10,13 @@
</context-group>
<target state="final">Fermer</target>
</trans-unit>
<trans-unit id="ngb.timepicker.HH" datatype="html">
<trans-unit id="ngb.timepicker.HH" datatype="html" approved="yes">
<source>HH</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
<target state="translated">HH</target>
<target state="final">HH</target>
</trans-unit>
<trans-unit id="ngb.toast.close-aria" datatype="html" approved="yes">
<source>Close</source>
@@ -100,13 +100,13 @@
</context-group>
<target state="final">Précédent</target>
</trans-unit>
<trans-unit id="ngb.timepicker.MM" datatype="html">
<trans-unit id="ngb.timepicker.MM" datatype="html" approved="yes">
<source>MM</source>
<context-group purpose="location">
<context context-type="sourcefile">node_modules/src/ngb-config.ts</context>
<context context-type="linenumber">13</context>
</context-group>
<target state="translated">MM</target>
<target state="final">MM</target>
</trans-unit>
<trans-unit id="ngb.pagination.next" datatype="html" approved="yes">
<source>»</source>
@@ -3896,7 +3896,7 @@
<context context-type="sourcefile">src/app/components/common/input/document-link/document-link.component.ts</context>
<context context-type="linenumber">44</context>
</context-group>
<target state="needs-translation">No documents found</target>
<target state="translated">Aucun document trouvé</target>
</trans-unit>
<trans-unit id="5554528553553249088" datatype="html">
<source>Show password</source>
@@ -4700,7 +4700,7 @@
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
<context context-type="linenumber">9</context>
</context-group>
<target state="needs-translation">-</target>
<target state="translated">-</target>
</trans-unit>
<trans-unit id="8479257185772414452" datatype="html">
<source>+</source>
@@ -4708,7 +4708,7 @@
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
<context context-type="linenumber">15</context>
</context-group>
<target state="needs-translation">+</target>
<target state="translated">+</target>
</trans-unit>
<trans-unit id="8659635229098859487" datatype="html" approved="yes">
<source>Download original</source>
@@ -6870,7 +6870,7 @@
<context context-type="sourcefile">src/app/data/paperless-custom-field.ts</context>
<context context-type="linenumber">45</context>
</context-group>
<target state="needs-translation">Document Link</target>
<target state="translated">Lien du Document</target>
</trans-unit>
<trans-unit id="5948496158474272829" datatype="html" approved="yes">
<source>Warning: You have unsaved changes to your document(s).</source>

View File

@@ -476,7 +476,7 @@
<context context-type="sourcefile">src/app/components/admin/tasks/tasks.component.html</context>
<context context-type="linenumber">15</context>
</context-group>
<target state="needs-translation">Auto refresh</target>
<target state="translated">ריענון אוטומטי</target>
</trans-unit>
<trans-unit id="3894950702316166331" datatype="html">
<source>Loading...</source>
@@ -908,7 +908,7 @@
<context context-type="sourcefile">src/app/components/admin/settings/settings.component.html</context>
<context context-type="linenumber">174</context>
</context-group>
<target state="needs-translation">Default Permissions</target>
<target state="translated">הרשאות ברירת מחדל</target>
</trans-unit>
<trans-unit id="8222269449891326545" datatype="html">
<source> Settings apply to this user account for objects (Tags, Mail Rules, etc.) created via the web UI </source>

View File

@@ -288,7 +288,7 @@
<context context-type="sourcefile">src/app/app.component.ts</context>
<context context-type="linenumber">90</context>
</context-group>
<target state="needs-translation">Document <x id="PH" equiv-text="status.filename"/> was added to Paperless-ngx.</target>
<target state="translated">Dokument <x id="PH" equiv-text="status.filename"/> je dodan u Paperless-ngx.</target>
</trans-unit>
<trans-unit id="1931214133925051574" datatype="html">
<source>Open document</source>
@@ -316,7 +316,7 @@
<context context-type="sourcefile">src/app/app.component.ts</context>
<context context-type="linenumber">120</context>
</context-group>
<target state="needs-translation">Document <x id="PH" equiv-text="status.filename"/> is being processed by Paperless-ngx.</target>
<target state="translated">Dokument <x id="PH" equiv-text="status.filename"/> je u fazi obrade.</target>
</trans-unit>
<trans-unit id="2501522447884928778" datatype="html">
<source>Prev</source>
@@ -476,7 +476,7 @@
<context context-type="sourcefile">src/app/components/admin/tasks/tasks.component.html</context>
<context context-type="linenumber">15</context>
</context-group>
<target state="needs-translation">Auto refresh</target>
<target state="translated">Automatsko osvježavanje</target>
</trans-unit>
<trans-unit id="3894950702316166331" datatype="html">
<source>Loading...</source>
@@ -596,7 +596,7 @@
<context context-type="sourcefile">src/app/components/admin/settings/settings.component.html</context>
<context context-type="linenumber">15</context>
</context-group>
<target state="needs-translation">General</target>
<target state="translated">Općenito</target>
</trans-unit>
<trans-unit id="8671234314555525900" datatype="html">
<source>Appearance</source>
@@ -916,7 +916,7 @@
<context context-type="sourcefile">src/app/components/admin/settings/settings.component.html</context>
<context context-type="linenumber">178,180</context>
</context-group>
<target state="needs-translation"> Settings apply to this user account for objects (Tags, Mail Rules, etc.) created via the web UI </target>
<target state="translated"> Postavke ovog korisničkog računa za objekte (Oznake, Pravila za e-poštu, itd.) stvorene putem web sučelja </target>
</trans-unit>
<trans-unit id="4292903881380648974" datatype="html">
<source>Default Owner</source>
@@ -2074,7 +2074,7 @@
<context context-type="sourcefile">src/app/components/admin/users-groups/users-groups.component.ts</context>
<context context-type="linenumber">124</context>
</context-group>
<target state="needs-translation">Deleted user</target>
<target state="translated">Izbrisani korisnik</target>
</trans-unit>
<trans-unit id="1942566571910298572" datatype="html">
<source>Error deleting user.</source>
@@ -2479,7 +2479,7 @@
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.ts</context>
<context context-type="linenumber">276</context>
</context-group>
<target state="needs-translation">An error occurred while saving update checking settings.</target>
<target state="translated">Došlo je do pogreške prilikom spremanja postavki ažuriranja.</target>
</trans-unit>
<trans-unit id="8700121026680200191" datatype="html">
<source>Clear</source>
@@ -3039,7 +3039,7 @@
<context context-type="sourcefile">src/app/components/common/edit-dialog/custom-field-edit-dialog/custom-field-edit-dialog.component.html</context>
<context context-type="linenumber">9</context>
</context-group>
<target state="needs-translation">Data type</target>
<target state="translated">Tip podataka</target>
</trans-unit>
<trans-unit id="5933665691581884232" datatype="html">
<source>Data type cannot be changed after a field is created</source>
@@ -3047,7 +3047,7 @@
<context context-type="sourcefile">src/app/components/common/edit-dialog/custom-field-edit-dialog/custom-field-edit-dialog.component.html</context>
<context context-type="linenumber">10</context>
</context-group>
<target state="needs-translation">Data type cannot be changed after a field is created</target>
<target state="translated">Tip podataka ne može se promijeniti nakon što je polje stvoreno</target>
</trans-unit>
<trans-unit id="528950215505228201" datatype="html">
<source>Create new custom field</source>
@@ -3175,7 +3175,7 @@
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-account-edit-dialog/mail-account-edit-dialog.component.html</context>
<context context-type="linenumber">19</context>
</context-group>
<target state="needs-translation">Character Set</target>
<target state="translated">Skup znakova</target>
</trans-unit>
<trans-unit id="6563391987554512024" datatype="html">
<source>Test</source>

View File

@@ -1958,7 +1958,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">144</context>
</context-group>
<target state="needs-translation">Password has been changed, you will be logged out momentarily.</target>
<target state="translated">A senha foi alterada, você será deslogado momentaneamente.</target>
</trans-unit>
<trans-unit id="2753185112875184719" datatype="html">
<source>Saved user &quot;<x id="PH" equiv-text="newUser.username"/>&quot;.</source>
@@ -2163,7 +2163,7 @@
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.html</context>
<context context-type="linenumber">45</context>
</context-group>
<target state="needs-translation">My Profile</target>
<target state="translated">Meu Perfil</target>
</trans-unit>
<trans-unit id="3797778920049399855" datatype="html" approved="yes">
<source>Logout</source>
@@ -4086,7 +4086,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">3</context>
</context-group>
<target state="needs-translation">Edit Profile</target>
<target state="translated">Editar Perfil</target>
</trans-unit>
<trans-unit id="8214169742072920158" datatype="html">
<source>Confirm Email</source>
@@ -4094,7 +4094,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">13</context>
</context-group>
<target state="needs-translation">Confirm Email</target>
<target state="translated">Confirmar Email</target>
</trans-unit>
<trans-unit id="3241357959735682038" datatype="html">
<source>Confirm Password</source>
@@ -4102,7 +4102,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">23</context>
</context-group>
<target state="needs-translation">Confirm Password</target>
<target state="translated">Confirmar Senha</target>
</trans-unit>
<trans-unit id="7554924397178347823" datatype="html">
<source>API Auth Token</source>
@@ -4110,7 +4110,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">31</context>
</context-group>
<target state="needs-translation">API Auth Token</target>
<target state="translated">Token de autenticação da API</target>
</trans-unit>
<trans-unit id="4323470180912194028" datatype="html">
<source>Copy</source>
@@ -4134,7 +4134,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">41</context>
</context-group>
<target state="needs-translation">Regenerate auth token</target>
<target state="translated">Regerar token de autenticação</target>
</trans-unit>
<trans-unit id="5392341774767336507" datatype="html">
<source>Copied!</source>
@@ -4154,7 +4154,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">49</context>
</context-group>
<target state="needs-translation">Warning: changing the token cannot be undone</target>
<target state="translated">Aviso: alterar o token não pode ser desfeito</target>
</trans-unit>
<trans-unit id="6141884091799403188" datatype="html">
<source>Emails must match</source>
@@ -4162,7 +4162,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">94</context>
</context-group>
<target state="needs-translation">Emails must match</target>
<target state="translated">Os e-mails devem ser iguais</target>
</trans-unit>
<trans-unit id="5281933990298241826" datatype="html">
<source>Passwords must match</source>
@@ -4170,7 +4170,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">122</context>
</context-group>
<target state="needs-translation">Passwords must match</target>
<target state="translated">As senhas devem ser iguais</target>
</trans-unit>
<trans-unit id="4219429959475101385" datatype="html">
<source>Profile updated successfully</source>
@@ -4178,7 +4178,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">141</context>
</context-group>
<target state="needs-translation">Profile updated successfully</target>
<target state="translated">Perfil atualizado com sucesso</target>
</trans-unit>
<trans-unit id="3417726855410304962" datatype="html">
<source>Error saving profile</source>
@@ -4186,7 +4186,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">153</context>
</context-group>
<target state="needs-translation">Error saving profile</target>
<target state="translated">Erro ao salvar o perfil</target>
</trans-unit>
<trans-unit id="154249228726292516" datatype="html">
<source>Error generating auth token</source>
@@ -4194,7 +4194,7 @@ Curingas como *.pdf ou *invoice* são permitidos. Sem diferenciação de maiúsc
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">170</context>
</context-group>
<target state="needs-translation">Error generating auth token</target>
<target state="translated">Erro ao gerar token de autenticação</target>
</trans-unit>
<trans-unit id="3797570084942068182" datatype="html" approved="yes">
<source>Select</source>

File diff suppressed because it is too large Load Diff

View File

@@ -476,7 +476,7 @@
<context context-type="sourcefile">src/app/components/admin/tasks/tasks.component.html</context>
<context context-type="linenumber">15</context>
</context-group>
<target state="needs-translation">Auto refresh</target>
<target state="translated">Automatsko osvеžavanjе</target>
</trans-unit>
<trans-unit id="3894950702316166331" datatype="html">
<source>Loading...</source>
@@ -2163,7 +2163,7 @@
<context context-type="sourcefile">src/app/components/app-frame/app-frame.component.html</context>
<context context-type="linenumber">45</context>
</context-group>
<target state="needs-translation">My Profile</target>
<target state="translated">Moj profil</target>
</trans-unit>
<trans-unit id="3797778920049399855" datatype="html">
<source>Logout</source>
@@ -2811,7 +2811,7 @@
<context context-type="sourcefile">src/app/components/common/edit-dialog/consumption-template-edit-dialog/consumption-template-edit-dialog.component.html</context>
<context context-type="linenumber">38</context>
</context-group>
<target state="needs-translation">Assign custom fields</target>
<target state="translated">Dodeli dodatno polje</target>
</trans-unit>
<trans-unit id="5057200219587080996" datatype="html">
<source>Assign owner</source>
@@ -3355,7 +3355,7 @@
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
<context context-type="linenumber">24</context>
</context-group>
<target state="needs-translation">Filter attachment filename includes</target>
<target state="translated">Filter naziva fajla priloga uključuje</target>
</trans-unit>
<trans-unit id="4245210767172267486" datatype="html">
<source>Only consume documents which entirely match this filename if specified. Wildcards such as *.pdf or *invoice* are allowed. Case insensitive.</source>
@@ -3371,7 +3371,7 @@
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
<context context-type="linenumber">25</context>
</context-group>
<target state="needs-translation">Filter attachment filename excluding</target>
<target state="translated">Filter naziva fajla priloga isključuje</target>
</trans-unit>
<trans-unit id="6774472763442688477" datatype="html">
<source>Do not consume documents which entirely match this filename if specified. Wildcards such as *.pdf or *invoice* are allowed. Case insensitive.</source>
@@ -3379,7 +3379,7 @@
<context context-type="sourcefile">src/app/components/common/edit-dialog/mail-rule-edit-dialog/mail-rule-edit-dialog.component.html</context>
<context context-type="linenumber">25</context>
</context-group>
<target state="needs-translation">Do not consume documents which entirely match this filename if specified. Wildcards such as *.pdf or *invoice* are allowed. Case insensitive.</target>
<target state="translated">Neće obrađivati dokumente koji se u potpunosti podudaraju sa ovim imenom datoteke ako je navedeno. Dopušteni su zamenski znakovi kao što su *.pdf ili *faktura*. Neosetljivo je na mala i mala slova.</target>
</trans-unit>
<trans-unit id="9216117865911519658" datatype="html">
<source>Action</source>
@@ -3896,7 +3896,7 @@
<context context-type="sourcefile">src/app/components/common/input/document-link/document-link.component.ts</context>
<context context-type="linenumber">44</context>
</context-group>
<target state="needs-translation">No documents found</target>
<target state="translated">Nije pronađen nijedan dokument</target>
</trans-unit>
<trans-unit id="5554528553553249088" datatype="html">
<source>Show password</source>
@@ -3904,7 +3904,7 @@
<context context-type="sourcefile">src/app/components/common/input/password/password.component.html</context>
<context context-type="linenumber">5</context>
</context-group>
<target state="needs-translation">Show password</target>
<target state="translated">Prikaži lozinku</target>
</trans-unit>
<trans-unit id="594042705136125260" datatype="html">
<source>Edit Permissions</source>
@@ -4085,7 +4085,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">3</context>
</context-group>
<target state="needs-translation">Edit Profile</target>
<target state="translated">Izmeni profil</target>
</trans-unit>
<trans-unit id="8214169742072920158" datatype="html">
<source>Confirm Email</source>
@@ -4093,7 +4093,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">13</context>
</context-group>
<target state="needs-translation">Confirm Email</target>
<target state="translated">Potvrdi mejl</target>
</trans-unit>
<trans-unit id="3241357959735682038" datatype="html">
<source>Confirm Password</source>
@@ -4101,7 +4101,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">23</context>
</context-group>
<target state="needs-translation">Confirm Password</target>
<target state="translated">Potvrdi lozinku</target>
</trans-unit>
<trans-unit id="7554924397178347823" datatype="html">
<source>API Auth Token</source>
@@ -4109,7 +4109,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">31</context>
</context-group>
<target state="needs-translation">API Auth Token</target>
<target state="translated">API Auth Token</target>
</trans-unit>
<trans-unit id="4323470180912194028" datatype="html">
<source>Copy</source>
@@ -4133,7 +4133,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">41</context>
</context-group>
<target state="needs-translation">Regenerate auth token</target>
<target state="translated">Ponovo generiši token autentifikacije</target>
</trans-unit>
<trans-unit id="5392341774767336507" datatype="html">
<source>Copied!</source>
@@ -4153,7 +4153,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.html</context>
<context context-type="linenumber">49</context>
</context-group>
<target state="needs-translation">Warning: changing the token cannot be undone</target>
<target state="translated">Upozorenje: promena tokena se ne može opozvati</target>
</trans-unit>
<trans-unit id="6141884091799403188" datatype="html">
<source>Emails must match</source>
@@ -4161,7 +4161,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">94</context>
</context-group>
<target state="needs-translation">Emails must match</target>
<target state="translated">Mejlovi se moraju poklapati</target>
</trans-unit>
<trans-unit id="5281933990298241826" datatype="html">
<source>Passwords must match</source>
@@ -4169,7 +4169,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">122</context>
</context-group>
<target state="needs-translation">Passwords must match</target>
<target state="translated">Lozinke se moraju poklapati</target>
</trans-unit>
<trans-unit id="4219429959475101385" datatype="html">
<source>Profile updated successfully</source>
@@ -4177,7 +4177,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">141</context>
</context-group>
<target state="needs-translation">Profile updated successfully</target>
<target state="translated">Profil je uspešno ažuriran</target>
</trans-unit>
<trans-unit id="3417726855410304962" datatype="html">
<source>Error saving profile</source>
@@ -4185,7 +4185,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">153</context>
</context-group>
<target state="needs-translation">Error saving profile</target>
<target state="translated">Greška prilikom čuvanja profila</target>
</trans-unit>
<trans-unit id="154249228726292516" datatype="html">
<source>Error generating auth token</source>
@@ -4193,7 +4193,7 @@
<context context-type="sourcefile">src/app/components/common/profile-edit-dialog/profile-edit-dialog.component.ts</context>
<context context-type="linenumber">170</context>
</context-group>
<target state="needs-translation">Error generating auth token</target>
<target state="translated">Greška prilikom generisanja auth tokena</target>
</trans-unit>
<trans-unit id="3797570084942068182" datatype="html">
<source>Select</source>
@@ -4700,7 +4700,7 @@
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
<context context-type="linenumber">9</context>
</context-group>
<target state="needs-translation">-</target>
<target state="translated">-</target>
</trans-unit>
<trans-unit id="8479257185772414452" datatype="html">
<source>+</source>
@@ -4708,7 +4708,7 @@
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.html</context>
<context context-type="linenumber">15</context>
</context-group>
<target state="needs-translation">+</target>
<target state="translated">+</target>
</trans-unit>
<trans-unit id="8659635229098859487" datatype="html">
<source>Download original</source>
@@ -5132,7 +5132,7 @@
<context context-type="sourcefile">src/app/components/document-detail/document-detail.component.ts</context>
<context context-type="linenumber">781</context>
</context-group>
<target state="needs-translation">Page Fit</target>
<target state="translated">Uklopi stranu</target>
</trans-unit>
<trans-unit id="6857598786757174736" datatype="html">
<source>Select:</source>
@@ -6870,7 +6870,7 @@
<context context-type="sourcefile">src/app/data/paperless-custom-field.ts</context>
<context context-type="linenumber">45</context>
</context-group>
<target state="needs-translation">Document Link</target>
<target state="translated">Veza ka dokumentu</target>
</trans-unit>
<trans-unit id="5948496158474272829" datatype="html">
<source>Warning: You have unsaved changes to your document(s).</source>

View File

@@ -14,6 +14,8 @@ from pikepdf import Pdf
from PIL import Image
from documents.converters import convert_from_tiff_to_pdf
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource
from documents.utils import copy_basic_file_stats
from documents.utils import copy_file_with_basic_stats
@@ -53,6 +55,7 @@ class BarcodeReader:
self.mime: Final[str] = mime_type
self.pdf_file: Path = self.file
self.barcodes: list[Barcode] = []
self._tiff_conversion_done = False
self.temp_dir: Optional[tempfile.TemporaryDirectory] = None
if settings.CONSUMER_BARCODE_TIFF_SUPPORT:
@@ -150,12 +153,14 @@ class BarcodeReader:
def convert_from_tiff_to_pdf(self):
"""
May convert a TIFF image into a PDF, if the input is a TIFF
May convert a TIFF image into a PDF, if the input is a TIFF and
the TIFF has not been made into a PDF
"""
# Nothing to do, pdf_file is already assigned correctly
if self.mime != "image/tiff":
if self.mime != "image/tiff" or self._tiff_conversion_done:
return
self._tiff_conversion_done = True
self.pdf_file = convert_from_tiff_to_pdf(self.file, Path(self.temp_dir.name))
def detect(self) -> None:
@@ -167,6 +172,9 @@ class BarcodeReader:
if self.barcodes:
return
# No op if not a TIFF
self.convert_from_tiff_to_pdf()
# Choose the library for reading
if settings.CONSUMER_BARCODE_SCANNER == "PYZBAR":
reader = self.read_barcodes_pyzbar
@@ -240,7 +248,7 @@ class BarcodeReader:
"""
document_paths = []
fname = self.file.with_suffix("").name
fname = self.file.stem
with Pdf.open(self.pdf_file) as input_pdf:
# Start with an empty document
current_document: list[Page] = []
@@ -290,7 +298,7 @@ class BarcodeReader:
def separate(
self,
source: DocumentSource,
override_name: Optional[str] = None,
overrides: DocumentMetadataOverrides,
) -> bool:
"""
Separates the document, based on barcodes and configuration, creating new
@@ -316,27 +324,23 @@ class BarcodeReader:
logger.warning("No pages to split on!")
return False
# Create the split documents
doc_paths = self.separate_pages(separator_pages)
tmp_dir = Path(tempfile.mkdtemp(prefix="paperless-barcode-split-")).resolve()
# Save the new documents to correct folder
if source != DocumentSource.ConsumeFolder:
# The given file is somewhere in SCRATCH_DIR,
# and new documents must be moved to the CONSUMPTION_DIR
# for the consumer to notice them
save_to_dir = settings.CONSUMPTION_DIR
else:
# The given file is somewhere in CONSUMPTION_DIR,
# and may be some levels down for recursive tagging
# so use the file's parent to preserve any metadata
save_to_dir = self.file.parent
from documents import tasks
for idx, document_path in enumerate(doc_paths):
if override_name is not None:
newname = f"{idx}_{override_name}"
dest = save_to_dir / newname
else:
dest = save_to_dir
logger.info(f"Saving {document_path} to {dest}")
copy_file_with_basic_stats(document_path, dest)
# Create the split document tasks
for new_document in self.separate_pages(separator_pages):
copy_file_with_basic_stats(new_document, tmp_dir / new_document.name)
tasks.consume_file.delay(
ConsumableDocument(
# Same source, for templates
source=source,
# Can't use same folder or the consume might grab it again
original_file=(tmp_dir / new_document.name).resolve(),
),
# All the same metadata
overrides,
)
logger.info("Barcode splitting complete!")
return True

View File

@@ -3,6 +3,8 @@ import math
import os
from collections import Counter
from contextlib import contextmanager
from datetime import datetime
from typing import Optional
from dateutil.parser import isoparse
from django.conf import settings
@@ -25,9 +27,12 @@ from whoosh.index import open_dir
from whoosh.qparser import MultifieldParser
from whoosh.qparser import QueryParser
from whoosh.qparser.dateparse import DateParserPlugin
from whoosh.qparser.dateparse import English
from whoosh.qparser.plugins import FieldsPlugin
from whoosh.scoring import TF_IDF
from whoosh.searching import ResultsPage
from whoosh.searching import Searcher
from whoosh.util.times import timespan
from whoosh.writing import AsyncWriter
# from documents.models import CustomMetadata
@@ -356,6 +361,22 @@ class DelayedQuery:
return page
class LocalDateParser(English):
def reverse_timezone_offset(self, d):
return (d.replace(tzinfo=timezone.get_current_timezone())).astimezone(
timezone.utc,
)
def date_from(self, *args, **kwargs):
d = super().date_from(*args, **kwargs)
if isinstance(d, timespan):
d.start = self.reverse_timezone_offset(d.start)
d.end = self.reverse_timezone_offset(d.end)
elif isinstance(d, datetime):
d = self.reverse_timezone_offset(d)
return d
class DelayedFullTextQuery(DelayedQuery):
def _get_query(self):
q_str = self.query_params["query"]
@@ -371,7 +392,12 @@ class DelayedFullTextQuery(DelayedQuery):
],
self.searcher.ixreader.schema,
)
qp.add_plugin(DateParserPlugin(basedate=timezone.now()))
qp.add_plugin(
DateParserPlugin(
basedate=timezone.now(),
dateparser=LocalDateParser(),
),
)
q = qp.parse(q_str)
corrected = self.searcher.correct_query(q, q_str)
@@ -402,7 +428,12 @@ class DelayedMoreLikeThisQuery(DelayedQuery):
return q, mask
def autocomplete(ix: FileIndex, term: str, limit: int = 10, user: User = None):
def autocomplete(
ix: FileIndex,
term: str,
limit: int = 10,
user: Optional[User] = None,
):
"""
Mimics whoosh.reading.IndexReader.most_distinctive_terms with permissions
and without scoring
@@ -411,6 +442,9 @@ def autocomplete(ix: FileIndex, term: str, limit: int = 10, user: User = None):
with ix.searcher(weighting=TF_IDF()) as s:
qp = QueryParser("content", schema=ix.schema)
# Don't let searches with a query that happen to match a field override the
# content field query instead and return bogus, not text data
qp.remove_plugin_class(FieldsPlugin)
q = qp.parse(f"{term.lower()}*")
user_criterias = get_permissions_criterias(user)
@@ -430,7 +464,7 @@ def autocomplete(ix: FileIndex, term: str, limit: int = 10, user: User = None):
return terms
def get_permissions_criterias(user: User = None):
def get_permissions_criterias(user: Optional[User] = None):
user_criterias = [query.Term("has_owner", False)]
if user is not None:
if user.is_superuser: # superusers see all docs

View File

@@ -238,18 +238,6 @@ class Command(BaseCommand):
serializers.serialize("json", StoragePath.objects.all()),
)
notes = json.loads(
serializers.serialize("json", Note.objects.all()),
)
if not self.split_manifest:
manifest += notes
documents = Document.objects.order_by("id")
document_map = {d.pk: d for d in documents}
document_manifest = json.loads(serializers.serialize("json", documents))
if not self.split_manifest:
manifest += document_manifest
manifest += json.loads(
serializers.serialize("json", MailAccount.objects.all()),
)
@@ -303,10 +291,24 @@ class Command(BaseCommand):
serializers.serialize("json", CustomField.objects.all()),
)
# These are treated specially and included in the per-document manifest
# if that setting is enabled. Otherwise, they are just exported to the bulk
# manifest
documents = Document.objects.order_by("id")
document_map: dict[int, Document] = {d.pk: d for d in documents}
document_manifest = json.loads(serializers.serialize("json", documents))
notes = json.loads(
serializers.serialize("json", Note.objects.all()),
)
custom_field_instances = json.loads(
serializers.serialize("json", CustomFieldInstance.objects.all()),
)
if not self.split_manifest:
manifest += json.loads(
serializers.serialize("json", CustomFieldInstance.objects.all()),
)
manifest += document_manifest
manifest += notes
manifest += custom_field_instances
# 3. Export files from each document
for index, document_dict in tqdm.tqdm(
@@ -412,6 +414,12 @@ class Command(BaseCommand):
notes,
),
)
content += list(
filter(
lambda d: d["fields"]["document"] == document_dict["pk"],
custom_field_instances,
),
)
manifest_name.write_text(
json.dumps(content, indent=2, ensure_ascii=False),
encoding="utf-8",

View File

@@ -140,7 +140,7 @@ def consume_file(
with BarcodeReader(input_doc.original_file, input_doc.mime_type) as reader:
if settings.CONSUMER_ENABLE_BARCODES and reader.separate(
input_doc.source,
overrides.filename,
overrides,
):
# notify the sender, otherwise the progress bar
# in the UI stays stuck

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,337 @@
import datetime
import io
import json
import os
import shutil
import zipfile
from django.contrib.auth.models import User
from django.test import override_settings
from django.utils import timezone
from rest_framework import status
from rest_framework.test import APITestCase
from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.tests.utils import DirectoriesMixin
class TestBulkDownload(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/documents/bulk_download/"
def setUp(self):
super().setUp()
user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=user)
self.doc1 = Document.objects.create(title="unrelated", checksum="A")
self.doc2 = Document.objects.create(
title="document A",
filename="docA.pdf",
mime_type="application/pdf",
checksum="B",
created=timezone.make_aware(datetime.datetime(2021, 1, 1)),
)
self.doc2b = Document.objects.create(
title="document A",
filename="docA2.pdf",
mime_type="application/pdf",
checksum="D",
created=timezone.make_aware(datetime.datetime(2021, 1, 1)),
)
self.doc3 = Document.objects.create(
title="document B",
filename="docB.jpg",
mime_type="image/jpeg",
checksum="C",
created=timezone.make_aware(datetime.datetime(2020, 3, 21)),
archive_filename="docB.pdf",
archive_checksum="D",
)
shutil.copy(
os.path.join(os.path.dirname(__file__), "samples", "simple.pdf"),
self.doc2.source_path,
)
shutil.copy(
os.path.join(os.path.dirname(__file__), "samples", "simple.png"),
self.doc2b.source_path,
)
shutil.copy(
os.path.join(os.path.dirname(__file__), "samples", "simple.jpg"),
self.doc3.source_path,
)
shutil.copy(
os.path.join(os.path.dirname(__file__), "samples", "test_with_bom.pdf"),
self.doc3.archive_path,
)
def test_download_originals(self):
response = self.client.post(
self.ENDPOINT,
json.dumps(
{"documents": [self.doc2.id, self.doc3.id], "content": "originals"},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response["Content-Type"], "application/zip")
with zipfile.ZipFile(io.BytesIO(response.content)) as zipf:
self.assertEqual(len(zipf.filelist), 2)
self.assertIn("2021-01-01 document A.pdf", zipf.namelist())
self.assertIn("2020-03-21 document B.jpg", zipf.namelist())
with self.doc2.source_file as f:
self.assertEqual(f.read(), zipf.read("2021-01-01 document A.pdf"))
with self.doc3.source_file as f:
self.assertEqual(f.read(), zipf.read("2020-03-21 document B.jpg"))
def test_download_default(self):
response = self.client.post(
self.ENDPOINT,
json.dumps({"documents": [self.doc2.id, self.doc3.id]}),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response["Content-Type"], "application/zip")
with zipfile.ZipFile(io.BytesIO(response.content)) as zipf:
self.assertEqual(len(zipf.filelist), 2)
self.assertIn("2021-01-01 document A.pdf", zipf.namelist())
self.assertIn("2020-03-21 document B.pdf", zipf.namelist())
with self.doc2.source_file as f:
self.assertEqual(f.read(), zipf.read("2021-01-01 document A.pdf"))
with self.doc3.archive_file as f:
self.assertEqual(f.read(), zipf.read("2020-03-21 document B.pdf"))
def test_download_both(self):
response = self.client.post(
self.ENDPOINT,
json.dumps({"documents": [self.doc2.id, self.doc3.id], "content": "both"}),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response["Content-Type"], "application/zip")
with zipfile.ZipFile(io.BytesIO(response.content)) as zipf:
self.assertEqual(len(zipf.filelist), 3)
self.assertIn("originals/2021-01-01 document A.pdf", zipf.namelist())
self.assertIn("archive/2020-03-21 document B.pdf", zipf.namelist())
self.assertIn("originals/2020-03-21 document B.jpg", zipf.namelist())
with self.doc2.source_file as f:
self.assertEqual(
f.read(),
zipf.read("originals/2021-01-01 document A.pdf"),
)
with self.doc3.archive_file as f:
self.assertEqual(
f.read(),
zipf.read("archive/2020-03-21 document B.pdf"),
)
with self.doc3.source_file as f:
self.assertEqual(
f.read(),
zipf.read("originals/2020-03-21 document B.jpg"),
)
def test_filename_clashes(self):
response = self.client.post(
self.ENDPOINT,
json.dumps({"documents": [self.doc2.id, self.doc2b.id]}),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response["Content-Type"], "application/zip")
with zipfile.ZipFile(io.BytesIO(response.content)) as zipf:
self.assertEqual(len(zipf.filelist), 2)
self.assertIn("2021-01-01 document A.pdf", zipf.namelist())
self.assertIn("2021-01-01 document A_01.pdf", zipf.namelist())
with self.doc2.source_file as f:
self.assertEqual(f.read(), zipf.read("2021-01-01 document A.pdf"))
with self.doc2b.source_file as f:
self.assertEqual(f.read(), zipf.read("2021-01-01 document A_01.pdf"))
def test_compression(self):
self.client.post(
self.ENDPOINT,
json.dumps(
{"documents": [self.doc2.id, self.doc2b.id], "compression": "lzma"},
),
content_type="application/json",
)
@override_settings(FILENAME_FORMAT="{correspondent}/{title}")
def test_formatted_download_originals(self):
"""
GIVEN:
- Defined file naming format
WHEN:
- Bulk download request for original documents
- Bulk download request requests to follow format
THEN:
- Files in resulting zipfile are formatted
"""
c = Correspondent.objects.create(name="test")
c2 = Correspondent.objects.create(name="a space name")
self.doc2.correspondent = c
self.doc2.title = "This is Doc 2"
self.doc2.save()
self.doc3.correspondent = c2
self.doc3.title = "Title 2 - Doc 3"
self.doc3.save()
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"content": "originals",
"follow_formatting": True,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response["Content-Type"], "application/zip")
with zipfile.ZipFile(io.BytesIO(response.content)) as zipf:
self.assertEqual(len(zipf.filelist), 2)
self.assertIn("a space name/Title 2 - Doc 3.jpg", zipf.namelist())
self.assertIn("test/This is Doc 2.pdf", zipf.namelist())
with self.doc2.source_file as f:
self.assertEqual(f.read(), zipf.read("test/This is Doc 2.pdf"))
with self.doc3.source_file as f:
self.assertEqual(
f.read(),
zipf.read("a space name/Title 2 - Doc 3.jpg"),
)
@override_settings(FILENAME_FORMAT="somewhere/{title}")
def test_formatted_download_archive(self):
"""
GIVEN:
- Defined file naming format
WHEN:
- Bulk download request for archive documents
- Bulk download request requests to follow format
THEN:
- Files in resulting zipfile are formatted
"""
self.doc2.title = "This is Doc 2"
self.doc2.save()
self.doc3.title = "Title 2 - Doc 3"
self.doc3.save()
print(self.doc3.archive_path)
print(self.doc3.archive_filename)
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"follow_formatting": True,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response["Content-Type"], "application/zip")
with zipfile.ZipFile(io.BytesIO(response.content)) as zipf:
self.assertEqual(len(zipf.filelist), 2)
self.assertIn("somewhere/This is Doc 2.pdf", zipf.namelist())
self.assertIn("somewhere/Title 2 - Doc 3.pdf", zipf.namelist())
with self.doc2.source_file as f:
self.assertEqual(f.read(), zipf.read("somewhere/This is Doc 2.pdf"))
with self.doc3.archive_file as f:
self.assertEqual(f.read(), zipf.read("somewhere/Title 2 - Doc 3.pdf"))
@override_settings(FILENAME_FORMAT="{document_type}/{title}")
def test_formatted_download_both(self):
"""
GIVEN:
- Defined file naming format
WHEN:
- Bulk download request for original documents and archive documents
- Bulk download request requests to follow format
THEN:
- Files defined in resulting zipfile are formatted
"""
dc1 = DocumentType.objects.create(name="bill")
dc2 = DocumentType.objects.create(name="statement")
self.doc2.document_type = dc1
self.doc2.title = "This is Doc 2"
self.doc2.save()
self.doc3.document_type = dc2
self.doc3.title = "Title 2 - Doc 3"
self.doc3.save()
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"content": "both",
"follow_formatting": True,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response["Content-Type"], "application/zip")
with zipfile.ZipFile(io.BytesIO(response.content)) as zipf:
self.assertEqual(len(zipf.filelist), 3)
self.assertIn("originals/bill/This is Doc 2.pdf", zipf.namelist())
self.assertIn("archive/statement/Title 2 - Doc 3.pdf", zipf.namelist())
self.assertIn("originals/statement/Title 2 - Doc 3.jpg", zipf.namelist())
with self.doc2.source_file as f:
self.assertEqual(
f.read(),
zipf.read("originals/bill/This is Doc 2.pdf"),
)
with self.doc3.archive_file as f:
self.assertEqual(
f.read(),
zipf.read("archive/statement/Title 2 - Doc 3.pdf"),
)
with self.doc3.source_file as f:
self.assertEqual(
f.read(),
zipf.read("originals/statement/Title 2 - Doc 3.jpg"),
)

View File

@@ -0,0 +1,870 @@
import json
from unittest import mock
from django.contrib.auth.models import User
from guardian.shortcuts import assign_perm
from rest_framework import status
from rest_framework.test import APITestCase
from documents import bulk_edit
from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import StoragePath
from documents.models import Tag
from documents.tests.utils import DirectoriesMixin
class TestBulkEdit(DirectoriesMixin, APITestCase):
def setUp(self):
super().setUp()
user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=user)
patcher = mock.patch("documents.bulk_edit.bulk_update_documents.delay")
self.async_task = patcher.start()
self.addCleanup(patcher.stop)
self.c1 = Correspondent.objects.create(name="c1")
self.c2 = Correspondent.objects.create(name="c2")
self.dt1 = DocumentType.objects.create(name="dt1")
self.dt2 = DocumentType.objects.create(name="dt2")
self.t1 = Tag.objects.create(name="t1")
self.t2 = Tag.objects.create(name="t2")
self.doc1 = Document.objects.create(checksum="A", title="A")
self.doc2 = Document.objects.create(
checksum="B",
title="B",
correspondent=self.c1,
document_type=self.dt1,
)
self.doc3 = Document.objects.create(
checksum="C",
title="C",
correspondent=self.c2,
document_type=self.dt2,
)
self.doc4 = Document.objects.create(checksum="D", title="D")
self.doc5 = Document.objects.create(checksum="E", title="E")
self.doc2.tags.add(self.t1)
self.doc3.tags.add(self.t2)
self.doc4.tags.add(self.t1, self.t2)
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
def test_set_correspondent(self):
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 1)
bulk_edit.set_correspondent(
[self.doc1.id, self.doc2.id, self.doc3.id],
self.c2.id,
)
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 3)
self.async_task.assert_called_once()
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
def test_unset_correspondent(self):
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 1)
bulk_edit.set_correspondent([self.doc1.id, self.doc2.id, self.doc3.id], None)
self.assertEqual(Document.objects.filter(correspondent=self.c2).count(), 0)
self.async_task.assert_called_once()
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
def test_set_document_type(self):
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 1)
bulk_edit.set_document_type(
[self.doc1.id, self.doc2.id, self.doc3.id],
self.dt2.id,
)
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 3)
self.async_task.assert_called_once()
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc2.id])
def test_unset_document_type(self):
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 1)
bulk_edit.set_document_type([self.doc1.id, self.doc2.id, self.doc3.id], None)
self.assertEqual(Document.objects.filter(document_type=self.dt2).count(), 0)
self.async_task.assert_called_once()
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
def test_set_document_storage_path(self):
"""
GIVEN:
- 5 documents without defined storage path
WHEN:
- Bulk edit called to add storage path to 1 document
THEN:
- Single document storage path update
"""
self.assertEqual(Document.objects.filter(storage_path=None).count(), 5)
bulk_edit.set_storage_path(
[self.doc1.id],
self.sp1.id,
)
self.assertEqual(Document.objects.filter(storage_path=None).count(), 4)
self.async_task.assert_called_once()
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
def test_unset_document_storage_path(self):
"""
GIVEN:
- 4 documents without defined storage path
- 1 document with a defined storage
WHEN:
- Bulk edit called to remove storage path from 1 document
THEN:
- Single document storage path removed
"""
self.assertEqual(Document.objects.filter(storage_path=None).count(), 5)
bulk_edit.set_storage_path(
[self.doc1.id],
self.sp1.id,
)
self.assertEqual(Document.objects.filter(storage_path=None).count(), 4)
bulk_edit.set_storage_path(
[self.doc1.id],
None,
)
self.assertEqual(Document.objects.filter(storage_path=None).count(), 5)
self.async_task.assert_called()
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id])
def test_add_tag(self):
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 2)
bulk_edit.add_tag(
[self.doc1.id, self.doc2.id, self.doc3.id, self.doc4.id],
self.t1.id,
)
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 4)
self.async_task.assert_called_once()
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc1.id, self.doc3.id])
def test_remove_tag(self):
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 2)
bulk_edit.remove_tag([self.doc1.id, self.doc3.id, self.doc4.id], self.t1.id)
self.assertEqual(Document.objects.filter(tags__id=self.t1.id).count(), 1)
self.async_task.assert_called_once()
args, kwargs = self.async_task.call_args
self.assertCountEqual(kwargs["document_ids"], [self.doc4.id])
def test_modify_tags(self):
tag_unrelated = Tag.objects.create(name="unrelated")
self.doc2.tags.add(tag_unrelated)
self.doc3.tags.add(tag_unrelated)
bulk_edit.modify_tags(
[self.doc2.id, self.doc3.id],
add_tags=[self.t2.id],
remove_tags=[self.t1.id],
)
self.assertCountEqual(list(self.doc2.tags.all()), [self.t2, tag_unrelated])
self.assertCountEqual(list(self.doc3.tags.all()), [self.t2, tag_unrelated])
self.async_task.assert_called_once()
args, kwargs = self.async_task.call_args
# TODO: doc3 should not be affected, but the query for that is rather complicated
self.assertCountEqual(kwargs["document_ids"], [self.doc2.id, self.doc3.id])
def test_delete(self):
self.assertEqual(Document.objects.count(), 5)
bulk_edit.delete([self.doc1.id, self.doc2.id])
self.assertEqual(Document.objects.count(), 3)
self.assertCountEqual(
[doc.id for doc in Document.objects.all()],
[self.doc3.id, self.doc4.id, self.doc5.id],
)
@mock.patch("documents.serialisers.bulk_edit.set_correspondent")
def test_api_set_correspondent(self, m):
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_correspondent",
"parameters": {"correspondent": self.c1.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertEqual(kwargs["correspondent"], self.c1.id)
@mock.patch("documents.serialisers.bulk_edit.set_correspondent")
def test_api_unset_correspondent(self, m):
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_correspondent",
"parameters": {"correspondent": None},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertIsNone(kwargs["correspondent"])
@mock.patch("documents.serialisers.bulk_edit.set_document_type")
def test_api_set_type(self, m):
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_document_type",
"parameters": {"document_type": self.dt1.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertEqual(kwargs["document_type"], self.dt1.id)
@mock.patch("documents.serialisers.bulk_edit.set_document_type")
def test_api_unset_type(self, m):
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_document_type",
"parameters": {"document_type": None},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertIsNone(kwargs["document_type"])
@mock.patch("documents.serialisers.bulk_edit.add_tag")
def test_api_add_tag(self, m):
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "add_tag",
"parameters": {"tag": self.t1.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertEqual(kwargs["tag"], self.t1.id)
@mock.patch("documents.serialisers.bulk_edit.remove_tag")
def test_api_remove_tag(self, m):
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "remove_tag",
"parameters": {"tag": self.t1.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertEqual(kwargs["tag"], self.t1.id)
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
def test_api_modify_tags(self, m):
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id, self.doc3.id],
"method": "modify_tags",
"parameters": {
"add_tags": [self.t1.id],
"remove_tags": [self.t2.id],
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertListEqual(args[0], [self.doc1.id, self.doc3.id])
self.assertEqual(kwargs["add_tags"], [self.t1.id])
self.assertEqual(kwargs["remove_tags"], [self.t2.id])
@mock.patch("documents.serialisers.bulk_edit.modify_tags")
def test_api_modify_tags_not_provided(self, m):
"""
GIVEN:
- API data to modify tags is missing modify_tags field
WHEN:
- API to edit tags is called
THEN:
- API returns HTTP 400
- modify_tags is not called
"""
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id, self.doc3.id],
"method": "modify_tags",
"parameters": {
"add_tags": [self.t1.id],
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
m.assert_not_called()
@mock.patch("documents.serialisers.bulk_edit.delete")
def test_api_delete(self, m):
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{"documents": [self.doc1.id], "method": "delete", "parameters": {}},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertEqual(args[0], [self.doc1.id])
self.assertEqual(len(kwargs), 0)
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
def test_api_set_storage_path(self, m):
"""
GIVEN:
- API data to set the storage path of a document
WHEN:
- API is called
THEN:
- set_storage_path is called with correct document IDs and storage_path ID
"""
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_storage_path",
"parameters": {"storage_path": self.sp1.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertListEqual(args[0], [self.doc1.id])
self.assertEqual(kwargs["storage_path"], self.sp1.id)
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
def test_api_unset_storage_path(self, m):
"""
GIVEN:
- API data to clear/unset the storage path of a document
WHEN:
- API is called
THEN:
- set_storage_path is called with correct document IDs and None storage_path
"""
m.return_value = "OK"
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_storage_path",
"parameters": {"storage_path": None},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertListEqual(args[0], [self.doc1.id])
self.assertEqual(kwargs["storage_path"], None)
def test_api_invalid_storage_path(self):
"""
GIVEN:
- API data to set the storage path of a document
- Given storage_path ID isn't valid
WHEN:
- API is called
THEN:
- set_storage_path is called with correct document IDs and storage_path ID
"""
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_storage_path",
"parameters": {"storage_path": self.sp1.id + 10},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.async_task.assert_not_called()
def test_api_set_storage_path_not_provided(self):
"""
GIVEN:
- API data to set the storage path of a document
- API data is missing storage path ID
WHEN:
- API is called
THEN:
- set_storage_path is called with correct document IDs and storage_path ID
"""
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id],
"method": "set_storage_path",
"parameters": {},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.async_task.assert_not_called()
def test_api_invalid_doc(self):
self.assertEqual(Document.objects.count(), 5)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps({"documents": [-235], "method": "delete", "parameters": {}}),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(Document.objects.count(), 5)
def test_api_invalid_method(self):
self.assertEqual(Document.objects.count(), 5)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "exterminate",
"parameters": {},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(Document.objects.count(), 5)
def test_api_invalid_correspondent(self):
self.assertEqual(self.doc2.correspondent, self.c1)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "set_correspondent",
"parameters": {"correspondent": 345657},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
doc2 = Document.objects.get(id=self.doc2.id)
self.assertEqual(doc2.correspondent, self.c1)
def test_api_no_correspondent(self):
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "set_correspondent",
"parameters": {},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_api_invalid_document_type(self):
self.assertEqual(self.doc2.document_type, self.dt1)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "set_document_type",
"parameters": {"document_type": 345657},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
doc2 = Document.objects.get(id=self.doc2.id)
self.assertEqual(doc2.document_type, self.dt1)
def test_api_no_document_type(self):
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "set_document_type",
"parameters": {},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_api_add_invalid_tag(self):
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "add_tag",
"parameters": {"tag": 345657},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
def test_api_add_tag_no_tag(self):
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{"documents": [self.doc2.id], "method": "add_tag", "parameters": {}},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_api_delete_invalid_tag(self):
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "remove_tag",
"parameters": {"tag": 345657},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
def test_api_delete_tag_no_tag(self):
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{"documents": [self.doc2.id], "method": "remove_tag", "parameters": {}},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_api_modify_invalid_tags(self):
self.assertEqual(list(self.doc2.tags.all()), [self.t1])
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "modify_tags",
"parameters": {
"add_tags": [self.t2.id, 1657],
"remove_tags": [1123123],
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_api_modify_tags_no_tags(self):
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "modify_tags",
"parameters": {"remove_tags": [1123123]},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id],
"method": "modify_tags",
"parameters": {"add_tags": [self.t2.id, 1657]},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_api_selection_data_empty(self):
response = self.client.post(
"/api/documents/selection_data/",
json.dumps({"documents": []}),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
for field, Entity in [
("selected_correspondents", Correspondent),
("selected_tags", Tag),
("selected_document_types", DocumentType),
]:
self.assertEqual(len(response.data[field]), Entity.objects.count())
for correspondent in response.data[field]:
self.assertEqual(correspondent["document_count"], 0)
self.assertCountEqual(
map(lambda c: c["id"], response.data[field]),
map(lambda c: c["id"], Entity.objects.values("id")),
)
def test_api_selection_data(self):
response = self.client.post(
"/api/documents/selection_data/",
json.dumps(
{"documents": [self.doc1.id, self.doc2.id, self.doc4.id, self.doc5.id]},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertCountEqual(
response.data["selected_correspondents"],
[
{"id": self.c1.id, "document_count": 1},
{"id": self.c2.id, "document_count": 0},
],
)
self.assertCountEqual(
response.data["selected_tags"],
[
{"id": self.t1.id, "document_count": 2},
{"id": self.t2.id, "document_count": 1},
],
)
self.assertCountEqual(
response.data["selected_document_types"],
[
{"id": self.c1.id, "document_count": 1},
{"id": self.c2.id, "document_count": 0},
],
)
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
def test_set_permissions(self, m):
m.return_value = "OK"
user1 = User.objects.create(username="user1")
user2 = User.objects.create(username="user2")
permissions = {
"view": {
"users": [user1.id, user2.id],
"groups": None,
},
"change": {
"users": [user1.id],
"groups": None,
},
}
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"method": "set_permissions",
"parameters": {"set_permissions": permissions},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
args, kwargs = m.call_args
self.assertCountEqual(args[0], [self.doc2.id, self.doc3.id])
self.assertEqual(len(kwargs["set_permissions"]["view"]["users"]), 2)
@mock.patch("documents.serialisers.bulk_edit.set_permissions")
def test_insufficient_permissions_ownership(self, m):
"""
GIVEN:
- Documents owned by user other than logged in user
WHEN:
- set_permissions bulk edit API endpoint is called
THEN:
- User is not able to change permissions
"""
m.return_value = "OK"
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
user1 = User.objects.create(username="user1")
self.client.force_authenticate(user=user1)
permissions = {
"owner": user1.id,
}
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id, self.doc2.id, self.doc3.id],
"method": "set_permissions",
"parameters": {"set_permissions": permissions},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
m.assert_not_called()
self.assertEqual(response.content, b"Insufficient permissions")
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc2.id, self.doc3.id],
"method": "set_permissions",
"parameters": {"set_permissions": permissions},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()
@mock.patch("documents.serialisers.bulk_edit.set_storage_path")
def test_insufficient_permissions_edit(self, m):
"""
GIVEN:
- Documents for which current user only has view permissions
WHEN:
- API is called
THEN:
- set_storage_path only called if user can edit all docs
"""
m.return_value = "OK"
self.doc1.owner = User.objects.get(username="temp_admin")
self.doc1.save()
user1 = User.objects.create(username="user1")
assign_perm("view_document", user1, self.doc1)
self.client.force_authenticate(user=user1)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id, self.doc2.id, self.doc3.id],
"method": "set_storage_path",
"parameters": {"storage_path": self.sp1.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
m.assert_not_called()
self.assertEqual(response.content, b"Insufficient permissions")
assign_perm("change_document", user1, self.doc1)
response = self.client.post(
"/api/documents/bulk_edit/",
json.dumps(
{
"documents": [self.doc1.id, self.doc2.id, self.doc3.id],
"method": "set_storage_path",
"parameters": {"storage_path": self.sp1.id},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
m.assert_called_once()

View File

@@ -0,0 +1,236 @@
import json
from django.contrib.auth.models import Group
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APITestCase
from documents.data_models import DocumentSource
from documents.models import ConsumptionTemplate
from documents.models import Correspondent
from documents.models import CustomField
from documents.models import DocumentType
from documents.models import StoragePath
from documents.models import Tag
from documents.tests.utils import DirectoriesMixin
from paperless_mail.models import MailAccount
from paperless_mail.models import MailRule
class TestApiConsumptionTemplates(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/consumption_templates/"
def setUp(self) -> None:
super().setUp()
user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=user)
self.user2 = User.objects.create(username="user2")
self.user3 = User.objects.create(username="user3")
self.group1 = Group.objects.create(name="group1")
self.c = Correspondent.objects.create(name="Correspondent Name")
self.c2 = Correspondent.objects.create(name="Correspondent Name 2")
self.dt = DocumentType.objects.create(name="DocType Name")
self.t1 = Tag.objects.create(name="t1")
self.t2 = Tag.objects.create(name="t2")
self.t3 = Tag.objects.create(name="t3")
self.sp = StoragePath.objects.create(path="/test/")
self.cf1 = CustomField.objects.create(name="Custom Field 1", data_type="string")
self.cf2 = CustomField.objects.create(
name="Custom Field 2",
data_type="integer",
)
self.ct = ConsumptionTemplate.objects.create(
name="Template 1",
order=0,
sources=f"{int(DocumentSource.ApiUpload)},{int(DocumentSource.ConsumeFolder)},{int(DocumentSource.MailFetch)}",
filter_filename="*simple*",
filter_path="*/samples/*",
assign_title="Doc from {correspondent}",
assign_correspondent=self.c,
assign_document_type=self.dt,
assign_storage_path=self.sp,
assign_owner=self.user2,
)
self.ct.assign_tags.add(self.t1)
self.ct.assign_tags.add(self.t2)
self.ct.assign_tags.add(self.t3)
self.ct.assign_view_users.add(self.user3.pk)
self.ct.assign_view_groups.add(self.group1.pk)
self.ct.assign_change_users.add(self.user3.pk)
self.ct.assign_change_groups.add(self.group1.pk)
self.ct.assign_custom_fields.add(self.cf1.pk)
self.ct.assign_custom_fields.add(self.cf2.pk)
self.ct.save()
def test_api_get_consumption_template(self):
"""
GIVEN:
- API request to get all consumption template
WHEN:
- API is called
THEN:
- Existing consumption templates are returned
"""
response = self.client.get(self.ENDPOINT, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], 1)
resp_consumption_template = response.data["results"][0]
self.assertEqual(resp_consumption_template["id"], self.ct.id)
self.assertEqual(
resp_consumption_template["assign_correspondent"],
self.ct.assign_correspondent.pk,
)
def test_api_create_consumption_template(self):
"""
GIVEN:
- API request to create a consumption template
WHEN:
- API is called
THEN:
- Correct HTTP response
- New template is created
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Template 2",
"order": 1,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*test*",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ConsumptionTemplate.objects.count(), 2)
def test_api_create_invalid_consumption_template(self):
"""
GIVEN:
- API request to create a consumption template
- Neither file name nor path filter are specified
WHEN:
- API is called
THEN:
- Correct HTTP 400 response
- No template is created
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Template 2",
"order": 1,
"sources": [DocumentSource.ApiUpload],
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(ConsumptionTemplate.objects.count(), 1)
def test_api_create_consumption_template_empty_fields(self):
"""
GIVEN:
- API request to create a consumption template
- Path or filename filter or assign title are empty string
WHEN:
- API is called
THEN:
- Template is created but filter or title assignment is not set if ""
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Template 2",
"order": 1,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "*test*",
"filter_path": "",
"assign_title": "",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
ct = ConsumptionTemplate.objects.get(name="Template 2")
self.assertEqual(ct.filter_filename, "*test*")
self.assertIsNone(ct.filter_path)
self.assertIsNone(ct.assign_title)
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Template 3",
"order": 1,
"sources": [DocumentSource.ApiUpload],
"filter_filename": "",
"filter_path": "*/test/*",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
ct2 = ConsumptionTemplate.objects.get(name="Template 3")
self.assertEqual(ct2.filter_path, "*/test/*")
self.assertIsNone(ct2.filter_filename)
def test_api_create_consumption_template_with_mailrule(self):
"""
GIVEN:
- API request to create a consumption template with a mail rule but no MailFetch source
WHEN:
- API is called
THEN:
- New template is created with MailFetch as source
"""
account1 = MailAccount.objects.create(
name="Email1",
username="username1",
password="password1",
imap_server="server.example.com",
imap_port=443,
imap_security=MailAccount.ImapSecurity.SSL,
character_set="UTF-8",
)
rule1 = MailRule.objects.create(
name="Rule1",
account=account1,
folder="INBOX",
filter_from="from@example.com",
filter_to="someone@somewhere.com",
filter_subject="subject",
filter_body="body",
filter_attachment_filename_include="file.pdf",
maximum_age=30,
action=MailRule.MailAction.MARK_READ,
assign_title_from=MailRule.TitleSource.FROM_SUBJECT,
assign_correspondent_from=MailRule.CorrespondentSource.FROM_NOTHING,
order=0,
attachment_type=MailRule.AttachmentProcessing.ATTACHMENTS_ONLY,
)
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Template 2",
"order": 1,
"sources": [DocumentSource.ApiUpload],
"filter_mailrule": rule1.pk,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(ConsumptionTemplate.objects.count(), 2)
ct = ConsumptionTemplate.objects.get(name="Template 2")
self.assertEqual(ct.sources, [int(DocumentSource.MailFetch).__str__()])

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,224 @@
import json
from unittest import mock
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APITestCase
from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import StoragePath
from documents.models import Tag
from documents.tests.utils import DirectoriesMixin
class TestApiObjects(DirectoriesMixin, APITestCase):
def setUp(self) -> None:
super().setUp()
user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=user)
self.tag1 = Tag.objects.create(name="t1", is_inbox_tag=True)
self.tag2 = Tag.objects.create(name="t2")
self.tag3 = Tag.objects.create(name="t3")
self.c1 = Correspondent.objects.create(name="c1")
self.c2 = Correspondent.objects.create(name="c2")
self.c3 = Correspondent.objects.create(name="c3")
self.dt1 = DocumentType.objects.create(name="dt1")
self.dt2 = DocumentType.objects.create(name="dt2")
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{title}")
self.sp2 = StoragePath.objects.create(name="sp2", path="Something2/{title}")
def test_object_filters(self):
response = self.client.get(
f"/api/tags/?id={self.tag2.id}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertEqual(len(results), 1)
response = self.client.get(
f"/api/tags/?id__in={self.tag1.id},{self.tag3.id}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertEqual(len(results), 2)
response = self.client.get(
f"/api/correspondents/?id={self.c2.id}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertEqual(len(results), 1)
response = self.client.get(
f"/api/correspondents/?id__in={self.c1.id},{self.c3.id}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertEqual(len(results), 2)
response = self.client.get(
f"/api/document_types/?id={self.dt1.id}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertEqual(len(results), 1)
response = self.client.get(
f"/api/document_types/?id__in={self.dt1.id},{self.dt2.id}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertEqual(len(results), 2)
response = self.client.get(
f"/api/storage_paths/?id={self.sp1.id}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertEqual(len(results), 1)
response = self.client.get(
f"/api/storage_paths/?id__in={self.sp1.id},{self.sp2.id}",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
results = response.data["results"]
self.assertEqual(len(results), 2)
class TestApiStoragePaths(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/storage_paths/"
def setUp(self) -> None:
super().setUp()
user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=user)
self.sp1 = StoragePath.objects.create(name="sp1", path="Something/{checksum}")
def test_api_get_storage_path(self):
"""
GIVEN:
- API request to get all storage paths
WHEN:
- API is called
THEN:
- Existing storage paths are returned
"""
response = self.client.get(self.ENDPOINT, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], 1)
resp_storage_path = response.data["results"][0]
self.assertEqual(resp_storage_path["id"], self.sp1.id)
self.assertEqual(resp_storage_path["path"], self.sp1.path)
def test_api_create_storage_path(self):
"""
GIVEN:
- API request to create a storage paths
WHEN:
- API is called
THEN:
- Correct HTTP response
- New storage path is created
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "A storage path",
"path": "Somewhere/{asn}",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(StoragePath.objects.count(), 2)
def test_api_create_invalid_storage_path(self):
"""
GIVEN:
- API request to create a storage paths
- Storage path format is incorrect
WHEN:
- API is called
THEN:
- Correct HTTP 400 response
- No storage path is created
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Another storage path",
"path": "Somewhere/{correspdent}",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(StoragePath.objects.count(), 1)
def test_api_storage_path_placeholders(self):
"""
GIVEN:
- API request to create a storage path with placeholders
- Storage path is valid
WHEN:
- API is called
THEN:
- Correct HTTP response
- New storage path is created
"""
response = self.client.post(
self.ENDPOINT,
json.dumps(
{
"name": "Storage path with placeholders",
"path": "{title}/{correspondent}/{document_type}/{created}/{created_year}"
"/{created_year_short}/{created_month}/{created_month_name}"
"/{created_month_name_short}/{created_day}/{added}/{added_year}"
"/{added_year_short}/{added_month}/{added_month_name}"
"/{added_month_name_short}/{added_day}/{asn}/{tags}"
"/{tag_list}/{owner_username}/{original_name}/{doc_pk}/",
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(StoragePath.objects.count(), 2)
@mock.patch("documents.bulk_edit.bulk_update_documents.delay")
def test_api_update_storage_path(self, bulk_update_mock):
"""
GIVEN:
- API request to get all storage paths
WHEN:
- API is called
THEN:
- Existing storage paths are returned
"""
document = Document.objects.create(
mime_type="application/pdf",
storage_path=self.sp1,
)
response = self.client.patch(
f"{self.ENDPOINT}{self.sp1.pk}/",
data={
"path": "somewhere/{created} - {title}",
},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
bulk_update_mock.assert_called_once()
args, _ = bulk_update_mock.call_args
self.assertCountEqual([document.pk], args[0])

View File

@@ -0,0 +1,910 @@
import json
from django.contrib.auth.models import Group
from django.contrib.auth.models import Permission
from django.contrib.auth.models import User
from guardian.shortcuts import assign_perm
from guardian.shortcuts import get_perms
from guardian.shortcuts import get_users_with_perms
from rest_framework import status
from rest_framework.test import APITestCase
from documents.models import Correspondent
from documents.models import Document
from documents.models import DocumentType
from documents.models import MatchingModel
from documents.models import StoragePath
from documents.models import Tag
from documents.tests.utils import DirectoriesMixin
class TestApiAuth(DirectoriesMixin, APITestCase):
def test_auth_required(self):
d = Document.objects.create(title="Test")
self.assertEqual(
self.client.get("/api/documents/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get(f"/api/documents/{d.id}/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get(f"/api/documents/{d.id}/download/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get(f"/api/documents/{d.id}/preview/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get(f"/api/documents/{d.id}/thumb/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/tags/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/correspondents/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/document_types/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/logs/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/saved_views/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/search/autocomplete/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/documents/bulk_edit/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/documents/bulk_download/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
self.assertEqual(
self.client.get("/api/documents/selection_data/").status_code,
status.HTTP_401_UNAUTHORIZED,
)
def test_api_version_no_auth(self):
response = self.client.get("/api/")
self.assertNotIn("X-Api-Version", response)
self.assertNotIn("X-Version", response)
def test_api_version_with_auth(self):
user = User.objects.create_superuser(username="test")
self.client.force_authenticate(user)
response = self.client.get("/api/")
self.assertIn("X-Api-Version", response)
self.assertIn("X-Version", response)
def test_api_insufficient_permissions(self):
user = User.objects.create_user(username="test")
self.client.force_authenticate(user)
Document.objects.create(title="Test")
self.assertEqual(
self.client.get("/api/documents/").status_code,
status.HTTP_403_FORBIDDEN,
)
self.assertEqual(
self.client.get("/api/tags/").status_code,
status.HTTP_403_FORBIDDEN,
)
self.assertEqual(
self.client.get("/api/correspondents/").status_code,
status.HTTP_403_FORBIDDEN,
)
self.assertEqual(
self.client.get("/api/document_types/").status_code,
status.HTTP_403_FORBIDDEN,
)
self.assertEqual(
self.client.get("/api/logs/").status_code,
status.HTTP_403_FORBIDDEN,
)
self.assertEqual(
self.client.get("/api/saved_views/").status_code,
status.HTTP_403_FORBIDDEN,
)
def test_api_sufficient_permissions(self):
user = User.objects.create_user(username="test")
user.user_permissions.add(*Permission.objects.all())
self.client.force_authenticate(user)
Document.objects.create(title="Test")
self.assertEqual(
self.client.get("/api/documents/").status_code,
status.HTTP_200_OK,
)
self.assertEqual(self.client.get("/api/tags/").status_code, status.HTTP_200_OK)
self.assertEqual(
self.client.get("/api/correspondents/").status_code,
status.HTTP_200_OK,
)
self.assertEqual(
self.client.get("/api/document_types/").status_code,
status.HTTP_200_OK,
)
self.assertEqual(self.client.get("/api/logs/").status_code, status.HTTP_200_OK)
self.assertEqual(
self.client.get("/api/saved_views/").status_code,
status.HTTP_200_OK,
)
def test_api_get_object_permissions(self):
user1 = User.objects.create_user(username="test1")
user2 = User.objects.create_user(username="test2")
user1.user_permissions.add(*Permission.objects.filter(codename="view_document"))
self.client.force_authenticate(user1)
self.assertEqual(
self.client.get("/api/documents/").status_code,
status.HTTP_200_OK,
)
d = Document.objects.create(title="Test", content="the content 1", checksum="1")
# no owner
self.assertEqual(
self.client.get(f"/api/documents/{d.id}/").status_code,
status.HTTP_200_OK,
)
d2 = Document.objects.create(
title="Test 2",
content="the content 2",
checksum="2",
owner=user2,
)
self.assertEqual(
self.client.get(f"/api/documents/{d2.id}/").status_code,
status.HTTP_404_NOT_FOUND,
)
def test_api_default_owner(self):
"""
GIVEN:
- API request to create an object (Tag)
WHEN:
- owner is not set at all
THEN:
- Object created with current user as owner
"""
user1 = User.objects.create_superuser(username="user1")
self.client.force_authenticate(user1)
response = self.client.post(
"/api/tags/",
json.dumps(
{
"name": "test1",
"matching_algorithm": MatchingModel.MATCH_AUTO,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
tag1 = Tag.objects.filter(name="test1").first()
self.assertEqual(tag1.owner, user1)
def test_api_set_no_owner(self):
"""
GIVEN:
- API request to create an object (Tag)
WHEN:
- owner is passed as None
THEN:
- Object created with no owner
"""
user1 = User.objects.create_superuser(username="user1")
self.client.force_authenticate(user1)
response = self.client.post(
"/api/tags/",
json.dumps(
{
"name": "test1",
"matching_algorithm": MatchingModel.MATCH_AUTO,
"owner": None,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
tag1 = Tag.objects.filter(name="test1").first()
self.assertEqual(tag1.owner, None)
def test_api_set_owner_w_permissions(self):
"""
GIVEN:
- API request to create an object (Tag) that supplies set_permissions object
WHEN:
- owner is passed as user id
- view > users is set & view > groups is set
THEN:
- Object permissions are set appropriately
"""
user1 = User.objects.create_superuser(username="user1")
user2 = User.objects.create(username="user2")
group1 = Group.objects.create(name="group1")
self.client.force_authenticate(user1)
response = self.client.post(
"/api/tags/",
json.dumps(
{
"name": "test1",
"matching_algorithm": MatchingModel.MATCH_AUTO,
"owner": user1.id,
"set_permissions": {
"view": {
"users": [user2.id],
"groups": [group1.id],
},
"change": {
"users": None,
"groups": None,
},
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
tag1 = Tag.objects.filter(name="test1").first()
from guardian.core import ObjectPermissionChecker
checker = ObjectPermissionChecker(user2)
self.assertEqual(checker.has_perm("view_tag", tag1), True)
self.assertIn("view_tag", get_perms(group1, tag1))
def test_api_set_other_owner_w_permissions(self):
"""
GIVEN:
- API request to create an object (Tag)
WHEN:
- a different owner than is logged in is set
- view > groups is set
THEN:
- Object permissions are set appropriately
"""
user1 = User.objects.create_superuser(username="user1")
user2 = User.objects.create(username="user2")
group1 = Group.objects.create(name="group1")
self.client.force_authenticate(user1)
response = self.client.post(
"/api/tags/",
json.dumps(
{
"name": "test1",
"matching_algorithm": MatchingModel.MATCH_AUTO,
"owner": user2.id,
"set_permissions": {
"view": {
"users": None,
"groups": [group1.id],
},
"change": {
"users": None,
"groups": None,
},
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
tag1 = Tag.objects.filter(name="test1").first()
self.assertEqual(tag1.owner, user2)
self.assertIn("view_tag", get_perms(group1, tag1))
def test_api_set_doc_permissions(self):
"""
GIVEN:
- API request to update doc permissions and owner
WHEN:
- owner is set
- view > users is set & view > groups is set
THEN:
- Object permissions are set appropriately
"""
doc = Document.objects.create(
title="test",
mime_type="application/pdf",
content="this is a document",
)
user1 = User.objects.create_superuser(username="user1")
user2 = User.objects.create(username="user2")
group1 = Group.objects.create(name="group1")
self.client.force_authenticate(user1)
response = self.client.patch(
f"/api/documents/{doc.id}/",
json.dumps(
{
"owner": user1.id,
"set_permissions": {
"view": {
"users": [user2.id],
"groups": [group1.id],
},
"change": {
"users": None,
"groups": None,
},
},
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
doc = Document.objects.get(pk=doc.id)
self.assertEqual(doc.owner, user1)
from guardian.core import ObjectPermissionChecker
checker = ObjectPermissionChecker(user2)
self.assertTrue(checker.has_perm("view_document", doc))
self.assertIn("view_document", get_perms(group1, doc))
def test_dynamic_permissions_fields(self):
user1 = User.objects.create_user(username="user1")
user1.user_permissions.add(*Permission.objects.filter(codename="view_document"))
user2 = User.objects.create_user(username="user2")
Document.objects.create(title="Test", content="content 1", checksum="1")
doc2 = Document.objects.create(
title="Test2",
content="content 2",
checksum="2",
owner=user2,
)
doc3 = Document.objects.create(
title="Test3",
content="content 3",
checksum="3",
owner=user2,
)
assign_perm("view_document", user1, doc2)
assign_perm("view_document", user1, doc3)
assign_perm("change_document", user1, doc3)
self.client.force_authenticate(user1)
response = self.client.get(
"/api/documents/",
format="json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
resp_data = response.json()
self.assertNotIn("permissions", resp_data["results"][0])
self.assertIn("user_can_change", resp_data["results"][0])
self.assertEqual(resp_data["results"][0]["user_can_change"], True) # doc1
self.assertEqual(resp_data["results"][1]["user_can_change"], False) # doc2
self.assertEqual(resp_data["results"][2]["user_can_change"], True) # doc3
response = self.client.get(
"/api/documents/?full_perms=true",
format="json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
resp_data = response.json()
self.assertIn("permissions", resp_data["results"][0])
self.assertNotIn("user_can_change", resp_data["results"][0])
class TestApiUser(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/users/"
def setUp(self):
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
def test_get_users(self):
"""
GIVEN:
- Configured users
WHEN:
- API call is made to get users
THEN:
- Configured users are provided
"""
user1 = User.objects.create(
username="testuser",
password="test",
first_name="Test",
last_name="User",
)
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], 2)
returned_user2 = response.data["results"][1]
self.assertEqual(returned_user2["username"], user1.username)
self.assertEqual(returned_user2["password"], "**********")
self.assertEqual(returned_user2["first_name"], user1.first_name)
self.assertEqual(returned_user2["last_name"], user1.last_name)
def test_create_user(self):
"""
WHEN:
- API request is made to add a user account
THEN:
- A new user account is created
"""
user1 = {
"username": "testuser",
"password": "test",
"first_name": "Test",
"last_name": "User",
}
response = self.client.post(
self.ENDPOINT,
data=user1,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
returned_user1 = User.objects.get(username="testuser")
self.assertEqual(returned_user1.username, user1["username"])
self.assertEqual(returned_user1.first_name, user1["first_name"])
self.assertEqual(returned_user1.last_name, user1["last_name"])
def test_delete_user(self):
"""
GIVEN:
- Existing user account
WHEN:
- API request is made to delete a user account
THEN:
- Account is deleted
"""
user1 = User.objects.create(
username="testuser",
password="test",
first_name="Test",
last_name="User",
)
nUsers = User.objects.count()
response = self.client.delete(
f"{self.ENDPOINT}{user1.pk}/",
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(User.objects.count(), nUsers - 1)
def test_update_user(self):
"""
GIVEN:
- Existing user accounts
WHEN:
- API request is made to update user account
THEN:
- The user account is updated, password only updated if not '****'
"""
user1 = User.objects.create(
username="testuser",
password="test",
first_name="Test",
last_name="User",
)
initial_password = user1.password
response = self.client.patch(
f"{self.ENDPOINT}{user1.pk}/",
data={
"first_name": "Updated Name 1",
"password": "******",
},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
returned_user1 = User.objects.get(pk=user1.pk)
self.assertEqual(returned_user1.first_name, "Updated Name 1")
self.assertEqual(returned_user1.password, initial_password)
response = self.client.patch(
f"{self.ENDPOINT}{user1.pk}/",
data={
"first_name": "Updated Name 2",
"password": "123xyz",
},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
returned_user2 = User.objects.get(pk=user1.pk)
self.assertEqual(returned_user2.first_name, "Updated Name 2")
self.assertNotEqual(returned_user2.password, initial_password)
class TestApiGroup(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/groups/"
def setUp(self):
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
def test_get_groups(self):
"""
GIVEN:
- Configured groups
WHEN:
- API call is made to get groups
THEN:
- Configured groups are provided
"""
group1 = Group.objects.create(
name="Test Group",
)
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["count"], 1)
returned_group1 = response.data["results"][0]
self.assertEqual(returned_group1["name"], group1.name)
def test_create_group(self):
"""
WHEN:
- API request is made to add a group
THEN:
- A new group is created
"""
group1 = {
"name": "Test Group",
}
response = self.client.post(
self.ENDPOINT,
data=group1,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
returned_group1 = Group.objects.get(name="Test Group")
self.assertEqual(returned_group1.name, group1["name"])
def test_delete_group(self):
"""
GIVEN:
- Existing group
WHEN:
- API request is made to delete a group
THEN:
- Group is deleted
"""
group1 = Group.objects.create(
name="Test Group",
)
response = self.client.delete(
f"{self.ENDPOINT}{group1.pk}/",
)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(len(Group.objects.all()), 0)
def test_update_group(self):
"""
GIVEN:
- Existing groups
WHEN:
- API request is made to update group
THEN:
- The group is updated
"""
group1 = Group.objects.create(
name="Test Group",
)
response = self.client.patch(
f"{self.ENDPOINT}{group1.pk}/",
data={
"name": "Updated Name 1",
},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
returned_group1 = Group.objects.get(pk=group1.pk)
self.assertEqual(returned_group1.name, "Updated Name 1")
class TestBulkEditObjectPermissions(APITestCase):
def setUp(self):
super().setUp()
user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=user)
self.t1 = Tag.objects.create(name="t1")
self.t2 = Tag.objects.create(name="t2")
self.c1 = Correspondent.objects.create(name="c1")
self.dt1 = DocumentType.objects.create(name="dt1")
self.sp1 = StoragePath.objects.create(name="sp1")
self.user1 = User.objects.create(username="user1")
self.user2 = User.objects.create(username="user2")
self.user3 = User.objects.create(username="user3")
def test_bulk_object_set_permissions(self):
"""
GIVEN:
- Existing objects
WHEN:
- bulk_edit_object_perms API endpoint is called
THEN:
- Permissions and / or owner are changed
"""
permissions = {
"view": {
"users": [self.user1.id, self.user2.id],
"groups": [],
},
"change": {
"users": [self.user1.id],
"groups": [],
},
}
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [self.t1.id, self.t2.id],
"object_type": "tags",
"permissions": permissions,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn(self.user1, get_users_with_perms(self.t1))
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [self.c1.id],
"object_type": "correspondents",
"permissions": permissions,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn(self.user1, get_users_with_perms(self.c1))
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [self.dt1.id],
"object_type": "document_types",
"permissions": permissions,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn(self.user1, get_users_with_perms(self.dt1))
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [self.sp1.id],
"object_type": "storage_paths",
"permissions": permissions,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn(self.user1, get_users_with_perms(self.sp1))
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [self.t1.id, self.t2.id],
"object_type": "tags",
"owner": self.user3.id,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Tag.objects.get(pk=self.t2.id).owner, self.user3)
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [self.sp1.id],
"object_type": "storage_paths",
"owner": self.user3.id,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(StoragePath.objects.get(pk=self.sp1.id).owner, self.user3)
def test_bulk_edit_object_permissions_insufficient_perms(self):
"""
GIVEN:
- Objects owned by user other than logged in user
WHEN:
- bulk_edit_object_perms API endpoint is called
THEN:
- User is not able to change permissions
"""
self.t1.owner = User.objects.get(username="temp_admin")
self.t1.save()
self.client.force_authenticate(user=self.user1)
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [self.t1.id, self.t2.id],
"object_type": "tags",
"owner": self.user1.id,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(response.content, b"Insufficient permissions")
def test_bulk_edit_object_permissions_validation(self):
"""
GIVEN:
- Existing objects
WHEN:
- bulk_edit_object_perms API endpoint is called with invalid params
THEN:
- Validation fails
"""
# not a list
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": self.t1.id,
"object_type": "tags",
"owner": self.user1.id,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# not a list of ints
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": ["one"],
"object_type": "tags",
"owner": self.user1.id,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# duplicates
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [self.t1.id, self.t2.id, self.t1.id],
"object_type": "tags",
"owner": self.user1.id,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
# not a valid object type
response = self.client.post(
"/api/bulk_edit_object_perms/",
json.dumps(
{
"objects": [1],
"object_type": "madeup",
"owner": self.user1.id,
},
),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)

View File

@@ -0,0 +1,123 @@
import json
import urllib.request
from unittest import mock
from unittest.mock import MagicMock
from rest_framework import status
from rest_framework.test import APITestCase
from documents.tests.utils import DirectoriesMixin
from paperless import version
class TestApiRemoteVersion(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/remote_version/"
def setUp(self):
super().setUp()
@mock.patch("urllib.request.urlopen")
def test_remote_version_enabled_no_update_prefix(self, urlopen_mock):
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.return_value = json.dumps({"tag_name": "ngx-1.6.0"}).encode()
cm.__enter__.return_value = cm
urlopen_mock.return_value = cm
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(
response.data,
{
"version": "1.6.0",
"update_available": False,
},
)
@mock.patch("urllib.request.urlopen")
def test_remote_version_enabled_no_update_no_prefix(self, urlopen_mock):
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.return_value = json.dumps(
{"tag_name": version.__full_version_str__},
).encode()
cm.__enter__.return_value = cm
urlopen_mock.return_value = cm
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(
response.data,
{
"version": version.__full_version_str__,
"update_available": False,
},
)
@mock.patch("urllib.request.urlopen")
def test_remote_version_enabled_update(self, urlopen_mock):
new_version = (
version.__version__[0],
version.__version__[1],
version.__version__[2] + 1,
)
new_version_str = ".".join(map(str, new_version))
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.return_value = json.dumps(
{"tag_name": new_version_str},
).encode()
cm.__enter__.return_value = cm
urlopen_mock.return_value = cm
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(
response.data,
{
"version": new_version_str,
"update_available": True,
},
)
@mock.patch("urllib.request.urlopen")
def test_remote_version_bad_json(self, urlopen_mock):
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.return_value = b'{ "blah":'
cm.__enter__.return_value = cm
urlopen_mock.return_value = cm
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(
response.data,
{
"version": "0.0.0",
"update_available": False,
},
)
@mock.patch("urllib.request.urlopen")
def test_remote_version_exception(self, urlopen_mock):
cm = MagicMock()
cm.getcode.return_value = status.HTTP_200_OK
cm.read.side_effect = urllib.error.URLError("an error")
cm.__enter__.return_value = cm
urlopen_mock.return_value = cm
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(
response.data,
{
"version": "0.0.0",
"update_available": False,
},
)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,240 @@
import uuid
import celery
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APITestCase
from documents.models import PaperlessTask
from documents.tests.utils import DirectoriesMixin
class TestTasks(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/tasks/"
ENDPOINT_ACKNOWLEDGE = "/api/acknowledge_tasks/"
def setUp(self):
super().setUp()
self.user = User.objects.create_superuser(username="temp_admin")
self.client.force_authenticate(user=self.user)
def test_get_tasks(self):
"""
GIVEN:
- Attempted celery tasks
WHEN:
- API call is made to get tasks
THEN:
- Attempting and pending tasks are serialized and provided
"""
task1 = PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf",
)
task2 = PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_two.pdf",
)
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 2)
returned_task1 = response.data[1]
returned_task2 = response.data[0]
self.assertEqual(returned_task1["task_id"], task1.task_id)
self.assertEqual(returned_task1["status"], celery.states.PENDING)
self.assertEqual(returned_task1["task_file_name"], task1.task_file_name)
self.assertEqual(returned_task2["task_id"], task2.task_id)
self.assertEqual(returned_task2["status"], celery.states.PENDING)
self.assertEqual(returned_task2["task_file_name"], task2.task_file_name)
def test_get_single_task_status(self):
"""
GIVEN
- Query parameter for a valid task ID
WHEN:
- API call is made to get task status
THEN:
- Single task data is returned
"""
id1 = str(uuid.uuid4())
task1 = PaperlessTask.objects.create(
task_id=id1,
task_file_name="task_one.pdf",
)
_ = PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_two.pdf",
)
response = self.client.get(self.ENDPOINT + f"?task_id={id1}")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
returned_task1 = response.data[0]
self.assertEqual(returned_task1["task_id"], task1.task_id)
def test_get_single_task_status_not_valid(self):
"""
GIVEN
- Query parameter for a non-existent task ID
WHEN:
- API call is made to get task status
THEN:
- No task data is returned
"""
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf",
)
_ = PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_two.pdf",
)
response = self.client.get(self.ENDPOINT + "?task_id=bad-task-id")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 0)
def test_acknowledge_tasks(self):
"""
GIVEN:
- Attempted celery tasks
WHEN:
- API call is made to get mark task as acknowledged
THEN:
- Task is marked as acknowledged
"""
task = PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf",
)
response = self.client.get(self.ENDPOINT)
self.assertEqual(len(response.data), 1)
response = self.client.post(
self.ENDPOINT_ACKNOWLEDGE,
{"tasks": [task.id]},
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self.client.get(self.ENDPOINT)
self.assertEqual(len(response.data), 0)
def test_task_result_no_error(self):
"""
GIVEN:
- A celery task completed without error
WHEN:
- API call is made to get tasks
THEN:
- The returned data includes the task result
"""
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf",
status=celery.states.SUCCESS,
result="Success. New document id 1 created",
)
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
returned_data = response.data[0]
self.assertEqual(returned_data["result"], "Success. New document id 1 created")
self.assertEqual(returned_data["related_document"], "1")
def test_task_result_with_error(self):
"""
GIVEN:
- A celery task completed with an exception
WHEN:
- API call is made to get tasks
THEN:
- The returned result is the exception info
"""
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="task_one.pdf",
status=celery.states.FAILURE,
result="test.pdf: Not consuming test.pdf: It is a duplicate.",
)
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
returned_data = response.data[0]
self.assertEqual(
returned_data["result"],
"test.pdf: Not consuming test.pdf: It is a duplicate.",
)
def test_task_name_webui(self):
"""
GIVEN:
- Attempted celery task
- Task was created through the webui
WHEN:
- API call is made to get tasks
THEN:
- Returned data include the filename
"""
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="test.pdf",
task_name="documents.tasks.some_task",
status=celery.states.SUCCESS,
)
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
returned_data = response.data[0]
self.assertEqual(returned_data["task_file_name"], "test.pdf")
def test_task_name_consume_folder(self):
"""
GIVEN:
- Attempted celery task
- Task was created through the consume folder
WHEN:
- API call is made to get tasks
THEN:
- Returned data include the filename
"""
PaperlessTask.objects.create(
task_id=str(uuid.uuid4()),
task_file_name="anothertest.pdf",
task_name="documents.tasks.some_task",
status=celery.states.SUCCESS,
)
response = self.client.get(self.ENDPOINT)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
returned_data = response.data[0]
self.assertEqual(returned_data["task_file_name"], "anothertest.pdf")

View File

@@ -0,0 +1,65 @@
import json
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APITestCase
from documents.tests.utils import DirectoriesMixin
class TestApiUiSettings(DirectoriesMixin, APITestCase):
ENDPOINT = "/api/ui_settings/"
def setUp(self):
super().setUp()
self.test_user = User.objects.create_superuser(username="test")
self.test_user.first_name = "Test"
self.test_user.last_name = "User"
self.test_user.save()
self.client.force_authenticate(user=self.test_user)
def test_api_get_ui_settings(self):
response = self.client.get(self.ENDPOINT, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertDictEqual(
response.data["user"],
{
"id": self.test_user.id,
"username": self.test_user.username,
"is_superuser": True,
"groups": [],
"first_name": self.test_user.first_name,
"last_name": self.test_user.last_name,
},
)
self.assertDictEqual(
response.data["settings"],
{
"update_checking": {
"backend_setting": "default",
},
},
)
def test_api_set_ui_settings(self):
settings = {
"settings": {
"dark_mode": {
"enabled": True,
},
},
}
response = self.client.post(
self.ENDPOINT,
json.dumps(settings),
content_type="application/json",
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
ui_settings = self.test_user.ui_settings
self.assertDictEqual(
ui_settings.settings,
settings["settings"],
)

View File

@@ -1,5 +1,4 @@
import shutil
from pathlib import Path
from unittest import mock
import pytest
@@ -11,10 +10,13 @@ from documents import tasks
from documents.barcodes import BarcodeReader
from documents.consumer import ConsumerError
from documents.data_models import ConsumableDocument
from documents.data_models import DocumentMetadataOverrides
from documents.data_models import DocumentSource
from documents.models import Document
from documents.tests.utils import DirectoriesMixin
from documents.tests.utils import DocumentConsumeDelayMixin
from documents.tests.utils import FileSystemAssertsMixin
from documents.tests.utils import SampleDirMixin
try:
import zxingcpp # noqa: F401
@@ -25,11 +27,7 @@ except ImportError:
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
SAMPLE_DIR = Path(__file__).parent / "samples"
BARCODE_SAMPLE_DIR = SAMPLE_DIR / "barcodes"
class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, SampleDirMixin, TestCase):
def test_scan_file_for_separating_barcodes(self):
"""
GIVEN:
@@ -48,6 +46,46 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {0: False})
@override_settings(
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)
def test_scan_tiff_for_separating_barcodes(self):
"""
GIVEN:
- TIFF image containing barcodes
WHEN:
- Consume task returns
THEN:
- The file was split
"""
test_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.tiff"
with BarcodeReader(test_file, "image/tiff") as reader:
reader.detect()
separator_page_numbers = reader.get_separation_pages()
self.assertDictEqual(separator_page_numbers, {1: False})
@override_settings(
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)
def test_scan_tiff_with_alpha_for_separating_barcodes(self):
"""
GIVEN:
- TIFF image containing barcodes
WHEN:
- Consume task returns
THEN:
- The file was split
"""
test_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle-alpha.tiff"
with BarcodeReader(test_file, "image/tiff") as reader:
reader.detect()
separator_page_numbers = reader.get_separation_pages()
self.assertDictEqual(separator_page_numbers, {1: False})
def test_scan_file_for_separating_barcodes_none_present(self):
"""
GIVEN:
@@ -285,6 +323,28 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertGreater(len(reader.barcodes), 0)
self.assertDictEqual(separator_page_numbers, {1: False})
def test_scan_file_for_separating_barcodes_password(self):
"""
GIVEN:
- Password protected PDF
WHEN:
- File is scanned for barcode
THEN:
- Scanning handles the exception without crashing
"""
test_file = self.SAMPLE_DIR / "password-is-test.pdf"
with self.assertLogs("paperless.barcodes", level="WARNING") as cm:
with BarcodeReader(test_file, "application/pdf") as reader:
reader.detect()
warning = cm.output[0]
expected_str = "WARNING:paperless.barcodes:File is likely password protected, not checking for barcodes"
self.assertTrue(warning.startswith(expected_str))
separator_page_numbers = reader.get_separation_pages()
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {})
def test_separate_pages(self):
"""
GIVEN:
@@ -332,8 +392,12 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
with self.assertLogs("paperless.barcodes", level="WARNING") as cm:
with BarcodeReader(test_file, "application/pdf") as reader:
success = reader.separate(DocumentSource.ApiUpload)
self.assertFalse(success)
self.assertFalse(
reader.separate(
DocumentSource.ApiUpload,
DocumentMetadataOverrides(),
),
)
self.assertEqual(
cm.output,
[
@@ -341,215 +405,6 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
],
)
def test_save_to_dir_given_name(self):
"""
GIVEN:
- File to save to a directory
- There is a name override
WHEN:
- The file is saved
THEN:
- The file exists
"""
test_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.pdf"
with BarcodeReader(test_file, "application/pdf") as reader:
reader.separate(DocumentSource.ApiUpload, "newname.pdf")
self.assertEqual(reader.pdf_file, test_file)
target_file1 = settings.CONSUMPTION_DIR / "0_newname.pdf"
target_file2 = settings.CONSUMPTION_DIR / "1_newname.pdf"
self.assertIsFile(target_file1)
self.assertIsFile(target_file2)
def test_barcode_splitter_api_upload(self):
"""
GIVEN:
- Input file containing barcodes
WHEN:
- Input file is split on barcodes
THEN:
- Correct number of files produced
"""
sample_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.pdf"
test_file = settings.SCRATCH_DIR / "patch-code-t-middle.pdf"
shutil.copy(sample_file, test_file)
with BarcodeReader(test_file, "application/pdf") as reader:
reader.separate(DocumentSource.ApiUpload)
self.assertEqual(reader.pdf_file, test_file)
target_file1 = (
settings.CONSUMPTION_DIR / "patch-code-t-middle_document_0.pdf"
)
target_file2 = (
settings.CONSUMPTION_DIR / "patch-code-t-middle_document_1.pdf"
)
self.assertIsFile(target_file1)
self.assertIsFile(target_file2)
def test_barcode_splitter_consume_dir(self):
"""
GIVEN:
- Input file containing barcodes
WHEN:
- Input file is split on barcodes
THEN:
- Correct number of files produced
"""
sample_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.pdf"
test_file = settings.CONSUMPTION_DIR / "patch-code-t-middle.pdf"
shutil.copy(sample_file, test_file)
with BarcodeReader(test_file, "application/pdf") as reader:
reader.detect()
reader.separate(DocumentSource.ConsumeFolder)
self.assertEqual(reader.pdf_file, test_file)
target_file1 = (
settings.CONSUMPTION_DIR / "patch-code-t-middle_document_0.pdf"
)
target_file2 = (
settings.CONSUMPTION_DIR / "patch-code-t-middle_document_1.pdf"
)
self.assertIsFile(target_file1)
self.assertIsFile(target_file2)
def test_barcode_splitter_consume_dir_recursive(self):
"""
GIVEN:
- Input file containing barcodes
- Input file is within a directory structure of the consume folder
WHEN:
- Input file is split on barcodes
THEN:
- Correct number of files produced
- Output files are within the same directory structure
"""
sample_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.pdf"
test_file = (
settings.CONSUMPTION_DIR / "tag1" / "tag2" / "patch-code-t-middle.pdf"
)
test_file.parent.mkdir(parents=True)
shutil.copy(sample_file, test_file)
with BarcodeReader(test_file, "application/pdf") as reader:
reader.separate(DocumentSource.ConsumeFolder)
self.assertEqual(reader.pdf_file, test_file)
target_file1 = (
settings.CONSUMPTION_DIR
/ "tag1"
/ "tag2"
/ "patch-code-t-middle_document_0.pdf"
)
target_file2 = (
settings.CONSUMPTION_DIR
/ "tag1"
/ "tag2"
/ "patch-code-t-middle_document_1.pdf"
)
self.assertIsFile(target_file1)
self.assertIsFile(target_file2)
@override_settings(CONSUMER_ENABLE_BARCODES=True)
def test_consume_barcode_file(self):
"""
GIVEN:
- Input file with barcodes given to consume task
WHEN:
- Consume task returns
THEN:
- The file was split
"""
test_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.pdf"
dst = settings.SCRATCH_DIR / "patch-code-t-middle.pdf"
shutil.copy(test_file, dst)
with mock.patch("documents.tasks.async_to_sync"):
self.assertEqual(
tasks.consume_file(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=dst,
),
None,
),
"File successfully split",
)
@override_settings(
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)
def test_consume_barcode_tiff_file(self):
"""
GIVEN:
- TIFF image containing barcodes
WHEN:
- Consume task returns
THEN:
- The file was split
"""
test_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.tiff"
dst = settings.SCRATCH_DIR / "patch-code-t-middle.tiff"
shutil.copy(test_file, dst)
with mock.patch("documents.tasks.async_to_sync"):
self.assertEqual(
tasks.consume_file(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=dst,
),
None,
),
"File successfully split",
)
self.assertIsNotFile(dst)
@override_settings(
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)
def test_consume_barcode_tiff_file_with_alpha(self):
"""
GIVEN:
- TIFF image containing barcodes
- TIFF image has an alpha layer
WHEN:
- Consume task handles the alpha layer and returns
THEN:
- The file was split without issue
"""
test_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle-alpha.tiff"
dst = settings.SCRATCH_DIR / "patch-code-t-middle.tiff"
shutil.copy(test_file, dst)
with mock.patch("documents.tasks.async_to_sync"):
self.assertEqual(
tasks.consume_file(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=dst,
),
None,
),
"File successfully split",
)
self.assertIsNotFile(dst)
@override_settings(
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_BARCODE_TIFF_SUPPORT=True,
@@ -597,60 +452,6 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertIsNone(kwargs["override_document_type_id"])
self.assertIsNone(kwargs["override_tag_ids"])
@override_settings(
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_BARCODE_TIFF_SUPPORT=True,
)
def test_consume_barcode_supported_no_extension_file(self):
"""
GIVEN:
- TIFF image containing barcodes
- TIFF file is given without extension
WHEN:
- Consume task returns
THEN:
- The file was split
"""
test_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.tiff"
dst = settings.SCRATCH_DIR / "patch-code-t-middle"
shutil.copy(test_file, dst)
with mock.patch("documents.tasks.async_to_sync"):
self.assertEqual(
tasks.consume_file(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=dst,
),
None,
),
"File successfully split",
)
self.assertIsNotFile(dst)
def test_scan_file_for_separating_barcodes_password(self):
"""
GIVEN:
- Password protected PDF
WHEN:
- File is scanned for barcode
THEN:
- Scanning handles the exception without crashing
"""
test_file = self.SAMPLE_DIR / "password-is-test.pdf"
with self.assertLogs("paperless.barcodes", level="WARNING") as cm:
with BarcodeReader(test_file, "application/pdf") as reader:
reader.detect()
warning = cm.output[0]
expected_str = "WARNING:paperless.barcodes:File is likely password protected, not checking for barcodes"
self.assertTrue(warning.startswith(expected_str))
separator_page_numbers = reader.get_separation_pages()
self.assertEqual(reader.pdf_file, test_file)
self.assertDictEqual(separator_page_numbers, {})
@override_settings(
CONSUMER_ENABLE_BARCODES=True,
CONSUMER_ENABLE_ASN_BARCODE=True,
@@ -722,11 +523,64 @@ class TestBarcode(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
self.assertEqual(len(document_list), 5)
class TestAsnBarcode(DirectoriesMixin, TestCase):
SAMPLE_DIR = Path(__file__).parent / "samples"
@override_settings(CONSUMER_BARCODE_SCANNER="PYZBAR")
class TestBarcodeNewConsume(
DirectoriesMixin,
FileSystemAssertsMixin,
SampleDirMixin,
DocumentConsumeDelayMixin,
TestCase,
):
@override_settings(CONSUMER_ENABLE_BARCODES=True)
def test_consume_barcode_file(self):
"""
GIVEN:
- Incoming file with at 1 barcode producing 2 documents
- Document includes metadata override information
WHEN:
- The document is split
THEN:
- Two new consume tasks are created
- Metadata overrides are preserved for the new consume
- The document source is unchanged (for consume templates)
"""
test_file = self.BARCODE_SAMPLE_DIR / "patch-code-t-middle.pdf"
temp_copy = self.dirs.scratch_dir / test_file.name
shutil.copy(test_file, temp_copy)
BARCODE_SAMPLE_DIR = SAMPLE_DIR / "barcodes"
overrides = DocumentMetadataOverrides(tag_ids=[1, 2, 9])
with mock.patch("documents.tasks.async_to_sync") as progress_mocker:
self.assertEqual(
tasks.consume_file(
ConsumableDocument(
source=DocumentSource.ConsumeFolder,
original_file=temp_copy,
),
overrides,
),
"File successfully split",
)
# We let the consumer know progress is done
progress_mocker.assert_called_once()
# 2 new document consume tasks created
self.assertEqual(self.consume_file_mock.call_count, 2)
self.assertIsNotFile(temp_copy)
# Check the split files exist
# Check the source is unchanged
# Check the overrides are unchanged
for (
new_input_doc,
new_doc_overrides,
) in self.get_all_consume_delay_call_args():
self.assertEqual(new_input_doc.source, DocumentSource.ConsumeFolder)
self.assertIsFile(new_input_doc.original_file)
self.assertEqual(overrides, new_doc_overrides)
class TestAsnBarcode(DirectoriesMixin, SampleDirMixin, TestCase):
@override_settings(CONSUMER_ASN_BARCODE_PREFIX="CUSTOM-PREFIX-")
def test_scan_file_for_asn_custom_prefix(self):
"""

View File

@@ -646,10 +646,13 @@ class TestExportImport(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
with paperless_environment():
self.assertEqual(Document.objects.count(), 4)
self.assertEqual(CustomFieldInstance.objects.count(), 1)
Document.objects.all().delete()
CustomFieldInstance.objects.all().delete()
self.assertEqual(Document.objects.count(), 0)
call_command("document_importer", "--no-progress-bar", self.target)
self.assertEqual(Document.objects.count(), 4)
self.assertEqual(CustomFieldInstance.objects.count(), 1)
def test_folder_prefix(self):
"""

View File

@@ -235,8 +235,10 @@ class DocumentConsumeDelayMixin:
"""
Iterates over all calls to the async task and returns the arguments
"""
# Must be at least 1 call
self.consume_file_mock.assert_called()
for args, _ in self.consume_file_mock.call_args_list:
for args, kwargs in self.consume_file_mock.call_args_list:
input_doc, overrides = args
yield (input_doc, overrides)
@@ -244,7 +246,7 @@ class DocumentConsumeDelayMixin:
def get_specific_consume_delay_call_args(
self,
index: int,
) -> Iterator[tuple[ConsumableDocument, DocumentMetadataOverrides]]:
) -> tuple[ConsumableDocument, DocumentMetadataOverrides]:
"""
Returns the arguments of a specific call to the async task
"""
@@ -299,3 +301,9 @@ class TestMigrations(TransactionTestCase):
def setUpBeforeMigration(self, apps):
pass
class SampleDirMixin:
SAMPLE_DIR = Path(__file__).parent / "samples"
BARCODE_SAMPLE_DIR = SAMPLE_DIR / "barcodes"

View File

@@ -182,10 +182,14 @@ class PassUserMixin(CreateModelMixin):
class CorrespondentViewSet(ModelViewSet, PassUserMixin):
model = Correspondent
queryset = Correspondent.objects.annotate(
document_count=Count("documents"),
last_correspondence=Max("documents__created"),
).order_by(Lower("name"))
queryset = (
Correspondent.objects.annotate(
document_count=Count("documents"),
last_correspondence=Max("documents__created"),
)
.select_related("owner")
.order_by(Lower("name"))
)
serializer_class = CorrespondentSerializer
pagination_class = StandardPagination
@@ -208,8 +212,12 @@ class CorrespondentViewSet(ModelViewSet, PassUserMixin):
class TagViewSet(ModelViewSet, PassUserMixin):
model = Tag
queryset = Tag.objects.annotate(document_count=Count("documents")).order_by(
Lower("name"),
queryset = (
Tag.objects.annotate(document_count=Count("documents"))
.select_related("owner")
.order_by(
Lower("name"),
)
)
def get_serializer_class(self, *args, **kwargs):
@@ -232,9 +240,13 @@ class TagViewSet(ModelViewSet, PassUserMixin):
class DocumentTypeViewSet(ModelViewSet, PassUserMixin):
model = DocumentType
queryset = DocumentType.objects.annotate(
document_count=Count("documents"),
).order_by(Lower("name"))
queryset = (
DocumentType.objects.annotate(
document_count=Count("documents"),
)
.select_related("owner")
.order_by(Lower("name"))
)
serializer_class = DocumentTypeSerializer
pagination_class = StandardPagination
@@ -283,7 +295,12 @@ class DocumentViewSet(
)
def get_queryset(self):
return Document.objects.distinct().annotate(num_notes=Count("notes"))
return (
Document.objects.distinct()
.annotate(num_notes=Count("notes"))
.select_related("correspondent", "storage_path", "document_type", "owner")
.prefetch_related("tags", "custom_fields", "notes")
)
def get_serializer(self, *args, **kwargs):
fields_param = self.request.query_params.get("fields", None)
@@ -627,9 +644,18 @@ class DocumentViewSet(
class SearchResultSerializer(DocumentSerializer, PassUserMixin):
def to_representation(self, instance):
doc = Document.objects.get(id=instance["id"])
doc = (
Document.objects.select_related(
"correspondent",
"storage_path",
"document_type",
"owner",
)
.prefetch_related("tags", "custom_fields", "notes")
.get(id=instance["id"])
)
notes = ",".join(
[str(c.note) for c in Note.objects.filter(document=instance["id"])],
[str(c.note) for c in doc.notes.all()],
)
r = super().to_representation(doc)
r["__search_hit__"] = {
@@ -752,7 +778,11 @@ class SavedViewViewSet(ModelViewSet, PassUserMixin):
def get_queryset(self):
user = self.request.user
return SavedView.objects.filter(owner=user)
return (
SavedView.objects.filter(owner=user)
.select_related("owner")
.prefetch_related("filter_rules")
)
def perform_create(self, serializer):
serializer.save(owner=self.request.user)
@@ -1080,8 +1110,12 @@ class BulkDownloadView(GenericAPIView):
class StoragePathViewSet(ModelViewSet, PassUserMixin):
model = StoragePath
queryset = StoragePath.objects.annotate(document_count=Count("documents")).order_by(
Lower("name"),
queryset = (
StoragePath.objects.annotate(document_count=Count("documents"))
.select_related("owner")
.order_by(
Lower("name"),
)
)
serializer_class = StoragePathSerializer
@@ -1347,7 +1381,18 @@ class ConsumptionTemplateViewSet(ModelViewSet):
model = ConsumptionTemplate
queryset = ConsumptionTemplate.objects.all().order_by("name")
queryset = (
ConsumptionTemplate.objects.prefetch_related(
"assign_tags",
"assign_view_users",
"assign_view_groups",
"assign_change_users",
"assign_change_groups",
"assign_custom_fields",
)
.all()
.order_by("order")
)
class CustomFieldViewSet(ModelViewSet):

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-08 12:09\n"
"Last-Translator: \n"
"Language-Team: Catalan\n"
"Language: ca_ES\n"
@@ -581,7 +581,7 @@ msgstr "Monetari"
#: documents/models.py:759
msgid "Document Link"
msgstr ""
msgstr "Enllaç Document"
#: documents/models.py:771
msgid "data type"
@@ -685,7 +685,7 @@ msgstr "dóna permissos d'edició a aquests grups"
#: documents/models.py:1019
msgid "assign these custom fields"
msgstr ""
msgstr "assigna aquests camps personalitzats"
#: documents/models.py:1023
msgid "consumption template"
@@ -1172,15 +1172,15 @@ msgstr "filtra cos"
#: paperless_mail/models.py:143
msgid "filter attachment filename inclusive"
msgstr ""
msgstr "filtra nom del fitxer adjunt incloent"
#: paperless_mail/models.py:155
msgid "filter attachment filename exclusive"
msgstr ""
msgstr "filtra nom del fitxer adjunt excloent"
#: paperless_mail/models.py:160
msgid "Do not consume documents which entirely match this filename if specified. Wildcards such as *.pdf or *invoice* are allowed. Case insensitive."
msgstr ""
msgstr "No consumeixis documents que coincideixin aquest nom d'arxiu especificat. Comodins com *.pdf o *factura* permessos. Cas insensitiu."
#: paperless_mail/models.py:167
msgid "maximum age"

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-14 00:23\n"
"Last-Translator: \n"
"Language-Team: French\n"
"Language: fr_FR\n"
@@ -581,7 +581,7 @@ msgstr "Monétaire"
#: documents/models.py:759
msgid "Document Link"
msgstr ""
msgstr "Lien du Document"
#: documents/models.py:771
msgid "data type"

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-09 00:23\n"
"Last-Translator: \n"
"Language-Team: Hebrew\n"
"Language: he_IL\n"
@@ -533,7 +533,7 @@ msgstr ""
#: documents/models.py:689
msgid "Original"
msgstr ""
msgstr "מקור"
#: documents/models.py:700
msgid "expiration"
@@ -545,11 +545,11 @@ msgstr ""
#: documents/models.py:739
msgid "share link"
msgstr ""
msgstr "שתף קישור"
#: documents/models.py:740
msgid "share links"
msgstr ""
msgstr "שתף קישורים"
#: documents/models.py:752
msgid "String"
@@ -557,11 +557,11 @@ msgstr ""
#: documents/models.py:753
msgid "URL"
msgstr ""
msgstr "כתובת URL"
#: documents/models.py:754
msgid "Date"
msgstr ""
msgstr "תאריך"
#: documents/models.py:755
msgid "Boolean"
@@ -589,11 +589,11 @@ msgstr ""
#: documents/models.py:779
msgid "custom field"
msgstr ""
msgstr "שדה מותאם אישית"
#: documents/models.py:780
msgid "custom fields"
msgstr ""
msgstr "שדות מותאמים אישית"
#: documents/models.py:842
msgid "custom field instance"
@@ -771,7 +771,7 @@ msgstr "התחבר"
#: documents/templates/registration/login.html:70
msgid "Forgot your password?"
msgstr ""
msgstr "שכחת את הסיסמה?"
#: documents/templates/registration/password_reset_complete.html:14
msgid "Paperless-ngx reset password complete"
@@ -779,7 +779,7 @@ msgstr ""
#: documents/templates/registration/password_reset_complete.html:40
msgid "Password reset complete."
msgstr ""
msgstr "איפוס סיסמה הושלם."
#: documents/templates/registration/password_reset_complete.html:42
#, python-format
@@ -820,11 +820,11 @@ msgstr ""
#: documents/templates/registration/password_reset_done.html:40
msgid "Check your inbox."
msgstr ""
msgstr "בדוק את תיבת הדואר שלך."
#: documents/templates/registration/password_reset_done.html:41
msgid "We've emailed you instructions for setting your password. You should receive the email shortly!"
msgstr ""
msgstr "נשלח מייל עם הוראות להגדרת הסיסמה שלך. אתה צריך לקבל מייל בקרוב!"
#: documents/templates/registration/password_reset_form.html:14
msgid "Paperless-ngx reset password request"
@@ -836,7 +836,7 @@ msgstr ""
#: documents/templates/registration/password_reset_form.html:44
msgid "An error occurred. Please try again."
msgstr ""
msgstr "אירעה שגיאה. נא לנסות שוב."
#: documents/templates/registration/password_reset_form.html:47
msgid "Email"
@@ -844,7 +844,7 @@ msgstr "דוא\"ל"
#: documents/templates/registration/password_reset_form.html:54
msgid "Send me instructions!"
msgstr ""
msgstr "שלח לי את ההוראות!"
#: paperless/apps.py:10
msgid "Paperless"
@@ -868,7 +868,7 @@ msgstr "בלרוסית"
#: paperless/settings.py:590
msgid "Bulgarian"
msgstr ""
msgstr "בולגרית"
#: paperless/settings.py:591
msgid "Catalan"
@@ -908,7 +908,7 @@ msgstr "צרפתית"
#: paperless/settings.py:600
msgid "Hungarian"
msgstr ""
msgstr "הונגרית"
#: paperless/settings.py:601
msgid "Italian"

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-13 12:09\n"
"Last-Translator: \n"
"Language-Team: Croatian\n"
"Language: hr_HR\n"

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-11 00:25\n"
"Last-Translator: \n"
"Language-Team: Polish\n"
"Language: pl_PL\n"
@@ -665,7 +665,7 @@ msgstr ""
#: documents/models.py:984
msgid "assign this owner"
msgstr ""
msgstr "przypisz tego właściciela"
#: documents/models.py:991
msgid "grant view permissions to these users"
@@ -751,11 +751,11 @@ msgstr "Twoja nazwa użytkownika i hasło nie są zgodne. Spróbuj ponownie."
#: documents/templates/registration/login.html:48
msgid "Share link was not found."
msgstr ""
msgstr "Link nie został odnaleziony."
#: documents/templates/registration/login.html:52
msgid "Share link has expired."
msgstr ""
msgstr "Link utracił ważność."
#: documents/templates/registration/login.html:55
msgid "Username"
@@ -779,7 +779,7 @@ msgstr ""
#: documents/templates/registration/password_reset_complete.html:40
msgid "Password reset complete."
msgstr ""
msgstr "Resetowanie hasła zakończone."
#: documents/templates/registration/password_reset_complete.html:42
#, python-format

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-06 12:09\n"
"PO-Revision-Date: 2023-12-10 00:26\n"
"Last-Translator: \n"
"Language-Team: Portuguese, Brazilian\n"
"Language: pt_BR\n"
@@ -385,27 +385,27 @@ msgstr "caminho de armazenamento é"
#: documents/models.py:448
msgid "has correspondent in"
msgstr ""
msgstr "tem correspondente em"
#: documents/models.py:449
msgid "does not have correspondent in"
msgstr ""
msgstr "não tem correspondente em"
#: documents/models.py:450
msgid "has document type in"
msgstr ""
msgstr "possui categoria de documento em"
#: documents/models.py:451
msgid "does not have document type in"
msgstr ""
msgstr "não possui categoria de documento em"
#: documents/models.py:452
msgid "has storage path in"
msgstr ""
msgstr "possui caminho de armazenamento em"
#: documents/models.py:453
msgid "does not have storage path in"
msgstr ""
msgstr "não possui caminho de armazenamento em"
#: documents/models.py:454
msgid "owner is"
@@ -513,7 +513,7 @@ msgstr "Os dados retornados pela tarefa"
#: documents/models.py:650
msgid "Note for the document"
msgstr ""
msgstr "Nota para o documento"
#: documents/models.py:674
msgid "user"
@@ -521,7 +521,7 @@ msgstr "usuário"
#: documents/models.py:679
msgid "note"
msgstr ""
msgstr "nota"
#: documents/models.py:680
msgid "notes"
@@ -529,15 +529,15 @@ msgstr "notas"
#: documents/models.py:688
msgid "Archive"
msgstr ""
msgstr "Arquivo"
#: documents/models.py:689
msgid "Original"
msgstr ""
msgstr "Original"
#: documents/models.py:700
msgid "expiration"
msgstr ""
msgstr "validade"
#: documents/models.py:707
msgid "slug"
@@ -553,15 +553,15 @@ msgstr "compartilhar link"
#: documents/models.py:752
msgid "String"
msgstr ""
msgstr "Texto"
#: documents/models.py:753
msgid "URL"
msgstr ""
msgstr "URL"
#: documents/models.py:754
msgid "Date"
msgstr ""
msgstr "Data"
#: documents/models.py:755
msgid "Boolean"
@@ -573,7 +573,7 @@ msgstr "Número inteiro"
#: documents/models.py:757
msgid "Float"
msgstr ""
msgstr "Ponto flutuante"
#: documents/models.py:758
msgid "Monetary"
@@ -581,15 +581,15 @@ msgstr "Unidade monetária"
#: documents/models.py:759
msgid "Document Link"
msgstr ""
msgstr "Link do documento"
#: documents/models.py:771
msgid "data type"
msgstr ""
msgstr "tipo de dados"
#: documents/models.py:779
msgid "custom field"
msgstr ""
msgstr "campo personalizado"
#: documents/models.py:780
msgid "custom fields"
@@ -597,11 +597,11 @@ msgstr "campos personalizados"
#: documents/models.py:842
msgid "custom field instance"
msgstr ""
msgstr "instância de campo personalizado"
#: documents/models.py:843
msgid "custom field instances"
msgstr ""
msgstr "instâncias de campo personalizadas"
#: documents/models.py:891
msgid "Consume Folder"

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-10 00:26\n"
"Last-Translator: \n"
"Language-Team: Portuguese\n"
"Language: pt_PT\n"
@@ -385,23 +385,23 @@ msgstr "local de armazenamento é"
#: documents/models.py:448
msgid "has correspondent in"
msgstr ""
msgstr "tem correspondente em"
#: documents/models.py:449
msgid "does not have correspondent in"
msgstr ""
msgstr "não tem correspondente em"
#: documents/models.py:450
msgid "has document type in"
msgstr ""
msgstr "tem categoria de documento em"
#: documents/models.py:451
msgid "does not have document type in"
msgstr ""
msgstr "não tem categoria de documento em"
#: documents/models.py:452
msgid "has storage path in"
msgstr ""
msgstr "tem caminho de armazenamento em"
#: documents/models.py:453
msgid "does not have storage path in"

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-16 00:23\n"
"Last-Translator: \n"
"Language-Team: Romanian\n"
"Language: ro_RO\n"
@@ -23,11 +23,11 @@ msgstr "Documente"
#: documents/models.py:36 documents/models.py:734
msgid "owner"
msgstr ""
msgstr "proprietar"
#: documents/models.py:53
msgid "None"
msgstr ""
msgstr "Nimic"
#: documents/models.py:54
msgid "Any word"
@@ -108,15 +108,15 @@ msgstr "tipuri de document"
#: documents/models.py:124
msgid "path"
msgstr ""
msgstr "cale"
#: documents/models.py:129 documents/models.py:156
msgid "storage path"
msgstr ""
msgstr "cale de stocare"
#: documents/models.py:130
msgid "storage paths"
msgstr ""
msgstr "căi de stocare"
#: documents/models.py:137
msgid "Unencrypted"
@@ -193,11 +193,11 @@ msgstr "Numele curent al arhivei stocate"
#: documents/models.py:250
msgid "original filename"
msgstr ""
msgstr "numele original al fișierului"
#: documents/models.py:256
msgid "The original name of the file when it was uploaded"
msgstr ""
msgstr "Numele original al fișierului când a fost încărcat"
#: documents/models.py:263
msgid "archive serial number"
@@ -381,47 +381,47 @@ msgstr ""
#: documents/models.py:447
msgid "storage path is"
msgstr ""
msgstr "calea de stocare este"
#: documents/models.py:448
msgid "has correspondent in"
msgstr ""
msgstr "are corespondent în"
#: documents/models.py:449
msgid "does not have correspondent in"
msgstr ""
msgstr "nu are corespondent în"
#: documents/models.py:450
msgid "has document type in"
msgstr ""
msgstr "are tip de document în"
#: documents/models.py:451
msgid "does not have document type in"
msgstr ""
msgstr "nu are tip document în"
#: documents/models.py:452
msgid "has storage path in"
msgstr ""
msgstr "are cale de stocare în"
#: documents/models.py:453
msgid "does not have storage path in"
msgstr ""
msgstr "nu are cale de stocare în"
#: documents/models.py:454
msgid "owner is"
msgstr ""
msgstr "proprietarul este"
#: documents/models.py:455
msgid "has owner in"
msgstr ""
msgstr "are proprietar în"
#: documents/models.py:456
msgid "does not have owner"
msgstr ""
msgstr "nu are proprietar"
#: documents/models.py:457
msgid "does not have owner in"
msgstr ""
msgstr "nu are proprietar în"
#: documents/models.py:467
msgid "rule type"
@@ -441,47 +441,47 @@ msgstr "reguli de filtrare"
#: documents/models.py:584
msgid "Task ID"
msgstr ""
msgstr "ID Sarcină"
#: documents/models.py:585
msgid "Celery ID for the Task that was run"
msgstr ""
msgstr "ID-ul sarcinii Celery care a fost rulată"
#: documents/models.py:590
msgid "Acknowledged"
msgstr ""
msgstr "Confirmat"
#: documents/models.py:591
msgid "If the task is acknowledged via the frontend or API"
msgstr ""
msgstr "Dacă sarcina este confirmată prin frontend sau API"
#: documents/models.py:597
msgid "Task Filename"
msgstr ""
msgstr "Numele fișierului sarcină"
#: documents/models.py:598
msgid "Name of the file which the Task was run for"
msgstr ""
msgstr "Numele fișierului pentru care sarcina a fost executată"
#: documents/models.py:604
msgid "Task Name"
msgstr ""
msgstr "Nume sarcină"
#: documents/models.py:605
msgid "Name of the Task which was run"
msgstr ""
msgstr "Numele sarcinii care a fost executată"
#: documents/models.py:612
msgid "Task State"
msgstr ""
msgstr "Stare sarcină"
#: documents/models.py:613
msgid "Current state of the task being run"
msgstr ""
msgstr "Stadiul actual al sarcinii în curs de desfășurare"
#: documents/models.py:618
msgid "Created DateTime"
msgstr ""
msgstr "Data creării"
#: documents/models.py:619
msgid "Datetime field when the task result was created in UTC"
@@ -489,7 +489,7 @@ msgstr ""
#: documents/models.py:624
msgid "Started DateTime"
msgstr ""
msgstr "Data începerii"
#: documents/models.py:625
msgid "Datetime field when the task was started in UTC"
@@ -497,7 +497,7 @@ msgstr ""
#: documents/models.py:630
msgid "Completed DateTime"
msgstr ""
msgstr "Data finalizării"
#: documents/models.py:631
msgid "Datetime field when the task was completed in UTC"
@@ -505,15 +505,15 @@ msgstr ""
#: documents/models.py:636
msgid "Result Data"
msgstr ""
msgstr "Datele rezultatului"
#: documents/models.py:638
msgid "The data returned by the task"
msgstr ""
msgstr "Datele returnate de sarcină"
#: documents/models.py:650
msgid "Note for the document"
msgstr ""
msgstr "Notă pentru document"
#: documents/models.py:674
msgid "user"
@@ -521,23 +521,23 @@ msgstr "utilizator"
#: documents/models.py:679
msgid "note"
msgstr ""
msgstr "notă"
#: documents/models.py:680
msgid "notes"
msgstr ""
msgstr "note"
#: documents/models.py:688
msgid "Archive"
msgstr ""
msgstr "Arhivă"
#: documents/models.py:689
msgid "Original"
msgstr ""
msgstr "Original"
#: documents/models.py:700
msgid "expiration"
msgstr ""
msgstr "expirare"
#: documents/models.py:707
msgid "slug"
@@ -545,35 +545,35 @@ msgstr ""
#: documents/models.py:739
msgid "share link"
msgstr ""
msgstr "link de partajare"
#: documents/models.py:740
msgid "share links"
msgstr ""
msgstr "link-uri de partajare"
#: documents/models.py:752
msgid "String"
msgstr ""
msgstr "Şir de caractere"
#: documents/models.py:753
msgid "URL"
msgstr ""
msgstr "Adresă URL"
#: documents/models.py:754
msgid "Date"
msgstr ""
msgstr "Dată"
#: documents/models.py:755
msgid "Boolean"
msgstr ""
msgstr "Boolean"
#: documents/models.py:756
msgid "Integer"
msgstr ""
msgstr "Număr întreg"
#: documents/models.py:757
msgid "Float"
msgstr ""
msgstr "Număr zecimal"
#: documents/models.py:758
msgid "Monetary"
@@ -581,11 +581,11 @@ msgstr ""
#: documents/models.py:759
msgid "Document Link"
msgstr ""
msgstr "Link document"
#: documents/models.py:771
msgid "data type"
msgstr ""
msgstr "tip date"
#: documents/models.py:779
msgid "custom field"

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-10 00:26\n"
"Last-Translator: \n"
"Language-Team: Serbian (Latin)\n"
"Language: sr_CS\n"

View File

@@ -3,7 +3,7 @@ msgstr ""
"Project-Id-Version: paperless-ngx\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2023-12-05 08:26-0800\n"
"PO-Revision-Date: 2023-12-05 16:27\n"
"PO-Revision-Date: 2023-12-14 00:23\n"
"Last-Translator: \n"
"Language-Team: Chinese Traditional\n"
"Language: zh_TW\n"
@@ -47,7 +47,7 @@ msgstr ""
#: documents/models.py:58
msgid "Fuzzy word"
msgstr ""
msgstr "模糊詞"
#: documents/models.py:59
msgid "Automatic"
@@ -68,15 +68,15 @@ msgstr "比對演算法"
#: documents/models.py:72
msgid "is insensitive"
msgstr ""
msgstr "不區分大小寫"
#: documents/models.py:95 documents/models.py:147
msgid "correspondent"
msgstr ""
msgstr "聯繫者"
#: documents/models.py:96
msgid "correspondents"
msgstr ""
msgstr "聯繫者"
#: documents/models.py:100
msgid "color"
@@ -84,47 +84,47 @@ msgstr "顏色"
#: documents/models.py:103
msgid "is inbox tag"
msgstr ""
msgstr "收件匣標籤"
#: documents/models.py:106
msgid "Marks this tag as an inbox tag: All newly consumed documents will be tagged with inbox tags."
msgstr ""
msgstr "標記此標籤為收件匣標籤:所有新處理的文件將會以此收件匣標籤作標記。"
#: documents/models.py:112
msgid "tag"
msgstr ""
msgstr "標籤"
#: documents/models.py:113 documents/models.py:185
msgid "tags"
msgstr ""
msgstr "標籤"
#: documents/models.py:118 documents/models.py:167
msgid "document type"
msgstr ""
msgstr "文件類型"
#: documents/models.py:119
msgid "document types"
msgstr ""
msgstr "文件類型"
#: documents/models.py:124
msgid "path"
msgstr ""
msgstr "位址"
#: documents/models.py:129 documents/models.py:156
msgid "storage path"
msgstr ""
msgstr "儲存位址"
#: documents/models.py:130
msgid "storage paths"
msgstr ""
msgstr "儲存位址"
#: documents/models.py:137
msgid "Unencrypted"
msgstr ""
msgstr "未加密"
#: documents/models.py:138
msgid "Encrypted with GNU Privacy Guard"
msgstr ""
msgstr "已使用 GNU Privacy Guard 進行加密"
#: documents/models.py:159
msgid "title"
@@ -189,27 +189,27 @@ msgstr "存檔檔案名稱"
#: documents/models.py:246
msgid "Current archive filename in storage"
msgstr ""
msgstr "現時儲存空間封存的檔案名稱"
#: documents/models.py:250
msgid "original filename"
msgstr ""
msgstr "原先檔案名稱"
#: documents/models.py:256
msgid "The original name of the file when it was uploaded"
msgstr ""
msgstr "檔案上傳時的檔案名稱"
#: documents/models.py:263
msgid "archive serial number"
msgstr ""
msgstr "封存編號"
#: documents/models.py:273
msgid "The position of this document in your physical document archive."
msgstr ""
msgstr "此檔案在你實體儲存空間的位置。"
#: documents/models.py:279 documents/models.py:665 documents/models.py:719
msgid "document"
msgstr ""
msgstr "文件"
#: documents/models.py:280
msgid "documents"
@@ -217,47 +217,47 @@ msgstr "文件"
#: documents/models.py:368
msgid "debug"
msgstr ""
msgstr "偵錯"
#: documents/models.py:369
msgid "information"
msgstr ""
msgstr "資訊"
#: documents/models.py:370
msgid "warning"
msgstr ""
msgstr "警告"
#: documents/models.py:371 paperless_mail/models.py:305
msgid "error"
msgstr ""
msgstr "錯誤"
#: documents/models.py:372
msgid "critical"
msgstr ""
msgstr "嚴重"
#: documents/models.py:375
msgid "group"
msgstr ""
msgstr "群組"
#: documents/models.py:377
msgid "message"
msgstr ""
msgstr "訊息"
#: documents/models.py:380
msgid "level"
msgstr ""
msgstr "程度"
#: documents/models.py:389
msgid "log"
msgstr ""
msgstr "記錄"
#: documents/models.py:390
msgid "logs"
msgstr ""
msgstr "記錄"
#: documents/models.py:399 documents/models.py:464
msgid "saved view"
msgstr ""
msgstr "已儲存的檢視表"
#: documents/models.py:400
msgid "saved views"
@@ -265,207 +265,207 @@ msgstr "保存視圖"
#: documents/models.py:405
msgid "show on dashboard"
msgstr ""
msgstr "顯示在概覽"
#: documents/models.py:408
msgid "show in sidebar"
msgstr ""
msgstr "顯示在側邊欄"
#: documents/models.py:412
msgid "sort field"
msgstr ""
msgstr "排序欄位"
#: documents/models.py:417
msgid "sort reverse"
msgstr ""
msgstr "倒轉排序"
#: documents/models.py:422
msgid "title contains"
msgstr ""
msgstr "標題包含"
#: documents/models.py:423
msgid "content contains"
msgstr ""
msgstr "內容包含"
#: documents/models.py:424
msgid "ASN is"
msgstr ""
msgstr "ASN 為"
#: documents/models.py:425
msgid "correspondent is"
msgstr ""
msgstr "聯繫者為"
#: documents/models.py:426
msgid "document type is"
msgstr ""
msgstr "文件類型為"
#: documents/models.py:427
msgid "is in inbox"
msgstr ""
msgstr "在收件匣內"
#: documents/models.py:428
msgid "has tag"
msgstr ""
msgstr "包含標籤"
#: documents/models.py:429
msgid "has any tag"
msgstr ""
msgstr "包含任何標籤"
#: documents/models.py:430
msgid "created before"
msgstr ""
msgstr "建立時間之前"
#: documents/models.py:431
msgid "created after"
msgstr ""
msgstr "建立時間之後"
#: documents/models.py:432
msgid "created year is"
msgstr ""
msgstr "建立年份為"
#: documents/models.py:433
msgid "created month is"
msgstr ""
msgstr "建立月份為"
#: documents/models.py:434
msgid "created day is"
msgstr ""
msgstr "建立日期為"
#: documents/models.py:435
msgid "added before"
msgstr ""
msgstr "加入時間之前"
#: documents/models.py:436
msgid "added after"
msgstr ""
msgstr "加入時間之後"
#: documents/models.py:437
msgid "modified before"
msgstr ""
msgstr "修改之前"
#: documents/models.py:438
msgid "modified after"
msgstr ""
msgstr "修改之後"
#: documents/models.py:439
msgid "does not have tag"
msgstr ""
msgstr "沒有包含標籤"
#: documents/models.py:440
msgid "does not have ASN"
msgstr ""
msgstr "沒有包含 ASN"
#: documents/models.py:441
msgid "title or content contains"
msgstr ""
msgstr "標題或內容包含"
#: documents/models.py:442
msgid "fulltext query"
msgstr ""
msgstr "全文搜索"
#: documents/models.py:443
msgid "more like this"
msgstr ""
msgstr "其他類似內容"
#: documents/models.py:444
msgid "has tags in"
msgstr ""
msgstr "含有這個標籤"
#: documents/models.py:445
msgid "ASN greater than"
msgstr ""
msgstr "ASN 大於"
#: documents/models.py:446
msgid "ASN less than"
msgstr ""
msgstr "ASN 小於"
#: documents/models.py:447
msgid "storage path is"
msgstr ""
msgstr "儲存位址為"
#: documents/models.py:448
msgid "has correspondent in"
msgstr ""
msgstr "包含聯繫者"
#: documents/models.py:449
msgid "does not have correspondent in"
msgstr ""
msgstr "沒有包含聯繫者"
#: documents/models.py:450
msgid "has document type in"
msgstr ""
msgstr "文件類型包含"
#: documents/models.py:451
msgid "does not have document type in"
msgstr ""
msgstr "沒有包含的文件類型"
#: documents/models.py:452
msgid "has storage path in"
msgstr ""
msgstr "儲存位址包含"
#: documents/models.py:453
msgid "does not have storage path in"
msgstr ""
msgstr "沒有包含的儲存位址"
#: documents/models.py:454
msgid "owner is"
msgstr ""
msgstr "擁有者為"
#: documents/models.py:455
msgid "has owner in"
msgstr ""
msgstr "擁有者包含"
#: documents/models.py:456
msgid "does not have owner"
msgstr ""
msgstr "沒有包含的擁有者"
#: documents/models.py:457
msgid "does not have owner in"
msgstr ""
msgstr "沒有包含的擁有者"
#: documents/models.py:467
msgid "rule type"
msgstr ""
msgstr "規則類型"
#: documents/models.py:469
msgid "value"
msgstr ""
msgstr "數值"
#: documents/models.py:472
msgid "filter rule"
msgstr ""
msgstr "過濾規則"
#: documents/models.py:473
msgid "filter rules"
msgstr ""
msgstr "過濾規則"
#: documents/models.py:584
msgid "Task ID"
msgstr ""
msgstr "任務 ID"
#: documents/models.py:585
msgid "Celery ID for the Task that was run"
msgstr ""
msgstr "已執行任務的 Celery ID"
#: documents/models.py:590
msgid "Acknowledged"
msgstr ""
msgstr "已確認"
#: documents/models.py:591
msgid "If the task is acknowledged via the frontend or API"
msgstr ""
msgstr "如果任務已由前端 / API 確認"
#: documents/models.py:597
msgid "Task Filename"
msgstr ""
msgstr "任務檔案名稱"
#: documents/models.py:598
msgid "Name of the file which the Task was run for"
msgstr ""
msgstr "執行任務的目標檔案名稱"
#: documents/models.py:604
msgid "Task Name"
msgstr ""
msgstr "任務名稱"
#: documents/models.py:605
msgid "Name of the Task which was run"
@@ -473,7 +473,7 @@ msgstr ""
#: documents/models.py:612
msgid "Task State"
msgstr ""
msgstr "任務狀態"
#: documents/models.py:613
msgid "Current state of the task being run"
@@ -657,7 +657,7 @@ msgstr ""
#: documents/models.py:967 paperless_mail/models.py:238
msgid "assign this correspondent"
msgstr ""
msgstr "指派這個聯繫者"
#: documents/models.py:975
msgid "assign this storage path"
@@ -1128,7 +1128,7 @@ msgstr ""
#: paperless_mail/models.py:88
msgid "Do not assign a correspondent"
msgstr ""
msgstr "不要指派聯繫者"
#: paperless_mail/models.py:89
msgid "Use mail address"
@@ -1140,7 +1140,7 @@ msgstr ""
#: paperless_mail/models.py:91
msgid "Use correspondent selected below"
msgstr ""
msgstr "使用以下已選擇的聯繫者"
#: paperless_mail/models.py:101
msgid "account"
@@ -1220,7 +1220,7 @@ msgstr ""
#: paperless_mail/models.py:228
msgid "assign correspondent from"
msgstr ""
msgstr "指派聯繫者從"
#: paperless_mail/models.py:242
msgid "Assign the rule owner to documents"

View File

@@ -1,6 +1,6 @@
from typing import Final
__version__: Final[tuple[int, int, int]] = (2, 1, 1)
__version__: Final[tuple[int, int, int]] = (2, 1, 3)
# Version string like X.Y.Z
__full_version_str__: Final[str] = ".".join(map(str, __version__))
# Version string like X.Y

View File

@@ -8,6 +8,7 @@ import traceback
from datetime import date
from datetime import timedelta
from fnmatch import fnmatch
from pathlib import Path
from typing import Optional
from typing import Union
@@ -703,12 +704,15 @@ class MailAccountHandler(LoggingMixin):
if is_mime_type_supported(mime_type):
os.makedirs(settings.SCRATCH_DIR, exist_ok=True)
_, temp_filename = tempfile.mkstemp(
prefix="paperless-mail-",
dir=settings.SCRATCH_DIR,
temp_dir = Path(
tempfile.mkdtemp(
prefix="paperless-mail-",
dir=settings.SCRATCH_DIR,
),
)
with open(temp_filename, "wb") as f:
f.write(att.payload)
temp_filename = temp_dir / pathvalidate.sanitize_filename(att.filename)
temp_filename.write_bytes(att.payload)
self.log.info(
f"Rule {rule}: "

View File

@@ -1271,7 +1271,10 @@ class TestMail(
self.assertEqual(len(self.bogus_mailbox.fetch("UNSEEN", False)), 0)
self.assertEqual(len(self.bogus_mailbox.messages), 3)
def assert_queue_consumption_tasks_call_args(self, expected_call_args: list):
def assert_queue_consumption_tasks_call_args(
self,
expected_call_args: list[list[dict[str, str]]],
):
"""
Verifies that queue_consumption_tasks has been called with the expected arguments.
@@ -1283,7 +1286,7 @@ class TestMail(
"""
# assert number of calls to queue_consumption_tasks mathc
# assert number of calls to queue_consumption_tasks match
self.assertEqual(
len(self._queue_consumption_tasks_mock.call_args_list),
len(expected_call_args),

View File

@@ -254,7 +254,7 @@ class RasterisedDocumentParser(DocumentParser):
f"Image DPI of {ocrmypdf_args['image_dpi']} is low, OCR may fail",
)
if settings.OCR_USER_ARGS and not safe_fallback:
if settings.OCR_USER_ARGS:
try:
user_args = json.loads(settings.OCR_USER_ARGS)
ocrmypdf_args = {**ocrmypdf_args, **user_args}