mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2026-04-28 02:49:26 +00:00
Fix(dev): catch llm configuration error on get suggestions (#12647)
This commit is contained in:
@@ -356,6 +356,31 @@ class TestAISuggestions(DirectoriesMixin, TestCase):
|
||||
},
|
||||
)
|
||||
|
||||
@patch("documents.views.get_ai_document_classification")
|
||||
@override_settings(
|
||||
AI_ENABLED=True,
|
||||
LLM_BACKEND="openai",
|
||||
)
|
||||
def test_suggestions_with_invalid_ai_configuration(
|
||||
self,
|
||||
mock_get_ai_classification,
|
||||
) -> None:
|
||||
mock_get_ai_classification.side_effect = ValueError(
|
||||
"Unknown model 'gpt-5.4-mini-2026-03-17'.",
|
||||
)
|
||||
|
||||
self.client.force_login(user=self.user)
|
||||
response = self.client.get(f"/api/documents/{self.document.pk}/suggestions/")
|
||||
|
||||
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
|
||||
self.assertEqual(
|
||||
response.json(),
|
||||
{
|
||||
"ai": ["Invalid AI configuration."],
|
||||
},
|
||||
)
|
||||
self.assertIsNone(get_llm_suggestion_cache(self.document.pk, backend="openai"))
|
||||
|
||||
def test_invalidate_suggestions_cache(self) -> None:
|
||||
self.client.force_login(user=self.user)
|
||||
suggestions = {
|
||||
|
||||
@@ -1320,7 +1320,17 @@ class DocumentViewSet(
|
||||
refresh_suggestions_cache(doc.pk)
|
||||
return Response(cached_llm_suggestions.suggestions)
|
||||
|
||||
llm_suggestions = get_ai_document_classification(doc, request.user)
|
||||
try:
|
||||
llm_suggestions = get_ai_document_classification(doc, request.user)
|
||||
except ValueError as exc:
|
||||
logger.exception(
|
||||
"Invalid AI configuration while generating suggestions for "
|
||||
"document %s: %s",
|
||||
doc.pk,
|
||||
exc,
|
||||
exc_info=True,
|
||||
)
|
||||
raise ValidationError({"ai": [_("Invalid AI configuration.")]}) from exc
|
||||
|
||||
matched_tags = match_tags_by_name(
|
||||
llm_suggestions.get("tags", []),
|
||||
|
||||
Reference in New Issue
Block a user