Fix(dev): catch llm configuration error on get suggestions (#12647)

This commit is contained in:
shamoon
2026-04-25 22:43:04 -07:00
committed by GitHub
parent 8cab1d0c13
commit 778d6b9fe3
2 changed files with 36 additions and 1 deletions

View File

@@ -356,6 +356,31 @@ class TestAISuggestions(DirectoriesMixin, TestCase):
},
)
@patch("documents.views.get_ai_document_classification")
@override_settings(
AI_ENABLED=True,
LLM_BACKEND="openai",
)
def test_suggestions_with_invalid_ai_configuration(
self,
mock_get_ai_classification,
) -> None:
mock_get_ai_classification.side_effect = ValueError(
"Unknown model 'gpt-5.4-mini-2026-03-17'.",
)
self.client.force_login(user=self.user)
response = self.client.get(f"/api/documents/{self.document.pk}/suggestions/")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
self.assertEqual(
response.json(),
{
"ai": ["Invalid AI configuration."],
},
)
self.assertIsNone(get_llm_suggestion_cache(self.document.pk, backend="openai"))
def test_invalidate_suggestions_cache(self) -> None:
self.client.force_login(user=self.user)
suggestions = {

View File

@@ -1320,7 +1320,17 @@ class DocumentViewSet(
refresh_suggestions_cache(doc.pk)
return Response(cached_llm_suggestions.suggestions)
llm_suggestions = get_ai_document_classification(doc, request.user)
try:
llm_suggestions = get_ai_document_classification(doc, request.user)
except ValueError as exc:
logger.exception(
"Invalid AI configuration while generating suggestions for "
"document %s: %s",
doc.pk,
exc,
exc_info=True,
)
raise ValidationError({"ai": [_("Invalid AI configuration.")]}) from exc
matched_tags = match_tags_by_name(
llm_suggestions.get("tags", []),