Allow LLM model deletion without replacement if unused

Updated backend logic and API schema to permit deleting an LLM model without specifying a replacement if no workflow nodes are using it. Adjusted tests to cover both cases (with and without usage), made replacement_model_slug optional in the response model, and updated OpenAPI spec accordingly.
This commit is contained in:
Bentlybro
2026-01-21 23:26:52 +00:00
parent 8d021fe76c
commit 42f8a26ee1
4 changed files with 53 additions and 8 deletions

View File

@@ -439,9 +439,53 @@ def test_delete_llm_model_validation_error(
assert "Replacement model 'invalid' not found" in response.json()["detail"]
def test_delete_llm_model_missing_replacement() -> None:
"""Test deletion fails when replacement_model_slug is not provided"""
def test_delete_llm_model_no_replacement_with_usage(
mocker: pytest_mock.MockFixture,
) -> None:
"""Test deletion fails when nodes exist but no replacement is provided"""
mocker.patch(
"backend.api.features.admin.llm_routes.llm_db.delete_model",
new=AsyncMock(
side_effect=ValueError(
"Cannot delete model 'test-model': 5 workflow node(s) are using it. "
"Please provide a replacement_model_slug to migrate them."
)
),
)
response = client.delete("/admin/llm/models/model-1")
# FastAPI will return 422 for missing required query params
assert response.status_code == 422
assert response.status_code == 400
assert "workflow node(s) are using it" in response.json()["detail"]
def test_delete_llm_model_no_replacement_no_usage(
mocker: pytest_mock.MockFixture,
) -> None:
"""Test deletion succeeds when no nodes use the model and no replacement is provided"""
mock_response = llm_model.DeleteLlmModelResponse(
deleted_model_slug="unused-model",
deleted_model_display_name="Unused Model",
replacement_model_slug=None,
nodes_migrated=0,
message="Successfully deleted model 'Unused Model' (unused-model). No workflows were using this model.",
)
mocker.patch(
"backend.api.features.admin.llm_routes.llm_db.delete_model",
new=AsyncMock(return_value=mock_response),
)
mock_refresh = mocker.patch(
"backend.api.features.admin.llm_routes._refresh_runtime_state",
new=AsyncMock(),
)
response = client.delete("/admin/llm/models/model-1")
assert response.status_code == 200
response_data = response.json()
assert response_data["deleted_model_slug"] == "unused-model"
assert response_data["nodes_migrated"] == 0
assert response_data["replacement_model_slug"] is None
mock_refresh.assert_called_once()

View File

@@ -493,7 +493,9 @@ async def delete_model(
deleted_display_name = model.displayName
# 2. Count affected nodes first to determine if replacement is needed
count_result = await prisma.models.prisma().query_raw(
import prisma as prisma_module
count_result = await prisma_module.get_client().query_raw(
"""
SELECT COUNT(*) as count
FROM "AgentNode"

View File

@@ -173,7 +173,7 @@ class ToggleLlmModelResponse(pydantic.BaseModel):
class DeleteLlmModelResponse(pydantic.BaseModel):
deleted_model_slug: str
deleted_model_display_name: str
replacement_model_slug: str
replacement_model_slug: Optional[str] = None
nodes_migrated: int
message: str

View File

@@ -7930,7 +7930,7 @@
"title": "Deleted Model Display Name"
},
"replacement_model_slug": {
"type": "string",
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Replacement Model Slug"
},
"nodes_migrated": { "type": "integer", "title": "Nodes Migrated" },
@@ -7940,7 +7940,6 @@
"required": [
"deleted_model_slug",
"deleted_model_display_name",
"replacement_model_slug",
"nodes_migrated",
"message"
],