mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-01 18:35:00 -05:00
Compare commits
3 Commits
ntindle/fi
...
docker/opt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1ed748a356 | ||
|
|
9c28639c32 | ||
|
|
4f37a12743 |
@@ -37,13 +37,15 @@ ENV POETRY_VIRTUALENVS_CREATE=true
|
||||
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
|
||||
ENV PATH=/opt/poetry/bin:$PATH
|
||||
|
||||
RUN pip3 install poetry --break-system-packages
|
||||
RUN pip3 install --no-cache-dir poetry --break-system-packages
|
||||
|
||||
# Copy and install dependencies
|
||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
||||
WORKDIR /app/autogpt_platform/backend
|
||||
RUN poetry install --no-ansi --no-root
|
||||
# Production image only needs runtime deps; dev deps (pytest, black, ruff, etc.)
|
||||
# are installed locally via `poetry install --with dev` per the development docs
|
||||
RUN poetry install --no-ansi --no-root --only main
|
||||
|
||||
# Generate Prisma client
|
||||
COPY autogpt_platform/backend/schema.prisma ./
|
||||
@@ -51,6 +53,15 @@ COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/parti
|
||||
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
|
||||
RUN poetry run prisma generate && poetry run gen-prisma-stub
|
||||
|
||||
# Clean up build artifacts and caches to reduce layer size
|
||||
# Note: setuptools is kept as it's a direct dependency (used by aioclamd via pkg_resources)
|
||||
RUN find /app -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true; \
|
||||
find /app -type d -name tests -exec rm -rf {} + 2>/dev/null || true; \
|
||||
find /app -type d -name test -exec rm -rf {} + 2>/dev/null || true; \
|
||||
rm -rf /app/autogpt_platform/backend/.venv/lib/python*/site-packages/pip* \
|
||||
/root/.cache/pip \
|
||||
/root/.cache/pypoetry
|
||||
|
||||
FROM debian:13-slim AS server_dependencies
|
||||
|
||||
WORKDIR /app
|
||||
@@ -68,7 +79,7 @@ RUN apt-get update && apt-get install -y \
|
||||
python3-pip \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy only necessary files from builder
|
||||
# Copy built artifacts from builder (cleaned of caches, __pycache__, and test dirs)
|
||||
COPY --from=builder /app /app
|
||||
COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3*
|
||||
COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry
|
||||
@@ -81,9 +92,7 @@ COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-pyth
|
||||
|
||||
ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
|
||||
|
||||
RUN mkdir -p /app/autogpt_platform/autogpt_libs
|
||||
RUN mkdir -p /app/autogpt_platform/backend
|
||||
|
||||
# Copy fresh source from context (overwrites builder's copy with latest source)
|
||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||
|
||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
||||
|
||||
@@ -14,7 +14,6 @@ from backend.data.graph import (
|
||||
create_graph,
|
||||
get_graph,
|
||||
get_graph_all_versions,
|
||||
get_store_listed_graphs,
|
||||
)
|
||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||
|
||||
@@ -267,18 +266,18 @@ async def get_library_agents_for_generation(
|
||||
async def search_marketplace_agents_for_generation(
|
||||
search_query: str,
|
||||
max_results: int = 10,
|
||||
) -> list[LibraryAgentSummary]:
|
||||
) -> list[MarketplaceAgentSummary]:
|
||||
"""Search marketplace agents formatted for Agent Generator.
|
||||
|
||||
Fetches marketplace agents and their full schemas so they can be used
|
||||
as sub-agents in generated workflows.
|
||||
Note: This returns basic agent info. Full input/output schemas would require
|
||||
additional graph fetches and is a potential future enhancement.
|
||||
|
||||
Args:
|
||||
search_query: Search term to find relevant public agents
|
||||
max_results: Maximum number of agents to return (default 10)
|
||||
|
||||
Returns:
|
||||
List of LibraryAgentSummary with full input/output schemas
|
||||
List of MarketplaceAgentSummary (without detailed schemas for now)
|
||||
"""
|
||||
try:
|
||||
response = await store_db.get_store_agents(
|
||||
@@ -287,31 +286,17 @@ async def search_marketplace_agents_for_generation(
|
||||
page_size=max_results,
|
||||
)
|
||||
|
||||
agents_with_graphs = [
|
||||
agent for agent in response.agents if agent.agent_graph_id
|
||||
]
|
||||
|
||||
if not agents_with_graphs:
|
||||
return []
|
||||
|
||||
graph_ids = [agent.agent_graph_id for agent in agents_with_graphs]
|
||||
graphs = await get_store_listed_graphs(*graph_ids)
|
||||
|
||||
results: list[LibraryAgentSummary] = []
|
||||
for agent in agents_with_graphs:
|
||||
graph_id = agent.agent_graph_id
|
||||
if graph_id and graph_id in graphs:
|
||||
graph = graphs[graph_id]
|
||||
results.append(
|
||||
LibraryAgentSummary(
|
||||
graph_id=graph.id,
|
||||
graph_version=graph.version,
|
||||
name=agent.agent_name,
|
||||
description=agent.description,
|
||||
input_schema=graph.input_schema,
|
||||
output_schema=graph.output_schema,
|
||||
)
|
||||
results: list[MarketplaceAgentSummary] = []
|
||||
for agent in response.agents:
|
||||
results.append(
|
||||
MarketplaceAgentSummary(
|
||||
name=agent.agent_name,
|
||||
description=agent.description,
|
||||
sub_heading=agent.sub_heading,
|
||||
creator=agent.creator,
|
||||
is_marketplace_agent=True,
|
||||
)
|
||||
)
|
||||
return results
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to search marketplace agents: {e}")
|
||||
@@ -342,7 +327,8 @@ async def get_all_relevant_agents_for_generation(
|
||||
max_marketplace_results: Max marketplace agents to return (default 10)
|
||||
|
||||
Returns:
|
||||
List of AgentSummary with full schemas (both library and marketplace agents)
|
||||
List of AgentSummary, library agents first (with full schemas),
|
||||
then marketplace agents (basic info only)
|
||||
"""
|
||||
agents: list[AgentSummary] = []
|
||||
seen_graph_ids: set[str] = set()
|
||||
@@ -379,11 +365,16 @@ async def get_all_relevant_agents_for_generation(
|
||||
search_query=search_query,
|
||||
max_results=max_marketplace_results,
|
||||
)
|
||||
library_names: set[str] = set()
|
||||
for a in agents:
|
||||
name = a.get("name")
|
||||
if name and isinstance(name, str):
|
||||
library_names.add(name.lower())
|
||||
for agent in marketplace_agents:
|
||||
graph_id = agent.get("graph_id")
|
||||
if graph_id and graph_id not in seen_graph_ids:
|
||||
agents.append(agent)
|
||||
seen_graph_ids.add(graph_id)
|
||||
agent_name = agent.get("name")
|
||||
if agent_name and isinstance(agent_name, str):
|
||||
if agent_name.lower() not in library_names:
|
||||
agents.append(agent)
|
||||
|
||||
return agents
|
||||
|
||||
|
||||
@@ -104,60 +104,18 @@ async def list_library_agents(
|
||||
order_by = {"updatedAt": "desc"}
|
||||
|
||||
try:
|
||||
# For LAST_EXECUTED sorting, we need to fetch execution data and sort in Python
|
||||
# since Prisma doesn't support sorting by nested relations
|
||||
if sort_by == library_model.LibraryAgentSort.LAST_EXECUTED:
|
||||
# TODO: This fetches all agents into memory for sorting, which may cause
|
||||
# performance issues for users with many agents. Prisma doesn't support
|
||||
# sorting by nested relations, so a dedicated lastExecutedAt column or
|
||||
# raw SQL query would be needed for database-level pagination.
|
||||
library_agents = await prisma.models.LibraryAgent.prisma().find_many(
|
||||
where=where_clause,
|
||||
include=library_agent_include(
|
||||
user_id,
|
||||
include_nodes=False,
|
||||
include_executions=True,
|
||||
execution_limit=1,
|
||||
),
|
||||
)
|
||||
|
||||
def get_sort_key(
|
||||
agent: prisma.models.LibraryAgent,
|
||||
) -> tuple[int, float]:
|
||||
"""
|
||||
Returns a tuple for sorting: (has_no_executions, -timestamp).
|
||||
|
||||
Agents WITH executions come first (sorted by most recent execution),
|
||||
agents WITHOUT executions come last (sorted by creation date).
|
||||
"""
|
||||
graph = agent.AgentGraph
|
||||
if graph and graph.Executions and len(graph.Executions) > 0:
|
||||
execution = graph.Executions[0]
|
||||
timestamp = execution.updatedAt or execution.createdAt
|
||||
return (0, -timestamp.timestamp())
|
||||
return (1, -agent.createdAt.timestamp())
|
||||
|
||||
library_agents.sort(key=get_sort_key)
|
||||
|
||||
# Apply pagination after sorting
|
||||
agent_count = len(library_agents)
|
||||
start_idx = (page - 1) * page_size
|
||||
end_idx = start_idx + page_size
|
||||
library_agents = library_agents[start_idx:end_idx]
|
||||
else:
|
||||
# Standard sorting via database
|
||||
library_agents = await prisma.models.LibraryAgent.prisma().find_many(
|
||||
where=where_clause,
|
||||
include=library_agent_include(
|
||||
user_id, include_nodes=False, include_executions=False
|
||||
),
|
||||
order=order_by,
|
||||
skip=(page - 1) * page_size,
|
||||
take=page_size,
|
||||
)
|
||||
agent_count = await prisma.models.LibraryAgent.prisma().count(
|
||||
where=where_clause
|
||||
)
|
||||
library_agents = await prisma.models.LibraryAgent.prisma().find_many(
|
||||
where=where_clause,
|
||||
include=library_agent_include(
|
||||
user_id, include_nodes=False, include_executions=include_executions
|
||||
),
|
||||
order=order_by,
|
||||
skip=(page - 1) * page_size,
|
||||
take=page_size,
|
||||
)
|
||||
agent_count = await prisma.models.LibraryAgent.prisma().count(
|
||||
where=where_clause
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Retrieved {len(library_agents)} library agents for user #{user_id}"
|
||||
@@ -387,20 +345,6 @@ async def get_library_agent_by_graph_id(
|
||||
graph_id: str,
|
||||
graph_version: Optional[int] = None,
|
||||
) -> library_model.LibraryAgent | None:
|
||||
"""
|
||||
Retrieves a library agent by its graph ID for a given user.
|
||||
|
||||
Args:
|
||||
user_id: The ID of the user who owns the library agent.
|
||||
graph_id: The ID of the agent graph to look up.
|
||||
graph_version: Optional specific version of the graph to retrieve.
|
||||
|
||||
Returns:
|
||||
The LibraryAgent if found, otherwise None.
|
||||
|
||||
Raises:
|
||||
DatabaseError: If there's an error during retrieval.
|
||||
"""
|
||||
try:
|
||||
filter: prisma.types.LibraryAgentWhereInput = {
|
||||
"agentGraphId": graph_id,
|
||||
@@ -684,17 +628,6 @@ async def update_library_agent(
|
||||
async def delete_library_agent(
|
||||
library_agent_id: str, user_id: str, soft_delete: bool = True
|
||||
) -> None:
|
||||
"""
|
||||
Deletes a library agent and cleans up associated schedules and webhooks.
|
||||
|
||||
Args:
|
||||
library_agent_id: The ID of the library agent to delete.
|
||||
user_id: The ID of the user who owns the library agent.
|
||||
soft_delete: If True, marks the agent as deleted; if False, permanently removes it.
|
||||
|
||||
Raises:
|
||||
NotFoundError: If the library agent is not found or doesn't belong to the user.
|
||||
"""
|
||||
# First get the agent to find the graph_id for cleanup
|
||||
library_agent = await prisma.models.LibraryAgent.prisma().find_unique(
|
||||
where={"id": library_agent_id}, include={"AgentGraph": True}
|
||||
@@ -1188,20 +1121,6 @@ async def update_preset(
|
||||
async def set_preset_webhook(
|
||||
user_id: str, preset_id: str, webhook_id: str | None
|
||||
) -> library_model.LibraryAgentPreset:
|
||||
"""
|
||||
Sets or removes a webhook connection for a preset.
|
||||
|
||||
Args:
|
||||
user_id: The ID of the user who owns the preset.
|
||||
preset_id: The ID of the preset to update.
|
||||
webhook_id: The ID of the webhook to connect, or None to disconnect.
|
||||
|
||||
Returns:
|
||||
The updated LibraryAgentPreset.
|
||||
|
||||
Raises:
|
||||
NotFoundError: If the preset is not found or doesn't belong to the user.
|
||||
"""
|
||||
current = await prisma.models.AgentPreset.prisma().find_unique(
|
||||
where={"id": preset_id},
|
||||
include=AGENT_PRESET_INCLUDE,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import datetime
|
||||
|
||||
import prisma.enums
|
||||
import prisma.models
|
||||
@@ -9,7 +9,6 @@ from backend.data.db import connect
|
||||
from backend.data.includes import library_agent_include
|
||||
|
||||
from . import db
|
||||
from . import model as library_model
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -226,183 +225,3 @@ async def test_add_agent_to_library_not_found(mocker):
|
||||
mock_store_listing_version.return_value.find_unique.assert_called_once_with(
|
||||
where={"id": "version123"}, include={"AgentGraph": True}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_list_library_agents_sort_by_last_executed(mocker):
|
||||
"""
|
||||
Test LAST_EXECUTED sorting behavior:
|
||||
- Agents WITH executions come first, sorted by most recent execution (updatedAt)
|
||||
- Agents WITHOUT executions come last, sorted by creation date
|
||||
"""
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Agent 1: Has execution that finished 1 hour ago
|
||||
agent1_execution = prisma.models.AgentGraphExecution(
|
||||
id="exec1",
|
||||
agentGraphId="agent1",
|
||||
agentGraphVersion=1,
|
||||
userId="test-user",
|
||||
createdAt=now - timedelta(hours=2),
|
||||
updatedAt=now - timedelta(hours=1), # Finished 1 hour ago
|
||||
executionStatus=prisma.enums.AgentExecutionStatus.COMPLETED,
|
||||
isDeleted=False,
|
||||
isShared=False,
|
||||
)
|
||||
agent1_graph = prisma.models.AgentGraph(
|
||||
id="agent1",
|
||||
version=1,
|
||||
name="Agent With Recent Execution",
|
||||
description="Has execution finished 1 hour ago",
|
||||
userId="test-user",
|
||||
isActive=True,
|
||||
createdAt=now - timedelta(days=5),
|
||||
Executions=[agent1_execution],
|
||||
)
|
||||
library_agent1 = prisma.models.LibraryAgent(
|
||||
id="lib1",
|
||||
userId="test-user",
|
||||
agentGraphId="agent1",
|
||||
agentGraphVersion=1,
|
||||
settings="{}", # type: ignore
|
||||
isCreatedByUser=True,
|
||||
isDeleted=False,
|
||||
isArchived=False,
|
||||
createdAt=now - timedelta(days=5),
|
||||
updatedAt=now - timedelta(days=5),
|
||||
isFavorite=False,
|
||||
useGraphIsActiveVersion=True,
|
||||
AgentGraph=agent1_graph,
|
||||
)
|
||||
|
||||
# Agent 2: Has execution that finished 3 hours ago
|
||||
agent2_execution = prisma.models.AgentGraphExecution(
|
||||
id="exec2",
|
||||
agentGraphId="agent2",
|
||||
agentGraphVersion=1,
|
||||
userId="test-user",
|
||||
createdAt=now - timedelta(hours=5),
|
||||
updatedAt=now - timedelta(hours=3), # Finished 3 hours ago
|
||||
executionStatus=prisma.enums.AgentExecutionStatus.COMPLETED,
|
||||
isDeleted=False,
|
||||
isShared=False,
|
||||
)
|
||||
agent2_graph = prisma.models.AgentGraph(
|
||||
id="agent2",
|
||||
version=1,
|
||||
name="Agent With Older Execution",
|
||||
description="Has execution finished 3 hours ago",
|
||||
userId="test-user",
|
||||
isActive=True,
|
||||
createdAt=now - timedelta(days=3),
|
||||
Executions=[agent2_execution],
|
||||
)
|
||||
library_agent2 = prisma.models.LibraryAgent(
|
||||
id="lib2",
|
||||
userId="test-user",
|
||||
agentGraphId="agent2",
|
||||
agentGraphVersion=1,
|
||||
settings="{}", # type: ignore
|
||||
isCreatedByUser=True,
|
||||
isDeleted=False,
|
||||
isArchived=False,
|
||||
createdAt=now - timedelta(days=3),
|
||||
updatedAt=now - timedelta(days=3),
|
||||
isFavorite=False,
|
||||
useGraphIsActiveVersion=True,
|
||||
AgentGraph=agent2_graph,
|
||||
)
|
||||
|
||||
# Agent 3: No executions, created 1 day ago (should come after agents with executions)
|
||||
agent3_graph = prisma.models.AgentGraph(
|
||||
id="agent3",
|
||||
version=1,
|
||||
name="Agent Without Executions (Newer)",
|
||||
description="No executions, created 1 day ago",
|
||||
userId="test-user",
|
||||
isActive=True,
|
||||
createdAt=now - timedelta(days=1),
|
||||
Executions=[],
|
||||
)
|
||||
library_agent3 = prisma.models.LibraryAgent(
|
||||
id="lib3",
|
||||
userId="test-user",
|
||||
agentGraphId="agent3",
|
||||
agentGraphVersion=1,
|
||||
settings="{}", # type: ignore
|
||||
isCreatedByUser=True,
|
||||
isDeleted=False,
|
||||
isArchived=False,
|
||||
createdAt=now - timedelta(days=1),
|
||||
updatedAt=now - timedelta(days=1),
|
||||
isFavorite=False,
|
||||
useGraphIsActiveVersion=True,
|
||||
AgentGraph=agent3_graph,
|
||||
)
|
||||
|
||||
# Agent 4: No executions, created 2 days ago
|
||||
agent4_graph = prisma.models.AgentGraph(
|
||||
id="agent4",
|
||||
version=1,
|
||||
name="Agent Without Executions (Older)",
|
||||
description="No executions, created 2 days ago",
|
||||
userId="test-user",
|
||||
isActive=True,
|
||||
createdAt=now - timedelta(days=2),
|
||||
Executions=[],
|
||||
)
|
||||
library_agent4 = prisma.models.LibraryAgent(
|
||||
id="lib4",
|
||||
userId="test-user",
|
||||
agentGraphId="agent4",
|
||||
agentGraphVersion=1,
|
||||
settings="{}", # type: ignore
|
||||
isCreatedByUser=True,
|
||||
isDeleted=False,
|
||||
isArchived=False,
|
||||
createdAt=now - timedelta(days=2),
|
||||
updatedAt=now - timedelta(days=2),
|
||||
isFavorite=False,
|
||||
useGraphIsActiveVersion=True,
|
||||
AgentGraph=agent4_graph,
|
||||
)
|
||||
|
||||
# Return agents in random order to verify sorting works
|
||||
mock_library_agents = [
|
||||
library_agent3,
|
||||
library_agent1,
|
||||
library_agent4,
|
||||
library_agent2,
|
||||
]
|
||||
|
||||
# Mock prisma calls
|
||||
mock_agent_graph = mocker.patch("prisma.models.AgentGraph.prisma")
|
||||
mock_agent_graph.return_value.find_many = mocker.AsyncMock(return_value=[])
|
||||
|
||||
mock_library_agent = mocker.patch("prisma.models.LibraryAgent.prisma")
|
||||
mock_library_agent.return_value.find_many = mocker.AsyncMock(
|
||||
return_value=mock_library_agents
|
||||
)
|
||||
|
||||
# Call function with LAST_EXECUTED sort
|
||||
result = await db.list_library_agents(
|
||||
"test-user",
|
||||
sort_by=library_model.LibraryAgentSort.LAST_EXECUTED,
|
||||
)
|
||||
|
||||
# Verify sorting order:
|
||||
# 1. Agent 1 (execution finished 1 hour ago) - most recent execution
|
||||
# 2. Agent 2 (execution finished 3 hours ago) - older execution
|
||||
# 3. Agent 3 (no executions, created 1 day ago) - newer creation
|
||||
# 4. Agent 4 (no executions, created 2 days ago) - older creation
|
||||
assert len(result.agents) == 4
|
||||
assert (
|
||||
result.agents[0].id == "lib1"
|
||||
), "Agent with most recent execution should be first"
|
||||
assert result.agents[1].id == "lib2", "Agent with older execution should be second"
|
||||
assert (
|
||||
result.agents[2].id == "lib3"
|
||||
), "Agent without executions (newer) should be third"
|
||||
assert (
|
||||
result.agents[3].id == "lib4"
|
||||
), "Agent without executions (older) should be last"
|
||||
|
||||
@@ -442,7 +442,6 @@ class LibraryAgentSort(str, Enum):
|
||||
|
||||
CREATED_AT = "createdAt"
|
||||
UPDATED_AT = "updatedAt"
|
||||
LAST_EXECUTED = "lastExecuted"
|
||||
|
||||
|
||||
class LibraryAgentUpdateRequest(pydantic.BaseModel):
|
||||
|
||||
@@ -28,7 +28,7 @@ async def list_library_agents(
|
||||
None, description="Search term to filter agents"
|
||||
),
|
||||
sort_by: library_model.LibraryAgentSort = Query(
|
||||
library_model.LibraryAgentSort.LAST_EXECUTED,
|
||||
library_model.LibraryAgentSort.UPDATED_AT,
|
||||
description="Criteria to sort results by",
|
||||
),
|
||||
page: int = Query(
|
||||
|
||||
@@ -112,7 +112,7 @@ async def test_get_library_agents_success(
|
||||
mock_db_call.assert_called_once_with(
|
||||
user_id=test_user_id,
|
||||
search_term="test",
|
||||
sort_by=library_model.LibraryAgentSort.LAST_EXECUTED,
|
||||
sort_by=library_model.LibraryAgentSort.UPDATED_AT,
|
||||
page=1,
|
||||
page_size=15,
|
||||
)
|
||||
|
||||
@@ -112,7 +112,6 @@ async def get_store_agents(
|
||||
description=agent["description"],
|
||||
runs=agent["runs"],
|
||||
rating=agent["rating"],
|
||||
agent_graph_id=agent.get("agentGraphId", ""),
|
||||
)
|
||||
store_agents.append(store_agent)
|
||||
except Exception as e:
|
||||
@@ -171,7 +170,6 @@ async def get_store_agents(
|
||||
description=agent.description,
|
||||
runs=agent.runs,
|
||||
rating=agent.rating,
|
||||
agent_graph_id=agent.agentGraphId,
|
||||
)
|
||||
# Add to the list only if creation was successful
|
||||
store_agents.append(store_agent)
|
||||
|
||||
@@ -600,7 +600,6 @@ async def hybrid_search(
|
||||
sa.featured,
|
||||
sa.is_available,
|
||||
sa.updated_at,
|
||||
sa."agentGraphId",
|
||||
-- Searchable text for BM25 reranking
|
||||
COALESCE(sa.agent_name, '') || ' ' || COALESCE(sa.sub_heading, '') || ' ' || COALESCE(sa.description, '') as searchable_text,
|
||||
-- Semantic score
|
||||
@@ -660,7 +659,6 @@ async def hybrid_search(
|
||||
featured,
|
||||
is_available,
|
||||
updated_at,
|
||||
"agentGraphId",
|
||||
searchable_text,
|
||||
semantic_score,
|
||||
lexical_score,
|
||||
|
||||
@@ -38,7 +38,6 @@ class StoreAgent(pydantic.BaseModel):
|
||||
description: str
|
||||
runs: int
|
||||
rating: float
|
||||
agent_graph_id: str
|
||||
|
||||
|
||||
class StoreAgentsResponse(pydantic.BaseModel):
|
||||
|
||||
@@ -26,13 +26,11 @@ def test_store_agent():
|
||||
description="Test description",
|
||||
runs=50,
|
||||
rating=4.5,
|
||||
agent_graph_id="test-graph-id",
|
||||
)
|
||||
assert agent.slug == "test-agent"
|
||||
assert agent.agent_name == "Test Agent"
|
||||
assert agent.runs == 50
|
||||
assert agent.rating == 4.5
|
||||
assert agent.agent_graph_id == "test-graph-id"
|
||||
|
||||
|
||||
def test_store_agents_response():
|
||||
@@ -48,7 +46,6 @@ def test_store_agents_response():
|
||||
description="Test description",
|
||||
runs=50,
|
||||
rating=4.5,
|
||||
agent_graph_id="test-graph-id",
|
||||
)
|
||||
],
|
||||
pagination=store_model.Pagination(
|
||||
|
||||
@@ -82,7 +82,6 @@ def test_get_agents_featured(
|
||||
description="Featured agent description",
|
||||
runs=100,
|
||||
rating=4.5,
|
||||
agent_graph_id="test-graph-1",
|
||||
)
|
||||
],
|
||||
pagination=store_model.Pagination(
|
||||
@@ -128,7 +127,6 @@ def test_get_agents_by_creator(
|
||||
description="Creator agent description",
|
||||
runs=50,
|
||||
rating=4.0,
|
||||
agent_graph_id="test-graph-2",
|
||||
)
|
||||
],
|
||||
pagination=store_model.Pagination(
|
||||
@@ -174,7 +172,6 @@ def test_get_agents_sorted(
|
||||
description="Top agent description",
|
||||
runs=1000,
|
||||
rating=5.0,
|
||||
agent_graph_id="test-graph-3",
|
||||
)
|
||||
],
|
||||
pagination=store_model.Pagination(
|
||||
@@ -220,7 +217,6 @@ def test_get_agents_search(
|
||||
description="Specific search term description",
|
||||
runs=75,
|
||||
rating=4.2,
|
||||
agent_graph_id="test-graph-search",
|
||||
)
|
||||
],
|
||||
pagination=store_model.Pagination(
|
||||
@@ -266,7 +262,6 @@ def test_get_agents_category(
|
||||
description="Category agent description",
|
||||
runs=60,
|
||||
rating=4.1,
|
||||
agent_graph_id="test-graph-category",
|
||||
)
|
||||
],
|
||||
pagination=store_model.Pagination(
|
||||
@@ -311,7 +306,6 @@ def test_get_agents_pagination(
|
||||
description=f"Agent {i} description",
|
||||
runs=i * 10,
|
||||
rating=4.0,
|
||||
agent_graph_id="test-graph-2",
|
||||
)
|
||||
for i in range(5)
|
||||
],
|
||||
|
||||
@@ -33,7 +33,6 @@ class TestCacheDeletion:
|
||||
description="Test description",
|
||||
runs=100,
|
||||
rating=4.5,
|
||||
agent_graph_id="test-graph-id",
|
||||
)
|
||||
],
|
||||
pagination=Pagination(
|
||||
|
||||
@@ -1028,39 +1028,6 @@ async def get_graph(
|
||||
return GraphModel.from_db(graph, for_export)
|
||||
|
||||
|
||||
async def get_store_listed_graphs(*graph_ids: str) -> dict[str, GraphModel]:
|
||||
"""Batch-fetch multiple store-listed graphs by their IDs.
|
||||
|
||||
Only returns graphs that have approved store listings (publicly available).
|
||||
Does not require permission checks since store-listed graphs are public.
|
||||
|
||||
Args:
|
||||
*graph_ids: Variable number of graph IDs to fetch
|
||||
|
||||
Returns:
|
||||
Dict mapping graph_id to GraphModel for graphs with approved store listings
|
||||
"""
|
||||
if not graph_ids:
|
||||
return {}
|
||||
|
||||
store_listings = await StoreListingVersion.prisma().find_many(
|
||||
where={
|
||||
"agentGraphId": {"in": list(graph_ids)},
|
||||
"submissionStatus": SubmissionStatus.APPROVED,
|
||||
"isDeleted": False,
|
||||
},
|
||||
include={"AgentGraph": {"include": AGENT_GRAPH_INCLUDE}},
|
||||
distinct=["agentGraphId"],
|
||||
order={"agentGraphVersion": "desc"},
|
||||
)
|
||||
|
||||
return {
|
||||
listing.agentGraphId: GraphModel.from_db(listing.AgentGraph)
|
||||
for listing in store_listings
|
||||
if listing.AgentGraph
|
||||
}
|
||||
|
||||
|
||||
async def get_graph_as_admin(
|
||||
graph_id: str,
|
||||
version: int | None = None,
|
||||
|
||||
@@ -119,9 +119,7 @@ def library_agent_include(
|
||||
if include_executions:
|
||||
agent_graph_include["Executions"] = {
|
||||
"where": {"userId": user_id},
|
||||
"order_by": {
|
||||
"updatedAt": "desc"
|
||||
}, # Uses updatedAt because it reflects when the executioncompleted or last progressed
|
||||
"order_by": {"createdAt": "desc"},
|
||||
"take": execution_limit,
|
||||
}
|
||||
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
"sub_heading": "Creator agent subheading",
|
||||
"description": "Creator agent description",
|
||||
"runs": 50,
|
||||
"rating": 4.0,
|
||||
"agent_graph_id": "test-graph-2"
|
||||
"rating": 4.0
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
"sub_heading": "Category agent subheading",
|
||||
"description": "Category agent description",
|
||||
"runs": 60,
|
||||
"rating": 4.1,
|
||||
"agent_graph_id": "test-graph-category"
|
||||
"rating": 4.1
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
"sub_heading": "Agent 0 subheading",
|
||||
"description": "Agent 0 description",
|
||||
"runs": 0,
|
||||
"rating": 4.0,
|
||||
"agent_graph_id": "test-graph-2"
|
||||
"rating": 4.0
|
||||
},
|
||||
{
|
||||
"slug": "agent-1",
|
||||
@@ -21,8 +20,7 @@
|
||||
"sub_heading": "Agent 1 subheading",
|
||||
"description": "Agent 1 description",
|
||||
"runs": 10,
|
||||
"rating": 4.0,
|
||||
"agent_graph_id": "test-graph-2"
|
||||
"rating": 4.0
|
||||
},
|
||||
{
|
||||
"slug": "agent-2",
|
||||
@@ -33,8 +31,7 @@
|
||||
"sub_heading": "Agent 2 subheading",
|
||||
"description": "Agent 2 description",
|
||||
"runs": 20,
|
||||
"rating": 4.0,
|
||||
"agent_graph_id": "test-graph-2"
|
||||
"rating": 4.0
|
||||
},
|
||||
{
|
||||
"slug": "agent-3",
|
||||
@@ -45,8 +42,7 @@
|
||||
"sub_heading": "Agent 3 subheading",
|
||||
"description": "Agent 3 description",
|
||||
"runs": 30,
|
||||
"rating": 4.0,
|
||||
"agent_graph_id": "test-graph-2"
|
||||
"rating": 4.0
|
||||
},
|
||||
{
|
||||
"slug": "agent-4",
|
||||
@@ -57,8 +53,7 @@
|
||||
"sub_heading": "Agent 4 subheading",
|
||||
"description": "Agent 4 description",
|
||||
"runs": 40,
|
||||
"rating": 4.0,
|
||||
"agent_graph_id": "test-graph-2"
|
||||
"rating": 4.0
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
"sub_heading": "Search agent subheading",
|
||||
"description": "Specific search term description",
|
||||
"runs": 75,
|
||||
"rating": 4.2,
|
||||
"agent_graph_id": "test-graph-search"
|
||||
"rating": 4.2
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
"sub_heading": "Top agent subheading",
|
||||
"description": "Top agent description",
|
||||
"runs": 1000,
|
||||
"rating": 5.0,
|
||||
"agent_graph_id": "test-graph-3"
|
||||
"rating": 5.0
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
|
||||
@@ -9,8 +9,7 @@
|
||||
"sub_heading": "Featured agent subheading",
|
||||
"description": "Featured agent description",
|
||||
"runs": 100,
|
||||
"rating": 4.5,
|
||||
"agent_graph_id": "test-graph-1"
|
||||
"rating": 4.5
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
|
||||
@@ -134,28 +134,15 @@ class TestSearchMarketplaceAgentsForGeneration:
|
||||
description="A public agent",
|
||||
sub_heading="Does something useful",
|
||||
creator="creator-1",
|
||||
agent_graph_id="graph-123",
|
||||
)
|
||||
]
|
||||
|
||||
mock_graph = MagicMock()
|
||||
mock_graph.id = "graph-123"
|
||||
mock_graph.version = 1
|
||||
mock_graph.input_schema = {"type": "object"}
|
||||
mock_graph.output_schema = {"type": "object"}
|
||||
|
||||
with (
|
||||
patch(
|
||||
"backend.api.features.store.db.get_store_agents",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_response,
|
||||
) as mock_search,
|
||||
patch(
|
||||
"backend.api.features.chat.tools.agent_generator.core.get_store_listed_graphs",
|
||||
new_callable=AsyncMock,
|
||||
return_value={"graph-123": mock_graph},
|
||||
),
|
||||
):
|
||||
# The store_db is dynamically imported, so patch the import path
|
||||
with patch(
|
||||
"backend.api.features.store.db.get_store_agents",
|
||||
new_callable=AsyncMock,
|
||||
return_value=mock_response,
|
||||
) as mock_search:
|
||||
result = await core.search_marketplace_agents_for_generation(
|
||||
search_query="automation",
|
||||
max_results=10,
|
||||
@@ -169,7 +156,7 @@ class TestSearchMarketplaceAgentsForGeneration:
|
||||
|
||||
assert len(result) == 1
|
||||
assert result[0]["name"] == "Public Agent"
|
||||
assert result[0]["graph_id"] == "graph-123"
|
||||
assert result[0]["is_marketplace_agent"] is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handles_marketplace_error_gracefully(self):
|
||||
@@ -206,12 +193,11 @@ class TestGetAllRelevantAgentsForGeneration:
|
||||
|
||||
marketplace_agents = [
|
||||
{
|
||||
"graph_id": "market-456",
|
||||
"graph_version": 1,
|
||||
"name": "Market Agent",
|
||||
"description": "From marketplace",
|
||||
"input_schema": {},
|
||||
"output_schema": {},
|
||||
"sub_heading": "Sub heading",
|
||||
"creator": "creator-1",
|
||||
"is_marketplace_agent": True,
|
||||
}
|
||||
]
|
||||
|
||||
@@ -239,11 +225,11 @@ class TestGetAllRelevantAgentsForGeneration:
|
||||
assert result[1]["name"] == "Market Agent"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_deduplicates_by_graph_id(self):
|
||||
"""Test that marketplace agents with same graph_id as library are excluded."""
|
||||
async def test_deduplicates_by_name(self):
|
||||
"""Test that marketplace agents with same name as library are excluded."""
|
||||
library_agents = [
|
||||
{
|
||||
"graph_id": "shared-123",
|
||||
"graph_id": "lib-123",
|
||||
"graph_version": 1,
|
||||
"name": "Shared Agent",
|
||||
"description": "From library",
|
||||
@@ -254,20 +240,18 @@ class TestGetAllRelevantAgentsForGeneration:
|
||||
|
||||
marketplace_agents = [
|
||||
{
|
||||
"graph_id": "shared-123", # Same graph_id, should be deduplicated
|
||||
"graph_version": 1,
|
||||
"name": "Shared Agent",
|
||||
"name": "Shared Agent", # Same name, should be deduplicated
|
||||
"description": "From marketplace",
|
||||
"input_schema": {},
|
||||
"output_schema": {},
|
||||
"sub_heading": "Sub heading",
|
||||
"creator": "creator-1",
|
||||
"is_marketplace_agent": True,
|
||||
},
|
||||
{
|
||||
"graph_id": "unique-456",
|
||||
"graph_version": 1,
|
||||
"name": "Unique Agent",
|
||||
"description": "Only in marketplace",
|
||||
"input_schema": {},
|
||||
"output_schema": {},
|
||||
"sub_heading": "Sub heading",
|
||||
"creator": "creator-2",
|
||||
"is_marketplace_agent": True,
|
||||
},
|
||||
]
|
||||
|
||||
@@ -289,7 +273,7 @@ class TestGetAllRelevantAgentsForGeneration:
|
||||
include_marketplace=True,
|
||||
)
|
||||
|
||||
# Shared Agent from marketplace should be excluded by graph_id
|
||||
# Shared Agent from marketplace should be excluded
|
||||
assert len(result) == 2
|
||||
names = [a["name"] for a in result]
|
||||
assert "Shared Agent" in names
|
||||
|
||||
@@ -23,13 +23,10 @@ export function LibrarySortMenu({ setLibrarySort }: Props) {
|
||||
<Select onValueChange={handleSortChange}>
|
||||
<SelectTrigger className="ml-1 w-fit space-x-1 border-none px-0 text-base underline underline-offset-4 shadow-none">
|
||||
<ArrowDownNarrowWideIcon className="h-4 w-4 sm:hidden" />
|
||||
<SelectValue placeholder="Last Executed" />
|
||||
<SelectValue placeholder="Last Modified" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectGroup>
|
||||
<SelectItem value={LibraryAgentSort.lastExecuted}>
|
||||
Last Executed
|
||||
</SelectItem>
|
||||
<SelectItem value={LibraryAgentSort.createdAt}>
|
||||
Creation Date
|
||||
</SelectItem>
|
||||
|
||||
@@ -11,14 +11,12 @@ export function useLibrarySortMenu({ setLibrarySort }: Props) {
|
||||
|
||||
const getSortLabel = (sort: LibraryAgentSort) => {
|
||||
switch (sort) {
|
||||
case LibraryAgentSort.lastExecuted:
|
||||
return "Last Executed";
|
||||
case LibraryAgentSort.createdAt:
|
||||
return "Creation Date";
|
||||
case LibraryAgentSort.updatedAt:
|
||||
return "Last Modified";
|
||||
default:
|
||||
return "Last Executed";
|
||||
return "Last Modified";
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { LibraryAgentSort } from "@/app/api/__generated__/models/libraryAgentSort";
|
||||
import { parseAsStringEnum, useQueryState } from "nuqs";
|
||||
import { useCallback, useMemo, useState } from "react";
|
||||
import { useCallback, useEffect, useMemo, useState } from "react";
|
||||
|
||||
const sortParser = parseAsStringEnum(Object.values(LibraryAgentSort));
|
||||
|
||||
@@ -11,7 +11,14 @@ export function useLibraryListPage() {
|
||||
const [uploadedFile, setUploadedFile] = useState<File | null>(null);
|
||||
const [librarySortRaw, setLibrarySortRaw] = useQueryState("sort", sortParser);
|
||||
|
||||
const librarySort = librarySortRaw || LibraryAgentSort.lastExecuted;
|
||||
// Ensure sort param is always present in URL (even if default)
|
||||
useEffect(() => {
|
||||
if (!librarySortRaw) {
|
||||
setLibrarySortRaw(LibraryAgentSort.updatedAt, { shallow: false });
|
||||
}
|
||||
}, [librarySortRaw, setLibrarySortRaw]);
|
||||
|
||||
const librarySort = librarySortRaw || LibraryAgentSort.updatedAt;
|
||||
|
||||
const setLibrarySort = useCallback(
|
||||
(value: LibraryAgentSort) => {
|
||||
|
||||
@@ -3361,7 +3361,7 @@
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/LibraryAgentSort",
|
||||
"description": "Criteria to sort results by",
|
||||
"default": "lastExecuted"
|
||||
"default": "updatedAt"
|
||||
},
|
||||
"description": "Criteria to sort results by"
|
||||
},
|
||||
@@ -8239,7 +8239,7 @@
|
||||
},
|
||||
"LibraryAgentSort": {
|
||||
"type": "string",
|
||||
"enum": ["createdAt", "updatedAt", "lastExecuted"],
|
||||
"enum": ["createdAt", "updatedAt"],
|
||||
"title": "LibraryAgentSort",
|
||||
"description": "Possible sort options for sorting library agents."
|
||||
},
|
||||
@@ -9833,8 +9833,7 @@
|
||||
"sub_heading": { "type": "string", "title": "Sub Heading" },
|
||||
"description": { "type": "string", "title": "Description" },
|
||||
"runs": { "type": "integer", "title": "Runs" },
|
||||
"rating": { "type": "number", "title": "Rating" },
|
||||
"agent_graph_id": { "type": "string", "title": "Agent Graph Id" }
|
||||
"rating": { "type": "number", "title": "Rating" }
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
@@ -9846,8 +9845,7 @@
|
||||
"sub_heading",
|
||||
"description",
|
||||
"runs",
|
||||
"rating",
|
||||
"agent_graph_id"
|
||||
"rating"
|
||||
],
|
||||
"title": "StoreAgent"
|
||||
},
|
||||
|
||||
@@ -612,7 +612,6 @@ export type LibraryAgentPresetUpdatable = Partial<
|
||||
export enum LibraryAgentSortEnum {
|
||||
CREATED_AT = "createdAt",
|
||||
UPDATED_AT = "updatedAt",
|
||||
LAST_EXECUTED = "lastExecuted",
|
||||
}
|
||||
|
||||
/* *** CREDENTIALS *** */
|
||||
|
||||
@@ -85,7 +85,7 @@ export class LibraryPage extends BasePage {
|
||||
|
||||
async selectSortOption(
|
||||
page: Page,
|
||||
sortOption: "Last Executed" | "Creation Date" | "Last Modified",
|
||||
sortOption: "Creation Date" | "Last Modified",
|
||||
): Promise<void> {
|
||||
const { getRole } = getSelectors(page);
|
||||
await getRole("combobox").click();
|
||||
|
||||
@@ -182,7 +182,7 @@ test("logged in user is redirected from /login to /library", async ({
|
||||
await hasUrl(page, "/marketplace");
|
||||
|
||||
await page.goto("/login");
|
||||
await hasUrl(page, "/library");
|
||||
await hasUrl(page, "/library?sort=updatedAt");
|
||||
});
|
||||
|
||||
test("logged in user is redirected from /signup to /library", async ({
|
||||
@@ -195,5 +195,5 @@ test("logged in user is redirected from /signup to /library", async ({
|
||||
await hasUrl(page, "/marketplace");
|
||||
|
||||
await page.goto("/signup");
|
||||
await hasUrl(page, "/library");
|
||||
await hasUrl(page, "/library?sort=updatedAt");
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user