mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-02 10:55:14 -05:00
Compare commits
2 Commits
docker/opt
...
dev
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1081590384 | ||
|
|
7e37de8e30 |
@@ -37,15 +37,13 @@ ENV POETRY_VIRTUALENVS_CREATE=true
|
|||||||
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
|
ENV POETRY_VIRTUALENVS_IN_PROJECT=true
|
||||||
ENV PATH=/opt/poetry/bin:$PATH
|
ENV PATH=/opt/poetry/bin:$PATH
|
||||||
|
|
||||||
RUN pip3 install --no-cache-dir poetry --break-system-packages
|
RUN pip3 install poetry --break-system-packages
|
||||||
|
|
||||||
# Copy and install dependencies
|
# Copy and install dependencies
|
||||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
||||||
WORKDIR /app/autogpt_platform/backend
|
WORKDIR /app/autogpt_platform/backend
|
||||||
# Production image only needs runtime deps; dev deps (pytest, black, ruff, etc.)
|
RUN poetry install --no-ansi --no-root
|
||||||
# are installed locally via `poetry install --with dev` per the development docs
|
|
||||||
RUN poetry install --no-ansi --no-root --only main
|
|
||||||
|
|
||||||
# Generate Prisma client
|
# Generate Prisma client
|
||||||
COPY autogpt_platform/backend/schema.prisma ./
|
COPY autogpt_platform/backend/schema.prisma ./
|
||||||
@@ -53,15 +51,6 @@ COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/parti
|
|||||||
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
|
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
|
||||||
RUN poetry run prisma generate && poetry run gen-prisma-stub
|
RUN poetry run prisma generate && poetry run gen-prisma-stub
|
||||||
|
|
||||||
# Clean up build artifacts and caches to reduce layer size
|
|
||||||
# Note: setuptools is kept as it's a direct dependency (used by aioclamd via pkg_resources)
|
|
||||||
RUN find /app -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true; \
|
|
||||||
find /app -type d -name tests -exec rm -rf {} + 2>/dev/null || true; \
|
|
||||||
find /app -type d -name test -exec rm -rf {} + 2>/dev/null || true; \
|
|
||||||
rm -rf /app/autogpt_platform/backend/.venv/lib/python*/site-packages/pip* \
|
|
||||||
/root/.cache/pip \
|
|
||||||
/root/.cache/pypoetry
|
|
||||||
|
|
||||||
FROM debian:13-slim AS server_dependencies
|
FROM debian:13-slim AS server_dependencies
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
@@ -79,7 +68,7 @@ RUN apt-get update && apt-get install -y \
|
|||||||
python3-pip \
|
python3-pip \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Copy built artifacts from builder (cleaned of caches, __pycache__, and test dirs)
|
# Copy only necessary files from builder
|
||||||
COPY --from=builder /app /app
|
COPY --from=builder /app /app
|
||||||
COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3*
|
COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3*
|
||||||
COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry
|
COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry
|
||||||
@@ -92,7 +81,9 @@ COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-pyth
|
|||||||
|
|
||||||
ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
|
ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
|
||||||
|
|
||||||
# Copy fresh source from context (overwrites builder's copy with latest source)
|
RUN mkdir -p /app/autogpt_platform/autogpt_libs
|
||||||
|
RUN mkdir -p /app/autogpt_platform/backend
|
||||||
|
|
||||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||||
|
|
||||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
||||||
|
|||||||
@@ -14,6 +14,7 @@ from backend.data.graph import (
|
|||||||
create_graph,
|
create_graph,
|
||||||
get_graph,
|
get_graph,
|
||||||
get_graph_all_versions,
|
get_graph_all_versions,
|
||||||
|
get_store_listed_graphs,
|
||||||
)
|
)
|
||||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||||
|
|
||||||
@@ -266,18 +267,18 @@ async def get_library_agents_for_generation(
|
|||||||
async def search_marketplace_agents_for_generation(
|
async def search_marketplace_agents_for_generation(
|
||||||
search_query: str,
|
search_query: str,
|
||||||
max_results: int = 10,
|
max_results: int = 10,
|
||||||
) -> list[MarketplaceAgentSummary]:
|
) -> list[LibraryAgentSummary]:
|
||||||
"""Search marketplace agents formatted for Agent Generator.
|
"""Search marketplace agents formatted for Agent Generator.
|
||||||
|
|
||||||
Note: This returns basic agent info. Full input/output schemas would require
|
Fetches marketplace agents and their full schemas so they can be used
|
||||||
additional graph fetches and is a potential future enhancement.
|
as sub-agents in generated workflows.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
search_query: Search term to find relevant public agents
|
search_query: Search term to find relevant public agents
|
||||||
max_results: Maximum number of agents to return (default 10)
|
max_results: Maximum number of agents to return (default 10)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List of MarketplaceAgentSummary (without detailed schemas for now)
|
List of LibraryAgentSummary with full input/output schemas
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
response = await store_db.get_store_agents(
|
response = await store_db.get_store_agents(
|
||||||
@@ -286,15 +287,29 @@ async def search_marketplace_agents_for_generation(
|
|||||||
page_size=max_results,
|
page_size=max_results,
|
||||||
)
|
)
|
||||||
|
|
||||||
results: list[MarketplaceAgentSummary] = []
|
agents_with_graphs = [
|
||||||
for agent in response.agents:
|
agent for agent in response.agents if agent.agent_graph_id
|
||||||
|
]
|
||||||
|
|
||||||
|
if not agents_with_graphs:
|
||||||
|
return []
|
||||||
|
|
||||||
|
graph_ids = [agent.agent_graph_id for agent in agents_with_graphs]
|
||||||
|
graphs = await get_store_listed_graphs(*graph_ids)
|
||||||
|
|
||||||
|
results: list[LibraryAgentSummary] = []
|
||||||
|
for agent in agents_with_graphs:
|
||||||
|
graph_id = agent.agent_graph_id
|
||||||
|
if graph_id and graph_id in graphs:
|
||||||
|
graph = graphs[graph_id]
|
||||||
results.append(
|
results.append(
|
||||||
MarketplaceAgentSummary(
|
LibraryAgentSummary(
|
||||||
|
graph_id=graph.id,
|
||||||
|
graph_version=graph.version,
|
||||||
name=agent.agent_name,
|
name=agent.agent_name,
|
||||||
description=agent.description,
|
description=agent.description,
|
||||||
sub_heading=agent.sub_heading,
|
input_schema=graph.input_schema,
|
||||||
creator=agent.creator,
|
output_schema=graph.output_schema,
|
||||||
is_marketplace_agent=True,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return results
|
return results
|
||||||
@@ -327,8 +342,7 @@ async def get_all_relevant_agents_for_generation(
|
|||||||
max_marketplace_results: Max marketplace agents to return (default 10)
|
max_marketplace_results: Max marketplace agents to return (default 10)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List of AgentSummary, library agents first (with full schemas),
|
List of AgentSummary with full schemas (both library and marketplace agents)
|
||||||
then marketplace agents (basic info only)
|
|
||||||
"""
|
"""
|
||||||
agents: list[AgentSummary] = []
|
agents: list[AgentSummary] = []
|
||||||
seen_graph_ids: set[str] = set()
|
seen_graph_ids: set[str] = set()
|
||||||
@@ -365,16 +379,11 @@ async def get_all_relevant_agents_for_generation(
|
|||||||
search_query=search_query,
|
search_query=search_query,
|
||||||
max_results=max_marketplace_results,
|
max_results=max_marketplace_results,
|
||||||
)
|
)
|
||||||
library_names: set[str] = set()
|
|
||||||
for a in agents:
|
|
||||||
name = a.get("name")
|
|
||||||
if name and isinstance(name, str):
|
|
||||||
library_names.add(name.lower())
|
|
||||||
for agent in marketplace_agents:
|
for agent in marketplace_agents:
|
||||||
agent_name = agent.get("name")
|
graph_id = agent.get("graph_id")
|
||||||
if agent_name and isinstance(agent_name, str):
|
if graph_id and graph_id not in seen_graph_ids:
|
||||||
if agent_name.lower() not in library_names:
|
|
||||||
agents.append(agent)
|
agents.append(agent)
|
||||||
|
seen_graph_ids.add(graph_id)
|
||||||
|
|
||||||
return agents
|
return agents
|
||||||
|
|
||||||
|
|||||||
@@ -112,6 +112,7 @@ async def get_store_agents(
|
|||||||
description=agent["description"],
|
description=agent["description"],
|
||||||
runs=agent["runs"],
|
runs=agent["runs"],
|
||||||
rating=agent["rating"],
|
rating=agent["rating"],
|
||||||
|
agent_graph_id=agent.get("agentGraphId", ""),
|
||||||
)
|
)
|
||||||
store_agents.append(store_agent)
|
store_agents.append(store_agent)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -170,6 +171,7 @@ async def get_store_agents(
|
|||||||
description=agent.description,
|
description=agent.description,
|
||||||
runs=agent.runs,
|
runs=agent.runs,
|
||||||
rating=agent.rating,
|
rating=agent.rating,
|
||||||
|
agent_graph_id=agent.agentGraphId,
|
||||||
)
|
)
|
||||||
# Add to the list only if creation was successful
|
# Add to the list only if creation was successful
|
||||||
store_agents.append(store_agent)
|
store_agents.append(store_agent)
|
||||||
|
|||||||
@@ -600,6 +600,7 @@ async def hybrid_search(
|
|||||||
sa.featured,
|
sa.featured,
|
||||||
sa.is_available,
|
sa.is_available,
|
||||||
sa.updated_at,
|
sa.updated_at,
|
||||||
|
sa."agentGraphId",
|
||||||
-- Searchable text for BM25 reranking
|
-- Searchable text for BM25 reranking
|
||||||
COALESCE(sa.agent_name, '') || ' ' || COALESCE(sa.sub_heading, '') || ' ' || COALESCE(sa.description, '') as searchable_text,
|
COALESCE(sa.agent_name, '') || ' ' || COALESCE(sa.sub_heading, '') || ' ' || COALESCE(sa.description, '') as searchable_text,
|
||||||
-- Semantic score
|
-- Semantic score
|
||||||
@@ -659,6 +660,7 @@ async def hybrid_search(
|
|||||||
featured,
|
featured,
|
||||||
is_available,
|
is_available,
|
||||||
updated_at,
|
updated_at,
|
||||||
|
"agentGraphId",
|
||||||
searchable_text,
|
searchable_text,
|
||||||
semantic_score,
|
semantic_score,
|
||||||
lexical_score,
|
lexical_score,
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ class StoreAgent(pydantic.BaseModel):
|
|||||||
description: str
|
description: str
|
||||||
runs: int
|
runs: int
|
||||||
rating: float
|
rating: float
|
||||||
|
agent_graph_id: str
|
||||||
|
|
||||||
|
|
||||||
class StoreAgentsResponse(pydantic.BaseModel):
|
class StoreAgentsResponse(pydantic.BaseModel):
|
||||||
|
|||||||
@@ -26,11 +26,13 @@ def test_store_agent():
|
|||||||
description="Test description",
|
description="Test description",
|
||||||
runs=50,
|
runs=50,
|
||||||
rating=4.5,
|
rating=4.5,
|
||||||
|
agent_graph_id="test-graph-id",
|
||||||
)
|
)
|
||||||
assert agent.slug == "test-agent"
|
assert agent.slug == "test-agent"
|
||||||
assert agent.agent_name == "Test Agent"
|
assert agent.agent_name == "Test Agent"
|
||||||
assert agent.runs == 50
|
assert agent.runs == 50
|
||||||
assert agent.rating == 4.5
|
assert agent.rating == 4.5
|
||||||
|
assert agent.agent_graph_id == "test-graph-id"
|
||||||
|
|
||||||
|
|
||||||
def test_store_agents_response():
|
def test_store_agents_response():
|
||||||
@@ -46,6 +48,7 @@ def test_store_agents_response():
|
|||||||
description="Test description",
|
description="Test description",
|
||||||
runs=50,
|
runs=50,
|
||||||
rating=4.5,
|
rating=4.5,
|
||||||
|
agent_graph_id="test-graph-id",
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
pagination=store_model.Pagination(
|
pagination=store_model.Pagination(
|
||||||
|
|||||||
@@ -82,6 +82,7 @@ def test_get_agents_featured(
|
|||||||
description="Featured agent description",
|
description="Featured agent description",
|
||||||
runs=100,
|
runs=100,
|
||||||
rating=4.5,
|
rating=4.5,
|
||||||
|
agent_graph_id="test-graph-1",
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
pagination=store_model.Pagination(
|
pagination=store_model.Pagination(
|
||||||
@@ -127,6 +128,7 @@ def test_get_agents_by_creator(
|
|||||||
description="Creator agent description",
|
description="Creator agent description",
|
||||||
runs=50,
|
runs=50,
|
||||||
rating=4.0,
|
rating=4.0,
|
||||||
|
agent_graph_id="test-graph-2",
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
pagination=store_model.Pagination(
|
pagination=store_model.Pagination(
|
||||||
@@ -172,6 +174,7 @@ def test_get_agents_sorted(
|
|||||||
description="Top agent description",
|
description="Top agent description",
|
||||||
runs=1000,
|
runs=1000,
|
||||||
rating=5.0,
|
rating=5.0,
|
||||||
|
agent_graph_id="test-graph-3",
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
pagination=store_model.Pagination(
|
pagination=store_model.Pagination(
|
||||||
@@ -217,6 +220,7 @@ def test_get_agents_search(
|
|||||||
description="Specific search term description",
|
description="Specific search term description",
|
||||||
runs=75,
|
runs=75,
|
||||||
rating=4.2,
|
rating=4.2,
|
||||||
|
agent_graph_id="test-graph-search",
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
pagination=store_model.Pagination(
|
pagination=store_model.Pagination(
|
||||||
@@ -262,6 +266,7 @@ def test_get_agents_category(
|
|||||||
description="Category agent description",
|
description="Category agent description",
|
||||||
runs=60,
|
runs=60,
|
||||||
rating=4.1,
|
rating=4.1,
|
||||||
|
agent_graph_id="test-graph-category",
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
pagination=store_model.Pagination(
|
pagination=store_model.Pagination(
|
||||||
@@ -306,6 +311,7 @@ def test_get_agents_pagination(
|
|||||||
description=f"Agent {i} description",
|
description=f"Agent {i} description",
|
||||||
runs=i * 10,
|
runs=i * 10,
|
||||||
rating=4.0,
|
rating=4.0,
|
||||||
|
agent_graph_id="test-graph-2",
|
||||||
)
|
)
|
||||||
for i in range(5)
|
for i in range(5)
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ class TestCacheDeletion:
|
|||||||
description="Test description",
|
description="Test description",
|
||||||
runs=100,
|
runs=100,
|
||||||
rating=4.5,
|
rating=4.5,
|
||||||
|
agent_graph_id="test-graph-id",
|
||||||
)
|
)
|
||||||
],
|
],
|
||||||
pagination=Pagination(
|
pagination=Pagination(
|
||||||
|
|||||||
@@ -1028,6 +1028,39 @@ async def get_graph(
|
|||||||
return GraphModel.from_db(graph, for_export)
|
return GraphModel.from_db(graph, for_export)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_store_listed_graphs(*graph_ids: str) -> dict[str, GraphModel]:
|
||||||
|
"""Batch-fetch multiple store-listed graphs by their IDs.
|
||||||
|
|
||||||
|
Only returns graphs that have approved store listings (publicly available).
|
||||||
|
Does not require permission checks since store-listed graphs are public.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
*graph_ids: Variable number of graph IDs to fetch
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict mapping graph_id to GraphModel for graphs with approved store listings
|
||||||
|
"""
|
||||||
|
if not graph_ids:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
store_listings = await StoreListingVersion.prisma().find_many(
|
||||||
|
where={
|
||||||
|
"agentGraphId": {"in": list(graph_ids)},
|
||||||
|
"submissionStatus": SubmissionStatus.APPROVED,
|
||||||
|
"isDeleted": False,
|
||||||
|
},
|
||||||
|
include={"AgentGraph": {"include": AGENT_GRAPH_INCLUDE}},
|
||||||
|
distinct=["agentGraphId"],
|
||||||
|
order={"agentGraphVersion": "desc"},
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
listing.agentGraphId: GraphModel.from_db(listing.AgentGraph)
|
||||||
|
for listing in store_listings
|
||||||
|
if listing.AgentGraph
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async def get_graph_as_admin(
|
async def get_graph_as_admin(
|
||||||
graph_id: str,
|
graph_id: str,
|
||||||
version: int | None = None,
|
version: int | None = None,
|
||||||
|
|||||||
@@ -0,0 +1,39 @@
|
|||||||
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import fastapi
|
||||||
|
from fastapi.routing import APIRoute
|
||||||
|
|
||||||
|
from backend.api.features.integrations.router import router as integrations_router
|
||||||
|
from backend.integrations.providers import ProviderName
|
||||||
|
from backend.integrations.webhooks import utils as webhooks_utils
|
||||||
|
|
||||||
|
|
||||||
|
def test_webhook_ingress_url_matches_route(monkeypatch) -> None:
|
||||||
|
app = fastapi.FastAPI()
|
||||||
|
app.include_router(integrations_router, prefix="/api/integrations")
|
||||||
|
|
||||||
|
provider = ProviderName.GITHUB
|
||||||
|
webhook_id = "webhook_123"
|
||||||
|
base_url = "https://example.com"
|
||||||
|
|
||||||
|
monkeypatch.setattr(webhooks_utils.app_config, "platform_base_url", base_url)
|
||||||
|
|
||||||
|
route = next(
|
||||||
|
route
|
||||||
|
for route in integrations_router.routes
|
||||||
|
if isinstance(route, APIRoute)
|
||||||
|
and route.path == "/{provider}/webhooks/{webhook_id}/ingress"
|
||||||
|
and "POST" in route.methods
|
||||||
|
)
|
||||||
|
expected_path = f"/api/integrations{route.path}".format(
|
||||||
|
provider=provider.value,
|
||||||
|
webhook_id=webhook_id,
|
||||||
|
)
|
||||||
|
actual_url = urlparse(webhooks_utils.webhook_ingress_url(provider, webhook_id))
|
||||||
|
expected_base = urlparse(base_url)
|
||||||
|
|
||||||
|
assert (actual_url.scheme, actual_url.netloc) == (
|
||||||
|
expected_base.scheme,
|
||||||
|
expected_base.netloc,
|
||||||
|
)
|
||||||
|
assert actual_url.path == expected_path
|
||||||
@@ -9,7 +9,8 @@
|
|||||||
"sub_heading": "Creator agent subheading",
|
"sub_heading": "Creator agent subheading",
|
||||||
"description": "Creator agent description",
|
"description": "Creator agent description",
|
||||||
"runs": 50,
|
"runs": 50,
|
||||||
"rating": 4.0
|
"rating": 4.0,
|
||||||
|
"agent_graph_id": "test-graph-2"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"pagination": {
|
"pagination": {
|
||||||
|
|||||||
@@ -9,7 +9,8 @@
|
|||||||
"sub_heading": "Category agent subheading",
|
"sub_heading": "Category agent subheading",
|
||||||
"description": "Category agent description",
|
"description": "Category agent description",
|
||||||
"runs": 60,
|
"runs": 60,
|
||||||
"rating": 4.1
|
"rating": 4.1,
|
||||||
|
"agent_graph_id": "test-graph-category"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"pagination": {
|
"pagination": {
|
||||||
|
|||||||
@@ -9,7 +9,8 @@
|
|||||||
"sub_heading": "Agent 0 subheading",
|
"sub_heading": "Agent 0 subheading",
|
||||||
"description": "Agent 0 description",
|
"description": "Agent 0 description",
|
||||||
"runs": 0,
|
"runs": 0,
|
||||||
"rating": 4.0
|
"rating": 4.0,
|
||||||
|
"agent_graph_id": "test-graph-2"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "agent-1",
|
"slug": "agent-1",
|
||||||
@@ -20,7 +21,8 @@
|
|||||||
"sub_heading": "Agent 1 subheading",
|
"sub_heading": "Agent 1 subheading",
|
||||||
"description": "Agent 1 description",
|
"description": "Agent 1 description",
|
||||||
"runs": 10,
|
"runs": 10,
|
||||||
"rating": 4.0
|
"rating": 4.0,
|
||||||
|
"agent_graph_id": "test-graph-2"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "agent-2",
|
"slug": "agent-2",
|
||||||
@@ -31,7 +33,8 @@
|
|||||||
"sub_heading": "Agent 2 subheading",
|
"sub_heading": "Agent 2 subheading",
|
||||||
"description": "Agent 2 description",
|
"description": "Agent 2 description",
|
||||||
"runs": 20,
|
"runs": 20,
|
||||||
"rating": 4.0
|
"rating": 4.0,
|
||||||
|
"agent_graph_id": "test-graph-2"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "agent-3",
|
"slug": "agent-3",
|
||||||
@@ -42,7 +45,8 @@
|
|||||||
"sub_heading": "Agent 3 subheading",
|
"sub_heading": "Agent 3 subheading",
|
||||||
"description": "Agent 3 description",
|
"description": "Agent 3 description",
|
||||||
"runs": 30,
|
"runs": 30,
|
||||||
"rating": 4.0
|
"rating": 4.0,
|
||||||
|
"agent_graph_id": "test-graph-2"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"slug": "agent-4",
|
"slug": "agent-4",
|
||||||
@@ -53,7 +57,8 @@
|
|||||||
"sub_heading": "Agent 4 subheading",
|
"sub_heading": "Agent 4 subheading",
|
||||||
"description": "Agent 4 description",
|
"description": "Agent 4 description",
|
||||||
"runs": 40,
|
"runs": 40,
|
||||||
"rating": 4.0
|
"rating": 4.0,
|
||||||
|
"agent_graph_id": "test-graph-2"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"pagination": {
|
"pagination": {
|
||||||
|
|||||||
@@ -9,7 +9,8 @@
|
|||||||
"sub_heading": "Search agent subheading",
|
"sub_heading": "Search agent subheading",
|
||||||
"description": "Specific search term description",
|
"description": "Specific search term description",
|
||||||
"runs": 75,
|
"runs": 75,
|
||||||
"rating": 4.2
|
"rating": 4.2,
|
||||||
|
"agent_graph_id": "test-graph-search"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"pagination": {
|
"pagination": {
|
||||||
|
|||||||
@@ -9,7 +9,8 @@
|
|||||||
"sub_heading": "Top agent subheading",
|
"sub_heading": "Top agent subheading",
|
||||||
"description": "Top agent description",
|
"description": "Top agent description",
|
||||||
"runs": 1000,
|
"runs": 1000,
|
||||||
"rating": 5.0
|
"rating": 5.0,
|
||||||
|
"agent_graph_id": "test-graph-3"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"pagination": {
|
"pagination": {
|
||||||
|
|||||||
@@ -9,7 +9,8 @@
|
|||||||
"sub_heading": "Featured agent subheading",
|
"sub_heading": "Featured agent subheading",
|
||||||
"description": "Featured agent description",
|
"description": "Featured agent description",
|
||||||
"runs": 100,
|
"runs": 100,
|
||||||
"rating": 4.5
|
"rating": 4.5,
|
||||||
|
"agent_graph_id": "test-graph-1"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"pagination": {
|
"pagination": {
|
||||||
|
|||||||
@@ -134,15 +134,28 @@ class TestSearchMarketplaceAgentsForGeneration:
|
|||||||
description="A public agent",
|
description="A public agent",
|
||||||
sub_heading="Does something useful",
|
sub_heading="Does something useful",
|
||||||
creator="creator-1",
|
creator="creator-1",
|
||||||
|
agent_graph_id="graph-123",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
# The store_db is dynamically imported, so patch the import path
|
mock_graph = MagicMock()
|
||||||
with patch(
|
mock_graph.id = "graph-123"
|
||||||
|
mock_graph.version = 1
|
||||||
|
mock_graph.input_schema = {"type": "object"}
|
||||||
|
mock_graph.output_schema = {"type": "object"}
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
"backend.api.features.store.db.get_store_agents",
|
"backend.api.features.store.db.get_store_agents",
|
||||||
new_callable=AsyncMock,
|
new_callable=AsyncMock,
|
||||||
return_value=mock_response,
|
return_value=mock_response,
|
||||||
) as mock_search:
|
) as mock_search,
|
||||||
|
patch(
|
||||||
|
"backend.api.features.chat.tools.agent_generator.core.get_store_listed_graphs",
|
||||||
|
new_callable=AsyncMock,
|
||||||
|
return_value={"graph-123": mock_graph},
|
||||||
|
),
|
||||||
|
):
|
||||||
result = await core.search_marketplace_agents_for_generation(
|
result = await core.search_marketplace_agents_for_generation(
|
||||||
search_query="automation",
|
search_query="automation",
|
||||||
max_results=10,
|
max_results=10,
|
||||||
@@ -156,7 +169,7 @@ class TestSearchMarketplaceAgentsForGeneration:
|
|||||||
|
|
||||||
assert len(result) == 1
|
assert len(result) == 1
|
||||||
assert result[0]["name"] == "Public Agent"
|
assert result[0]["name"] == "Public Agent"
|
||||||
assert result[0]["is_marketplace_agent"] is True
|
assert result[0]["graph_id"] == "graph-123"
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_handles_marketplace_error_gracefully(self):
|
async def test_handles_marketplace_error_gracefully(self):
|
||||||
@@ -193,11 +206,12 @@ class TestGetAllRelevantAgentsForGeneration:
|
|||||||
|
|
||||||
marketplace_agents = [
|
marketplace_agents = [
|
||||||
{
|
{
|
||||||
|
"graph_id": "market-456",
|
||||||
|
"graph_version": 1,
|
||||||
"name": "Market Agent",
|
"name": "Market Agent",
|
||||||
"description": "From marketplace",
|
"description": "From marketplace",
|
||||||
"sub_heading": "Sub heading",
|
"input_schema": {},
|
||||||
"creator": "creator-1",
|
"output_schema": {},
|
||||||
"is_marketplace_agent": True,
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -225,11 +239,11 @@ class TestGetAllRelevantAgentsForGeneration:
|
|||||||
assert result[1]["name"] == "Market Agent"
|
assert result[1]["name"] == "Market Agent"
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_deduplicates_by_name(self):
|
async def test_deduplicates_by_graph_id(self):
|
||||||
"""Test that marketplace agents with same name as library are excluded."""
|
"""Test that marketplace agents with same graph_id as library are excluded."""
|
||||||
library_agents = [
|
library_agents = [
|
||||||
{
|
{
|
||||||
"graph_id": "lib-123",
|
"graph_id": "shared-123",
|
||||||
"graph_version": 1,
|
"graph_version": 1,
|
||||||
"name": "Shared Agent",
|
"name": "Shared Agent",
|
||||||
"description": "From library",
|
"description": "From library",
|
||||||
@@ -240,18 +254,20 @@ class TestGetAllRelevantAgentsForGeneration:
|
|||||||
|
|
||||||
marketplace_agents = [
|
marketplace_agents = [
|
||||||
{
|
{
|
||||||
"name": "Shared Agent", # Same name, should be deduplicated
|
"graph_id": "shared-123", # Same graph_id, should be deduplicated
|
||||||
|
"graph_version": 1,
|
||||||
|
"name": "Shared Agent",
|
||||||
"description": "From marketplace",
|
"description": "From marketplace",
|
||||||
"sub_heading": "Sub heading",
|
"input_schema": {},
|
||||||
"creator": "creator-1",
|
"output_schema": {},
|
||||||
"is_marketplace_agent": True,
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
"graph_id": "unique-456",
|
||||||
|
"graph_version": 1,
|
||||||
"name": "Unique Agent",
|
"name": "Unique Agent",
|
||||||
"description": "Only in marketplace",
|
"description": "Only in marketplace",
|
||||||
"sub_heading": "Sub heading",
|
"input_schema": {},
|
||||||
"creator": "creator-2",
|
"output_schema": {},
|
||||||
"is_marketplace_agent": True,
|
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
@@ -273,7 +289,7 @@ class TestGetAllRelevantAgentsForGeneration:
|
|||||||
include_marketplace=True,
|
include_marketplace=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Shared Agent from marketplace should be excluded
|
# Shared Agent from marketplace should be excluded by graph_id
|
||||||
assert len(result) == 2
|
assert len(result) == 2
|
||||||
names = [a["name"] for a in result]
|
names = [a["name"] for a in result]
|
||||||
assert "Shared Agent" in names
|
assert "Shared Agent" in names
|
||||||
|
|||||||
@@ -9833,7 +9833,8 @@
|
|||||||
"sub_heading": { "type": "string", "title": "Sub Heading" },
|
"sub_heading": { "type": "string", "title": "Sub Heading" },
|
||||||
"description": { "type": "string", "title": "Description" },
|
"description": { "type": "string", "title": "Description" },
|
||||||
"runs": { "type": "integer", "title": "Runs" },
|
"runs": { "type": "integer", "title": "Runs" },
|
||||||
"rating": { "type": "number", "title": "Rating" }
|
"rating": { "type": "number", "title": "Rating" },
|
||||||
|
"agent_graph_id": { "type": "string", "title": "Agent Graph Id" }
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"required": [
|
"required": [
|
||||||
@@ -9845,7 +9846,8 @@
|
|||||||
"sub_heading",
|
"sub_heading",
|
||||||
"description",
|
"description",
|
||||||
"runs",
|
"runs",
|
||||||
"rating"
|
"rating",
|
||||||
|
"agent_graph_id"
|
||||||
],
|
],
|
||||||
"title": "StoreAgent"
|
"title": "StoreAgent"
|
||||||
},
|
},
|
||||||
|
|||||||
Reference in New Issue
Block a user