Compare commits

..

1 Commits

Author SHA1 Message Date
Swifty
0cbf7db58c extracted frontend changes out of the hackathon/copilot branch 2026-01-07 09:28:39 +01:00
220 changed files with 12463 additions and 7176 deletions

View File

@@ -16,7 +16,6 @@
!autogpt_platform/backend/poetry.lock
!autogpt_platform/backend/README.md
!autogpt_platform/backend/.env
!autogpt_platform/backend/gen_prisma_types_stub.py
# Platform - Market
!autogpt_platform/market/market/

View File

@@ -74,7 +74,7 @@ jobs:
- name: Generate Prisma Client
working-directory: autogpt_platform/backend
run: poetry run prisma generate && poetry run gen-prisma-stub
run: poetry run prisma generate
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
- name: Set up Node.js

View File

@@ -90,7 +90,7 @@ jobs:
- name: Generate Prisma Client
working-directory: autogpt_platform/backend
run: poetry run prisma generate && poetry run gen-prisma-stub
run: poetry run prisma generate
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
- name: Set up Node.js

View File

@@ -72,7 +72,7 @@ jobs:
- name: Generate Prisma Client
working-directory: autogpt_platform/backend
run: poetry run prisma generate && poetry run gen-prisma-stub
run: poetry run prisma generate
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
- name: Set up Node.js
@@ -108,16 +108,6 @@ jobs:
# run: pnpm playwright install --with-deps chromium
# Docker setup for development environment
- name: Free up disk space
run: |
# Remove large unused tools to free disk space for Docker builds
sudo rm -rf /usr/share/dotnet
sudo rm -rf /usr/local/lib/android
sudo rm -rf /opt/ghc
sudo rm -rf /opt/hostedtoolcache/CodeQL
sudo docker system prune -af
df -h
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

View File

@@ -134,7 +134,7 @@ jobs:
run: poetry install
- name: Generate Prisma Client
run: poetry run prisma generate && poetry run gen-prisma-stub
run: poetry run prisma generate
- id: supabase
name: Start Supabase

View File

@@ -12,7 +12,6 @@ reset-db:
rm -rf db/docker/volumes/db/data
cd backend && poetry run prisma migrate deploy
cd backend && poetry run prisma generate
cd backend && poetry run gen-prisma-stub
# View logs for core services
logs-core:
@@ -34,7 +33,6 @@ init-env:
migrate:
cd backend && poetry run prisma migrate deploy
cd backend && poetry run prisma generate
cd backend && poetry run gen-prisma-stub
run-backend:
cd backend && poetry run app

View File

@@ -48,8 +48,7 @@ RUN poetry install --no-ansi --no-root
# Generate Prisma client
COPY autogpt_platform/backend/schema.prisma ./
COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/partial_types.py
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
RUN poetry run prisma generate && poetry run gen-prisma-stub
RUN poetry run prisma generate
FROM debian:13-slim AS server_dependencies

View File

@@ -48,7 +48,6 @@ class LibraryAgent(pydantic.BaseModel):
id: str
graph_id: str
graph_version: int
owner_user_id: str # ID of user who owns/created this agent graph
image_url: str | None
@@ -164,7 +163,6 @@ class LibraryAgent(pydantic.BaseModel):
id=agent.id,
graph_id=agent.agentGraphId,
graph_version=agent.agentGraphVersion,
owner_user_id=agent.userId,
image_url=agent.imageUrl,
creator_name=creator_name,
creator_image_url=creator_image_url,

View File

@@ -42,7 +42,6 @@ async def test_get_library_agents_success(
id="test-agent-1",
graph_id="test-agent-1",
graph_version=1,
owner_user_id=test_user_id,
name="Test Agent 1",
description="Test Description 1",
image_url=None,
@@ -65,7 +64,6 @@ async def test_get_library_agents_success(
id="test-agent-2",
graph_id="test-agent-2",
graph_version=1,
owner_user_id=test_user_id,
name="Test Agent 2",
description="Test Description 2",
image_url=None,
@@ -140,7 +138,6 @@ async def test_get_favorite_library_agents_success(
id="test-agent-1",
graph_id="test-agent-1",
graph_version=1,
owner_user_id=test_user_id,
name="Favorite Agent 1",
description="Test Favorite Description 1",
image_url=None,
@@ -208,7 +205,6 @@ def test_add_agent_to_library_success(
id="test-library-agent-id",
graph_id="test-agent-1",
graph_version=1,
owner_user_id=test_user_id,
name="Test Agent 1",
description="Test Description 1",
image_url=None,

View File

@@ -1,72 +0,0 @@
#!/usr/bin/env python3
"""
CLI script to backfill embeddings for store agents.
Usage:
poetry run python -m backend.server.v2.store.backfill_embeddings [--batch-size N]
"""
import argparse
import asyncio
import sys
import prisma
from backend.api.features.store.embeddings import (
backfill_missing_embeddings,
get_embedding_stats,
)
async def main(batch_size: int = 100) -> int:
"""Run the backfill process."""
# Initialize Prisma client
client = prisma.Prisma()
await client.connect()
prisma.register(client)
try:
# Get current stats
print("Current embedding stats:")
stats = await get_embedding_stats()
print(f" Total approved: {stats['total_approved']}")
print(f" With embeddings: {stats['with_embeddings']}")
print(f" Without embeddings: {stats['without_embeddings']}")
print(f" Coverage: {stats['coverage_percent']}%")
if stats["without_embeddings"] == 0:
print("\nAll agents already have embeddings. Nothing to do.")
return 0
# Run backfill
print(f"\nBackfilling up to {batch_size} embeddings...")
result = await backfill_missing_embeddings(batch_size=batch_size)
print(f" Processed: {result['processed']}")
print(f" Success: {result['success']}")
print(f" Failed: {result['failed']}")
# Get final stats
print("\nFinal embedding stats:")
stats = await get_embedding_stats()
print(f" Total approved: {stats['total_approved']}")
print(f" With embeddings: {stats['with_embeddings']}")
print(f" Without embeddings: {stats['without_embeddings']}")
print(f" Coverage: {stats['coverage_percent']}%")
return 0 if result["failed"] == 0 else 1
finally:
await client.disconnect()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Backfill embeddings for store agents")
parser.add_argument(
"--batch-size",
type=int,
default=100,
help="Number of embeddings to generate (default: 100)",
)
args = parser.parse_args()
sys.exit(asyncio.run(main(batch_size=args.batch_size)))

View File

@@ -1,5 +1,6 @@
import asyncio
import logging
import typing
from datetime import datetime, timezone
from typing import Literal
@@ -9,7 +10,7 @@ import prisma.errors
import prisma.models
import prisma.types
from backend.data.db import transaction
from backend.data.db import query_raw_with_schema, transaction
from backend.data.graph import (
GraphMeta,
GraphModel,
@@ -29,8 +30,6 @@ from backend.util.settings import Settings
from . import exceptions as store_exceptions
from . import model as store_model
from .embeddings import ensure_embedding
from .hybrid_search import hybrid_search
logger = logging.getLogger(__name__)
settings = Settings()
@@ -57,62 +56,122 @@ async def get_store_agents(
f"Getting store agents. featured={featured}, creators={creators}, sorted_by={sorted_by}, search={search_query}, category={category}, page={page}"
)
search_used_hybrid = False
store_agents: list[store_model.StoreAgent] = []
total = 0
total_pages = 0
try:
# If search_query is provided, try hybrid search (embeddings + tsvector)
# If search_query is provided, use full-text search
if search_query:
try:
# Use hybrid search combining semantic and lexical signals
agents, total = await hybrid_search(
query=search_query,
featured=featured,
creators=creators,
category=category,
sorted_by="relevance", # Use hybrid scoring for relevance
page=page,
page_size=page_size,
)
search_used_hybrid = True
offset = (page - 1) * page_size
# Convert hybrid search results (dict format)
total_pages = (total + page_size - 1) // page_size
store_agents: list[store_model.StoreAgent] = []
for agent in agents:
try:
store_agent = store_model.StoreAgent(
slug=agent["slug"],
agent_name=agent["agent_name"],
agent_image=(
agent["agent_image"][0] if agent["agent_image"] else ""
),
creator=agent["creator_username"] or "Needs Profile",
creator_avatar=agent["creator_avatar"] or "",
sub_heading=agent["sub_heading"],
description=agent["description"],
runs=agent["runs"],
rating=agent["rating"],
)
store_agents.append(store_agent)
except Exception as e:
logger.error(
f"Error parsing Store agent from hybrid search results: {e}"
)
continue
# Whitelist allowed order_by columns
ALLOWED_ORDER_BY = {
"rating": "rating DESC, rank DESC",
"runs": "runs DESC, rank DESC",
"name": "agent_name ASC, rank ASC",
"updated_at": "updated_at DESC, rank DESC",
}
except Exception as hybrid_error:
# If hybrid search fails (e.g., missing embeddings table),
# fallback to basic search logic below
logger.warning(
f"Hybrid search failed, falling back to basic search: {hybrid_error}"
)
search_used_hybrid = False
# Validate and get order clause
if sorted_by and sorted_by in ALLOWED_ORDER_BY:
order_by_clause = ALLOWED_ORDER_BY[sorted_by]
else:
order_by_clause = "updated_at DESC, rank DESC"
if not search_used_hybrid:
# Fallback path - use basic search or no search
# Build WHERE conditions and parameters list
where_parts: list[str] = []
params: list[typing.Any] = [search_query] # $1 - search term
param_index = 2 # Start at $2 for next parameter
# Always filter for available agents
where_parts.append("is_available = true")
if featured:
where_parts.append("featured = true")
if creators and creators:
# Use ANY with array parameter
where_parts.append(f"creator_username = ANY(${param_index})")
params.append(creators)
param_index += 1
if category and category:
where_parts.append(f"${param_index} = ANY(categories)")
params.append(category)
param_index += 1
sql_where_clause: str = " AND ".join(where_parts) if where_parts else "1=1"
# Add pagination params
params.extend([page_size, offset])
limit_param = f"${param_index}"
offset_param = f"${param_index + 1}"
# Execute full-text search query with parameterized values
sql_query = f"""
SELECT
slug,
agent_name,
agent_image,
creator_username,
creator_avatar,
sub_heading,
description,
runs,
rating,
categories,
featured,
is_available,
updated_at,
ts_rank_cd(search, query) AS rank
FROM {{schema_prefix}}"StoreAgent",
plainto_tsquery('english', $1) AS query
WHERE {sql_where_clause}
AND search @@ query
ORDER BY {order_by_clause}
LIMIT {limit_param} OFFSET {offset_param}
"""
# Count query for pagination - only uses search term parameter
count_query = f"""
SELECT COUNT(*) as count
FROM {{schema_prefix}}"StoreAgent",
plainto_tsquery('english', $1) AS query
WHERE {sql_where_clause}
AND search @@ query
"""
# Execute both queries with parameters
agents = await query_raw_with_schema(sql_query, *params)
# For count, use params without pagination (last 2 params)
count_params = params[:-2]
count_result = await query_raw_with_schema(count_query, *count_params)
total = count_result[0]["count"] if count_result else 0
total_pages = (total + page_size - 1) // page_size
# Convert raw results to StoreAgent models
store_agents: list[store_model.StoreAgent] = []
for agent in agents:
try:
store_agent = store_model.StoreAgent(
slug=agent["slug"],
agent_name=agent["agent_name"],
agent_image=(
agent["agent_image"][0] if agent["agent_image"] else ""
),
creator=agent["creator_username"] or "Needs Profile",
creator_avatar=agent["creator_avatar"] or "",
sub_heading=agent["sub_heading"],
description=agent["description"],
runs=agent["runs"],
rating=agent["rating"],
)
store_agents.append(store_agent)
except Exception as e:
logger.error(f"Error parsing Store agent from search results: {e}")
continue
else:
# Non-search query path (original logic)
where_clause: prisma.types.StoreAgentWhereInput = {"is_available": True}
if featured:
where_clause["featured"] = featured
@@ -121,14 +180,6 @@ async def get_store_agents(
if category:
where_clause["categories"] = {"has": category}
# Add basic text search if search_query provided but hybrid failed
if search_query:
where_clause["OR"] = [
{"agent_name": {"contains": search_query, "mode": "insensitive"}},
{"sub_heading": {"contains": search_query, "mode": "insensitive"}},
{"description": {"contains": search_query, "mode": "insensitive"}},
]
order_by = []
if sorted_by == "rating":
order_by.append({"rating": "desc"})
@@ -563,7 +614,6 @@ async def get_store_submissions(
submission_models = []
for sub in submissions:
submission_model = store_model.StoreSubmission(
listing_id=sub.listing_id,
agent_id=sub.agent_id,
agent_version=sub.agent_version,
name=sub.name,
@@ -617,48 +667,35 @@ async def delete_store_submission(
submission_id: str,
) -> bool:
"""
Delete a store submission version as the submitting user.
Delete a store listing submission as the submitting user.
Args:
user_id: ID of the authenticated user
submission_id: StoreListingVersion ID to delete
submission_id: ID of the submission to be deleted
Returns:
bool: True if successfully deleted
bool: True if the submission was successfully deleted, False otherwise
"""
logger.debug(f"Deleting store submission {submission_id} for user {user_id}")
try:
# Find the submission version with ownership check
version = await prisma.models.StoreListingVersion.prisma().find_first(
where={"id": submission_id}, include={"StoreListing": True}
# Verify the submission belongs to this user
submission = await prisma.models.StoreListing.prisma().find_first(
where={"agentGraphId": submission_id, "owningUserId": user_id}
)
if (
not version
or not version.StoreListing
or version.StoreListing.owningUserId != user_id
):
raise store_exceptions.SubmissionNotFoundError("Submission not found")
# Prevent deletion of approved submissions
if version.submissionStatus == prisma.enums.SubmissionStatus.APPROVED:
raise store_exceptions.InvalidOperationError(
"Cannot delete approved submissions"
if not submission:
logger.warning(f"Submission not found for user {user_id}: {submission_id}")
raise store_exceptions.SubmissionNotFoundError(
f"Submission not found for this user. User ID: {user_id}, Submission ID: {submission_id}"
)
# Delete the version
await prisma.models.StoreListingVersion.prisma().delete(
where={"id": version.id}
)
# Delete the submission
await prisma.models.StoreListing.prisma().delete(where={"id": submission.id})
# Clean up empty listing if this was the last version
remaining = await prisma.models.StoreListingVersion.prisma().count(
where={"storeListingId": version.storeListingId}
logger.debug(
f"Successfully deleted submission {submission_id} for user {user_id}"
)
if remaining == 0:
await prisma.models.StoreListing.prisma().delete(
where={"id": version.storeListingId}
)
return True
except Exception as e:
@@ -722,15 +759,9 @@ async def create_store_submission(
logger.warning(
f"Agent not found for user {user_id}: {agent_id} v{agent_version}"
)
# Provide more user-friendly error message when agent_id is empty
if not agent_id or agent_id.strip() == "":
raise store_exceptions.AgentNotFoundError(
"No agent selected. Please select an agent before submitting to the store."
)
else:
raise store_exceptions.AgentNotFoundError(
f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}"
)
raise store_exceptions.AgentNotFoundError(
f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}"
)
# Check if listing already exists for this agent
existing_listing = await prisma.models.StoreListing.prisma().find_first(
@@ -802,7 +833,6 @@ async def create_store_submission(
logger.debug(f"Created store listing for agent {agent_id}")
# Return submission details
return store_model.StoreSubmission(
listing_id=listing.id,
agent_id=agent_id,
agent_version=agent_version,
name=name,
@@ -914,56 +944,81 @@ async def edit_store_submission(
# Currently we are not allowing user to update the agent associated with a submission
# If we allow it in future, then we need a check here to verify the agent belongs to this user.
# Only allow editing of PENDING submissions
if current_version.submissionStatus != prisma.enums.SubmissionStatus.PENDING:
# Check if we can edit this submission
if current_version.submissionStatus == prisma.enums.SubmissionStatus.REJECTED:
raise store_exceptions.InvalidOperationError(
f"Cannot edit a {current_version.submissionStatus.value.lower()} submission. Only pending submissions can be edited."
"Cannot edit a rejected submission"
)
# For APPROVED submissions, we need to create a new version
if current_version.submissionStatus == prisma.enums.SubmissionStatus.APPROVED:
# Create a new version for the existing listing
return await create_store_version(
user_id=user_id,
agent_id=current_version.agentGraphId,
agent_version=current_version.agentGraphVersion,
store_listing_id=current_version.storeListingId,
name=name,
video_url=video_url,
agent_output_demo_url=agent_output_demo_url,
image_urls=image_urls,
description=description,
sub_heading=sub_heading,
categories=categories,
changes_summary=changes_summary,
recommended_schedule_cron=recommended_schedule_cron,
instructions=instructions,
)
# For PENDING submissions, we can update the existing version
# Update the existing version
updated_version = await prisma.models.StoreListingVersion.prisma().update(
where={"id": store_listing_version_id},
data=prisma.types.StoreListingVersionUpdateInput(
elif current_version.submissionStatus == prisma.enums.SubmissionStatus.PENDING:
# Update the existing version
updated_version = await prisma.models.StoreListingVersion.prisma().update(
where={"id": store_listing_version_id},
data=prisma.types.StoreListingVersionUpdateInput(
name=name,
videoUrl=video_url,
agentOutputDemoUrl=agent_output_demo_url,
imageUrls=image_urls,
description=description,
categories=categories,
subHeading=sub_heading,
changesSummary=changes_summary,
recommendedScheduleCron=recommended_schedule_cron,
instructions=instructions,
),
)
logger.debug(
f"Updated existing version {store_listing_version_id} for agent {current_version.agentGraphId}"
)
if not updated_version:
raise DatabaseError("Failed to update store listing version")
return store_model.StoreSubmission(
agent_id=current_version.agentGraphId,
agent_version=current_version.agentGraphVersion,
name=name,
videoUrl=video_url,
agentOutputDemoUrl=agent_output_demo_url,
imageUrls=image_urls,
sub_heading=sub_heading,
slug=current_version.StoreListing.slug,
description=description,
categories=categories,
subHeading=sub_heading,
changesSummary=changes_summary,
recommendedScheduleCron=recommended_schedule_cron,
instructions=instructions,
),
)
image_urls=image_urls,
date_submitted=updated_version.submittedAt or updated_version.createdAt,
status=updated_version.submissionStatus,
runs=0,
rating=0.0,
store_listing_version_id=updated_version.id,
changes_summary=changes_summary,
video_url=video_url,
categories=categories,
version=updated_version.version,
)
logger.debug(
f"Updated existing version {store_listing_version_id} for agent {current_version.agentGraphId}"
)
if not updated_version:
raise DatabaseError("Failed to update store listing version")
return store_model.StoreSubmission(
listing_id=current_version.StoreListing.id,
agent_id=current_version.agentGraphId,
agent_version=current_version.agentGraphVersion,
name=name,
sub_heading=sub_heading,
slug=current_version.StoreListing.slug,
description=description,
instructions=instructions,
image_urls=image_urls,
date_submitted=updated_version.submittedAt or updated_version.createdAt,
status=updated_version.submissionStatus,
runs=0,
rating=0.0,
store_listing_version_id=updated_version.id,
changes_summary=changes_summary,
video_url=video_url,
categories=categories,
version=updated_version.version,
)
else:
raise store_exceptions.InvalidOperationError(
f"Cannot edit submission with status: {current_version.submissionStatus}"
)
except (
store_exceptions.SubmissionNotFoundError,
@@ -1042,78 +1097,38 @@ async def create_store_version(
f"Agent not found for this user. User ID: {user_id}, Agent ID: {agent_id}, Version: {agent_version}"
)
# Check if there's already a PENDING submission for this agent (any version)
existing_pending_submission = (
await prisma.models.StoreListingVersion.prisma().find_first(
where=prisma.types.StoreListingVersionWhereInput(
storeListingId=store_listing_id,
agentGraphId=agent_id,
submissionStatus=prisma.enums.SubmissionStatus.PENDING,
isDeleted=False,
)
# Get the latest version number
latest_version = listing.Versions[0] if listing.Versions else None
next_version = (latest_version.version + 1) if latest_version else 1
# Create a new version for the existing listing
new_version = await prisma.models.StoreListingVersion.prisma().create(
data=prisma.types.StoreListingVersionCreateInput(
version=next_version,
agentGraphId=agent_id,
agentGraphVersion=agent_version,
name=name,
videoUrl=video_url,
agentOutputDemoUrl=agent_output_demo_url,
imageUrls=image_urls,
description=description,
instructions=instructions,
categories=categories,
subHeading=sub_heading,
submissionStatus=prisma.enums.SubmissionStatus.PENDING,
submittedAt=datetime.now(),
changesSummary=changes_summary,
recommendedScheduleCron=recommended_schedule_cron,
storeListingId=store_listing_id,
)
)
# Handle existing pending submission and create new one atomically
async with transaction() as tx:
# Get the latest version number first
latest_listing = await prisma.models.StoreListing.prisma(tx).find_first(
where=prisma.types.StoreListingWhereInput(
id=store_listing_id, owningUserId=user_id
),
include={"Versions": {"order_by": {"version": "desc"}, "take": 1}},
)
if not latest_listing:
raise store_exceptions.ListingNotFoundError(
f"Store listing not found. User ID: {user_id}, Listing ID: {store_listing_id}"
)
latest_version = (
latest_listing.Versions[0] if latest_listing.Versions else None
)
next_version = (latest_version.version + 1) if latest_version else 1
# If there's an existing pending submission, delete it atomically before creating new one
if existing_pending_submission:
logger.info(
f"Found existing PENDING submission for agent {agent_id} (was v{existing_pending_submission.agentGraphVersion}, now v{agent_version}), replacing existing submission instead of creating duplicate"
)
await prisma.models.StoreListingVersion.prisma(tx).delete(
where={"id": existing_pending_submission.id}
)
logger.debug(
f"Deleted existing pending submission {existing_pending_submission.id}"
)
# Create a new version for the existing listing
new_version = await prisma.models.StoreListingVersion.prisma(tx).create(
data=prisma.types.StoreListingVersionCreateInput(
version=next_version,
agentGraphId=agent_id,
agentGraphVersion=agent_version,
name=name,
videoUrl=video_url,
agentOutputDemoUrl=agent_output_demo_url,
imageUrls=image_urls,
description=description,
instructions=instructions,
categories=categories,
subHeading=sub_heading,
submissionStatus=prisma.enums.SubmissionStatus.PENDING,
submittedAt=datetime.now(),
changesSummary=changes_summary,
recommendedScheduleCron=recommended_schedule_cron,
storeListingId=store_listing_id,
)
)
logger.debug(
f"Created new version for listing {store_listing_id} of agent {agent_id}"
)
# Return submission details
return store_model.StoreSubmission(
listing_id=listing.id,
agent_id=agent_id,
agent_version=agent_version,
name=name,
@@ -1549,22 +1564,6 @@ async def review_store_submission(
},
)
# Generate embedding for approved listing (non-blocking)
try:
await ensure_embedding(
version_id=store_listing_version_id,
name=store_listing_version.name,
description=store_listing_version.description,
sub_heading=store_listing_version.subHeading,
categories=store_listing_version.categories or [],
)
except Exception as e:
# Don't fail approval if embedding generation fails
logger.warning(
f"Failed to generate embedding for approved listing "
f"{store_listing_version_id}: {e}"
)
# If rejecting an approved agent, update the StoreListing accordingly
if is_rejecting_approved:
# Check if there are other approved versions
@@ -1709,12 +1708,15 @@ async def review_store_submission(
# Convert to Pydantic model for consistency
return store_model.StoreSubmission(
listing_id=(submission.StoreListing.id if submission.StoreListing else ""),
agent_id=submission.agentGraphId,
agent_version=submission.agentGraphVersion,
name=submission.name,
sub_heading=submission.subHeading,
slug=(submission.StoreListing.slug if submission.StoreListing else ""),
slug=(
submission.StoreListing.slug
if hasattr(submission, "storeListing") and submission.StoreListing
else ""
),
description=submission.description,
instructions=submission.instructions,
image_urls=submission.imageUrls or [],
@@ -1816,7 +1818,9 @@ async def get_admin_listings_with_versions(
where = prisma.types.StoreListingWhereInput(**where_dict)
include = prisma.types.StoreListingInclude(
Versions=prisma.types.FindManyStoreListingVersionArgsFromStoreListing(
order_by={"version": "desc"}
order_by=prisma.types._StoreListingVersion_version_OrderByInput(
version="desc"
)
),
OwningUser=True,
)
@@ -1841,7 +1845,6 @@ async def get_admin_listings_with_versions(
# If we have versions, turn them into StoreSubmission models
for version in listing.Versions or []:
version_model = store_model.StoreSubmission(
listing_id=listing.id,
agent_id=version.agentGraphId,
agent_version=version.agentGraphVersion,
name=version.name,

View File

@@ -1,533 +0,0 @@
"""
Unified Content Embeddings Service
Handles generation and storage of OpenAI embeddings for all content types
(store listings, blocks, documentation, library agents) to enable semantic/hybrid search.
"""
import asyncio
import logging
from typing import Any
import prisma
from openai import OpenAI
from prisma.enums import ContentType
from backend.util.json import dumps
from backend.util.settings import Settings
logger = logging.getLogger(__name__)
# OpenAI embedding model configuration
EMBEDDING_MODEL = "text-embedding-3-small"
EMBEDDING_DIM = 1536
def build_searchable_text(
name: str,
description: str,
sub_heading: str,
categories: list[str],
) -> str:
"""
Build searchable text from listing version fields.
Combines relevant fields into a single string for embedding.
"""
parts = []
# Name is important - include it
if name:
parts.append(name)
# Sub-heading provides context
if sub_heading:
parts.append(sub_heading)
# Description is the main content
if description:
parts.append(description)
# Categories help with semantic matching
if categories:
parts.append(" ".join(categories))
return " ".join(parts)
async def generate_embedding(text: str) -> list[float] | None:
"""
Generate embedding for text using OpenAI API.
Returns None if embedding generation fails.
"""
try:
settings = Settings()
api_key = settings.secrets.openai_internal_api_key
if not api_key:
logger.warning("openai_internal_api_key not set, cannot generate embedding")
return None
client = OpenAI(api_key=api_key)
# Truncate text to avoid token limits (~32k chars for safety)
truncated_text = text[:32000]
response = client.embeddings.create(
model=EMBEDDING_MODEL,
input=truncated_text,
)
embedding = response.data[0].embedding
logger.debug(f"Generated embedding with {len(embedding)} dimensions")
return embedding
except Exception as e:
logger.error(f"Failed to generate embedding: {e}")
return None
async def store_embedding(
version_id: str,
embedding: list[float],
tx: prisma.Prisma | None = None,
) -> bool:
"""
Store embedding in the database.
BACKWARD COMPATIBILITY: Maintained for existing store listing usage.
Uses raw SQL since Prisma doesn't natively support pgvector.
"""
return await store_content_embedding(
content_type=ContentType.STORE_AGENT,
content_id=version_id,
embedding=embedding,
searchable_text="", # Will be populated from existing data
metadata=None,
user_id=None, # Store agents are public
tx=tx,
)
async def store_content_embedding(
content_type: ContentType,
content_id: str,
embedding: list[float],
searchable_text: str,
metadata: dict | None = None,
user_id: str | None = None,
tx: prisma.Prisma | None = None,
) -> bool:
"""
Store embedding in the unified content embeddings table.
New function for unified content embedding storage.
Uses raw SQL since Prisma doesn't natively support pgvector.
"""
try:
client = tx if tx else prisma.get_client()
# Convert embedding to PostgreSQL vector format
embedding_str = "[" + ",".join(str(x) for x in embedding) + "]"
metadata_json = dumps(metadata or {})
# Upsert the embedding
await client.execute_raw(
"""
INSERT INTO platform."UnifiedContentEmbedding" (
"contentType", "contentId", "userId", "embedding", "searchableText", "metadata", "createdAt", "updatedAt"
)
VALUES ($1, $2, $3, $4::vector, $5, $6::jsonb, NOW(), NOW())
ON CONFLICT ("contentType", "contentId", "userId")
DO UPDATE SET
"embedding" = $4::vector,
"searchableText" = $5,
"metadata" = $6::jsonb,
"updatedAt" = NOW()
""",
content_type,
content_id,
user_id,
embedding_str,
searchable_text,
metadata_json,
)
logger.info(f"Stored embedding for {content_type}:{content_id}")
return True
except Exception as e:
logger.error(f"Failed to store embedding for {content_type}:{content_id}: {e}")
return False
async def get_embedding(version_id: str) -> dict[str, Any] | None:
"""
Retrieve embedding record for a listing version.
BACKWARD COMPATIBILITY: Maintained for existing store listing usage.
Returns dict with storeListingVersionId, embedding, timestamps or None if not found.
"""
result = await get_content_embedding(
ContentType.STORE_AGENT, version_id, user_id=None
)
if result:
# Transform to old format for backward compatibility
return {
"storeListingVersionId": result["contentId"],
"embedding": result["embedding"],
"createdAt": result["createdAt"],
"updatedAt": result["updatedAt"],
}
return None
async def get_content_embedding(
content_type: ContentType, content_id: str, user_id: str | None = None
) -> dict[str, Any] | None:
"""
Retrieve embedding record for any content type.
New function for unified content embedding retrieval.
Returns dict with contentType, contentId, embedding, timestamps or None if not found.
"""
try:
client = prisma.get_client()
result = await client.query_raw(
"""
SELECT
"contentType",
"contentId",
"userId",
"embedding"::text as "embedding",
"searchableText",
"metadata",
"createdAt",
"updatedAt"
FROM platform."UnifiedContentEmbedding"
WHERE "contentType" = $1 AND "contentId" = $2 AND ("userId" = $3 OR ($3 IS NULL AND "userId" IS NULL))
""",
content_type,
content_id,
user_id,
)
if result and len(result) > 0:
return result[0]
return None
except Exception as e:
logger.error(f"Failed to get embedding for {content_type}:{content_id}: {e}")
return None
async def ensure_embedding(
version_id: str,
name: str,
description: str,
sub_heading: str,
categories: list[str],
force: bool = False,
tx: prisma.Prisma | None = None,
) -> bool:
"""
Ensure an embedding exists for the listing version.
Creates embedding if missing. Use force=True to regenerate.
Backward-compatible wrapper for store listings.
Args:
version_id: The StoreListingVersion ID
name: Agent name
description: Agent description
sub_heading: Agent sub-heading
categories: Agent categories
force: Force regeneration even if embedding exists
tx: Optional transaction client
Returns:
True if embedding exists/was created, False on failure
"""
try:
# Check if embedding already exists
if not force:
existing = await get_embedding(version_id)
if existing and existing.get("embedding"):
logger.debug(f"Embedding for version {version_id} already exists")
return True
# Build searchable text for embedding
searchable_text = build_searchable_text(
name, description, sub_heading, categories
)
# Generate new embedding
embedding = await generate_embedding(searchable_text)
if embedding is None:
logger.warning(f"Could not generate embedding for version {version_id}")
return False
# Store the embedding with metadata using new function
metadata = {
"name": name,
"subHeading": sub_heading,
"categories": categories,
}
return await store_content_embedding(
content_type=ContentType.STORE_AGENT,
content_id=version_id,
embedding=embedding,
searchable_text=searchable_text,
metadata=metadata,
user_id=None, # Store agents are public
tx=tx,
)
except Exception as e:
logger.error(f"Failed to ensure embedding for version {version_id}: {e}")
return False
async def delete_embedding(version_id: str) -> bool:
"""
Delete embedding for a listing version.
BACKWARD COMPATIBILITY: Maintained for existing store listing usage.
Note: This is usually handled automatically by CASCADE delete,
but provided for manual cleanup if needed.
"""
return await delete_content_embedding(ContentType.STORE_AGENT, version_id)
async def delete_content_embedding(content_type: ContentType, content_id: str) -> bool:
"""
Delete embedding for any content type.
New function for unified content embedding deletion.
Note: This is usually handled automatically by CASCADE delete,
but provided for manual cleanup if needed.
"""
try:
client = prisma.get_client()
await client.execute_raw(
"""
DELETE FROM platform."UnifiedContentEmbedding"
WHERE "contentType" = $1 AND "contentId" = $2
""",
content_type,
content_id,
)
logger.info(f"Deleted embedding for {content_type}:{content_id}")
return True
except Exception as e:
logger.error(f"Failed to delete embedding for {content_type}:{content_id}: {e}")
return False
async def get_embedding_stats() -> dict[str, Any]:
"""
Get statistics about embedding coverage.
Returns counts of:
- Total approved listing versions
- Versions with embeddings
- Versions without embeddings
"""
try:
client = prisma.get_client()
# Count approved versions
approved_result = await client.query_raw(
"""
SELECT COUNT(*) as count
FROM platform."StoreListingVersion"
WHERE "submissionStatus" = 'APPROVED'
AND "isDeleted" = false
"""
)
total_approved = approved_result[0]["count"] if approved_result else 0
# Count versions with embeddings
embedded_result = await client.query_raw(
"""
SELECT COUNT(*) as count
FROM platform."StoreListingVersion" slv
JOIN platform."UnifiedContentEmbedding" uce ON slv.id = uce."contentId" AND uce."contentType" = 'STORE_AGENT'
WHERE slv."submissionStatus" = 'APPROVED'
AND slv."isDeleted" = false
"""
)
with_embeddings = embedded_result[0]["count"] if embedded_result else 0
return {
"total_approved": total_approved,
"with_embeddings": with_embeddings,
"without_embeddings": total_approved - with_embeddings,
"coverage_percent": (
round(with_embeddings / total_approved * 100, 1)
if total_approved > 0
else 0
),
}
except Exception as e:
logger.error(f"Failed to get embedding stats: {e}")
return {
"total_approved": 0,
"with_embeddings": 0,
"without_embeddings": 0,
"coverage_percent": 0,
"error": str(e),
}
async def backfill_missing_embeddings(batch_size: int = 10) -> dict[str, Any]:
"""
Generate embeddings for approved listings that don't have them.
Args:
batch_size: Number of embeddings to generate in one call
Returns:
Dict with success/failure counts
"""
try:
client = prisma.get_client()
# Find approved versions without embeddings
missing = await client.query_raw(
"""
SELECT
slv.id,
slv.name,
slv.description,
slv."subHeading",
slv.categories
FROM platform."StoreListingVersion" slv
LEFT JOIN platform."UnifiedContentEmbedding" uce
ON slv.id = uce."contentId" AND uce."contentType" = 'STORE_AGENT'
WHERE slv."submissionStatus" = 'APPROVED'
AND slv."isDeleted" = false
AND uce."contentId" IS NULL
LIMIT $1
""",
batch_size,
)
if not missing:
return {
"processed": 0,
"success": 0,
"failed": 0,
"message": "No missing embeddings",
}
# Process embeddings concurrently for better performance
embedding_tasks = [
ensure_embedding(
version_id=row["id"],
name=row["name"],
description=row["description"],
sub_heading=row["subHeading"],
categories=row["categories"] or [],
)
for row in missing
]
results = await asyncio.gather(*embedding_tasks, return_exceptions=True)
success = sum(1 for result in results if result is True)
failed = len(results) - success
return {
"processed": len(missing),
"success": success,
"failed": failed,
"message": f"Backfilled {success} embeddings, {failed} failed",
}
except Exception as e:
logger.error(f"Failed to backfill embeddings: {e}")
return {
"processed": 0,
"success": 0,
"failed": 0,
"error": str(e),
}
async def embed_query(query: str) -> list[float] | None:
"""
Generate embedding for a search query.
Same as generate_embedding but with clearer intent.
"""
return await generate_embedding(query)
def embedding_to_vector_string(embedding: list[float]) -> str:
"""Convert embedding list to PostgreSQL vector string format."""
return "[" + ",".join(str(x) for x in embedding) + "]"
async def ensure_content_embedding(
content_type: ContentType,
content_id: str,
searchable_text: str,
metadata: dict | None = None,
user_id: str | None = None,
force: bool = False,
tx: prisma.Prisma | None = None,
) -> bool:
"""
Ensure an embedding exists for any content type.
Generic function for creating embeddings for store agents, blocks, docs, etc.
Args:
content_type: ContentType enum value (STORE_AGENT, BLOCK, etc.)
content_id: Unique identifier for the content
searchable_text: Combined text for embedding generation
metadata: Optional metadata to store with embedding
force: Force regeneration even if embedding exists
tx: Optional transaction client
Returns:
True if embedding exists/was created, False on failure
"""
try:
# Check if embedding already exists
if not force:
existing = await get_content_embedding(content_type, content_id, user_id)
if existing and existing.get("embedding"):
logger.debug(
f"Embedding for {content_type}:{content_id} already exists"
)
return True
# Generate new embedding
embedding = await generate_embedding(searchable_text)
if embedding is None:
logger.warning(
f"Could not generate embedding for {content_type}:{content_id}"
)
return False
# Store the embedding
return await store_content_embedding(
content_type=content_type,
content_id=content_id,
embedding=embedding,
searchable_text=searchable_text,
metadata=metadata or {},
user_id=user_id,
tx=tx,
)
except Exception as e:
logger.error(f"Failed to ensure embedding for {content_type}:{content_id}: {e}")
return False

View File

@@ -1,359 +0,0 @@
from unittest.mock import MagicMock, patch
import prisma
import pytest
from prisma import Prisma
from prisma.enums import ContentType
from backend.api.features.store import embeddings
@pytest.fixture(autouse=True)
async def setup_prisma():
"""Setup Prisma client for tests."""
try:
Prisma()
except prisma.errors.ClientAlreadyRegisteredError:
pass
yield
@pytest.mark.asyncio(loop_scope="session")
async def test_build_searchable_text():
"""Test searchable text building from listing fields."""
result = embeddings.build_searchable_text(
name="AI Assistant",
description="A helpful AI assistant for productivity",
sub_heading="Boost your productivity",
categories=["AI", "Productivity"],
)
expected = "AI Assistant Boost your productivity A helpful AI assistant for productivity AI Productivity"
assert result == expected
@pytest.mark.asyncio(loop_scope="session")
async def test_build_searchable_text_empty_fields():
"""Test searchable text building with empty fields."""
result = embeddings.build_searchable_text(
name="", description="Test description", sub_heading="", categories=[]
)
assert result == "Test description"
@pytest.mark.asyncio(loop_scope="session")
@patch("backend.api.features.store.embeddings.OpenAI")
async def test_generate_embedding_success(mock_openai_class):
"""Test successful embedding generation."""
# Mock OpenAI response
mock_client = MagicMock()
mock_response = MagicMock()
mock_response.data = [MagicMock()]
mock_response.data[0].embedding = [0.1, 0.2, 0.3] * 512 # 1536 dimensions
mock_client.embeddings.create.return_value = mock_response
mock_openai_class.return_value = mock_client
with patch("backend.api.features.store.embeddings.Settings") as mock_settings:
mock_settings.return_value.secrets.openai_internal_api_key = "test-key"
result = await embeddings.generate_embedding("test text")
assert result is not None
assert len(result) == 1536
assert result[0] == 0.1
mock_client.embeddings.create.assert_called_once_with(
model="text-embedding-3-small", input="test text"
)
@pytest.mark.asyncio(loop_scope="session")
@patch("backend.api.features.store.embeddings.OpenAI")
async def test_generate_embedding_no_api_key(mock_openai_class):
"""Test embedding generation without API key."""
with patch("backend.api.features.store.embeddings.Settings") as mock_settings:
mock_settings.return_value.secrets.openai_internal_api_key = ""
result = await embeddings.generate_embedding("test text")
assert result is None
mock_openai_class.assert_not_called()
@pytest.mark.asyncio(loop_scope="session")
@patch("backend.api.features.store.embeddings.OpenAI")
async def test_generate_embedding_api_error(mock_openai_class):
"""Test embedding generation with API error."""
mock_client = MagicMock()
mock_client.embeddings.create.side_effect = Exception("API Error")
mock_openai_class.return_value = mock_client
with patch("backend.api.features.store.embeddings.Settings") as mock_settings:
mock_settings.return_value.secrets.openai_internal_api_key = "test-key"
result = await embeddings.generate_embedding("test text")
assert result is None
@pytest.mark.asyncio(loop_scope="session")
@patch("backend.api.features.store.embeddings.OpenAI")
async def test_generate_embedding_text_truncation(mock_openai_class):
"""Test that long text is properly truncated."""
mock_client = MagicMock()
mock_response = MagicMock()
mock_response.data = [MagicMock()]
mock_response.data[0].embedding = [0.1] * 1536
mock_client.embeddings.create.return_value = mock_response
mock_openai_class.return_value = mock_client
# Create text longer than 32k chars
long_text = "a" * 35000
with patch("backend.api.features.store.embeddings.Settings") as mock_settings:
mock_settings.return_value.secrets.openai_internal_api_key = "test-key"
await embeddings.generate_embedding(long_text)
# Verify truncated text was sent to API
call_args = mock_client.embeddings.create.call_args
assert len(call_args.kwargs["input"]) == 32000
@pytest.mark.asyncio(loop_scope="session")
async def test_store_embedding_success(mocker):
"""Test successful embedding storage."""
mock_client = mocker.AsyncMock()
mock_client.execute_raw = mocker.AsyncMock()
embedding = [0.1, 0.2, 0.3]
result = await embeddings.store_embedding(
version_id="test-version-id", embedding=embedding, tx=mock_client
)
assert result is True
mock_client.execute_raw.assert_called_once()
call_args = mock_client.execute_raw.call_args[0]
assert "test-version-id" in call_args
assert "[0.1,0.2,0.3]" in call_args
assert None in call_args # userId should be None for store agents
@pytest.mark.asyncio(loop_scope="session")
async def test_store_embedding_database_error(mocker):
"""Test embedding storage with database error."""
mock_client = mocker.AsyncMock()
mock_client.execute_raw.side_effect = Exception("Database error")
embedding = [0.1, 0.2, 0.3]
result = await embeddings.store_embedding(
version_id="test-version-id", embedding=embedding, tx=mock_client
)
assert result is False
@pytest.mark.asyncio(loop_scope="session")
async def test_get_embedding_success(mocker):
"""Test successful embedding retrieval."""
mock_client = mocker.AsyncMock()
mock_result = [
{
"contentType": "STORE_AGENT",
"contentId": "test-version-id",
"embedding": "[0.1,0.2,0.3]",
"searchableText": "Test text",
"metadata": {},
"createdAt": "2024-01-01T00:00:00Z",
"updatedAt": "2024-01-01T00:00:00Z",
}
]
mock_client.query_raw.return_value = mock_result
with patch("prisma.get_client", return_value=mock_client):
result = await embeddings.get_embedding("test-version-id")
assert result is not None
assert result["storeListingVersionId"] == "test-version-id"
assert result["embedding"] == "[0.1,0.2,0.3]"
@pytest.mark.asyncio(loop_scope="session")
async def test_get_embedding_not_found(mocker):
"""Test embedding retrieval when not found."""
mock_client = mocker.AsyncMock()
mock_client.query_raw.return_value = []
with patch("prisma.get_client", return_value=mock_client):
result = await embeddings.get_embedding("test-version-id")
assert result is None
@pytest.mark.asyncio(loop_scope="session")
@patch("backend.api.features.store.embeddings.generate_embedding")
@patch("backend.api.features.store.embeddings.store_embedding")
@patch("backend.api.features.store.embeddings.get_embedding")
async def test_ensure_embedding_already_exists(mock_get, mock_store, mock_generate):
"""Test ensure_embedding when embedding already exists."""
mock_get.return_value = {"embedding": "[0.1,0.2,0.3]"}
result = await embeddings.ensure_embedding(
version_id="test-id",
name="Test",
description="Test description",
sub_heading="Test heading",
categories=["test"],
)
assert result is True
mock_generate.assert_not_called()
mock_store.assert_not_called()
@pytest.mark.asyncio(loop_scope="session")
@patch("backend.api.features.store.embeddings.generate_embedding")
@patch("backend.api.features.store.embeddings.store_content_embedding")
@patch("backend.api.features.store.embeddings.get_embedding")
async def test_ensure_embedding_create_new(mock_get, mock_store, mock_generate):
"""Test ensure_embedding creating new embedding."""
mock_get.return_value = None
mock_generate.return_value = [0.1, 0.2, 0.3]
mock_store.return_value = True
result = await embeddings.ensure_embedding(
version_id="test-id",
name="Test",
description="Test description",
sub_heading="Test heading",
categories=["test"],
)
assert result is True
mock_generate.assert_called_once_with("Test Test heading Test description test")
mock_store.assert_called_once_with(
content_type=ContentType.STORE_AGENT,
content_id="test-id",
embedding=[0.1, 0.2, 0.3],
searchable_text="Test Test heading Test description test",
metadata={"name": "Test", "subHeading": "Test heading", "categories": ["test"]},
user_id=None,
tx=None,
)
@pytest.mark.asyncio(loop_scope="session")
@patch("backend.api.features.store.embeddings.generate_embedding")
@patch("backend.api.features.store.embeddings.get_embedding")
async def test_ensure_embedding_generation_fails(mock_get, mock_generate):
"""Test ensure_embedding when generation fails."""
mock_get.return_value = None
mock_generate.return_value = None
result = await embeddings.ensure_embedding(
version_id="test-id",
name="Test",
description="Test description",
sub_heading="Test heading",
categories=["test"],
)
assert result is False
@pytest.mark.asyncio(loop_scope="session")
async def test_get_embedding_stats(mocker):
"""Test embedding statistics retrieval."""
mock_client = mocker.AsyncMock()
# Mock approved count query
mock_approved_result = [{"count": 100}]
# Mock embedded count query
mock_embedded_result = [{"count": 75}]
mock_client.query_raw.side_effect = [mock_approved_result, mock_embedded_result]
with patch("prisma.get_client", return_value=mock_client):
result = await embeddings.get_embedding_stats()
assert result["total_approved"] == 100
assert result["with_embeddings"] == 75
assert result["without_embeddings"] == 25
assert result["coverage_percent"] == 75.0
@pytest.mark.asyncio(loop_scope="session")
@patch("backend.api.features.store.embeddings.ensure_embedding")
async def test_backfill_missing_embeddings_success(mock_ensure, mocker):
"""Test backfill with successful embedding generation."""
mock_client = mocker.AsyncMock()
# Mock missing embeddings query
mock_missing = [
{
"id": "version-1",
"name": "Agent 1",
"description": "Description 1",
"subHeading": "Heading 1",
"categories": ["AI"],
},
{
"id": "version-2",
"name": "Agent 2",
"description": "Description 2",
"subHeading": "Heading 2",
"categories": ["Productivity"],
},
]
mock_client.query_raw.return_value = mock_missing
# Mock ensure_embedding to succeed for first, fail for second
mock_ensure.side_effect = [True, False]
with patch("prisma.get_client", return_value=mock_client):
result = await embeddings.backfill_missing_embeddings(batch_size=5)
assert result["processed"] == 2
assert result["success"] == 1
assert result["failed"] == 1
assert mock_ensure.call_count == 2
@pytest.mark.asyncio(loop_scope="session")
async def test_backfill_missing_embeddings_no_missing(mocker):
"""Test backfill when no embeddings are missing."""
mock_client = mocker.AsyncMock()
mock_client.query_raw.return_value = []
with patch("prisma.get_client", return_value=mock_client):
result = await embeddings.backfill_missing_embeddings(batch_size=5)
assert result["processed"] == 0
assert result["success"] == 0
assert result["failed"] == 0
assert result["message"] == "No missing embeddings"
@pytest.mark.asyncio(loop_scope="session")
async def test_embedding_to_vector_string():
"""Test embedding to PostgreSQL vector string conversion."""
embedding = [0.1, 0.2, 0.3, -0.4]
result = embeddings.embedding_to_vector_string(embedding)
assert result == "[0.1,0.2,0.3,-0.4]"
@pytest.mark.asyncio(loop_scope="session")
async def test_embed_query():
"""Test embed_query function (alias for generate_embedding)."""
with patch(
"backend.api.features.store.embeddings.generate_embedding"
) as mock_generate:
mock_generate.return_value = [0.1, 0.2, 0.3]
result = await embeddings.embed_query("test query")
assert result == [0.1, 0.2, 0.3]
mock_generate.assert_called_once_with("test query")

View File

@@ -1,377 +0,0 @@
"""
Hybrid Search for Store Agents
Combines semantic (embedding) search with lexical (tsvector) search
for improved relevance in marketplace agent discovery.
"""
import logging
from dataclasses import dataclass
from datetime import datetime
from typing import Any, Literal
from backend.api.features.store.embeddings import (
embed_query,
embedding_to_vector_string,
)
from backend.data.db import query_raw_with_schema
logger = logging.getLogger(__name__)
@dataclass
class HybridSearchWeights:
"""Weights for combining search signals."""
semantic: float = 0.35 # Embedding cosine similarity
lexical: float = 0.35 # tsvector ts_rank_cd score
category: float = 0.20 # Category match boost
recency: float = 0.10 # Newer agents ranked higher
DEFAULT_WEIGHTS = HybridSearchWeights()
# Minimum relevance score threshold - agents below this are filtered out
# With weights (0.35 semantic + 0.35 lexical + 0.20 category + 0.10 recency):
# - 0.20 means at least ~50% semantic match OR strong lexical match required
# - Ensures only genuinely relevant results are returned
# - Recency alone (0.10 max) won't pass the threshold
DEFAULT_MIN_SCORE = 0.20
@dataclass
class HybridSearchResult:
"""A single search result with score breakdown."""
slug: str
agent_name: str
agent_image: str
creator_username: str
creator_avatar: str
sub_heading: str
description: str
runs: int
rating: float
categories: list[str]
featured: bool
is_available: bool
updated_at: datetime
# Score breakdown (for debugging/tuning)
combined_score: float
semantic_score: float = 0.0
lexical_score: float = 0.0
category_score: float = 0.0
recency_score: float = 0.0
async def hybrid_search(
query: str,
featured: bool = False,
creators: list[str] | None = None,
category: str | None = None,
sorted_by: (
Literal["relevance", "rating", "runs", "name", "updated_at"] | None
) = None,
page: int = 1,
page_size: int = 20,
weights: HybridSearchWeights | None = None,
min_score: float | None = None,
) -> tuple[list[dict[str, Any]], int]:
"""
Perform hybrid search combining semantic and lexical signals.
Args:
query: Search query string
featured: Filter for featured agents only
creators: Filter by creator usernames
category: Filter by category
sorted_by: Sort order (relevance uses hybrid scoring)
page: Page number (1-indexed)
page_size: Results per page
weights: Custom weights for search signals
min_score: Minimum relevance score threshold (0-1). Results below
this score are filtered out. Defaults to DEFAULT_MIN_SCORE.
Returns:
Tuple of (results list, total count). Returns empty list if no
results meet the minimum relevance threshold.
"""
if weights is None:
weights = DEFAULT_WEIGHTS
if min_score is None:
min_score = DEFAULT_MIN_SCORE
offset = (page - 1) * page_size
# Generate query embedding
query_embedding = await embed_query(query)
# Build WHERE clause conditions
where_parts: list[str] = ["sa.is_available = true"]
params: list[Any] = []
param_index = 1
# Add search query for lexical matching
params.append(query)
query_param = f"${param_index}"
param_index += 1
# Add lowercased query for category matching
params.append(query.lower())
query_lower_param = f"${param_index}"
param_index += 1
if featured:
where_parts.append("sa.featured = true")
if creators:
where_parts.append(f"sa.creator_username = ANY(${param_index})")
params.append(creators)
param_index += 1
if category:
where_parts.append(f"${param_index} = ANY(sa.categories)")
params.append(category)
param_index += 1
where_clause = " AND ".join(where_parts)
# Determine if we can use hybrid search (have query embedding)
use_hybrid = query_embedding is not None
if use_hybrid:
# Add embedding parameter
embedding_str = embedding_to_vector_string(query_embedding)
params.append(embedding_str)
embedding_param = f"${param_index}"
param_index += 1
# Optimized hybrid search query:
# 1. Direct join to UnifiedContentEmbedding via contentId=storeListingVersionId (no redundant JOINs)
# 2. UNION ALL approach to enable index usage for both lexical and semantic branches
# 3. COUNT(*) OVER() to get total count in single query
# 4. Simplified category matching with array_to_string
sql_query = f"""
WITH candidates AS (
-- Lexical matches (uses GIN index on search column)
SELECT DISTINCT sa."storeListingVersionId"
FROM {{schema_prefix}}"StoreAgent" sa
WHERE {where_clause}
AND sa.search @@ plainto_tsquery('english', {query_param})
UNION
-- Semantic matches (uses HNSW index on embedding)
SELECT DISTINCT sa."storeListingVersionId"
FROM {{schema_prefix}}"StoreAgent" sa
INNER JOIN {{schema_prefix}}"UnifiedContentEmbedding" uce
ON sa."storeListingVersionId" = uce."contentId" AND uce."contentType" = 'STORE_AGENT'
WHERE {where_clause}
),
search_scores AS (
SELECT
sa.slug,
sa.agent_name,
sa.agent_image,
sa.creator_username,
sa.creator_avatar,
sa.sub_heading,
sa.description,
sa.runs,
sa.rating,
sa.categories,
sa.featured,
sa.is_available,
sa.updated_at,
-- Semantic score: cosine similarity (1 - distance)
COALESCE(1 - (uce.embedding <=> {embedding_param}::vector), 0) as semantic_score,
-- Lexical score: ts_rank_cd (will be normalized later)
COALESCE(ts_rank_cd(sa.search, plainto_tsquery('english', {query_param})), 0) as lexical_raw,
-- Category match: check if query appears in any category
CASE
WHEN LOWER(array_to_string(sa.categories, ' ')) LIKE '%' || {query_lower_param} || '%'
THEN 1.0
ELSE 0.0
END as category_score,
-- Recency score: exponential decay over 90 days
EXP(-EXTRACT(EPOCH FROM (NOW() - sa.updated_at)) / (90 * 24 * 3600)) as recency_score
FROM candidates c
INNER JOIN {{schema_prefix}}"StoreAgent" sa
ON c."storeListingVersionId" = sa."storeListingVersionId"
LEFT JOIN {{schema_prefix}}"UnifiedContentEmbedding" uce
ON sa."storeListingVersionId" = uce."contentId" AND uce."contentType" = 'STORE_AGENT'
),
normalized AS (
SELECT
*,
-- Normalize lexical score by max in result set
CASE
WHEN MAX(lexical_raw) OVER () > 0
THEN lexical_raw / MAX(lexical_raw) OVER ()
ELSE 0
END as lexical_score
FROM search_scores
),
scored AS (
SELECT
slug,
agent_name,
agent_image,
creator_username,
creator_avatar,
sub_heading,
description,
runs,
rating,
categories,
featured,
is_available,
updated_at,
semantic_score,
lexical_score,
category_score,
recency_score,
(
{weights.semantic} * semantic_score +
{weights.lexical} * lexical_score +
{weights.category} * category_score +
{weights.recency} * recency_score
) as combined_score
FROM normalized
),
filtered AS (
SELECT
*,
COUNT(*) OVER () as total_count
FROM scored
WHERE combined_score >= {min_score}
)
SELECT * FROM filtered
ORDER BY combined_score DESC
LIMIT ${param_index} OFFSET ${param_index + 1}
"""
# Add pagination params
params.extend([page_size, offset])
else:
# Fallback to lexical-only search (existing behavior)
logger.warning("Falling back to lexical-only search (no query embedding)")
sql_query = f"""
WITH lexical_scores AS (
SELECT
slug,
agent_name,
agent_image,
creator_username,
creator_avatar,
sub_heading,
description,
runs,
rating,
categories,
featured,
is_available,
updated_at,
0.0 as semantic_score,
ts_rank_cd(search, plainto_tsquery('english', {query_param})) as lexical_raw,
CASE
WHEN LOWER(array_to_string(categories, ' ')) LIKE '%' || {query_lower_param} || '%'
THEN 1.0
ELSE 0.0
END as category_score,
EXP(-EXTRACT(EPOCH FROM (NOW() - updated_at)) / (90 * 24 * 3600)) as recency_score
FROM {{schema_prefix}}"StoreAgent" sa
WHERE {where_clause}
AND search @@ plainto_tsquery('english', {query_param})
),
normalized AS (
SELECT
*,
CASE
WHEN MAX(lexical_raw) OVER () > 0
THEN lexical_raw / MAX(lexical_raw) OVER ()
ELSE 0
END as lexical_score
FROM lexical_scores
),
scored AS (
SELECT
slug,
agent_name,
agent_image,
creator_username,
creator_avatar,
sub_heading,
description,
runs,
rating,
categories,
featured,
is_available,
updated_at,
semantic_score,
lexical_score,
category_score,
recency_score,
(
{weights.lexical} * lexical_score +
{weights.category} * category_score +
{weights.recency} * recency_score
) as combined_score
FROM normalized
),
filtered AS (
SELECT
*,
COUNT(*) OVER () as total_count
FROM scored
WHERE combined_score >= {min_score}
)
SELECT * FROM filtered
ORDER BY combined_score DESC
LIMIT ${param_index} OFFSET ${param_index + 1}
"""
params.extend([page_size, offset])
try:
# Execute search query - includes total_count via window function
results = await query_raw_with_schema(sql_query, *params)
# Extract total count from first result (all rows have same count)
total = results[0]["total_count"] if results else 0
# Remove total_count from results before returning
for result in results:
result.pop("total_count", None)
logger.info(
f"Hybrid search for '{query}': {len(results)} results, {total} total "
f"(hybrid={use_hybrid})"
)
return results, total
except Exception as e:
logger.error(f"Hybrid search failed: {e}")
raise
async def hybrid_search_simple(
query: str,
page: int = 1,
page_size: int = 20,
) -> tuple[list[dict[str, Any]], int]:
"""
Simplified hybrid search for common use cases.
Uses default weights and no filters.
"""
return await hybrid_search(
query=query,
page=page,
page_size=page_size,
)

View File

@@ -110,7 +110,6 @@ class Profile(pydantic.BaseModel):
class StoreSubmission(pydantic.BaseModel):
listing_id: str
agent_id: str
agent_version: int
name: str
@@ -165,12 +164,8 @@ class StoreListingsWithVersionsResponse(pydantic.BaseModel):
class StoreSubmissionRequest(pydantic.BaseModel):
agent_id: str = pydantic.Field(
..., min_length=1, description="Agent ID cannot be empty"
)
agent_version: int = pydantic.Field(
..., gt=0, description="Agent version must be greater than 0"
)
agent_id: str
agent_version: int
slug: str
name: str
sub_heading: str

View File

@@ -138,7 +138,6 @@ def test_creator_details():
def test_store_submission():
submission = store_model.StoreSubmission(
listing_id="listing123",
agent_id="agent123",
agent_version=1,
sub_heading="Test subheading",
@@ -160,7 +159,6 @@ def test_store_submissions_response():
response = store_model.StoreSubmissionsResponse(
submissions=[
store_model.StoreSubmission(
listing_id="listing123",
agent_id="agent123",
agent_version=1,
sub_heading="Test subheading",

View File

@@ -521,7 +521,6 @@ def test_get_submissions_success(
mocked_value = store_model.StoreSubmissionsResponse(
submissions=[
store_model.StoreSubmission(
listing_id="test-listing-id",
name="Test Agent",
description="Test agent description",
image_urls=["test.jpg"],

View File

@@ -6,9 +6,6 @@ import hashlib
import hmac
import logging
from enum import Enum
from typing import cast
from prisma.types import Serializable
from backend.sdk import (
BaseWebhooksManager,
@@ -87,9 +84,7 @@ class AirtableWebhookManager(BaseWebhooksManager):
# update webhook config
await update_webhook(
webhook.id,
config=cast(
dict[str, Serializable], {"base_id": base_id, "cursor": response.cursor}
),
config={"base_id": base_id, "cursor": response.cursor},
)
event_type = "notification"

View File

@@ -975,28 +975,10 @@ class SmartDecisionMakerBlock(Block):
graph_version: int,
execution_context: ExecutionContext,
execution_processor: "ExecutionProcessor",
nodes_to_skip: set[str] | None = None,
**kwargs,
) -> BlockOutput:
tool_functions = await self._create_tool_node_signatures(node_id)
original_tool_count = len(tool_functions)
# Filter out tools for nodes that should be skipped (e.g., missing optional credentials)
if nodes_to_skip:
tool_functions = [
tf
for tf in tool_functions
if tf.get("function", {}).get("_sink_node_id") not in nodes_to_skip
]
# Only raise error if we had tools but they were all filtered out
if original_tool_count > 0 and not tool_functions:
raise ValueError(
"No available tools to execute - all downstream nodes are unavailable "
"(possibly due to missing optional credentials)"
)
yield "tool_functions", json.dumps(tool_functions)
conversation_history = input_data.conversation_history or []

View File

@@ -383,7 +383,6 @@ class GraphExecutionWithNodes(GraphExecution):
self,
execution_context: ExecutionContext,
compiled_nodes_input_masks: Optional[NodesInputMasks] = None,
nodes_to_skip: Optional[set[str]] = None,
):
return GraphExecutionEntry(
user_id=self.user_id,
@@ -391,7 +390,6 @@ class GraphExecutionWithNodes(GraphExecution):
graph_version=self.graph_version or 0,
graph_exec_id=self.id,
nodes_input_masks=compiled_nodes_input_masks,
nodes_to_skip=nodes_to_skip or set(),
execution_context=execution_context,
)
@@ -1147,8 +1145,6 @@ class GraphExecutionEntry(BaseModel):
graph_id: str
graph_version: int
nodes_input_masks: Optional[NodesInputMasks] = None
nodes_to_skip: set[str] = Field(default_factory=set)
"""Node IDs that should be skipped due to optional credentials not being configured."""
execution_context: ExecutionContext = Field(default_factory=ExecutionContext)

View File

@@ -94,15 +94,6 @@ class Node(BaseDbModel):
input_links: list[Link] = []
output_links: list[Link] = []
@property
def credentials_optional(self) -> bool:
"""
Whether credentials are optional for this node.
When True and credentials are not configured, the node will be skipped
during execution rather than causing a validation error.
"""
return self.metadata.get("credentials_optional", False)
@property
def block(self) -> AnyBlockSchema | "_UnknownBlockBase":
"""Get the block for this node. Returns UnknownBlock if block is deleted/missing."""
@@ -335,35 +326,7 @@ class Graph(BaseGraph):
@computed_field
@property
def credentials_input_schema(self) -> dict[str, Any]:
schema = self._credentials_input_schema.jsonschema()
# Determine which credential fields are required based on credentials_optional metadata
graph_credentials_inputs = self.aggregate_credentials_inputs()
required_fields = []
# Build a map of node_id -> node for quick lookup
all_nodes = {node.id: node for node in self.nodes}
for sub_graph in self.sub_graphs:
for node in sub_graph.nodes:
all_nodes[node.id] = node
for field_key, (
_field_info,
node_field_pairs,
) in graph_credentials_inputs.items():
# A field is required if ANY node using it has credentials_optional=False
is_required = False
for node_id, _field_name in node_field_pairs:
node = all_nodes.get(node_id)
if node and not node.credentials_optional:
is_required = True
break
if is_required:
required_fields.append(field_key)
schema["required"] = required_fields
return schema
return self._credentials_input_schema.jsonschema()
@property
def _credentials_input_schema(self) -> type[BlockSchema]:

View File

@@ -396,58 +396,3 @@ async def test_access_store_listing_graph(server: SpinTestServer):
created_graph.id, created_graph.version, "3e53486c-cf57-477e-ba2a-cb02dc828e1b"
)
assert got_graph is not None
# ============================================================================
# Tests for Optional Credentials Feature
# ============================================================================
def test_node_credentials_optional_default():
"""Test that credentials_optional defaults to False when not set in metadata."""
node = Node(
id="test_node",
block_id=StoreValueBlock().id,
input_default={},
metadata={},
)
assert node.credentials_optional is False
def test_node_credentials_optional_true():
"""Test that credentials_optional returns True when explicitly set."""
node = Node(
id="test_node",
block_id=StoreValueBlock().id,
input_default={},
metadata={"credentials_optional": True},
)
assert node.credentials_optional is True
def test_node_credentials_optional_false():
"""Test that credentials_optional returns False when explicitly set to False."""
node = Node(
id="test_node",
block_id=StoreValueBlock().id,
input_default={},
metadata={"credentials_optional": False},
)
assert node.credentials_optional is False
def test_node_credentials_optional_with_other_metadata():
"""Test that credentials_optional works correctly with other metadata present."""
node = Node(
id="test_node",
block_id=StoreValueBlock().id,
input_default={},
metadata={
"position": {"x": 100, "y": 200},
"customized_name": "My Custom Node",
"credentials_optional": True,
},
)
assert node.credentials_optional is True
assert node.metadata["position"] == {"x": 100, "y": 200}
assert node.metadata["customized_name"] == "My Custom Node"

View File

@@ -178,7 +178,6 @@ async def execute_node(
execution_processor: "ExecutionProcessor",
execution_stats: NodeExecutionStats | None = None,
nodes_input_masks: Optional[NodesInputMasks] = None,
nodes_to_skip: Optional[set[str]] = None,
) -> BlockOutput:
"""
Execute a node in the graph. This will trigger a block execution on a node,
@@ -246,7 +245,6 @@ async def execute_node(
"user_id": user_id,
"execution_context": execution_context,
"execution_processor": execution_processor,
"nodes_to_skip": nodes_to_skip or set(),
}
# Last-minute fetch credentials + acquire a system-wide read-write lock to prevent
@@ -544,7 +542,6 @@ class ExecutionProcessor:
node_exec_progress: NodeExecutionProgress,
nodes_input_masks: Optional[NodesInputMasks],
graph_stats_pair: tuple[GraphExecutionStats, threading.Lock],
nodes_to_skip: Optional[set[str]] = None,
) -> NodeExecutionStats:
log_metadata = LogMetadata(
logger=_logger,
@@ -567,7 +564,6 @@ class ExecutionProcessor:
db_client=db_client,
log_metadata=log_metadata,
nodes_input_masks=nodes_input_masks,
nodes_to_skip=nodes_to_skip,
)
if isinstance(status, BaseException):
raise status
@@ -613,7 +609,6 @@ class ExecutionProcessor:
db_client: "DatabaseManagerAsyncClient",
log_metadata: LogMetadata,
nodes_input_masks: Optional[NodesInputMasks] = None,
nodes_to_skip: Optional[set[str]] = None,
) -> ExecutionStatus:
status = ExecutionStatus.RUNNING
@@ -650,7 +645,6 @@ class ExecutionProcessor:
execution_processor=self,
execution_stats=stats,
nodes_input_masks=nodes_input_masks,
nodes_to_skip=nodes_to_skip,
):
await persist_output(output_name, output_data)
@@ -962,21 +956,6 @@ class ExecutionProcessor:
queued_node_exec = execution_queue.get()
# Check if this node should be skipped due to optional credentials
if queued_node_exec.node_id in graph_exec.nodes_to_skip:
log_metadata.info(
f"Skipping node execution {queued_node_exec.node_exec_id} "
f"for node {queued_node_exec.node_id} - optional credentials not configured"
)
# Mark the node as completed without executing
# No outputs will be produced, so downstream nodes won't trigger
update_node_execution_status(
db_client=db_client,
exec_id=queued_node_exec.node_exec_id,
status=ExecutionStatus.COMPLETED,
)
continue
log_metadata.debug(
f"Dispatching node execution {queued_node_exec.node_exec_id} "
f"for node {queued_node_exec.node_id}",
@@ -1037,7 +1016,6 @@ class ExecutionProcessor:
execution_stats,
execution_stats_lock,
),
nodes_to_skip=graph_exec.nodes_to_skip,
),
self.node_execution_loop,
)

View File

@@ -239,19 +239,14 @@ async def _validate_node_input_credentials(
graph: GraphModel,
user_id: str,
nodes_input_masks: Optional[NodesInputMasks] = None,
) -> tuple[dict[str, dict[str, str]], set[str]]:
) -> dict[str, dict[str, str]]:
"""
Checks all credentials for all nodes of the graph and returns structured errors
and a set of nodes that should be skipped due to optional missing credentials.
Checks all credentials for all nodes of the graph and returns structured errors.
Returns:
tuple[
dict[node_id, dict[field_name, error_message]]: Credential validation errors per node,
set[node_id]: Nodes that should be skipped (optional credentials not configured)
]
dict[node_id, dict[field_name, error_message]]: Credential validation errors per node
"""
credential_errors: dict[str, dict[str, str]] = defaultdict(dict)
nodes_to_skip: set[str] = set()
for node in graph.nodes:
block = node.block
@@ -261,46 +256,27 @@ async def _validate_node_input_credentials(
if not credentials_fields:
continue
# Track if any credential field is missing for this node
has_missing_credentials = False
for field_name, credentials_meta_type in credentials_fields.items():
try:
# Check nodes_input_masks first, then input_default
field_value = None
if (
nodes_input_masks
and (node_input_mask := nodes_input_masks.get(node.id))
and field_name in node_input_mask
):
field_value = node_input_mask[field_name]
credentials_meta = credentials_meta_type.model_validate(
node_input_mask[field_name]
)
elif field_name in node.input_default:
# For optional credentials, don't use input_default - treat as missing
# This prevents stale credential IDs from failing validation
if node.credentials_optional:
field_value = None
else:
field_value = node.input_default[field_name]
# Check if credentials are missing (None, empty, or not present)
if field_value is None or (
isinstance(field_value, dict) and not field_value.get("id")
):
has_missing_credentials = True
# If node has credentials_optional flag, mark for skipping instead of error
if node.credentials_optional:
continue # Don't add error, will be marked for skip after loop
else:
credential_errors[node.id][
field_name
] = "These credentials are required"
continue
credentials_meta = credentials_meta_type.model_validate(field_value)
credentials_meta = credentials_meta_type.model_validate(
node.input_default[field_name]
)
else:
# Missing credentials
credential_errors[node.id][
field_name
] = "These credentials are required"
continue
except ValidationError as e:
# Validation error means credentials were provided but invalid
# This should always be an error, even if optional
credential_errors[node.id][field_name] = f"Invalid credentials: {e}"
continue
@@ -311,7 +287,6 @@ async def _validate_node_input_credentials(
)
except Exception as e:
# Handle any errors fetching credentials
# If credentials were explicitly configured but unavailable, it's an error
credential_errors[node.id][
field_name
] = f"Credentials not available: {e}"
@@ -338,19 +313,7 @@ async def _validate_node_input_credentials(
] = "Invalid credentials: type/provider mismatch"
continue
# If node has optional credentials and any are missing, mark for skipping
# But only if there are no other errors for this node
if (
has_missing_credentials
and node.credentials_optional
and node.id not in credential_errors
):
nodes_to_skip.add(node.id)
logger.info(
f"Node #{node.id} will be skipped: optional credentials not configured"
)
return credential_errors, nodes_to_skip
return credential_errors
def make_node_credentials_input_map(
@@ -392,25 +355,21 @@ async def validate_graph_with_credentials(
graph: GraphModel,
user_id: str,
nodes_input_masks: Optional[NodesInputMasks] = None,
) -> tuple[Mapping[str, Mapping[str, str]], set[str]]:
) -> Mapping[str, Mapping[str, str]]:
"""
Validate graph including credentials and return structured errors per node,
along with a set of nodes that should be skipped due to optional missing credentials.
Validate graph including credentials and return structured errors per node.
Returns:
tuple[
dict[node_id, dict[field_name, error_message]]: Validation errors per node,
set[node_id]: Nodes that should be skipped (optional credentials not configured)
]
dict[node_id, dict[field_name, error_message]]: Validation errors per node
"""
# Get input validation errors
node_input_errors = GraphModel.validate_graph_get_errors(
graph, for_run=True, nodes_input_masks=nodes_input_masks
)
# Get credential input/availability/validation errors and nodes to skip
node_credential_input_errors, nodes_to_skip = (
await _validate_node_input_credentials(graph, user_id, nodes_input_masks)
# Get credential input/availability/validation errors
node_credential_input_errors = await _validate_node_input_credentials(
graph, user_id, nodes_input_masks
)
# Merge credential errors with structural errors
@@ -419,7 +378,7 @@ async def validate_graph_with_credentials(
node_input_errors[node_id] = {}
node_input_errors[node_id].update(field_errors)
return node_input_errors, nodes_to_skip
return node_input_errors
async def _construct_starting_node_execution_input(
@@ -427,7 +386,7 @@ async def _construct_starting_node_execution_input(
user_id: str,
graph_inputs: BlockInput,
nodes_input_masks: Optional[NodesInputMasks] = None,
) -> tuple[list[tuple[str, BlockInput]], set[str]]:
) -> list[tuple[str, BlockInput]]:
"""
Validates and prepares the input data for executing a graph.
This function checks the graph for starting nodes, validates the input data
@@ -441,14 +400,11 @@ async def _construct_starting_node_execution_input(
node_credentials_map: `dict[node_id, dict[input_name, CredentialsMetaInput]]`
Returns:
tuple[
list[tuple[str, BlockInput]]: A list of tuples, each containing the node ID
and the corresponding input data for that node.
set[str]: Node IDs that should be skipped (optional credentials not configured)
]
list[tuple[str, BlockInput]]: A list of tuples, each containing the node ID and
the corresponding input data for that node.
"""
# Use new validation function that includes credentials
validation_errors, nodes_to_skip = await validate_graph_with_credentials(
validation_errors = await validate_graph_with_credentials(
graph, user_id, nodes_input_masks
)
n_error_nodes = len(validation_errors)
@@ -489,7 +445,7 @@ async def _construct_starting_node_execution_input(
"No starting nodes found for the graph, make sure an AgentInput or blocks with no inbound links are present as starting nodes."
)
return nodes_input, nodes_to_skip
return nodes_input
async def validate_and_construct_node_execution_input(
@@ -500,7 +456,7 @@ async def validate_and_construct_node_execution_input(
graph_credentials_inputs: Optional[Mapping[str, CredentialsMetaInput]] = None,
nodes_input_masks: Optional[NodesInputMasks] = None,
is_sub_graph: bool = False,
) -> tuple[GraphModel, list[tuple[str, BlockInput]], NodesInputMasks, set[str]]:
) -> tuple[GraphModel, list[tuple[str, BlockInput]], NodesInputMasks]:
"""
Public wrapper that handles graph fetching, credential mapping, and validation+construction.
This centralizes the logic used by both scheduler validation and actual execution.
@@ -517,7 +473,6 @@ async def validate_and_construct_node_execution_input(
GraphModel: Full graph object for the given `graph_id`.
list[tuple[node_id, BlockInput]]: Starting node IDs with corresponding inputs.
dict[str, BlockInput]: Node input masks including all passed-in credentials.
set[str]: Node IDs that should be skipped (optional credentials not configured).
Raises:
NotFoundError: If the graph is not found.
@@ -559,16 +514,14 @@ async def validate_and_construct_node_execution_input(
nodes_input_masks or {},
)
starting_nodes_input, nodes_to_skip = (
await _construct_starting_node_execution_input(
graph=graph,
user_id=user_id,
graph_inputs=graph_inputs,
nodes_input_masks=nodes_input_masks,
)
starting_nodes_input = await _construct_starting_node_execution_input(
graph=graph,
user_id=user_id,
graph_inputs=graph_inputs,
nodes_input_masks=nodes_input_masks,
)
return graph, starting_nodes_input, nodes_input_masks, nodes_to_skip
return graph, starting_nodes_input, nodes_input_masks
def _merge_nodes_input_masks(
@@ -826,9 +779,6 @@ async def add_graph_execution(
# Use existing execution's compiled input masks
compiled_nodes_input_masks = graph_exec.nodes_input_masks or {}
# For resumed executions, nodes_to_skip was already determined at creation time
# TODO: Consider storing nodes_to_skip in DB if we need to preserve it across resumes
nodes_to_skip: set[str] = set()
logger.info(f"Resuming graph execution #{graph_exec.id} for graph #{graph_id}")
else:
@@ -837,7 +787,7 @@ async def add_graph_execution(
)
# Create new execution
graph, starting_nodes_input, compiled_nodes_input_masks, nodes_to_skip = (
graph, starting_nodes_input, compiled_nodes_input_masks = (
await validate_and_construct_node_execution_input(
graph_id=graph_id,
user_id=user_id,
@@ -886,7 +836,6 @@ async def add_graph_execution(
try:
graph_exec_entry = graph_exec.to_graph_execution_entry(
compiled_nodes_input_masks=compiled_nodes_input_masks,
nodes_to_skip=nodes_to_skip,
execution_context=execution_context,
)
logger.info(f"Publishing execution {graph_exec.id} to execution queue")

View File

@@ -367,13 +367,10 @@ async def test_add_graph_execution_is_repeatable(mocker: MockerFixture):
)
# Setup mock returns
# The function returns (graph, starting_nodes_input, compiled_nodes_input_masks, nodes_to_skip)
nodes_to_skip: set[str] = set()
mock_validate.return_value = (
mock_graph,
starting_nodes_input,
compiled_nodes_input_masks,
nodes_to_skip,
)
mock_prisma.is_connected.return_value = True
mock_edb.create_graph_execution = mocker.AsyncMock(return_value=mock_graph_exec)
@@ -459,212 +456,3 @@ async def test_add_graph_execution_is_repeatable(mocker: MockerFixture):
# Both executions should succeed (though they create different objects)
assert result1 == mock_graph_exec
assert result2 == mock_graph_exec_2
# ============================================================================
# Tests for Optional Credentials Feature
# ============================================================================
@pytest.mark.asyncio
async def test_validate_node_input_credentials_returns_nodes_to_skip(
mocker: MockerFixture,
):
"""
Test that _validate_node_input_credentials returns nodes_to_skip set
for nodes with credentials_optional=True and missing credentials.
"""
from backend.executor.utils import _validate_node_input_credentials
# Create a mock node with credentials_optional=True
mock_node = mocker.MagicMock()
mock_node.id = "node-with-optional-creds"
mock_node.credentials_optional = True
mock_node.input_default = {} # No credentials configured
# Create a mock block with credentials field
mock_block = mocker.MagicMock()
mock_credentials_field_type = mocker.MagicMock()
mock_block.input_schema.get_credentials_fields.return_value = {
"credentials": mock_credentials_field_type
}
mock_node.block = mock_block
# Create mock graph
mock_graph = mocker.MagicMock()
mock_graph.nodes = [mock_node]
# Call the function
errors, nodes_to_skip = await _validate_node_input_credentials(
graph=mock_graph,
user_id="test-user-id",
nodes_input_masks=None,
)
# Node should be in nodes_to_skip, not in errors
assert mock_node.id in nodes_to_skip
assert mock_node.id not in errors
@pytest.mark.asyncio
async def test_validate_node_input_credentials_required_missing_creds_error(
mocker: MockerFixture,
):
"""
Test that _validate_node_input_credentials returns errors
for nodes with credentials_optional=False and missing credentials.
"""
from backend.executor.utils import _validate_node_input_credentials
# Create a mock node with credentials_optional=False (required)
mock_node = mocker.MagicMock()
mock_node.id = "node-with-required-creds"
mock_node.credentials_optional = False
mock_node.input_default = {} # No credentials configured
# Create a mock block with credentials field
mock_block = mocker.MagicMock()
mock_credentials_field_type = mocker.MagicMock()
mock_block.input_schema.get_credentials_fields.return_value = {
"credentials": mock_credentials_field_type
}
mock_node.block = mock_block
# Create mock graph
mock_graph = mocker.MagicMock()
mock_graph.nodes = [mock_node]
# Call the function
errors, nodes_to_skip = await _validate_node_input_credentials(
graph=mock_graph,
user_id="test-user-id",
nodes_input_masks=None,
)
# Node should be in errors, not in nodes_to_skip
assert mock_node.id in errors
assert "credentials" in errors[mock_node.id]
assert "required" in errors[mock_node.id]["credentials"].lower()
assert mock_node.id not in nodes_to_skip
@pytest.mark.asyncio
async def test_validate_graph_with_credentials_returns_nodes_to_skip(
mocker: MockerFixture,
):
"""
Test that validate_graph_with_credentials returns nodes_to_skip set
from _validate_node_input_credentials.
"""
from backend.executor.utils import validate_graph_with_credentials
# Mock _validate_node_input_credentials to return specific values
mock_validate = mocker.patch(
"backend.executor.utils._validate_node_input_credentials"
)
expected_errors = {"node1": {"field": "error"}}
expected_nodes_to_skip = {"node2", "node3"}
mock_validate.return_value = (expected_errors, expected_nodes_to_skip)
# Mock GraphModel with validate_graph_get_errors method
mock_graph = mocker.MagicMock()
mock_graph.validate_graph_get_errors.return_value = {}
# Call the function
errors, nodes_to_skip = await validate_graph_with_credentials(
graph=mock_graph,
user_id="test-user-id",
nodes_input_masks=None,
)
# Verify nodes_to_skip is passed through
assert nodes_to_skip == expected_nodes_to_skip
assert "node1" in errors
@pytest.mark.asyncio
async def test_add_graph_execution_with_nodes_to_skip(mocker: MockerFixture):
"""
Test that add_graph_execution properly passes nodes_to_skip
to the graph execution entry.
"""
from backend.data.execution import GraphExecutionWithNodes
from backend.executor.utils import add_graph_execution
# Mock data
graph_id = "test-graph-id"
user_id = "test-user-id"
inputs = {"test_input": "test_value"}
graph_version = 1
# Mock the graph object
mock_graph = mocker.MagicMock()
mock_graph.version = graph_version
# Starting nodes and masks
starting_nodes_input = [("node1", {"input1": "value1"})]
compiled_nodes_input_masks = {}
nodes_to_skip = {"skipped-node-1", "skipped-node-2"}
# Mock the graph execution object
mock_graph_exec = mocker.MagicMock(spec=GraphExecutionWithNodes)
mock_graph_exec.id = "execution-id-123"
mock_graph_exec.node_executions = []
# Track what's passed to to_graph_execution_entry
captured_kwargs = {}
def capture_to_entry(**kwargs):
captured_kwargs.update(kwargs)
return mocker.MagicMock()
mock_graph_exec.to_graph_execution_entry.side_effect = capture_to_entry
# Setup mocks
mock_validate = mocker.patch(
"backend.executor.utils.validate_and_construct_node_execution_input"
)
mock_edb = mocker.patch("backend.executor.utils.execution_db")
mock_prisma = mocker.patch("backend.executor.utils.prisma")
mock_udb = mocker.patch("backend.executor.utils.user_db")
mock_gdb = mocker.patch("backend.executor.utils.graph_db")
mock_get_queue = mocker.patch("backend.executor.utils.get_async_execution_queue")
mock_get_event_bus = mocker.patch(
"backend.executor.utils.get_async_execution_event_bus"
)
# Setup returns - include nodes_to_skip in the tuple
mock_validate.return_value = (
mock_graph,
starting_nodes_input,
compiled_nodes_input_masks,
nodes_to_skip, # This should be passed through
)
mock_prisma.is_connected.return_value = True
mock_edb.create_graph_execution = mocker.AsyncMock(return_value=mock_graph_exec)
mock_edb.update_graph_execution_stats = mocker.AsyncMock(
return_value=mock_graph_exec
)
mock_edb.update_node_execution_status_batch = mocker.AsyncMock()
mock_user = mocker.MagicMock()
mock_user.timezone = "UTC"
mock_settings = mocker.MagicMock()
mock_settings.human_in_the_loop_safe_mode = True
mock_udb.get_user_by_id = mocker.AsyncMock(return_value=mock_user)
mock_gdb.get_graph_settings = mocker.AsyncMock(return_value=mock_settings)
mock_get_queue.return_value = mocker.AsyncMock()
mock_get_event_bus.return_value = mocker.MagicMock(publish=mocker.AsyncMock())
# Call the function
await add_graph_execution(
graph_id=graph_id,
user_id=user_id,
inputs=inputs,
graph_version=graph_version,
)
# Verify nodes_to_skip was passed to to_graph_execution_entry
assert "nodes_to_skip" in captured_kwargs
assert captured_kwargs["nodes_to_skip"] == nodes_to_skip

View File

@@ -1,227 +0,0 @@
#!/usr/bin/env python3
"""
Generate a lightweight stub for prisma/types.py that collapses all exported
symbols to Any. This prevents Pyright from spending time/budget on Prisma's
query DSL types while keeping runtime behavior unchanged.
Usage:
poetry run gen-prisma-stub
This script automatically finds the prisma package location and generates
the types.pyi stub file in the same directory as types.py.
"""
from __future__ import annotations
import ast
import importlib.util
import sys
from pathlib import Path
from typing import Iterable, Set
def _iter_assigned_names(target: ast.expr) -> Iterable[str]:
"""Extract names from assignment targets (handles tuple unpacking)."""
if isinstance(target, ast.Name):
yield target.id
elif isinstance(target, (ast.Tuple, ast.List)):
for elt in target.elts:
yield from _iter_assigned_names(elt)
def _is_private(name: str) -> bool:
"""Check if a name is private (starts with _ but not __)."""
return name.startswith("_") and not name.startswith("__")
def _is_safe_type_alias(node: ast.Assign) -> bool:
"""Check if an assignment is a safe type alias that shouldn't be stubbed.
Safe types are:
- Literal types (don't cause type budget issues)
- Simple type references (SortMode, SortOrder, etc.)
- TypeVar definitions
"""
if not node.value:
return False
# Check if it's a Subscript (like Literal[...], Union[...], TypeVar[...])
if isinstance(node.value, ast.Subscript):
# Get the base type name
if isinstance(node.value.value, ast.Name):
base_name = node.value.value.id
# Literal types are safe
if base_name == "Literal":
return True
# TypeVar is safe
if base_name == "TypeVar":
return True
elif isinstance(node.value.value, ast.Attribute):
# Handle typing_extensions.Literal etc.
if node.value.value.attr == "Literal":
return True
# Check if it's a simple Name reference (like SortMode = _types.SortMode)
if isinstance(node.value, ast.Attribute):
return True
# Check if it's a Call (like TypeVar(...))
if isinstance(node.value, ast.Call):
if isinstance(node.value.func, ast.Name):
if node.value.func.id == "TypeVar":
return True
return False
def collect_top_level_symbols(
tree: ast.Module, source_lines: list[str]
) -> tuple[Set[str], Set[str], list[str], Set[str]]:
"""Collect all top-level symbols from an AST module.
Returns:
Tuple of (class_names, function_names, safe_variable_sources, unsafe_variable_names)
safe_variable_sources contains the actual source code lines for safe variables
"""
classes: Set[str] = set()
functions: Set[str] = set()
safe_variable_sources: list[str] = []
unsafe_variables: Set[str] = set()
for node in tree.body:
if isinstance(node, ast.ClassDef):
if not _is_private(node.name):
classes.add(node.name)
elif isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
if not _is_private(node.name):
functions.add(node.name)
elif isinstance(node, ast.Assign):
is_safe = _is_safe_type_alias(node)
names = []
for t in node.targets:
for n in _iter_assigned_names(t):
if not _is_private(n):
names.append(n)
if names:
if is_safe:
# Extract the source code for this assignment
start_line = node.lineno - 1 # 0-indexed
end_line = node.end_lineno if node.end_lineno else node.lineno
source = "\n".join(source_lines[start_line:end_line])
safe_variable_sources.append(source)
else:
unsafe_variables.update(names)
elif isinstance(node, ast.AnnAssign) and node.target:
# Annotated assignments are always stubbed
for n in _iter_assigned_names(node.target):
if not _is_private(n):
unsafe_variables.add(n)
return classes, functions, safe_variable_sources, unsafe_variables
def find_prisma_types_path() -> Path:
"""Find the prisma types.py file in the installed package."""
spec = importlib.util.find_spec("prisma")
if spec is None or spec.origin is None:
raise RuntimeError("Could not find prisma package. Is it installed?")
prisma_dir = Path(spec.origin).parent
types_path = prisma_dir / "types.py"
if not types_path.exists():
raise RuntimeError(f"prisma/types.py not found at {types_path}")
return types_path
def generate_stub(src_path: Path, stub_path: Path) -> int:
"""Generate the .pyi stub file from the source types.py."""
code = src_path.read_text(encoding="utf-8", errors="ignore")
source_lines = code.splitlines()
tree = ast.parse(code, filename=str(src_path))
classes, functions, safe_variable_sources, unsafe_variables = (
collect_top_level_symbols(tree, source_lines)
)
header = """\
# -*- coding: utf-8 -*-
# Auto-generated stub file - DO NOT EDIT
# Generated by gen_prisma_types_stub.py
#
# This stub intentionally collapses complex Prisma query DSL types to Any.
# Prisma's generated types can explode Pyright's type inference budgets
# on large schemas. We collapse them to Any so the rest of the codebase
# can remain strongly typed while keeping runtime behavior unchanged.
#
# Safe types (Literal, TypeVar, simple references) are preserved from the
# original types.py to maintain proper type checking where possible.
from __future__ import annotations
from typing import Any
from typing_extensions import Literal
# Re-export commonly used typing constructs that may be imported from this module
from typing import TYPE_CHECKING, TypeVar, Generic, Union, Optional, List, Dict
# Base type alias for stubbed Prisma types - allows any dict structure
_PrismaDict = dict[str, Any]
"""
lines = [header]
# Include safe variable definitions (Literal types, TypeVars, etc.)
lines.append("# Safe type definitions preserved from original types.py")
for source in safe_variable_sources:
lines.append(source)
lines.append("")
# Stub all classes and unsafe variables uniformly as dict[str, Any] aliases
# This allows:
# 1. Use in type annotations: x: SomeType
# 2. Constructor calls: SomeType(...)
# 3. Dict literal assignments: x: SomeType = {...}
lines.append(
"# Stubbed types (collapsed to dict[str, Any] to prevent type budget exhaustion)"
)
all_stubbed = sorted(classes | unsafe_variables)
for name in all_stubbed:
lines.append(f"{name} = _PrismaDict")
lines.append("")
# Stub functions
for name in sorted(functions):
lines.append(f"def {name}(*args: Any, **kwargs: Any) -> Any: ...")
lines.append("")
stub_path.write_text("\n".join(lines), encoding="utf-8")
return (
len(classes)
+ len(functions)
+ len(safe_variable_sources)
+ len(unsafe_variables)
)
def main() -> None:
"""Main entry point."""
try:
types_path = find_prisma_types_path()
stub_path = types_path.with_suffix(".pyi")
print(f"Found prisma types.py at: {types_path}")
print(f"Generating stub at: {stub_path}")
num_symbols = generate_stub(types_path, stub_path)
print(f"Generated {stub_path.name} with {num_symbols} Any-typed symbols")
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -25,9 +25,6 @@ def run(*command: str) -> None:
def lint():
# Generate Prisma types stub before running pyright to prevent type budget exhaustion
run("gen-prisma-stub")
lint_step_args: list[list[str]] = [
["ruff", "check", *TARGET_DIRS, "--exit-zero"],
["ruff", "format", "--diff", "--check", LIBS_DIR],
@@ -52,6 +49,4 @@ def format():
run("ruff", "format", LIBS_DIR)
run("isort", "--profile", "black", BACKEND_DIR)
run("black", BACKEND_DIR)
# Generate Prisma types stub before running pyright to prevent type budget exhaustion
run("gen-prisma-stub")
run("pyright", *TARGET_DIRS)

View File

@@ -1,35 +0,0 @@
-- CreateExtension in public schema (standard location for pgvector)
CREATE EXTENSION IF NOT EXISTS "vector" WITH SCHEMA "public";
-- Grant usage on public schema to platform users
GRANT USAGE ON SCHEMA public TO postgres;
-- CreateEnum
CREATE TYPE "ContentType" AS ENUM ('STORE_AGENT', 'BLOCK', 'INTEGRATION', 'DOCUMENTATION', 'LIBRARY_AGENT');
-- CreateTable
CREATE TABLE "UnifiedContentEmbedding" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"contentType" "ContentType" NOT NULL,
"contentId" TEXT NOT NULL,
"userId" TEXT,
"embedding" public.vector(1536) NOT NULL,
"searchableText" TEXT NOT NULL,
"metadata" JSONB NOT NULL DEFAULT '{}',
CONSTRAINT "UnifiedContentEmbedding_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE INDEX "UnifiedContentEmbedding_contentType_idx" ON "UnifiedContentEmbedding"("contentType");
-- CreateIndex
CREATE INDEX "UnifiedContentEmbedding_userId_idx" ON "UnifiedContentEmbedding"("userId");
-- CreateIndex
CREATE INDEX "UnifiedContentEmbedding_contentType_userId_idx" ON "UnifiedContentEmbedding"("contentType", "userId");
-- CreateIndex
CREATE UNIQUE INDEX "UnifiedContentEmbedding_contentType_contentId_userId_key" ON "UnifiedContentEmbedding"("contentType", "contentId", "userId");

View File

@@ -117,7 +117,6 @@ lint = "linter:lint"
test = "run_tests:test"
load-store-agents = "test.load_store_agents:run"
export-api-schema = "backend.cli.generate_openapi_json:main"
gen-prisma-stub = "gen_prisma_types_stub:main"
oauth-tool = "backend.cli.oauth_tool:cli"
[tool.isort]
@@ -135,9 +134,6 @@ ignore_patterns = []
[tool.pytest.ini_options]
asyncio_mode = "auto"
asyncio_default_fixture_loop_scope = "session"
# Disable syrupy plugin to avoid conflict with pytest-snapshot
# Both provide --snapshot-update argument causing ArgumentError
addopts = "-p no:syrupy"
filterwarnings = [
"ignore:'audioop' is deprecated:DeprecationWarning:discord.player",
"ignore:invalid escape sequence:DeprecationWarning:tweepy.api",

View File

@@ -1,15 +1,14 @@
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
directUrl = env("DIRECT_URL")
extensions = [pgvector(map: "vector")]
provider = "postgresql"
url = env("DATABASE_URL")
directUrl = env("DIRECT_URL")
}
generator client {
provider = "prisma-client-py"
recursive_type_depth = -1
interface = "asyncio"
previewFeatures = ["views", "fullTextSearch", "postgresqlExtensions"]
previewFeatures = ["views", "fullTextSearch"]
partial_type_generator = "backend/data/partial_types.py"
}
@@ -128,8 +127,8 @@ model BuilderSearchHistory {
updatedAt DateTime @default(now()) @updatedAt
searchQuery String
filter String[] @default([])
byCreator String[] @default([])
filter String[] @default([])
byCreator String[] @default([])
userId String
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@ -722,25 +721,26 @@ view StoreAgent {
storeListingVersionId String
updated_at DateTime
slug String
agent_name String
agent_video String?
agent_output_demo String?
agent_image String[]
slug String
agent_name String
agent_video String?
agent_output_demo String?
agent_image String[]
featured Boolean @default(false)
creator_username String?
creator_avatar String?
sub_heading String
description String
categories String[]
runs Int
rating Float
versions String[]
agentGraphVersions String[]
agentGraphId String
is_available Boolean @default(true)
useForOnboarding Boolean @default(false)
featured Boolean @default(false)
creator_username String?
creator_avatar String?
sub_heading String
description String
categories String[]
search Unsupported("tsvector")? @default(dbgenerated("''::tsvector"))
runs Int
rating Float
versions String[]
agentGraphVersions String[]
agentGraphId String
is_available Boolean @default(true)
useForOnboarding Boolean @default(false)
// Materialized views used (refreshed every 15 minutes via pg_cron):
// - mv_agent_run_counts - Pre-aggregated agent execution counts by agentGraphId
@@ -856,14 +856,14 @@ model StoreListingVersion {
AgentGraph AgentGraph @relation(fields: [agentGraphId, agentGraphVersion], references: [id, version])
// Content fields
name String
subHeading String
videoUrl String?
agentOutputDemoUrl String?
imageUrls String[]
description String
instructions String?
categories String[]
name String
subHeading String
videoUrl String?
agentOutputDemoUrl String?
imageUrls String[]
description String
instructions String?
categories String[]
isFeatured Boolean @default(false)
@@ -899,9 +899,6 @@ model StoreListingVersion {
// Reviews for this specific version
Reviews StoreListingReview[]
// Note: Embeddings now stored in UnifiedContentEmbedding table
// Use contentType=STORE_AGENT and contentId=storeListingVersionId
@@unique([storeListingId, version])
@@index([storeListingId, submissionStatus, isAvailable])
@@index([submissionStatus])
@@ -909,42 +906,6 @@ model StoreListingVersion {
@@index([agentGraphId, agentGraphVersion]) // Non-unique index for efficient lookups
}
// Content type enum for unified search across store agents, blocks, docs
// Note: BLOCK/INTEGRATION are file-based (Python classes), not DB records
// DOCUMENTATION are file-based (.md files), not DB records
// Only STORE_AGENT and LIBRARY_AGENT are stored in database
enum ContentType {
STORE_AGENT // Database: StoreListingVersion
BLOCK // File-based: Python classes in /backend/blocks/
INTEGRATION // File-based: Python classes (blocks with credentials)
DOCUMENTATION // File-based: .md/.mdx files
LIBRARY_AGENT // Database: User's personal agents
}
// Unified embeddings table for all searchable content types
// Supports both public content (userId=null) and user-specific content (userId=userID)
model UnifiedContentEmbedding {
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
// Content identification
contentType ContentType
contentId String // DB ID (storeListingVersionId) or file identifier (block.id, file_path)
userId String? // NULL for public content (store, blocks, docs), userId for private content (library agents)
// Search data
embedding Unsupported("public.vector(1536)") // pgvector embedding from public schema
searchableText String // Combined text for search and fallback
metadata Json @default("{}") // Content-specific metadata
@@unique([contentType, contentId, userId]) // Allow same content for different users
@@index([contentType])
@@index([userId])
@@index([contentType, userId])
}
model StoreListingReview {
id String @id @default(uuid())
createdAt DateTime @default(now())
@@ -1037,16 +998,16 @@ model OAuthApplication {
updatedAt DateTime @updatedAt
// Application metadata
name String
description String?
logoUrl String? // URL to app logo stored in GCS
clientId String @unique
clientSecret String // Hashed with Scrypt (same as API keys)
clientSecretSalt String // Salt for Scrypt hashing
name String
description String?
logoUrl String? // URL to app logo stored in GCS
clientId String @unique
clientSecret String // Hashed with Scrypt (same as API keys)
clientSecretSalt String // Salt for Scrypt hashing
// OAuth configuration
redirectUris String[] // Allowed callback URLs
grantTypes String[] @default(["authorization_code", "refresh_token"])
grantTypes String[] @default(["authorization_code", "refresh_token"])
scopes APIKeyPermission[] // Which permissions the app can request
// Application management

View File

@@ -2,7 +2,6 @@
"created_at": "2025-09-04T13:37:00",
"credentials_input_schema": {
"properties": {},
"required": [],
"title": "TestGraphCredentialsInputSchema",
"type": "object"
},

View File

@@ -2,7 +2,6 @@
{
"credentials_input_schema": {
"properties": {},
"required": [],
"title": "TestGraphCredentialsInputSchema",
"type": "object"
},

View File

@@ -4,7 +4,6 @@
"id": "test-agent-1",
"graph_id": "test-agent-1",
"graph_version": 1,
"owner_user_id": "3e53486c-cf57-477e-ba2a-cb02dc828e1a",
"image_url": null,
"creator_name": "Test Creator",
"creator_image_url": "",
@@ -42,7 +41,6 @@
"id": "test-agent-2",
"graph_id": "test-agent-2",
"graph_version": 1,
"owner_user_id": "3e53486c-cf57-477e-ba2a-cb02dc828e1a",
"image_url": null,
"creator_name": "Test Creator",
"creator_image_url": "",

View File

@@ -1,7 +1,6 @@
{
"submissions": [
{
"listing_id": "test-listing-id",
"agent_id": "test-agent-id",
"agent_version": 1,
"name": "Test Agent",

View File

@@ -37,7 +37,7 @@ services:
context: ../
dockerfile: autogpt_platform/backend/Dockerfile
target: migrate
command: ["sh", "-c", "poetry run prisma generate && poetry run gen-prisma-stub && poetry run prisma migrate deploy"]
command: ["sh", "-c", "poetry run prisma generate && poetry run prisma migrate deploy"]
develop:
watch:
- path: ./

View File

@@ -54,7 +54,7 @@
"@radix-ui/react-tooltip": "1.2.8",
"@rjsf/core": "6.1.2",
"@rjsf/utils": "6.1.2",
"@rjsf/validator-ajv8": "6.1.2",
"@rjsf/validator-ajv8": "5.24.13",
"@sentry/nextjs": "10.27.0",
"@supabase/ssr": "0.7.0",
"@supabase/supabase-js": "2.78.0",
@@ -92,6 +92,7 @@
"react-currency-input-field": "4.0.3",
"react-day-picker": "9.11.1",
"react-dom": "18.3.1",
"react-drag-drop-files": "2.4.0",
"react-hook-form": "7.66.0",
"react-icons": "5.5.0",
"react-markdown": "9.0.3",

File diff suppressed because it is too large Load Diff

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

View File

@@ -1,6 +1,6 @@
import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs";
import { CredentialsMetaInput } from "@/app/api/__generated__/models/credentialsMetaInput";
import { GraphMeta } from "@/app/api/__generated__/models/graphMeta";
import { CredentialsInput } from "@/components/contextual/CredentialsInputs/CredentialsInputs";
import { useState } from "react";
import { getSchemaDefaultCredentials } from "../../helpers";
import { areAllCredentialsSet, getCredentialFields } from "./helpers";

View File

@@ -1,12 +1,12 @@
"use client";
import { RunAgentInputs } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/RunAgentInputs/RunAgentInputs";
import {
Card,
CardContent,
CardHeader,
CardTitle,
} from "@/components/__legacy__/ui/card";
import { RunAgentInputs } from "@/components/contextual/RunAgentInputs/RunAgentInputs";
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
import { CircleNotchIcon } from "@phosphor-icons/react/dist/ssr";
import { Play } from "lucide-react";

View File

@@ -0,0 +1,46 @@
"use client";
import { ChatDrawer } from "@/components/contextual/Chat/ChatDrawer";
import { usePathname } from "next/navigation";
import { Children, ReactNode } from "react";
interface PlatformLayoutContentProps {
children: ReactNode;
}
export function PlatformLayoutContent({
children,
}: PlatformLayoutContentProps) {
const pathname = usePathname();
const isAuthPage =
pathname?.includes("/login") || pathname?.includes("/signup");
// Extract Navbar, AdminImpersonationBanner, and page content from children
const childrenArray = Children.toArray(children);
const navbar = childrenArray[0];
const adminBanner = childrenArray[1];
const pageContent = childrenArray.slice(2);
// For login/signup pages, use a simpler layout that doesn't interfere with centering
if (isAuthPage) {
return (
<main className="flex min-h-screen w-full flex-col">
{navbar}
{adminBanner}
<section className="flex-1">{pageContent}</section>
</main>
);
}
// For logged-in pages, use the drawer layout
return (
<main className="flex h-screen w-full flex-col overflow-hidden">
{navbar}
{adminBanner}
<section className="flex min-h-0 flex-1 overflow-auto">
{pageContent}
</section>
<ChatDrawer />
</main>
);
}

View File

@@ -8,7 +8,7 @@ import { AuthCard } from "@/components/auth/AuthCard";
import { Text } from "@/components/atoms/Text/Text";
import { Button } from "@/components/atoms/Button/Button";
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs";
import { CredentialsInput } from "@/components/contextual/CredentialsInputs/CredentialsInputs";
import type {
BlockIOCredentialsSubSchema,
CredentialsMetaInput,

View File

@@ -1,11 +1,6 @@
import { BlockUIType } from "@/app/(platform)/build/components/types";
import { useGraphStore } from "@/app/(platform)/build/stores/graphStore";
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
import {
globalRegistry,
OutputActions,
OutputItem,
} from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
import { Label } from "@/components/__legacy__/ui/label";
import { ScrollArea } from "@/components/__legacy__/ui/scroll-area";
import {
@@ -23,6 +18,11 @@ import {
TooltipProvider,
TooltipTrigger,
} from "@/components/atoms/Tooltip/BaseTooltip";
import {
globalRegistry,
OutputActions,
OutputItem,
} from "@/components/contextual/OutputRenderers";
import { BookOpenIcon } from "@phosphor-icons/react";
import { useMemo } from "react";
import { useShallow } from "zustand/react/shallow";

View File

@@ -66,7 +66,6 @@ export const RunInputDialog = ({
formContext={{
showHandles: false,
size: "large",
showOptionalToggle: false,
}}
/>
</div>

View File

@@ -66,7 +66,7 @@ export const useRunInputDialog = ({
if (isCredentialFieldSchema(fieldSchema)) {
dynamicUiSchema[fieldName] = {
...dynamicUiSchema[fieldName],
"ui:field": "custom/credential_field",
"ui:field": "credentials",
};
}
});
@@ -76,18 +76,12 @@ export const useRunInputDialog = ({
}, [credentialsSchema]);
const handleManualRun = async () => {
// Filter out incomplete credentials (those without a valid id)
// RJSF auto-populates const values (provider, type) but not id field
const validCredentials = Object.fromEntries(
Object.entries(credentialValues).filter(([_, cred]) => cred && cred.id),
);
await executeGraph({
graphId: flowID ?? "",
graphVersion: flowVersion || null,
data: {
inputs: inputValues,
credentials_inputs: validCredentials,
credentials_inputs: credentialValues,
source: "builder",
},
});

View File

@@ -97,9 +97,6 @@ export const Flow = () => {
onConnect={onConnect}
onEdgesChange={onEdgesChange}
onNodeDragStop={onNodeDragStop}
onNodeContextMenu={(event) => {
event.preventDefault();
}}
maxZoom={2}
minZoom={0.1}
onDragOver={onDragOver}

View File

@@ -1,25 +1,24 @@
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
import { BlockCost } from "@/app/api/__generated__/models/blockCost";
import { BlockInfoCategoriesItem } from "@/app/api/__generated__/models/blockInfoCategoriesItem";
import { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
import { NodeModelMetadata } from "@/app/api/__generated__/models/nodeModelMetadata";
import { preprocessInputSchema } from "@/components/renderers/InputRenderer/utils/input-schema-pre-processor";
import { cn } from "@/lib/utils";
import { RJSFSchema } from "@rjsf/utils";
import { NodeProps, Node as XYNode } from "@xyflow/react";
import React from "react";
import { Node as XYNode, NodeProps } from "@xyflow/react";
import { RJSFSchema } from "@rjsf/utils";
import { BlockUIType } from "../../../types";
import { FormCreator } from "../FormCreator";
import { OutputHandler } from "../OutputHandler";
import { AyrshareConnectButton } from "./components/AyrshareConnectButton";
import { NodeAdvancedToggle } from "./components/NodeAdvancedToggle";
import { NodeContainer } from "./components/NodeContainer";
import { NodeExecutionBadge } from "./components/NodeExecutionBadge";
import { NodeHeader } from "./components/NodeHeader";
import { NodeDataRenderer } from "./components/NodeOutput/NodeOutput";
import { NodeRightClickMenu } from "./components/NodeRightClickMenu";
import { StickyNoteBlock } from "./components/StickyNoteBlock";
import { BlockInfoCategoriesItem } from "@/app/api/__generated__/models/blockInfoCategoriesItem";
import { BlockCost } from "@/app/api/__generated__/models/blockCost";
import { AgentExecutionStatus } from "@/app/api/__generated__/models/agentExecutionStatus";
import { NodeExecutionResult } from "@/app/api/__generated__/models/nodeExecutionResult";
import { NodeContainer } from "./components/NodeContainer";
import { NodeHeader } from "./components/NodeHeader";
import { FormCreator } from "../FormCreator";
import { preprocessInputSchema } from "@/components/renderers/InputRenderer/utils/input-schema-pre-processor";
import { OutputHandler } from "../OutputHandler";
import { NodeAdvancedToggle } from "./components/NodeAdvancedToggle";
import { NodeDataRenderer } from "./components/NodeOutput/NodeOutput";
import { NodeExecutionBadge } from "./components/NodeExecutionBadge";
import { cn } from "@/lib/utils";
import { WebhookDisclaimer } from "./components/WebhookDisclaimer";
import { AyrshareConnectButton } from "./components/AyrshareConnectButton";
import { NodeModelMetadata } from "@/app/api/__generated__/models/nodeModelMetadata";
export type CustomNodeData = {
hardcodedValues: {
@@ -89,7 +88,7 @@ export const CustomNode: React.FC<NodeProps<CustomNode>> = React.memo(
// Currently all blockTypes design are similar - that's why i am using the same component for all of them
// If in future - if we need some drastic change in some blockTypes design - we can create separate components for them
const node = (
return (
<NodeContainer selected={selected} nodeId={nodeId} hasErrors={hasErrors}>
<div className="rounded-xlarge bg-white">
<NodeHeader data={data} nodeId={nodeId} />
@@ -118,15 +117,6 @@ export const CustomNode: React.FC<NodeProps<CustomNode>> = React.memo(
<NodeExecutionBadge nodeId={nodeId} />
</NodeContainer>
);
return (
<NodeRightClickMenu
nodeId={nodeId}
subGraphID={data.hardcodedValues?.graph_id}
>
{node}
</NodeRightClickMenu>
);
},
);

View File

@@ -1,31 +1,26 @@
import { useCopyPasteStore } from "@/app/(platform)/build/stores/copyPasteStore";
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
import { Separator } from "@/components/__legacy__/ui/separator";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from "@/components/molecules/DropdownMenu/DropdownMenu";
import {
SecondaryDropdownMenuContent,
SecondaryDropdownMenuItem,
SecondaryDropdownMenuSeparator,
} from "@/components/molecules/SecondaryMenu/SecondaryMenu";
import {
ArrowSquareOutIcon,
CopyIcon,
DotsThreeOutlineVerticalIcon,
TrashIcon,
} from "@phosphor-icons/react";
import { DotsThreeOutlineVerticalIcon } from "@phosphor-icons/react";
import { Copy, Trash2, ExternalLink } from "lucide-react";
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
import { useCopyPasteStore } from "@/app/(platform)/build/stores/copyPasteStore";
import { useReactFlow } from "@xyflow/react";
type Props = {
export const NodeContextMenu = ({
nodeId,
subGraphID,
}: {
nodeId: string;
subGraphID?: string;
};
export const NodeContextMenu = ({ nodeId, subGraphID }: Props) => {
}) => {
const { deleteElements } = useReactFlow();
function handleCopy() {
const handleCopy = () => {
useNodeStore.setState((state) => ({
nodes: state.nodes.map((node) => ({
...node,
@@ -35,47 +30,47 @@ export const NodeContextMenu = ({ nodeId, subGraphID }: Props) => {
useCopyPasteStore.getState().copySelectedNodes();
useCopyPasteStore.getState().pasteNodes();
}
};
function handleDelete() {
const handleDelete = () => {
deleteElements({ nodes: [{ id: nodeId }] });
}
};
return (
<DropdownMenu>
<DropdownMenuTrigger className="py-2">
<DotsThreeOutlineVerticalIcon size={16} weight="fill" />
</DropdownMenuTrigger>
<SecondaryDropdownMenuContent side="right" align="start">
<SecondaryDropdownMenuItem onClick={handleCopy}>
<CopyIcon size={20} className="mr-2 dark:text-gray-100" />
<span className="dark:text-gray-100">Copy</span>
</SecondaryDropdownMenuItem>
<SecondaryDropdownMenuSeparator />
<DropdownMenuContent
side="right"
align="start"
className="rounded-xlarge"
>
<DropdownMenuItem onClick={handleCopy} className="hover:rounded-xlarge">
<Copy className="mr-2 h-4 w-4" />
Copy Node
</DropdownMenuItem>
{subGraphID && (
<>
<SecondaryDropdownMenuItem
onClick={() => window.open(`/build?flowID=${subGraphID}`)}
>
<ArrowSquareOutIcon
size={20}
className="mr-2 dark:text-gray-100"
/>
<span className="dark:text-gray-100">Open agent</span>
</SecondaryDropdownMenuItem>
<SecondaryDropdownMenuSeparator />
</>
<DropdownMenuItem
onClick={() => window.open(`/build?flowID=${subGraphID}`)}
className="hover:rounded-xlarge"
>
<ExternalLink className="mr-2 h-4 w-4" />
Open Agent
</DropdownMenuItem>
)}
<SecondaryDropdownMenuItem variant="destructive" onClick={handleDelete}>
<TrashIcon
size={20}
className="mr-2 text-red-500 dark:text-red-400"
/>
<span className="dark:text-red-400">Delete</span>
</SecondaryDropdownMenuItem>
</SecondaryDropdownMenuContent>
<Separator className="my-2" />
<DropdownMenuItem
onClick={handleDelete}
className="text-red-600 hover:rounded-xlarge"
>
<Trash2 className="mr-2 h-4 w-4" />
Delete
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
);
};

View File

@@ -1,24 +1,25 @@
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
import { Text } from "@/components/atoms/Text/Text";
import { beautifyString, cn } from "@/lib/utils";
import { NodeCost } from "./NodeCost";
import { NodeBadges } from "./NodeBadges";
import { NodeContextMenu } from "./NodeContextMenu";
import { CustomNodeData } from "../CustomNode";
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
import { useState } from "react";
import {
Tooltip,
TooltipContent,
TooltipProvider,
TooltipTrigger,
} from "@/components/atoms/Tooltip/BaseTooltip";
import { beautifyString, cn } from "@/lib/utils";
import { useState } from "react";
import { CustomNodeData } from "../CustomNode";
import { NodeBadges } from "./NodeBadges";
import { NodeContextMenu } from "./NodeContextMenu";
import { NodeCost } from "./NodeCost";
type Props = {
export const NodeHeader = ({
data,
nodeId,
}: {
data: CustomNodeData;
nodeId: string;
};
export const NodeHeader = ({ data, nodeId }: Props) => {
}) => {
const updateNodeData = useNodeStore((state) => state.updateNodeData);
const title = (data.metadata?.customized_name as string) || data.title;
const [isEditingTitle, setIsEditingTitle] = useState(false);

View File

@@ -1,7 +1,7 @@
"use client";
import type { OutputMetadata } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
import { globalRegistry } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
import type { OutputMetadata } from "@/components/contextual/OutputRenderers";
import { globalRegistry } from "@/components/contextual/OutputRenderers";
export const TextRenderer: React.FC<{
value: any;

View File

@@ -1,7 +1,3 @@
import {
OutputActions,
OutputItem,
} from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
import { ScrollArea } from "@/components/__legacy__/ui/scroll-area";
import { Button } from "@/components/atoms/Button/Button";
import { Text } from "@/components/atoms/Text/Text";
@@ -11,6 +7,10 @@ import {
TooltipProvider,
TooltipTrigger,
} from "@/components/atoms/Tooltip/BaseTooltip";
import {
OutputActions,
OutputItem,
} from "@/components/contextual/OutputRenderers";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { beautifyString } from "@/lib/utils";
import {
@@ -151,7 +151,7 @@ export const NodeDataViewer: FC<NodeDataViewerProps> = ({
</div>
<div className="flex justify-end pt-4">
{outputItems.length > 1 && (
{outputItems.length > 0 && (
<OutputActions
items={outputItems.map((item) => ({
value: item.value,

View File

@@ -1,6 +1,6 @@
import type { OutputMetadata } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
import { globalRegistry } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
import { downloadOutputs } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers/utils/download";
import type { OutputMetadata } from "@/components/contextual/OutputRenderers";
import { globalRegistry } from "@/components/contextual/OutputRenderers";
import { downloadOutputs } from "@/components/contextual/OutputRenderers/utils/download";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { beautifyString } from "@/lib/utils";
import React, { useMemo, useState } from "react";

View File

@@ -1,104 +0,0 @@
import { useCopyPasteStore } from "@/app/(platform)/build/stores/copyPasteStore";
import { useNodeStore } from "@/app/(platform)/build/stores/nodeStore";
import {
SecondaryMenuContent,
SecondaryMenuItem,
SecondaryMenuSeparator,
} from "@/components/molecules/SecondaryMenu/SecondaryMenu";
import { ArrowSquareOutIcon, CopyIcon, TrashIcon } from "@phosphor-icons/react";
import * as ContextMenu from "@radix-ui/react-context-menu";
import { useReactFlow } from "@xyflow/react";
import { useEffect, useRef } from "react";
import { CustomNode } from "../CustomNode";
type Props = {
nodeId: string;
subGraphID?: string;
children: React.ReactNode;
};
const DOUBLE_CLICK_TIMEOUT = 300;
export function NodeRightClickMenu({ nodeId, subGraphID, children }: Props) {
const { deleteElements } = useReactFlow<CustomNode>();
const lastRightClickTime = useRef<number>(0);
const containerRef = useRef<HTMLDivElement>(null);
function copyNode() {
useNodeStore.setState((state) => ({
nodes: state.nodes.map((node) => ({
...node,
selected: node.id === nodeId,
})),
}));
useCopyPasteStore.getState().copySelectedNodes();
useCopyPasteStore.getState().pasteNodes();
}
function deleteNode() {
deleteElements({ nodes: [{ id: nodeId }] });
}
useEffect(() => {
const container = containerRef.current;
if (!container) return;
function handleContextMenu(e: MouseEvent) {
const now = Date.now();
const timeSinceLastClick = now - lastRightClickTime.current;
if (timeSinceLastClick < DOUBLE_CLICK_TIMEOUT) {
e.stopImmediatePropagation();
lastRightClickTime.current = 0;
return;
}
lastRightClickTime.current = now;
}
container.addEventListener("contextmenu", handleContextMenu, true);
return () => {
container.removeEventListener("contextmenu", handleContextMenu, true);
};
}, []);
return (
<ContextMenu.Root>
<ContextMenu.Trigger asChild>
<div ref={containerRef}>{children}</div>
</ContextMenu.Trigger>
<SecondaryMenuContent>
<SecondaryMenuItem onSelect={copyNode}>
<CopyIcon size={20} className="mr-2 dark:text-gray-100" />
<span className="dark:text-gray-100">Copy</span>
</SecondaryMenuItem>
<SecondaryMenuSeparator />
{subGraphID && (
<>
<SecondaryMenuItem
onClick={() => window.open(`/build?flowID=${subGraphID}`)}
>
<ArrowSquareOutIcon
size={20}
className="mr-2 dark:text-gray-100"
/>
<span className="dark:text-gray-100">Open agent</span>
</SecondaryMenuItem>
<SecondaryMenuSeparator />
</>
)}
<SecondaryMenuItem variant="destructive" onSelect={deleteNode}>
<TrashIcon
size={20}
className="mr-2 text-red-500 dark:text-red-400"
/>
<span className="dark:text-red-400">Delete</span>
</SecondaryMenuItem>
</SecondaryMenuContent>
</ContextMenu.Root>
);
}

View File

@@ -1,10 +1,10 @@
import { Alert, AlertDescription } from "@/components/molecules/Alert/Alert";
import { Text } from "@/components/atoms/Text/Text";
import Link from "next/link";
import { useGetV2GetLibraryAgentByGraphId } from "@/app/api/__generated__/endpoints/library/library";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { useQueryStates, parseAsString } from "nuqs";
import { isValidUUID } from "@/app/(platform)/chat/helpers";
import { Text } from "@/components/atoms/Text/Text";
import { isValidUUID } from "@/components/contextual/Chat/helpers";
import { Alert, AlertDescription } from "@/components/molecules/Alert/Alert";
import Link from "next/link";
import { parseAsString, useQueryStates } from "nuqs";
export const WebhookDisclaimer = ({ nodeId }: { nodeId: string }) => {
const [{ flowID }] = useQueryStates({

View File

@@ -1,6 +1,6 @@
export const uiSchema = {
credentials: {
"ui:field": "custom/credential_field",
"ui:field": "credentials",
provider: { "ui:widget": "hidden" },
type: { "ui:widget": "hidden" },
id: { "ui:autofocus": true },

View File

@@ -1,57 +0,0 @@
import { useBlockMenuStore } from "@/app/(platform)/build/stores/blockMenuStore";
import { FilterChip } from "../FilterChip";
import { categories } from "./constants";
import { FilterSheet } from "../FilterSheet/FilterSheet";
import { GetV2BuilderSearchFilterAnyOfItem } from "@/app/api/__generated__/models/getV2BuilderSearchFilterAnyOfItem";
export const BlockMenuFilters = () => {
const {
filters,
addFilter,
removeFilter,
categoryCounts,
creators,
addCreator,
removeCreator,
} = useBlockMenuStore();
const handleFilterClick = (filter: GetV2BuilderSearchFilterAnyOfItem) => {
if (filters.includes(filter)) {
removeFilter(filter);
} else {
addFilter(filter);
}
};
const handleCreatorClick = (creator: string) => {
if (creators.includes(creator)) {
removeCreator(creator);
} else {
addCreator(creator);
}
};
return (
<div className="flex flex-wrap gap-2">
<FilterSheet categories={categories} />
{creators.length > 0 &&
creators.map((creator) => (
<FilterChip
key={creator}
name={"Created by " + creator.slice(0, 10) + "..."}
selected={creators.includes(creator)}
onClick={() => handleCreatorClick(creator)}
/>
))}
{categories.map((category) => (
<FilterChip
key={category.key}
name={category.name}
selected={filters.includes(category.key)}
onClick={() => handleFilterClick(category.key)}
number={categoryCounts[category.key] ?? 0}
/>
))}
</div>
);
};

View File

@@ -1,15 +0,0 @@
import { GetV2BuilderSearchFilterAnyOfItem } from "@/app/api/__generated__/models/getV2BuilderSearchFilterAnyOfItem";
import { CategoryKey } from "./types";
export const categories: Array<{ key: CategoryKey; name: string }> = [
{ key: GetV2BuilderSearchFilterAnyOfItem.blocks, name: "Blocks" },
{
key: GetV2BuilderSearchFilterAnyOfItem.integrations,
name: "Integrations",
},
{
key: GetV2BuilderSearchFilterAnyOfItem.marketplace_agents,
name: "Marketplace agents",
},
{ key: GetV2BuilderSearchFilterAnyOfItem.my_agents, name: "My agents" },
];

View File

@@ -1,26 +0,0 @@
import { GetV2BuilderSearchFilterAnyOfItem } from "@/app/api/__generated__/models/getV2BuilderSearchFilterAnyOfItem";
export type DefaultStateType =
| "suggestion"
| "all_blocks"
| "input_blocks"
| "action_blocks"
| "output_blocks"
| "integrations"
| "marketplace_agents"
| "my_agents";
export type CategoryKey = GetV2BuilderSearchFilterAnyOfItem;
export interface Filters {
categories: {
blocks: boolean;
integrations: boolean;
marketplace_agents: boolean;
my_agents: boolean;
providers: boolean;
};
createdBy: string[];
}
export type CategoryCounts = Record<CategoryKey, number>;

View File

@@ -1,14 +1,111 @@
import { Text } from "@/components/atoms/Text/Text";
import { useBlockMenuSearch } from "./useBlockMenuSearch";
import { InfiniteScroll } from "@/components/contextual/InfiniteScroll/InfiniteScroll";
import { LoadingSpinner } from "@/components/__legacy__/ui/loading";
import { SearchResponseItemsItem } from "@/app/api/__generated__/models/searchResponseItemsItem";
import { MarketplaceAgentBlock } from "../MarketplaceAgentBlock";
import { Block } from "../Block";
import { UGCAgentBlock } from "../UGCAgentBlock";
import { getSearchItemType } from "./helper";
import { useBlockMenuStore } from "../../../../stores/blockMenuStore";
import { blockMenuContainerStyle } from "../style";
import { BlockMenuFilters } from "../BlockMenuFilters/BlockMenuFilters";
import { BlockMenuSearchContent } from "../BlockMenuSearchContent/BlockMenuSearchContent";
import { cn } from "@/lib/utils";
import { NoSearchResult } from "../NoSearchResult";
export const BlockMenuSearch = () => {
const {
searchResults,
isFetchingNextPage,
fetchNextPage,
hasNextPage,
searchLoading,
handleAddLibraryAgent,
handleAddMarketplaceAgent,
addingLibraryAgentId,
addingMarketplaceAgentSlug,
} = useBlockMenuSearch();
const { searchQuery } = useBlockMenuStore();
if (searchLoading) {
return (
<div
className={cn(
blockMenuContainerStyle,
"flex items-center justify-center",
)}
>
<LoadingSpinner className="size-13" />
</div>
);
}
if (searchResults.length === 0) {
return <NoSearchResult />;
}
return (
<div className={blockMenuContainerStyle}>
<BlockMenuFilters />
<Text variant="body-medium">Search results</Text>
<BlockMenuSearchContent />
<InfiniteScroll
isFetchingNextPage={isFetchingNextPage}
fetchNextPage={fetchNextPage}
hasNextPage={hasNextPage}
loader={<LoadingSpinner className="size-13" />}
className="space-y-2.5"
>
{searchResults.map((item: SearchResponseItemsItem, index: number) => {
const { type, data } = getSearchItemType(item);
// backend give support to these 3 types only [right now] - we need to give support to integration and ai agent types in follow up PRs
switch (type) {
case "store_agent":
return (
<MarketplaceAgentBlock
key={index}
slug={data.slug}
highlightedText={searchQuery}
title={data.agent_name}
image_url={data.agent_image}
creator_name={data.creator}
number_of_runs={data.runs}
loading={addingMarketplaceAgentSlug === data.slug}
onClick={() =>
handleAddMarketplaceAgent({
creator_name: data.creator,
slug: data.slug,
})
}
/>
);
case "block":
return (
<Block
key={index}
title={data.name}
highlightedText={searchQuery}
description={data.description}
blockData={data}
/>
);
case "library_agent":
return (
<UGCAgentBlock
key={index}
title={data.name}
highlightedText={searchQuery}
image_url={data.image_url}
version={data.graph_version}
edited_time={data.updated_at}
isLoading={addingLibraryAgentId === data.id}
onClick={() => handleAddLibraryAgent(data)}
/>
);
default:
return null;
}
})}
</InfiniteScroll>
</div>
);
};

View File

@@ -23,19 +23,9 @@ import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { getQueryClient } from "@/lib/react-query/queryClient";
import { useToast } from "@/components/molecules/Toast/use-toast";
import * as Sentry from "@sentry/nextjs";
import { GetV2BuilderSearchFilterAnyOfItem } from "@/app/api/__generated__/models/getV2BuilderSearchFilterAnyOfItem";
export const useBlockMenuSearchContent = () => {
const {
searchQuery,
searchId,
setSearchId,
filters,
setCreatorsList,
creators,
setCategoryCounts,
} = useBlockMenuStore();
export const useBlockMenuSearch = () => {
const { searchQuery, searchId, setSearchId } = useBlockMenuStore();
const { toast } = useToast();
const { addAgentToBuilder, addLibraryAgentToBuilder } =
useAddAgentToBuilder();
@@ -67,8 +57,6 @@ export const useBlockMenuSearchContent = () => {
page_size: 8,
search_query: searchQuery,
search_id: searchId,
filter: filters.length > 0 ? filters : undefined,
by_creator: creators.length > 0 ? creators : undefined,
},
{
query: { getNextPageParam: getPaginationNextPageNumber },
@@ -110,26 +98,6 @@ export const useBlockMenuSearchContent = () => {
}
}, [searchQueryData, searchId, setSearchId]);
// from all the results, we need to get all the unique creators
useEffect(() => {
if (!searchQueryData?.pages?.length) {
return;
}
const latestData = okData(searchQueryData.pages.at(-1));
setCategoryCounts(
(latestData?.total_items as Record<
GetV2BuilderSearchFilterAnyOfItem,
number
>) || {
blocks: 0,
integrations: 0,
marketplace_agents: 0,
my_agents: 0,
},
);
setCreatorsList(latestData?.items || []);
}, [searchQueryData]);
useEffect(() => {
if (searchId && !searchQuery) {
resetSearchSession();

View File

@@ -1,108 +0,0 @@
import { SearchResponseItemsItem } from "@/app/api/__generated__/models/searchResponseItemsItem";
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
import { InfiniteScroll } from "@/components/contextual/InfiniteScroll/InfiniteScroll";
import { getSearchItemType } from "./helper";
import { MarketplaceAgentBlock } from "../MarketplaceAgentBlock";
import { Block } from "../Block";
import { UGCAgentBlock } from "../UGCAgentBlock";
import { useBlockMenuSearchContent } from "./useBlockMenuSearchContent";
import { useBlockMenuStore } from "@/app/(platform)/build/stores/blockMenuStore";
import { cn } from "@/lib/utils";
import { blockMenuContainerStyle } from "../style";
import { NoSearchResult } from "../NoSearchResult";
export const BlockMenuSearchContent = () => {
const {
searchResults,
isFetchingNextPage,
fetchNextPage,
hasNextPage,
searchLoading,
handleAddLibraryAgent,
handleAddMarketplaceAgent,
addingLibraryAgentId,
addingMarketplaceAgentSlug,
} = useBlockMenuSearchContent();
const { searchQuery } = useBlockMenuStore();
if (searchLoading) {
return (
<div
className={cn(
blockMenuContainerStyle,
"flex items-center justify-center",
)}
>
<LoadingSpinner className="size-13" />
</div>
);
}
if (searchResults.length === 0) {
return <NoSearchResult />;
}
return (
<InfiniteScroll
isFetchingNextPage={isFetchingNextPage}
fetchNextPage={fetchNextPage}
hasNextPage={hasNextPage}
loader={<LoadingSpinner className="size-13" />}
className="space-y-2.5"
>
{searchResults.map((item: SearchResponseItemsItem, index: number) => {
const { type, data } = getSearchItemType(item);
// backend give support to these 3 types only [right now] - we need to give support to integration and ai agent types in follow up PRs
switch (type) {
case "store_agent":
return (
<MarketplaceAgentBlock
key={index}
slug={data.slug}
highlightedText={searchQuery}
title={data.agent_name}
image_url={data.agent_image}
creator_name={data.creator}
number_of_runs={data.runs}
loading={addingMarketplaceAgentSlug === data.slug}
onClick={() =>
handleAddMarketplaceAgent({
creator_name: data.creator,
slug: data.slug,
})
}
/>
);
case "block":
return (
<Block
key={index}
title={data.name}
highlightedText={searchQuery}
description={data.description}
blockData={data}
/>
);
case "library_agent":
return (
<UGCAgentBlock
key={index}
title={data.name}
highlightedText={searchQuery}
image_url={data.image_url}
version={data.graph_version}
edited_time={data.updated_at}
isLoading={addingLibraryAgentId === data.id}
onClick={() => handleAddLibraryAgent(data)}
/>
);
default:
return null;
}
})}
</InfiniteScroll>
);
};

View File

@@ -1,9 +1,7 @@
import { Button } from "@/components/__legacy__/ui/button";
import { cn } from "@/lib/utils";
import { XIcon } from "@phosphor-icons/react";
import { AnimatePresence, motion } from "framer-motion";
import React, { ButtonHTMLAttributes, useState } from "react";
import { X } from "lucide-react";
import React, { ButtonHTMLAttributes } from "react";
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
selected?: boolean;
@@ -18,51 +16,39 @@ export const FilterChip: React.FC<Props> = ({
className,
...rest
}) => {
const [isHovered, setIsHovered] = useState(false);
return (
<AnimatePresence mode="wait">
<Button
onMouseEnter={() => setIsHovered(true)}
onMouseLeave={() => setIsHovered(false)}
<Button
className={cn(
"group w-fit space-x-1 rounded-[1.5rem] border border-zinc-300 bg-transparent px-[0.625rem] py-[0.375rem] shadow-none transition-transform duration-300 ease-in-out",
"hover:border-violet-500 hover:bg-transparent focus:ring-0 disabled:cursor-not-allowed",
selected && "border-0 bg-violet-700 hover:border",
className,
)}
{...rest}
>
<span
className={cn(
"group w-fit space-x-1 rounded-[1.5rem] border border-zinc-300 bg-transparent px-[0.625rem] py-[0.375rem] shadow-none",
"hover:border-violet-500 hover:bg-transparent focus:ring-0 disabled:cursor-not-allowed",
selected && "border-0 bg-violet-700 hover:border",
className,
"font-sans text-sm font-medium leading-[1.375rem] text-zinc-600 group-hover:text-zinc-600 group-disabled:text-zinc-400",
selected && "text-zinc-50",
)}
{...rest}
>
<span
className={cn(
"font-sans text-sm font-medium leading-[1.375rem] text-zinc-600 group-hover:text-zinc-600 group-disabled:text-zinc-400",
selected && "text-zinc-50",
{name}
</span>
{selected && (
<>
<span className="flex h-4 w-4 items-center justify-center rounded-full bg-zinc-50 transition-all duration-300 ease-in-out group-hover:hidden">
<X
className="h-3 w-3 rounded-full text-violet-700"
strokeWidth={2}
/>
</span>
{number !== undefined && (
<span className="hidden h-[1.375rem] items-center rounded-[1.25rem] bg-violet-700 p-[0.375rem] text-zinc-50 transition-all duration-300 ease-in-out animate-in fade-in zoom-in group-hover:flex">
{number > 100 ? "100+" : number}
</span>
)}
>
{name}
</span>
{selected && !isHovered && (
<motion.span
initial={{ opacity: 0.5, scale: 0.5, filter: "blur(20px)" }}
animate={{ opacity: 1, scale: 1, filter: "blur(0px)" }}
exit={{ opacity: 0.5, scale: 0.5, filter: "blur(20px)" }}
transition={{ duration: 0.3, type: "spring", bounce: 0.2 }}
className="flex h-4 w-4 items-center justify-center rounded-full bg-zinc-50"
>
<XIcon size={12} weight="bold" className="text-violet-700" />
</motion.span>
)}
{number !== undefined && isHovered && (
<motion.span
initial={{ opacity: 0.5, scale: 0.5, filter: "blur(10px)" }}
animate={{ opacity: 1, scale: 1, filter: "blur(0px)" }}
exit={{ opacity: 0.5, scale: 0.5, filter: "blur(10px)" }}
transition={{ duration: 0.3, type: "spring", bounce: 0.2 }}
className="flex h-[1.375rem] items-center rounded-[1.25rem] bg-violet-700 p-[0.375rem] text-zinc-50"
>
{number > 100 ? "100+" : number}
</motion.span>
)}
</Button>
</AnimatePresence>
</>
)}
</Button>
);
};

View File

@@ -1,156 +0,0 @@
import { FilterChip } from "../FilterChip";
import { cn } from "@/lib/utils";
import { CategoryKey } from "../BlockMenuFilters/types";
import { AnimatePresence, motion } from "framer-motion";
import { XIcon } from "@phosphor-icons/react";
import { Button } from "@/components/atoms/Button/Button";
import { Text } from "@/components/atoms/Text/Text";
import { Separator } from "@/components/__legacy__/ui/separator";
import { Checkbox } from "@/components/__legacy__/ui/checkbox";
import { useFilterSheet } from "./useFilterSheet";
import { INITIAL_CREATORS_TO_SHOW } from "./constant";
export function FilterSheet({
categories,
}: {
categories: Array<{ key: CategoryKey; name: string }>;
}) {
const {
isOpen,
localCategories,
localCreators,
displayedCreatorsCount,
handleLocalCategoryChange,
handleToggleShowMoreCreators,
handleLocalCreatorChange,
handleClearFilters,
handleCloseButton,
handleApplyFilters,
hasLocalActiveFilters,
visibleCreators,
creators,
handleOpenFilters,
hasActiveFilters,
} = useFilterSheet();
return (
<div className="m-0 inline w-fit p-0">
<FilterChip
name={hasActiveFilters() ? "Edit filters" : "All filters"}
onClick={handleOpenFilters}
/>
<AnimatePresence>
{isOpen && (
<motion.div
className={cn(
"absolute bottom-2 left-2 top-2 z-20 w-3/4 max-w-[22.5rem] space-y-4 overflow-hidden rounded-[0.75rem] bg-white pb-4 shadow-[0_4px_12px_2px_rgba(0,0,0,0.1)]",
)}
initial={{ x: "-100%", filter: "blur(10px)" }}
animate={{ x: 0, filter: "blur(0px)" }}
exit={{ x: "-110%", filter: "blur(10px)" }}
transition={{ duration: 0.4, type: "spring", bounce: 0.2 }}
>
{/* Top section */}
<div className="flex items-center justify-between px-5 pt-4">
<Text variant="body">Filters</Text>
<Button
className="p-0"
variant="ghost"
size="icon"
onClick={handleCloseButton}
>
<XIcon size={20} />
</Button>
</div>
<Separator className="h-[1px] w-full text-zinc-300" />
{/* Category section */}
<div className="space-y-4 px-5">
<Text variant="large">Categories</Text>
<div className="space-y-2">
{categories.map((category) => (
<div
key={category.key}
className="flex items-center space-x-2"
>
<Checkbox
id={category.key}
checked={localCategories.includes(category.key)}
onCheckedChange={() =>
handleLocalCategoryChange(category.key)
}
className="border border-[#D4D4D4] shadow-none data-[state=checked]:border-none data-[state=checked]:bg-violet-700 data-[state=checked]:text-white"
/>
<label
htmlFor={category.key}
className="font-sans text-sm leading-[1.375rem] text-zinc-600"
>
{category.name}
</label>
</div>
))}
</div>
</div>
{/* Created by section */}
<div className="space-y-4 px-5">
<p className="font-sans text-base font-medium text-zinc-800">
Created by
</p>
<div className="space-y-2">
{visibleCreators.map((creator, i) => (
<div key={i} className="flex items-center space-x-2">
<Checkbox
id={`creator-${creator}`}
checked={localCreators.includes(creator)}
onCheckedChange={() => handleLocalCreatorChange(creator)}
className="border border-[#D4D4D4] shadow-none data-[state=checked]:border-none data-[state=checked]:bg-violet-700 data-[state=checked]:text-white"
/>
<label
htmlFor={`creator-${creator}`}
className="font-sans text-sm leading-[1.375rem] text-zinc-600"
>
{creator}
</label>
</div>
))}
</div>
{creators.length > INITIAL_CREATORS_TO_SHOW && (
<Button
variant={"link"}
className="m-0 p-0 font-sans text-sm font-medium leading-[1.375rem] text-zinc-800 underline hover:text-zinc-600"
onClick={handleToggleShowMoreCreators}
>
{displayedCreatorsCount < creators.length ? "More" : "Less"}
</Button>
)}
</div>
{/* Footer section */}
<div className="fixed bottom-0 flex w-full justify-between gap-3 border-t border-zinc-200 bg-white px-5 py-3">
<Button
size="small"
variant={"outline"}
onClick={handleClearFilters}
className="rounded-[8px] px-2 py-1.5"
>
Clear
</Button>
<Button
size="small"
onClick={handleApplyFilters}
disabled={!hasLocalActiveFilters()}
className="rounded-[8px] px-2 py-1.5"
>
Apply filters
</Button>
</div>
</motion.div>
)}
</AnimatePresence>
</div>
);
}

View File

@@ -1,100 +0,0 @@
import { useBlockMenuStore } from "@/app/(platform)/build/stores/blockMenuStore";
import { useState } from "react";
import { INITIAL_CREATORS_TO_SHOW } from "./constant";
import { GetV2BuilderSearchFilterAnyOfItem } from "@/app/api/__generated__/models/getV2BuilderSearchFilterAnyOfItem";
export const useFilterSheet = () => {
const { filters, creators_list, creators, setFilters, setCreators } =
useBlockMenuStore();
const [isOpen, setIsOpen] = useState(false);
const [localCategories, setLocalCategories] =
useState<GetV2BuilderSearchFilterAnyOfItem[]>(filters);
const [localCreators, setLocalCreators] = useState<string[]>(creators);
const [displayedCreatorsCount, setDisplayedCreatorsCount] = useState(
INITIAL_CREATORS_TO_SHOW,
);
const handleLocalCategoryChange = (
category: GetV2BuilderSearchFilterAnyOfItem,
) => {
setLocalCategories((prev) => {
if (prev.includes(category)) {
return prev.filter((c) => c !== category);
}
return [...prev, category];
});
};
const hasActiveFilters = () => {
return filters.length > 0 || creators.length > 0;
};
const handleToggleShowMoreCreators = () => {
if (displayedCreatorsCount < creators.length) {
setDisplayedCreatorsCount(creators.length);
} else {
setDisplayedCreatorsCount(INITIAL_CREATORS_TO_SHOW);
}
};
const handleLocalCreatorChange = (creator: string) => {
setLocalCreators((prev) => {
if (prev.includes(creator)) {
return prev.filter((c) => c !== creator);
}
return [...prev, creator];
});
};
const handleClearFilters = () => {
setLocalCategories([]);
setLocalCreators([]);
setDisplayedCreatorsCount(INITIAL_CREATORS_TO_SHOW);
};
const handleCloseButton = () => {
setIsOpen(false);
setLocalCategories(filters);
setLocalCreators(creators);
setDisplayedCreatorsCount(INITIAL_CREATORS_TO_SHOW);
};
const handleApplyFilters = () => {
setFilters(localCategories);
setCreators(localCreators);
setIsOpen(false);
};
const handleOpenFilters = () => {
setIsOpen(true);
setLocalCategories(filters);
setLocalCreators(creators);
};
const hasLocalActiveFilters = () => {
return localCategories.length > 0 || localCreators.length > 0;
};
const visibleCreators = creators_list.slice(0, displayedCreatorsCount);
return {
creators,
isOpen,
setIsOpen,
localCategories,
localCreators,
displayedCreatorsCount,
setDisplayedCreatorsCount,
handleLocalCategoryChange,
handleToggleShowMoreCreators,
handleLocalCreatorChange,
handleClearFilters,
handleCloseButton,
handleOpenFilters,
handleApplyFilters,
hasLocalActiveFilters,
visibleCreators,
hasActiveFilters,
};
};

View File

@@ -1,9 +1,9 @@
import type { OutputMetadata } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
import type { OutputMetadata } from "@/components/contextual/OutputRenderers";
import {
globalRegistry,
OutputActions,
OutputItem,
} from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
} from "@/components/contextual/OutputRenderers";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { beautifyString } from "@/lib/utils";
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";

View File

@@ -3,7 +3,6 @@ import {
CustomNodeData,
} from "@/app/(platform)/build/components/legacy-builder/CustomNode/CustomNode";
import { NodeTableInput } from "@/app/(platform)/build/components/legacy-builder/NodeTableInput";
import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs";
import { Button } from "@/components/__legacy__/ui/button";
import { Calendar } from "@/components/__legacy__/ui/calendar";
import { LocalValuedInput } from "@/components/__legacy__/ui/input";
@@ -28,6 +27,7 @@ import {
SelectValue,
} from "@/components/__legacy__/ui/select";
import { Switch } from "@/components/atoms/Switch/Switch";
import { CredentialsInput } from "@/components/contextual/CredentialsInputs/CredentialsInputs";
import { GoogleDrivePickerInput } from "@/components/contextual/GoogleDrivePicker/GoogleDrivePickerInput";
import {
BlockIOArraySubSchema,

View File

@@ -1,30 +1,12 @@
import { create } from "zustand";
import { DefaultStateType } from "../components/NewControlPanel/NewBlockMenu/types";
import { SearchResponseItemsItem } from "@/app/api/__generated__/models/searchResponseItemsItem";
import { getSearchItemType } from "../components/NewControlPanel/NewBlockMenu/BlockMenuSearchContent/helper";
import { StoreAgent } from "@/app/api/__generated__/models/storeAgent";
import { GetV2BuilderSearchFilterAnyOfItem } from "@/app/api/__generated__/models/getV2BuilderSearchFilterAnyOfItem";
type BlockMenuStore = {
searchQuery: string;
searchId: string | undefined;
defaultState: DefaultStateType;
integration: string | undefined;
filters: GetV2BuilderSearchFilterAnyOfItem[];
creators: string[];
creators_list: string[];
categoryCounts: Record<GetV2BuilderSearchFilterAnyOfItem, number>;
setCategoryCounts: (
counts: Record<GetV2BuilderSearchFilterAnyOfItem, number>,
) => void;
setCreatorsList: (searchData: SearchResponseItemsItem[]) => void;
addCreator: (creator: string) => void;
setCreators: (creators: string[]) => void;
removeCreator: (creator: string) => void;
addFilter: (filter: GetV2BuilderSearchFilterAnyOfItem) => void;
setFilters: (filters: GetV2BuilderSearchFilterAnyOfItem[]) => void;
removeFilter: (filter: GetV2BuilderSearchFilterAnyOfItem) => void;
setSearchQuery: (query: string) => void;
setSearchId: (id: string | undefined) => void;
setDefaultState: (state: DefaultStateType) => void;
@@ -37,44 +19,11 @@ export const useBlockMenuStore = create<BlockMenuStore>((set) => ({
searchId: undefined,
defaultState: DefaultStateType.SUGGESTION,
integration: undefined,
filters: [],
creators: [], // creator filters that are applied to the search results
creators_list: [], // all creators that are available to filter by
categoryCounts: {
blocks: 0,
integrations: 0,
marketplace_agents: 0,
my_agents: 0,
},
setCategoryCounts: (counts) => set({ categoryCounts: counts }),
setCreatorsList: (searchData) => {
const marketplaceAgents = searchData.filter((item) => {
return getSearchItemType(item).type === "store_agent";
}) as StoreAgent[];
const newCreators = marketplaceAgents.map((agent) => agent.creator);
set((state) => ({
creators_list: Array.from(
new Set([...state.creators_list, ...newCreators]),
),
}));
},
setCreators: (creators) => set({ creators }),
setFilters: (filters) => set({ filters }),
setSearchQuery: (query) => set({ searchQuery: query }),
setSearchId: (id) => set({ searchId: id }),
setDefaultState: (state) => set({ defaultState: state }),
setIntegration: (integration) => set({ integration }),
addFilter: (filter) =>
set((state) => ({ filters: [...state.filters, filter] })),
removeFilter: (filter) =>
set((state) => ({ filters: state.filters.filter((f) => f !== filter) })),
addCreator: (creator) =>
set((state) => ({ creators: [...state.creators, creator] })),
removeCreator: (creator) =>
set((state) => ({ creators: state.creators.filter((c) => c !== creator) })),
reset: () =>
set({
searchQuery: "",

View File

@@ -68,9 +68,6 @@ type NodeStore = {
clearAllNodeErrors: () => void; // Add this
syncHardcodedValuesWithHandleIds: (nodeId: string) => void;
// Credentials optional helpers
setCredentialsOptional: (nodeId: string, optional: boolean) => void;
};
export const useNodeStore = create<NodeStore>((set, get) => ({
@@ -229,9 +226,6 @@ export const useNodeStore = create<NodeStore>((set, get) => ({
...(node.data.metadata?.customized_name !== undefined && {
customized_name: node.data.metadata.customized_name,
}),
...(node.data.metadata?.credentials_optional !== undefined && {
credentials_optional: node.data.metadata.credentials_optional,
}),
},
};
},
@@ -348,30 +342,4 @@ export const useNodeStore = create<NodeStore>((set, get) => ({
}));
}
},
setCredentialsOptional: (nodeId: string, optional: boolean) => {
set((state) => ({
nodes: state.nodes.map((n) =>
n.id === nodeId
? {
...n,
data: {
...n.data,
metadata: {
...n.data.metadata,
credentials_optional: optional,
},
},
}
: n,
),
}));
const newState = {
nodes: get().nodes,
edges: useEdgeStore.getState().edges,
};
useHistoryStore.getState().pushState(newState);
},
}));

View File

@@ -1,16 +1,24 @@
"use client";
import { useChatPage } from "./useChatPage";
import { ChatContainer } from "./components/ChatContainer/ChatContainer";
import { ChatErrorState } from "./components/ChatErrorState/ChatErrorState";
import { ChatLoadingState } from "./components/ChatLoadingState/ChatLoadingState";
import { useGetFlag, Flag } from "@/services/feature-flags/use-get-flag";
import { useRouter } from "next/navigation";
import { Button } from "@/components/__legacy__/ui/button";
import { scrollbarStyles } from "@/components/styles/scrollbars";
import { cn } from "@/lib/utils";
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
import { X } from "@phosphor-icons/react";
import { usePathname, useRouter } from "next/navigation";
import { useEffect } from "react";
import { Drawer } from "vaul";
import { ChatContainer } from "@/components/contextual/Chat/components/ChatContainer/ChatContainer";
import { ChatErrorState } from "@/components/contextual/Chat/components/ChatErrorState/ChatErrorState";
import { ChatLoadingState } from "@/components/contextual/Chat/components/ChatLoadingState/ChatLoadingState";
import { useChatPage } from "./useChatPage";
export default function ChatPage() {
const isChatEnabled = useGetFlag(Flag.CHAT);
const router = useRouter();
const pathname = usePathname();
const isOpen = pathname === "/chat";
const {
messages,
isLoading,
@@ -28,56 +36,88 @@ export default function ChatPage() {
}
}, [isChatEnabled, router]);
function handleOpenChange(open: boolean) {
if (!open) {
router.replace("/marketplace");
}
}
if (isChatEnabled === null || isChatEnabled === false) {
return null;
}
return (
<div className="flex h-full flex-col">
{/* Header */}
<header className="border-b border-zinc-200 bg-white p-4 dark:border-zinc-800 dark:bg-zinc-900">
<div className="container mx-auto flex items-center justify-between">
<h1 className="text-xl font-semibold">Chat</h1>
{sessionId && (
<div className="flex items-center gap-4">
<span className="text-sm text-zinc-600 dark:text-zinc-400">
Session: {sessionId.slice(0, 8)}...
</span>
<button
onClick={clearSession}
className="text-sm text-zinc-600 hover:text-zinc-900 dark:text-zinc-400 dark:hover:text-zinc-100"
>
New Chat
</button>
</div>
<Drawer.Root
open={isOpen}
onOpenChange={handleOpenChange}
direction="right"
modal={false}
>
<Drawer.Portal>
<Drawer.Content
className={cn(
"fixed right-0 top-0 z-50 flex h-full w-1/2 flex-col border-l border-zinc-200 bg-white dark:border-zinc-800 dark:bg-zinc-900",
scrollbarStyles,
)}
</div>
</header>
>
{/* Header */}
<header className="shrink-0 border-b border-zinc-200 bg-white p-4 dark:border-zinc-800 dark:bg-zinc-900">
<div className="flex items-center justify-between">
<Drawer.Title className="text-xl font-semibold">
Chat
</Drawer.Title>
<div className="flex items-center gap-4">
{sessionId && (
<>
<span className="text-sm text-zinc-600 dark:text-zinc-400">
Session: {sessionId.slice(0, 8)}...
</span>
<button
onClick={clearSession}
className="text-sm text-zinc-600 hover:text-zinc-900 dark:text-zinc-400 dark:hover:text-zinc-100"
>
New Chat
</button>
</>
)}
<Button
variant="link"
aria-label="Close"
onClick={() => handleOpenChange(false)}
className="!focus-visible:ring-0 p-0"
>
<X width="1.5rem" />
</Button>
</div>
</div>
</header>
{/* Main Content */}
<main className="container mx-auto flex flex-1 flex-col overflow-hidden">
{/* Loading State - show when explicitly loading/creating OR when we don't have a session yet and no error */}
{(isLoading || isCreating || (!sessionId && !error)) && (
<ChatLoadingState
message={isCreating ? "Creating session..." : "Loading..."}
/>
)}
{/* Main Content */}
<main className="flex min-h-0 flex-1 flex-col overflow-hidden">
{/* Loading State - show when explicitly loading/creating OR when we don't have a session yet and no error */}
{(isLoading || isCreating || (!sessionId && !error)) && (
<ChatLoadingState
message={isCreating ? "Creating session..." : "Loading..."}
/>
)}
{/* Error State */}
{error && !isLoading && (
<ChatErrorState error={error} onRetry={createSession} />
)}
{/* Error State */}
{error && !isLoading && (
<ChatErrorState error={error} onRetry={createSession} />
)}
{/* Session Content */}
{sessionId && !isLoading && !error && (
<ChatContainer
sessionId={sessionId}
initialMessages={messages}
onRefreshSession={refreshSession}
className="flex-1"
/>
)}
</main>
</div>
{/* Session Content */}
{sessionId && !isLoading && !error && (
<ChatContainer
sessionId={sessionId}
initialMessages={messages}
onRefreshSession={refreshSession}
className="flex-1"
/>
)}
</main>
</Drawer.Content>
</Drawer.Portal>
</Drawer.Root>
);
}

View File

@@ -1,11 +1,11 @@
"use client";
import { useEffect, useRef } from "react";
import { useRouter, useSearchParams } from "next/navigation";
import { toast } from "sonner";
import { useChatSession } from "@/app/(platform)/chat/useChatSession";
import { useChatSession } from "@/components/contextual/Chat/useChatSession";
import { useChatStream } from "@/components/contextual/Chat/useChatStream";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { useChatStream } from "@/app/(platform)/chat/useChatStream";
import { useRouter, useSearchParams } from "next/navigation";
import { useEffect, useRef } from "react";
import { toast } from "sonner";
export function useChatPage() {
const router = useRouter();

View File

@@ -1,13 +1,14 @@
import { Navbar } from "@/components/layout/Navbar/Navbar";
import { AdminImpersonationBanner } from "./admin/components/AdminImpersonationBanner";
import { ReactNode } from "react";
import { AdminImpersonationBanner } from "./admin/components/AdminImpersonationBanner";
import { PlatformLayoutContent } from "./PlatformLayoutContent";
export default function PlatformLayout({ children }: { children: ReactNode }) {
return (
<main className="flex h-screen w-full flex-col">
<PlatformLayoutContent>
<Navbar />
<AdminImpersonationBanner />
<section className="flex-1">{children}</section>
</main>
{children}
</PlatformLayoutContent>
);
}

View File

@@ -3,8 +3,8 @@
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { Text } from "@/components/atoms/Text/Text";
import type { CredentialsMetaInput } from "@/lib/autogpt-server-api/types";
import { CredentialsInput } from "../CredentialsInputs/CredentialsInputs";
import { RunAgentInputs } from "../RunAgentInputs/RunAgentInputs";
import { CredentialsInput } from "../../../../../../../../../../components/contextual/CredentialsInputs/CredentialsInputs";
import { RunAgentInputs } from "../../../../../../../../../../components/contextual/RunAgentInputs/RunAgentInputs";
import { getAgentCredentialsFields, getAgentInputFields } from "./helpers";
type Props = {

View File

@@ -34,7 +34,6 @@ type Props = {
onSelectCredentials: (newValue?: CredentialsMetaInput) => void;
onLoaded?: (loaded: boolean) => void;
readOnly?: boolean;
isOptional?: boolean;
showTitle?: boolean;
};
@@ -46,7 +45,6 @@ export function CredentialsInput({
siblingInputs,
onLoaded,
readOnly = false,
isOptional = false,
showTitle = true,
}: Props) {
const hookData = useCredentialsInput({
@@ -56,7 +54,6 @@ export function CredentialsInput({
siblingInputs,
onLoaded,
readOnly,
isOptional,
});
if (!isLoaded(hookData)) {
@@ -97,14 +94,7 @@ export function CredentialsInput({
<div className={cn("mb-6", className)}>
{showTitle && (
<div className="mb-2 flex items-center gap-2">
<Text variant="large-medium">
{displayName} credentials
{isOptional && (
<span className="ml-1 text-sm font-normal text-gray-500">
(optional)
</span>
)}
</Text>
<Text variant="large-medium">{displayName} credentials</Text>
{schema.description && (
<InformationTooltip description={schema.description} />
)}
@@ -113,16 +103,14 @@ export function CredentialsInput({
{hasCredentialsToShow ? (
<>
{(credentialsToShow.length > 1 || isOptional) && !readOnly ? (
{credentialsToShow.length > 1 && !readOnly ? (
<CredentialsSelect
credentials={credentialsToShow}
provider={provider}
displayName={displayName}
selectedCredentials={selectedCredential}
onSelectCredential={handleCredentialSelect}
onClearCredential={() => onSelectCredential(undefined)}
readOnly={readOnly}
allowNone={isOptional}
/>
) : (
<div className="mb-4 space-y-2">

View File

@@ -23,9 +23,7 @@ interface Props {
displayName: string;
selectedCredentials?: CredentialsMetaInput;
onSelectCredential: (credentialId: string) => void;
onClearCredential?: () => void;
readOnly?: boolean;
allowNone?: boolean;
}
export function CredentialsSelect({
@@ -34,30 +32,20 @@ export function CredentialsSelect({
displayName,
selectedCredentials,
onSelectCredential,
onClearCredential,
readOnly = false,
allowNone = true,
}: Props) {
// Auto-select first credential if none is selected (only if allowNone is false)
// Auto-select first credential if none is selected
useEffect(() => {
if (!allowNone && !selectedCredentials && credentials.length > 0) {
if (!selectedCredentials && credentials.length > 0) {
onSelectCredential(credentials[0].id);
}
}, [allowNone, selectedCredentials, credentials, onSelectCredential]);
const handleValueChange = (value: string) => {
if (value === "__none__") {
onClearCredential?.();
} else {
onSelectCredential(value);
}
};
}, [selectedCredentials, credentials, onSelectCredential]);
return (
<div className="mb-4 w-full">
<Select
value={selectedCredentials?.id || (allowNone ? "__none__" : "")}
onValueChange={handleValueChange}
value={selectedCredentials?.id || ""}
onValueChange={(value) => onSelectCredential(value)}
>
<SelectTrigger className="h-auto min-h-12 w-full rounded-medium border-zinc-200 p-0 pr-4 shadow-none">
{selectedCredentials ? (
@@ -82,15 +70,6 @@ export function CredentialsSelect({
)}
</SelectTrigger>
<SelectContent>
{allowNone && (
<SelectItem key="__none__" value="__none__">
<div className="flex items-center gap-2">
<Text variant="body" className="tracking-tight text-gray-500">
None (skip this credential)
</Text>
</div>
</SelectItem>
)}
{credentials.map((credential) => (
<SelectItem key={credential.id} value={credential.id}>
<div className="flex items-center gap-2">

View File

@@ -22,7 +22,6 @@ type Params = {
siblingInputs?: Record<string, any>;
onLoaded?: (loaded: boolean) => void;
readOnly?: boolean;
isOptional?: boolean;
};
export function useCredentialsInput({
@@ -32,7 +31,6 @@ export function useCredentialsInput({
siblingInputs,
onLoaded,
readOnly = false,
isOptional = false,
}: Params) {
const [isAPICredentialsModalOpen, setAPICredentialsModalOpen] =
useState(false);
@@ -101,20 +99,13 @@ export function useCredentialsInput({
: null;
}, [credentials]);
// Auto-select the one available credential (only if not optional)
// Auto-select the one available credential
useEffect(() => {
if (readOnly) return;
if (isOptional) return; // Don't auto-select when credential is optional
if (singleCredential && !selectedCredential) {
onSelectCredential(singleCredential);
}
}, [
singleCredential,
selectedCredential,
onSelectCredential,
readOnly,
isOptional,
]);
}, [singleCredential, selectedCredential, onSelectCredential, readOnly]);
if (
!credentials ||

View File

@@ -1,14 +1,13 @@
import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs";
import { Input } from "@/components/atoms/Input/Input";
import { CredentialsInput } from "@/components/contextual/CredentialsInputs/CredentialsInputs";
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
import { RunAgentInputs } from "../../../RunAgentInputs/RunAgentInputs";
import { RunAgentInputs } from "../../../../../../../../../../../../components/contextual/RunAgentInputs/RunAgentInputs";
import { useRunAgentModalContext } from "../../context";
import { ModalSection } from "../ModalSection/ModalSection";
import { WebhookTriggerBanner } from "../WebhookTriggerBanner/WebhookTriggerBanner";
export function ModalRunSection() {
const {
agent,
defaultRunType,
presetName,
setPresetName,
@@ -25,11 +24,6 @@ export function ModalRunSection() {
const inputFields = Object.entries(agentInputFields || {});
const credentialFields = Object.entries(agentCredentialsInputFields || {});
// Get the list of required credentials from the schema
const requiredCredentials = new Set(
(agent.credentials_input_schema?.required as string[]) || [],
);
return (
<div className="flex flex-col gap-4">
{defaultRunType === "automatic-trigger" ||
@@ -105,12 +99,14 @@ export function ModalRunSection() {
schema={
{ ...inputSubSchema, discriminator: undefined } as any
}
selectedCredentials={inputCredentials?.[key]}
selectedCredentials={
(inputCredentials && inputCredentials[key]) ??
inputSubSchema.default
}
onSelectCredentials={(value) =>
setInputCredentialsValue(key, value)
}
siblingInputs={inputValues}
isOptional={!requiredCredentials.has(key)}
/>
),
)}

View File

@@ -163,21 +163,15 @@ export function useAgentRunModal(
}, [agentInputSchema.required, inputValues]);
const [allCredentialsAreSet, missingCredentials] = useMemo(() => {
// Only check required credentials from schema, not all properties
// Credentials marked as optional in node metadata won't be in the required array
const requiredCredentials = new Set(
(agent.credentials_input_schema?.required as string[]) || [],
const availableCredentials = new Set(Object.keys(inputCredentials));
const allCredentials = new Set(
Object.keys(agentCredentialsInputFields || {}) ?? [],
);
const missing = [...allCredentials].filter(
(key) => !availableCredentials.has(key),
);
// Check if required credentials have valid id (not just key existence)
// A credential is valid only if it has an id field set
const missing = [...requiredCredentials].filter((key) => {
const cred = inputCredentials[key];
return !cred || !cred.id;
});
return [missing.length === 0, missing];
}, [agent.credentials_input_schema, inputCredentials]);
}, [agentCredentialsInputFields, inputCredentials]);
const credentialsRequired = useMemo(
() => Object.keys(agentCredentialsInputFields || {}).length > 0,
@@ -245,18 +239,12 @@ export function useAgentRunModal(
});
} else {
// Manual execution
// Filter out incomplete credentials (optional ones not selected)
// Only send credentials that have a valid id field
const validCredentials = Object.fromEntries(
Object.entries(inputCredentials).filter(([_, cred]) => cred && cred.id),
);
executeGraphMutation.mutate({
graphId: agent.graph_id,
graphVersion: agent.graph_version,
data: {
inputs: inputValues,
credentials_inputs: validCredentials,
credentials_inputs: inputCredentials,
source: "library",
},
});

View File

@@ -1,25 +1,17 @@
"use client";
import { getV1GetGraphVersion } from "@/app/api/__generated__/endpoints/graphs/graphs";
import {
getGetV2ListLibraryAgentsQueryKey,
useDeleteV2DeleteLibraryAgent,
} from "@/app/api/__generated__/endpoints/library/library";
import { GraphExecutionJobInfo } from "@/app/api/__generated__/models/graphExecutionJobInfo";
import { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExecutionMeta";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { LibraryAgentPreset } from "@/app/api/__generated__/models/libraryAgentPreset";
import { Button } from "@/components/atoms/Button/Button";
import { Text } from "@/components/atoms/Text/Text";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { ShowMoreText } from "@/components/molecules/ShowMoreText/ShowMoreText";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { exportAsJSONFile } from "@/lib/utils";
import { formatDate } from "@/lib/utils/time";
import { useQueryClient } from "@tanstack/react-query";
import Link from "next/link";
import { useRouter } from "next/navigation";
import { useState } from "react";
import { RunAgentModal } from "../modals/RunAgentModal/RunAgentModal";
import { RunDetailCard } from "../selected-views/RunDetailCard/RunDetailCard";
import { EmptyTasksIllustration } from "./EmptyTasksIllustration";
@@ -38,41 +30,6 @@ export function EmptyTasks({
onScheduleCreated,
}: Props) {
const { toast } = useToast();
const queryClient = useQueryClient();
const router = useRouter();
const [showDeleteDialog, setShowDeleteDialog] = useState(false);
const [isDeletingAgent, setIsDeletingAgent] = useState(false);
const { mutateAsync: deleteAgent } = useDeleteV2DeleteLibraryAgent();
async function handleDeleteAgent() {
if (!agent.id) return;
setIsDeletingAgent(true);
try {
await deleteAgent({ libraryAgentId: agent.id });
await queryClient.refetchQueries({
queryKey: getGetV2ListLibraryAgentsQueryKey(),
});
toast({ title: "Agent deleted" });
setShowDeleteDialog(false);
router.push("/library");
} catch (error: unknown) {
toast({
title: "Failed to delete agent",
description:
error instanceof Error
? error.message
: "An unexpected error occurred.",
variant: "destructive",
});
} finally {
setIsDeletingAgent(false);
}
}
async function handleExport() {
try {
@@ -190,50 +147,9 @@ export function EmptyTasks({
<Button variant="secondary" size="small" onClick={handleExport}>
Export agent to file
</Button>
<Button
variant="secondary"
size="small"
onClick={() => setShowDeleteDialog(true)}
>
Delete agent
</Button>
</div>
</div>
</div>
<Dialog
controlled={{
isOpen: showDeleteDialog,
set: setShowDeleteDialog,
}}
styling={{ maxWidth: "32rem" }}
title="Delete agent"
>
<Dialog.Content>
<div>
<Text variant="large">
Are you sure you want to delete this agent? This action cannot be
undone.
</Text>
<Dialog.Footer>
<Button
variant="secondary"
disabled={isDeletingAgent}
onClick={() => setShowDeleteDialog(false)}
>
Cancel
</Button>
<Button
variant="destructive"
onClick={handleDeleteAgent}
loading={isDeletingAgent}
>
Delete Agent
</Button>
</Dialog.Footer>
</div>
</Dialog.Content>
</Dialog>
</div>
);
}

View File

@@ -83,9 +83,7 @@ function renderCode(
</div>
)}
<pre className="overflow-x-auto rounded-md bg-muted p-3">
<code className="whitespace-pre-wrap break-words font-mono text-sm">
{codeValue}
</code>
<code className="font-mono text-sm">{codeValue}</code>
</pre>
</div>
);

View File

@@ -3,12 +3,12 @@
import type {
OutputMetadata,
OutputRenderer,
} from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
} from "@/components/contextual/OutputRenderers";
import {
globalRegistry,
OutputActions,
OutputItem,
} from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/selected-views/OutputRenderers";
} from "@/components/contextual/OutputRenderers";
import React, { useMemo } from "react";
type OutputsRecord = Record<string, Array<unknown>>;

View File

@@ -13,7 +13,7 @@ import { LoadingSelectedContent } from "../LoadingSelectedContent";
import { RunDetailCard } from "../RunDetailCard/RunDetailCard";
import { RunDetailHeader } from "../RunDetailHeader/RunDetailHeader";
import { SelectedViewLayout } from "../SelectedViewLayout";
import { SelectedScheduleActions } from "./components/SelectedScheduleActions/SelectedScheduleActions";
import { SelectedScheduleActions } from "./components/SelectedScheduleActions";
import { useSelectedScheduleView } from "./useSelectedScheduleView";
interface Props {

View File

@@ -0,0 +1,40 @@
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { Button } from "@/components/atoms/Button/Button";
import { EyeIcon } from "@phosphor-icons/react";
import { AgentActionsDropdown } from "../../AgentActionsDropdown";
import { useScheduleDetailHeader } from "../../RunDetailHeader/useScheduleDetailHeader";
import { SelectedActionsWrap } from "../../SelectedActionsWrap";
type Props = {
agent: LibraryAgent;
scheduleId: string;
onDeleted?: () => void;
};
export function SelectedScheduleActions({ agent, scheduleId }: Props) {
const { openInBuilderHref } = useScheduleDetailHeader(
agent.graph_id,
scheduleId,
agent.graph_version,
);
return (
<>
<SelectedActionsWrap>
{openInBuilderHref && (
<Button
variant="icon"
size="icon"
as="NextLink"
href={openInBuilderHref}
target="_blank"
aria-label="View scheduled task details"
>
<EyeIcon weight="bold" size={18} className="text-zinc-700" />
</Button>
)}
<AgentActionsDropdown agent={agent} scheduleId={scheduleId} />
</SelectedActionsWrap>
</>
);
}

View File

@@ -1,96 +0,0 @@
"use client";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { Button } from "@/components/atoms/Button/Button";
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
import { Text } from "@/components/atoms/Text/Text";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { EyeIcon, TrashIcon } from "@phosphor-icons/react";
import { AgentActionsDropdown } from "../../../AgentActionsDropdown";
import { SelectedActionsWrap } from "../../../SelectedActionsWrap";
import { useSelectedScheduleActions } from "./useSelectedScheduleActions";
type Props = {
agent: LibraryAgent;
scheduleId: string;
onDeleted?: () => void;
};
export function SelectedScheduleActions({
agent,
scheduleId,
onDeleted,
}: Props) {
const {
openInBuilderHref,
showDeleteDialog,
setShowDeleteDialog,
handleDelete,
isDeleting,
} = useSelectedScheduleActions({ agent, scheduleId, onDeleted });
return (
<>
<SelectedActionsWrap>
{openInBuilderHref && (
<Button
variant="icon"
size="icon"
as="NextLink"
href={openInBuilderHref}
target="_blank"
aria-label="View scheduled task details"
>
<EyeIcon weight="bold" size={18} className="text-zinc-700" />
</Button>
)}
<Button
variant="icon"
size="icon"
aria-label="Delete schedule"
onClick={() => setShowDeleteDialog(true)}
disabled={isDeleting}
>
{isDeleting ? (
<LoadingSpinner size="small" />
) : (
<TrashIcon weight="bold" size={18} />
)}
</Button>
<AgentActionsDropdown agent={agent} scheduleId={scheduleId} />
</SelectedActionsWrap>
<Dialog
controlled={{
isOpen: showDeleteDialog,
set: setShowDeleteDialog,
}}
styling={{ maxWidth: "32rem" }}
title="Delete schedule"
>
<Dialog.Content>
<Text variant="large">
Are you sure you want to delete this schedule? This action cannot be
undone.
</Text>
<Dialog.Footer>
<Button
variant="secondary"
onClick={() => setShowDeleteDialog(false)}
disabled={isDeleting}
>
Cancel
</Button>
<Button
variant="destructive"
onClick={handleDelete}
loading={isDeleting}
>
Delete Schedule
</Button>
</Dialog.Footer>
</Dialog.Content>
</Dialog>
</>
);
}

View File

@@ -1,65 +0,0 @@
"use client";
import {
getGetV1ListExecutionSchedulesForAGraphQueryOptions,
useDeleteV1DeleteExecutionSchedule,
} from "@/app/api/__generated__/endpoints/schedules/schedules";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { useQueryClient } from "@tanstack/react-query";
import { useState } from "react";
interface UseSelectedScheduleActionsProps {
agent: LibraryAgent;
scheduleId: string;
onDeleted?: () => void;
}
export function useSelectedScheduleActions({
agent,
scheduleId,
onDeleted,
}: UseSelectedScheduleActionsProps) {
const { toast } = useToast();
const queryClient = useQueryClient();
const [showDeleteDialog, setShowDeleteDialog] = useState(false);
const deleteMutation = useDeleteV1DeleteExecutionSchedule({
mutation: {
onSuccess: () => {
toast({ title: "Schedule deleted" });
queryClient.invalidateQueries({
queryKey: getGetV1ListExecutionSchedulesForAGraphQueryOptions(
agent.graph_id,
).queryKey,
});
setShowDeleteDialog(false);
onDeleted?.();
},
onError: (error: unknown) =>
toast({
title: "Failed to delete schedule",
description:
error instanceof Error
? error.message
: "An unexpected error occurred.",
variant: "destructive",
}),
},
});
function handleDelete() {
if (!scheduleId) return;
deleteMutation.mutate({ scheduleId });
}
const openInBuilderHref = `/build?flowID=${agent.graph_id}&flowVersion=${agent.graph_version}`;
return {
openInBuilderHref,
showDeleteDialog,
setShowDeleteDialog,
handleDelete,
isDeleting: deleteMutation.isPending,
};
}

View File

@@ -4,12 +4,12 @@ import type { GraphExecutionMeta } from "@/app/api/__generated__/models/graphExe
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { Input } from "@/components/atoms/Input/Input";
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
import { CredentialsInput } from "../../../../../../../../../../components/contextual/CredentialsInputs/CredentialsInputs";
import { RunAgentInputs } from "../../../../../../../../../../components/contextual/RunAgentInputs/RunAgentInputs";
import {
getAgentCredentialsFields,
getAgentInputFields,
} from "../../modals/AgentInputsReadOnly/helpers";
import { CredentialsInput } from "../../modals/CredentialsInputs/CredentialsInputs";
import { RunAgentInputs } from "../../modals/RunAgentInputs/RunAgentInputs";
import { LoadingSelectedContent } from "../LoadingSelectedContent";
import { RunDetailCard } from "../RunDetailCard/RunDetailCard";
import { RunDetailHeader } from "../RunDetailHeader/RunDetailHeader";

View File

@@ -3,12 +3,12 @@
import type { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { Input } from "@/components/atoms/Input/Input";
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
import { CredentialsInput } from "../../../../../../../../../../components/contextual/CredentialsInputs/CredentialsInputs";
import { RunAgentInputs } from "../../../../../../../../../../components/contextual/RunAgentInputs/RunAgentInputs";
import {
getAgentCredentialsFields,
getAgentInputFields,
} from "../../modals/AgentInputsReadOnly/helpers";
import { CredentialsInput } from "../../modals/CredentialsInputs/CredentialsInputs";
import { RunAgentInputs } from "../../modals/RunAgentInputs/RunAgentInputs";
import { LoadingSelectedContent } from "../LoadingSelectedContent";
import { RunDetailCard } from "../RunDetailCard/RunDetailCard";
import { RunDetailHeader } from "../RunDetailHeader/RunDetailHeader";

View File

@@ -40,17 +40,15 @@ export function useMarketplaceUpdate({ agent }: UseMarketplaceUpdateProps) {
},
);
// Get user's submissions - only fetch if user is the creator
const { data: submissionsData, isLoading: isSubmissionsLoading } =
useGetV2ListMySubmissions(
{ page: 1, page_size: 50 },
{
query: {
// Only fetch if user is the creator
enabled: !!(user?.id && agent?.owner_user_id === user.id),
},
// Get user's submissions to check for pending submissions
const { data: submissionsData } = useGetV2ListMySubmissions(
{ page: 1, page_size: 50 }, // Get enough to cover recent submissions
{
query: {
enabled: !!user?.id, // Only fetch if user is authenticated
},
);
},
);
const updateToLatestMutation = usePatchV2UpdateLibraryAgent({
mutation: {
@@ -80,36 +78,11 @@ export function useMarketplaceUpdate({ agent }: UseMarketplaceUpdateProps) {
// Check if marketplace has a newer version than user's current version
const marketplaceUpdateInfo = React.useMemo(() => {
const storeAgent = okData(storeAgentData) as any;
if (!agent || isSubmissionsLoading) {
if (!agent || !storeAgent) {
return {
hasUpdate: false,
latestVersion: undefined,
isUserCreator: false,
hasPublishUpdate: false,
};
}
const isUserCreator = agent?.owner_user_id === user?.id;
// Check if there's a pending submission for this specific agent version
const submissionsResponse = okData(submissionsData) as any;
const hasPendingSubmissionForCurrentVersion =
isUserCreator &&
submissionsResponse?.submissions?.some(
(submission: StoreSubmission) =>
submission.agent_id === agent.graph_id &&
submission.agent_version === agent.graph_version &&
submission.status === "PENDING",
);
if (!storeAgent) {
return {
hasUpdate: false,
latestVersion: undefined,
isUserCreator,
hasPublishUpdate:
isUserCreator && !hasPendingSubmissionForCurrentVersion,
};
}
@@ -124,15 +97,29 @@ export function useMarketplaceUpdate({ agent }: UseMarketplaceUpdateProps) {
)
: undefined;
// Show publish update button if:
// 1. User is the creator
// 2. No pending submission for current version
// 3. Either: agent not published yet OR local version is newer than marketplace
// Determine if the user is the creator of this agent
// Compare current user ID with the marketplace listing creator ID
const isUserCreator =
user?.id && agent.marketplace_listing?.creator.id === user.id;
// Check if there's a pending submission for this specific agent version
const submissionsResponse = okData(submissionsData) as any;
const hasPendingSubmissionForCurrentVersion =
isUserCreator &&
submissionsResponse?.submissions?.some(
(submission: StoreSubmission) =>
submission.agent_id === agent.graph_id &&
submission.agent_version === agent.graph_version &&
submission.status === "PENDING",
);
// If user is creator and their version is newer than marketplace, show publish update banner
// BUT only if there's no pending submission for this version
const hasPublishUpdate =
isUserCreator &&
!hasPendingSubmissionForCurrentVersion &&
(latestMarketplaceVersion === undefined || // Not published yet
agent.graph_version > latestMarketplaceVersion); // Or local version is newer
latestMarketplaceVersion !== undefined &&
agent.graph_version > latestMarketplaceVersion;
// If marketplace version is newer than user's version, show update banner
// This applies to both creators and non-creators
@@ -146,7 +133,7 @@ export function useMarketplaceUpdate({ agent }: UseMarketplaceUpdateProps) {
isUserCreator,
hasPublishUpdate,
};
}, [agent, storeAgentData, user, submissionsData, isSubmissionsLoading]);
}, [agent, storeAgentData, user, submissionsData]);
const handlePublishUpdate = () => {
setModalOpen(true);

View File

@@ -12,8 +12,6 @@ import {
} from "@/lib/autogpt-server-api";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import { CredentialsInput } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/CredentialsInputs/CredentialsInputs";
import { RunAgentInputs } from "@/app/(platform)/library/agents/[id]/components/NewAgentLibraryView/components/modals/RunAgentInputs/RunAgentInputs";
import { ScheduleTaskDialog } from "@/app/(platform)/library/agents/[id]/components/OldAgentLibraryView/components/cron-scheduler-dialog";
import ActionButtonGroup from "@/components/__legacy__/action-button-group";
import type { ButtonAction } from "@/components/__legacy__/types";
@@ -30,6 +28,8 @@ import {
} from "@/components/__legacy__/ui/icons";
import { Input } from "@/components/__legacy__/ui/input";
import { Button } from "@/components/atoms/Button/Button";
import { CredentialsInput } from "@/components/contextual/CredentialsInputs/CredentialsInputs";
import { RunAgentInputs } from "@/components/contextual/RunAgentInputs/RunAgentInputs";
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
import {
useToast,

View File

@@ -11,12 +11,12 @@ import {
} from "@/components/__legacy__/ui/card";
import LoadingBox from "@/components/__legacy__/ui/loading";
import type { OutputMetadata } from "../../NewAgentLibraryView/components/selected-views/OutputRenderers";
import type { OutputMetadata } from "../../../../../../../../components/contextual/OutputRenderers";
import {
globalRegistry,
OutputActions,
OutputItem,
} from "../../NewAgentLibraryView/components/selected-views/OutputRenderers";
} from "../../../../../../../../components/contextual/OutputRenderers";
export function AgentRunOutputView({
agentRunOutputs,

View File

@@ -1,17 +1,16 @@
"use client";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { Text } from "@/components/atoms/Text/Text";
import { InfiniteScroll } from "@/components/contextual/InfiniteScroll/InfiniteScroll";
import { HeartIcon } from "@phosphor-icons/react";
import React from "react";
import { useFavoriteAgents } from "../../hooks/useFavoriteAgents";
import { LibraryAgentCard } from "../LibraryAgentCard/LibraryAgentCard";
import LibraryAgentCard from "../LibraryAgentCard/LibraryAgentCard";
import { useGetFlag, Flag } from "@/services/feature-flags/use-get-flag";
import { Heart } from "lucide-react";
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
import { InfiniteScroll } from "@/components/contextual/InfiniteScroll/InfiniteScroll";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
interface Props {
searchTerm: string;
}
export function FavoritesSection({ searchTerm }: Props) {
export default function FavoritesSection() {
const isAgentFavoritingEnabled = useGetFlag(Flag.AGENT_FAVORITING);
const {
allAgents: favoriteAgents,
agentLoading: isLoading,
@@ -19,50 +18,60 @@ export function FavoritesSection({ searchTerm }: Props) {
hasNextPage,
fetchNextPage,
isFetchingNextPage,
} = useFavoriteAgents({ searchTerm });
} = useFavoriteAgents();
if (isLoading || favoriteAgents.length === 0) {
// Only show this section if the feature flag is enabled
if (!isAgentFavoritingEnabled) {
return null;
}
// Don't show the section if there are no favorites
if (!isLoading && favoriteAgents.length === 0) {
return null;
}
return (
<div className="!mb-8">
<div className="mb-3 flex items-center gap-2 p-2">
<HeartIcon className="h-5 w-5" weight="fill" />
<div className="flex items-baseline gap-2">
<Text variant="h4">Favorites</Text>
{!isLoading && (
<Text
variant="body"
data-testid="agents-count"
className="relative bottom-px text-zinc-500"
>
{agentCount}
</Text>
)}
</div>
<div className="mb-8">
<div className="flex items-center gap-[10px] p-2 pb-[10px]">
<Heart className="h-5 w-5 fill-red-500 text-red-500" />
<span className="font-poppin text-[18px] font-semibold leading-[28px] text-neutral-800">
Favorites
</span>
{!isLoading && (
<span className="font-sans text-[14px] font-normal leading-6">
{agentCount} {agentCount === 1 ? "agent" : "agents"}
</span>
)}
</div>
<div className="relative">
<InfiniteScroll
isFetchingNextPage={isFetchingNextPage}
fetchNextPage={fetchNextPage}
hasNextPage={hasNextPage}
loader={
<div className="flex h-8 w-full items-center justify-center">
<div className="h-6 w-6 animate-spin rounded-full border-b-2 border-t-2 border-neutral-800" />
</div>
}
>
{isLoading ? (
<div className="grid grid-cols-1 gap-4 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4">
{favoriteAgents.map((agent: LibraryAgent) => (
<LibraryAgentCard key={agent.id} agent={agent} />
{[...Array(4)].map((_, i) => (
<Skeleton key={i} className="h-48 w-full rounded-lg" />
))}
</div>
</InfiniteScroll>
) : (
<InfiniteScroll
isFetchingNextPage={isFetchingNextPage}
fetchNextPage={fetchNextPage}
hasNextPage={hasNextPage}
loader={
<div className="flex h-8 w-full items-center justify-center">
<div className="h-6 w-6 animate-spin rounded-full border-b-2 border-t-2 border-neutral-800" />
</div>
}
>
<div className="grid grid-cols-1 gap-4 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4">
{favoriteAgents.map((agent: LibraryAgent) => (
<LibraryAgentCard key={agent.id} agent={agent} />
))}
</div>
</InfiniteScroll>
)}
</div>
{favoriteAgents.length > 0 && <div className="!mt-10 border-t" />}
{favoriteAgents.length > 0 && <div className="mt-6 border-t pt-6" />}
</div>
);
}

View File

@@ -1,28 +1,34 @@
import { LibrarySearchBar } from "../LibrarySearchBar/LibrarySearchBar";
// import LibraryNotificationDropdown from "./library-notification-dropdown";
import LibraryUploadAgentDialog from "../LibraryUploadAgentDialog/LibraryUploadAgentDialog";
import LibrarySearchBar from "../LibrarySearchBar/LibrarySearchBar";
interface Props {
setSearchTerm: (value: string) => void;
}
type LibraryActionHeaderProps = Record<string, never>;
export function LibraryActionHeader({ setSearchTerm }: Props) {
/**
* LibraryActionHeader component - Renders a header with search, notifications and filters
*/
const LibraryActionHeader: React.FC<LibraryActionHeaderProps> = ({}) => {
return (
<>
<div className="mb-[32px] hidden items-center justify-center gap-4 md:flex">
<LibrarySearchBar setSearchTerm={setSearchTerm} />
<div className="mb-[32px] hidden items-start justify-between md:flex">
{/* <LibraryNotificationDropdown /> */}
<LibrarySearchBar />
<LibraryUploadAgentDialog />
</div>
{/* Mobile and tablet */}
<div className="flex flex-col gap-4 p-4 pt-[52px] md:hidden">
<div className="flex w-full justify-between">
{/* <LibraryNotificationDropdown /> */}
<LibraryUploadAgentDialog />
</div>
<div className="flex items-center justify-center">
<LibrarySearchBar setSearchTerm={setSearchTerm} />
<LibrarySearchBar />
</div>
</div>
</>
);
}
};
export default LibraryActionHeader;

View File

@@ -1,28 +1,28 @@
"use client";
import { LibraryAgentSort } from "@/app/api/__generated__/models/libraryAgentSort";
import { Text } from "@/components/atoms/Text/Text";
import { LibrarySortMenu } from "../LibrarySortMenu/LibrarySortMenu";
import LibrarySortMenu from "../LibrarySortMenu/LibrarySortMenu";
interface Props {
interface LibraryActionSubHeaderProps {
agentCount: number;
setLibrarySort: (value: LibraryAgentSort) => void;
}
export function LibraryActionSubHeader({ agentCount, setLibrarySort }: Props) {
export default function LibraryActionSubHeader({
agentCount,
}: LibraryActionSubHeaderProps) {
return (
<div className="flex items-baseline justify-between">
<div className="flex items-baseline gap-4">
<Text variant="h4">My agents</Text>
<Text
variant="body"
<div className="flex items-center justify-between pb-[10px]">
<div className="flex items-center gap-[10px] p-2">
<span className="font-poppin w-[96px] text-[18px] font-semibold leading-[28px] text-neutral-800">
My agents
</span>
<span
className="w-[70px] font-sans text-[14px] font-normal leading-6"
data-testid="agents-count"
className="text-zinc-500"
>
{agentCount}
</Text>
{agentCount} agents
</span>
</div>
<LibrarySortMenu setLibrarySort={setLibrarySort} />
<LibrarySortMenu />
</div>
);
}

View File

@@ -1,126 +1,332 @@
"use client";
import { Text } from "@/components/atoms/Text/Text";
import { CaretCircleRightIcon } from "@phosphor-icons/react";
import Link from "next/link";
import Image from "next/image";
import NextLink from "next/link";
import { Heart } from "@phosphor-icons/react";
import { useState, useEffect } from "react";
import { getQueryClient } from "@/lib/react-query/queryClient";
import { InfiniteData } from "@tanstack/react-query";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import {
getV2ListLibraryAgentsResponse,
getV2ListFavoriteLibraryAgentsResponse,
} from "@/app/api/__generated__/endpoints/library/library";
import BackendAPI, { LibraryAgentID } from "@/lib/autogpt-server-api";
import { cn } from "@/lib/utils";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
import Avatar, {
AvatarFallback,
AvatarImage,
} from "@/components/atoms/Avatar/Avatar";
import { Link } from "@/components/atoms/Link/Link";
import { AgentCardMenu } from "./components/AgentCardMenu";
import { FavoriteButton } from "./components/FavoriteButton";
import { useLibraryAgentCard } from "./useLibraryAgentCard";
interface Props {
interface LibraryAgentCardProps {
agent: LibraryAgent;
}
export function LibraryAgentCard({ agent }: Props) {
const { id, name, graph_id, can_access_graph, image_url } = agent;
const {
isFromMarketplace,
isFavorite,
profile,
export default function LibraryAgentCard({
agent: {
id,
name,
description,
graph_id,
can_access_graph,
creator_image_url,
handleToggleFavorite,
} = useLibraryAgentCard({ agent });
image_url,
is_favorite,
},
}: LibraryAgentCardProps) {
const isAgentFavoritingEnabled = useGetFlag(Flag.AGENT_FAVORITING);
const [isFavorite, setIsFavorite] = useState(is_favorite);
const [isUpdating, setIsUpdating] = useState(false);
const { toast } = useToast();
const api = new BackendAPI();
const queryClient = getQueryClient();
// Sync local state with prop when it changes (e.g., after query invalidation)
useEffect(() => {
setIsFavorite(is_favorite);
}, [is_favorite]);
const updateQueryData = (newIsFavorite: boolean) => {
// Update the agent in all library agent queries
queryClient.setQueriesData(
{ queryKey: ["/api/library/agents"] },
(
oldData:
| InfiniteData<getV2ListLibraryAgentsResponse, number | undefined>
| undefined,
) => {
if (!oldData?.pages) return oldData;
return {
...oldData,
pages: oldData.pages.map((page) => {
if (page.status !== 200) return page;
return {
...page,
data: {
...page.data,
agents: page.data.agents.map((agent: LibraryAgent) =>
agent.id === id
? { ...agent, is_favorite: newIsFavorite }
: agent,
),
},
};
}),
};
},
);
// Update or remove from favorites query based on new state
queryClient.setQueriesData(
{ queryKey: ["/api/library/agents/favorites"] },
(
oldData:
| InfiniteData<
getV2ListFavoriteLibraryAgentsResponse,
number | undefined
>
| undefined,
) => {
if (!oldData?.pages) return oldData;
if (newIsFavorite) {
// Add to favorites if not already there
const exists = oldData.pages.some(
(page) =>
page.status === 200 &&
page.data.agents.some((agent: LibraryAgent) => agent.id === id),
);
if (!exists) {
const firstPage = oldData.pages[0];
if (firstPage?.status === 200) {
const updatedAgent = {
id,
name,
description,
graph_id,
can_access_graph,
creator_image_url,
image_url,
is_favorite: true,
};
return {
...oldData,
pages: [
{
...firstPage,
data: {
...firstPage.data,
agents: [updatedAgent, ...firstPage.data.agents],
pagination: {
...firstPage.data.pagination,
total_items: firstPage.data.pagination.total_items + 1,
},
},
},
...oldData.pages.slice(1).map((page) =>
page.status === 200
? {
...page,
data: {
...page.data,
pagination: {
...page.data.pagination,
total_items: page.data.pagination.total_items + 1,
},
},
}
: page,
),
],
};
}
}
} else {
// Remove from favorites
let removedCount = 0;
return {
...oldData,
pages: oldData.pages.map((page) => {
if (page.status !== 200) return page;
const filteredAgents = page.data.agents.filter(
(agent: LibraryAgent) => agent.id !== id,
);
if (filteredAgents.length < page.data.agents.length) {
removedCount = 1;
}
return {
...page,
data: {
...page.data,
agents: filteredAgents,
pagination: {
...page.data.pagination,
total_items:
page.data.pagination.total_items - removedCount,
},
},
};
}),
};
}
return oldData;
},
);
};
const handleToggleFavorite = async (e: React.MouseEvent) => {
e.preventDefault(); // Prevent navigation when clicking the heart
e.stopPropagation();
if (isUpdating || !isAgentFavoritingEnabled) return;
const newIsFavorite = !isFavorite;
// Optimistic update
setIsFavorite(newIsFavorite);
updateQueryData(newIsFavorite);
setIsUpdating(true);
try {
await api.updateLibraryAgent(id as LibraryAgentID, {
is_favorite: newIsFavorite,
});
toast({
title: newIsFavorite ? "Added to favorites" : "Removed from favorites",
description: `${name} has been ${newIsFavorite ? "added to" : "removed from"} your favorites.`,
});
} catch (error) {
// Revert on error
console.error("Failed to update favorite status:", error);
setIsFavorite(!newIsFavorite);
updateQueryData(!newIsFavorite);
toast({
title: "Error",
description: "Failed to update favorite status. Please try again.",
variant: "destructive",
});
} finally {
setIsUpdating(false);
}
};
return (
<div
data-testid="library-agent-card"
data-agent-id={id}
className="group relative inline-flex h-[10.625rem] w-full max-w-[25rem] flex-col items-start justify-start gap-2.5 rounded-medium border border-zinc-100 bg-white transition-all duration-300 hover:shadow-md"
className="group inline-flex w-full max-w-[434px] flex-col items-start justify-start gap-2.5 rounded-[26px] bg-white transition-all duration-300 hover:shadow-lg dark:bg-transparent dark:hover:shadow-gray-700"
>
<NextLink href={`/library/agents/${id}`} className="flex-shrink-0">
<div className="relative flex items-center gap-2 px-4 pt-3">
<Avatar className="h-4 w-4 rounded-full">
<Link
href={`/library/agents/${id}`}
className="relative h-[200px] w-full overflow-hidden rounded-[20px]"
>
{!image_url ? (
<div
className={`h-full w-full ${
[
"bg-gradient-to-r from-green-200 to-blue-200",
"bg-gradient-to-r from-pink-200 to-purple-200",
"bg-gradient-to-r from-yellow-200 to-orange-200",
"bg-gradient-to-r from-blue-200 to-cyan-200",
"bg-gradient-to-r from-indigo-200 to-purple-200",
][parseInt(id.slice(0, 8), 16) % 5]
}`}
style={{
backgroundSize: "200% 200%",
animation: "gradient 15s ease infinite",
}}
/>
) : (
<Image
src={image_url}
alt={`${name} preview image`}
fill
className="object-cover"
/>
)}
{isAgentFavoritingEnabled && (
<button
onClick={handleToggleFavorite}
className={cn(
"absolute right-4 top-4 rounded-full bg-white/90 p-2 backdrop-blur-sm transition-all duration-200",
"hover:scale-110 hover:bg-white",
"focus:outline-none focus:ring-2 focus:ring-red-500 focus:ring-offset-2",
isUpdating && "cursor-not-allowed opacity-50",
!isFavorite && "opacity-0 group-hover:opacity-100",
)}
disabled={isUpdating}
aria-label={
isFavorite ? "Remove from favorites" : "Add to favorites"
}
>
<Heart
size={20}
weight={isFavorite ? "fill" : "regular"}
className={cn(
"transition-colors duration-200",
isFavorite
? "text-red-500"
: "text-gray-600 hover:text-red-500",
)}
/>
</button>
)}
<div className="absolute bottom-4 left-4">
<Avatar className="h-16 w-16">
<AvatarImage
src={
isFromMarketplace
? creator_image_url || "/avatar-placeholder.png"
: profile?.avatar_url || "/avatar-placeholder.png"
creator_image_url
? creator_image_url
: "/avatar-placeholder.png"
}
alt={`${name} creator avatar`}
/>
<AvatarFallback size={48}>{name.charAt(0)}</AvatarFallback>
<AvatarFallback size={64}>{name.charAt(0)}</AvatarFallback>
</Avatar>
<Text
variant="small-medium"
className="uppercase tracking-wide text-zinc-400"
>
{isFromMarketplace ? "FROM MARKETPLACE" : "Built by you"}
</Text>
</div>
</NextLink>
<FavoriteButton
isFavorite={isFavorite}
onClick={handleToggleFavorite}
className="absolute right-10 top-0"
/>
<AgentCardMenu agent={agent} />
</Link>
<div className="flex w-full flex-1 flex-col px-4 pb-2">
<Link
href={`/library/agents/${id}`}
className="flex w-full items-start justify-between gap-2 no-underline hover:no-underline"
>
<Text
variant="h5"
data-testid="library-agent-card-name"
className="line-clamp-3 hyphens-auto break-words no-underline hover:no-underline"
>
<div className="flex w-full flex-1 flex-col px-4 py-4">
<Link href={`/library/agents/${id}`}>
<h3 className="mb-2 line-clamp-2 font-poppins text-2xl font-semibold leading-tight text-[#272727] dark:text-neutral-100">
{name}
</Text>
</h3>
{!image_url ? (
<div
className={`h-[3.64rem] w-[6.70rem] flex-shrink-0 rounded-small ${
[
"bg-gradient-to-r from-green-200 to-blue-200",
"bg-gradient-to-r from-pink-200 to-purple-200",
"bg-gradient-to-r from-yellow-200 to-orange-200",
"bg-gradient-to-r from-blue-200 to-cyan-200",
"bg-gradient-to-r from-indigo-200 to-purple-200",
][parseInt(id.slice(0, 8), 16) % 5]
}`}
style={{
backgroundSize: "200% 200%",
animation: "gradient 15s ease infinite",
}}
/>
) : (
<Image
src={image_url}
alt={`${name} preview image`}
width={107}
height={58}
className="flex-shrink-0 rounded-small object-cover"
/>
)}
<p className="line-clamp-3 flex-1 text-sm text-gray-600 dark:text-gray-400">
{description}
</p>
</Link>
<div className="mt-auto flex w-full justify-start gap-6 border-t border-zinc-100 pb-1 pt-3">
<div className="flex-grow" />
{/* Spacer */}
<div className="items-between mt-4 flex w-full justify-between gap-3">
<Link
href={`/library/agents/${id}`}
data-testid="library-agent-card-see-runs-link"
className="flex items-center gap-1 text-[13px]"
className="text-lg font-semibold text-neutral-800 hover:underline dark:text-neutral-200"
>
See runs <CaretCircleRightIcon size={20} />
See runs
</Link>
{can_access_graph && (
<Link
href={`/build?flowID=${graph_id}`}
data-testid="library-agent-card-open-in-builder-link"
className="flex items-center gap-1 text-[13px]"
isExternal
className="text-lg font-semibold text-neutral-800 hover:underline dark:text-neutral-200"
>
Open in builder <CaretCircleRightIcon size={20} />
Open in builder
</Link>
)}
</div>

View File

@@ -1,188 +0,0 @@
"use client";
import {
getGetV2ListLibraryAgentsQueryKey,
useDeleteV2DeleteLibraryAgent,
usePostV2ForkLibraryAgent,
} from "@/app/api/__generated__/endpoints/library/library";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { Button } from "@/components/atoms/Button/Button";
import { Text } from "@/components/atoms/Text/Text";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuSeparator,
DropdownMenuTrigger,
} from "@/components/molecules/DropdownMenu/DropdownMenu";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { DotsThree } from "@phosphor-icons/react";
import { useQueryClient } from "@tanstack/react-query";
import Link from "next/link";
import { useRouter } from "next/navigation";
import { useState } from "react";
interface AgentCardMenuProps {
agent: LibraryAgent;
}
export function AgentCardMenu({ agent }: AgentCardMenuProps) {
const { toast } = useToast();
const queryClient = useQueryClient();
const router = useRouter();
const [showDeleteDialog, setShowDeleteDialog] = useState(false);
const [isDeletingAgent, setIsDeletingAgent] = useState(false);
const [isDuplicatingAgent, setIsDuplicatingAgent] = useState(false);
const { mutateAsync: deleteAgent } = useDeleteV2DeleteLibraryAgent();
const { mutateAsync: forkAgent } = usePostV2ForkLibraryAgent();
async function handleDuplicateAgent() {
if (!agent.id) return;
setIsDuplicatingAgent(true);
try {
const result = await forkAgent({ libraryAgentId: agent.id });
if (result.status === 200) {
await queryClient.refetchQueries({
queryKey: getGetV2ListLibraryAgentsQueryKey(),
});
toast({
title: "Agent duplicated",
description: `${result.data.name} has been created.`,
});
}
} catch (error: unknown) {
toast({
title: "Failed to duplicate agent",
description:
error instanceof Error
? error.message
: "An unexpected error occurred.",
variant: "destructive",
});
} finally {
setIsDuplicatingAgent(false);
}
}
async function handleDeleteAgent() {
if (!agent.id) return;
setIsDeletingAgent(true);
try {
await deleteAgent({ libraryAgentId: agent.id });
await queryClient.refetchQueries({
queryKey: getGetV2ListLibraryAgentsQueryKey(),
});
toast({ title: "Agent deleted" });
setShowDeleteDialog(false);
router.push("/library");
} catch (error: unknown) {
toast({
title: "Failed to delete agent",
description:
error instanceof Error
? error.message
: "An unexpected error occurred.",
variant: "destructive",
});
} finally {
setIsDeletingAgent(false);
}
}
return (
<>
<DropdownMenu>
<DropdownMenuTrigger asChild>
<button
className="absolute right-2 top-1 rounded p-1.5 transition-opacity hover:bg-neutral-100"
onClick={(e) => e.stopPropagation()}
aria-label="More actions"
>
<DotsThree className="h-5 w-5 text-neutral-600" />
</button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end">
{agent.can_access_graph && (
<>
<DropdownMenuItem asChild>
<Link
href={`/build?flowID=${agent.graph_id}&flowVersion=${agent.graph_version}`}
target="_blank"
className="flex items-center gap-2"
onClick={(e) => e.stopPropagation()}
>
Edit agent
</Link>
</DropdownMenuItem>
<DropdownMenuSeparator />
</>
)}
<DropdownMenuItem
onClick={(e) => {
e.stopPropagation();
handleDuplicateAgent();
}}
disabled={isDuplicatingAgent}
className="flex items-center gap-2"
>
Duplicate agent
</DropdownMenuItem>
<DropdownMenuSeparator />
<DropdownMenuItem
onClick={(e) => {
e.stopPropagation();
setShowDeleteDialog(true);
}}
className="flex items-center gap-2 text-red-600 focus:bg-red-50 focus:text-red-600"
>
Delete agent
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
<Dialog
controlled={{
isOpen: showDeleteDialog,
set: setShowDeleteDialog,
}}
styling={{ maxWidth: "32rem" }}
title="Delete agent"
>
<Dialog.Content>
<div>
<Text variant="large">
Are you sure you want to delete this agent? This action cannot be
undone.
</Text>
<Dialog.Footer>
<Button
variant="secondary"
disabled={isDeletingAgent}
onClick={() => setShowDeleteDialog(false)}
>
Cancel
</Button>
<Button
variant="destructive"
onClick={handleDeleteAgent}
loading={isDeletingAgent}
>
Delete Agent
</Button>
</Dialog.Footer>
</div>
</Dialog.Content>
</Dialog>
</>
);
}

View File

@@ -1,39 +0,0 @@
"use client";
import { cn } from "@/lib/utils";
import { HeartIcon } from "@phosphor-icons/react";
import type { MouseEvent } from "react";
interface FavoriteButtonProps {
isFavorite: boolean;
onClick: (e: MouseEvent<HTMLButtonElement>) => void;
className?: string;
}
export function FavoriteButton({
isFavorite,
onClick,
className,
}: FavoriteButtonProps) {
return (
<button
onClick={onClick}
className={cn(
"rounded-full p-2 transition-all duration-200",
"hover:scale-110",
!isFavorite && "opacity-0 group-hover:opacity-100",
className,
)}
aria-label={isFavorite ? "Remove from favorites" : "Add to favorites"}
>
<HeartIcon
size={20}
weight={isFavorite ? "fill" : "regular"}
className={cn(
"transition-colors duration-200",
isFavorite ? "text-red-500" : "text-gray-600 hover:text-red-500",
)}
/>
</button>
);
}

View File

@@ -1,150 +0,0 @@
import { InfiniteData, QueryClient } from "@tanstack/react-query";
import {
getV2ListFavoriteLibraryAgentsResponse,
getV2ListLibraryAgentsResponse,
} from "@/app/api/__generated__/endpoints/library/library";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
interface UpdateFavoriteInQueriesParams {
queryClient: QueryClient;
agentId: string;
agent: LibraryAgent;
newIsFavorite: boolean;
}
export function updateFavoriteInQueries({
queryClient,
agentId,
agent,
newIsFavorite,
}: UpdateFavoriteInQueriesParams) {
queryClient.setQueriesData(
{ queryKey: ["/api/library/agents"] },
(
oldData:
| InfiniteData<getV2ListLibraryAgentsResponse, number | undefined>
| undefined,
) => {
if (!oldData?.pages) return oldData;
return {
...oldData,
pages: oldData.pages.map((page) => {
if (page.status !== 200) return page;
return {
...page,
data: {
...page.data,
agents: page.data.agents.map((currentAgent: LibraryAgent) =>
currentAgent.id === agentId
? { ...currentAgent, is_favorite: newIsFavorite }
: currentAgent,
),
},
};
}),
};
},
);
queryClient.setQueriesData(
{ queryKey: ["/api/library/agents/favorites"] },
(
oldData:
| InfiniteData<
getV2ListFavoriteLibraryAgentsResponse,
number | undefined
>
| undefined,
) => {
if (!oldData?.pages) return oldData;
if (newIsFavorite) {
const exists = oldData.pages.some(
(page) =>
page.status === 200 &&
page.data.agents.some(
(currentAgent: LibraryAgent) => currentAgent.id === agentId,
),
);
if (!exists) {
const firstPage = oldData.pages[0];
if (firstPage?.status === 200) {
const updatedAgent = {
id: agent.id,
name: agent.name,
description: agent.description,
graph_id: agent.graph_id,
can_access_graph: agent.can_access_graph,
creator_image_url: agent.creator_image_url,
image_url: agent.image_url,
is_favorite: true,
};
return {
...oldData,
pages: [
{
...firstPage,
data: {
...firstPage.data,
agents: [updatedAgent, ...firstPage.data.agents],
pagination: {
...firstPage.data.pagination,
total_items: firstPage.data.pagination.total_items + 1,
},
},
},
...oldData.pages.slice(1).map((page) =>
page.status === 200
? {
...page,
data: {
...page.data,
pagination: {
...page.data.pagination,
total_items: page.data.pagination.total_items + 1,
},
},
}
: page,
),
],
};
}
}
} else {
return {
...oldData,
pages: oldData.pages.map((page) => {
if (page.status !== 200) return page;
const filteredAgents = page.data.agents.filter(
(currentAgent: LibraryAgent) => currentAgent.id !== agentId,
);
const removedCount =
filteredAgents.length < page.data.agents.length ? 1 : 0;
return {
...page,
data: {
...page.data,
agents: filteredAgents,
pagination: {
...page.data.pagination,
total_items: page.data.pagination.total_items - removedCount,
},
},
};
}),
};
}
return oldData;
},
);
}

View File

@@ -1,84 +0,0 @@
"use client";
import { getQueryClient } from "@/lib/react-query/queryClient";
import { useEffect, useState } from "react";
import { usePatchV2UpdateLibraryAgent } from "@/app/api/__generated__/endpoints/library/library";
import { useGetV2GetUserProfile } from "@/app/api/__generated__/endpoints/store/store";
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
import { okData } from "@/app/api/helpers";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { updateFavoriteInQueries } from "./helpers";
interface Props {
agent: LibraryAgent;
}
export function useLibraryAgentCard({ agent }: Props) {
const { id, name, is_favorite, creator_image_url, marketplace_listing } =
agent;
const isFromMarketplace = Boolean(marketplace_listing);
const [isFavorite, setIsFavorite] = useState(is_favorite);
const { toast } = useToast();
const queryClient = getQueryClient();
const { mutateAsync: updateLibraryAgent } = usePatchV2UpdateLibraryAgent();
const { data: profile } = useGetV2GetUserProfile({
query: {
select: okData,
},
});
useEffect(() => {
setIsFavorite(is_favorite);
}, [is_favorite]);
function updateQueryData(newIsFavorite: boolean) {
updateFavoriteInQueries({
queryClient,
agentId: id,
agent,
newIsFavorite,
});
}
async function handleToggleFavorite(e: React.MouseEvent) {
e.preventDefault();
e.stopPropagation();
const newIsFavorite = !isFavorite;
setIsFavorite(newIsFavorite);
updateQueryData(newIsFavorite);
try {
await updateLibraryAgent({
libraryAgentId: id,
data: { is_favorite: newIsFavorite },
});
toast({
title: newIsFavorite ? "Added to favorites" : "Removed from favorites",
description: `${name} has been ${newIsFavorite ? "added to" : "removed from"} your favorites.`,
});
} catch {
setIsFavorite(!newIsFavorite);
updateQueryData(!newIsFavorite);
toast({
title: "Error",
description: "Failed to update favorite status. Please try again.",
variant: "destructive",
});
}
}
return {
isFromMarketplace,
isFavorite,
profile,
creator_image_url,
handleToggleFavorite,
};
}

Some files were not shown because too many files have changed in this diff Show More