mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-22 13:38:10 -05:00
Compare commits
4 Commits
master
...
feat/agent
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da9c4a4adf | ||
|
|
0ca73004e5 | ||
|
|
9a786ed8d9 | ||
|
|
0a435e2ffb |
@@ -1,29 +1,28 @@
|
|||||||
"""Agent generator package - Creates agents from natural language."""
|
"""Agent generator package - Creates agents from natural language."""
|
||||||
|
|
||||||
from .core import (
|
from .core import (
|
||||||
apply_agent_patch,
|
AgentGeneratorNotConfiguredError,
|
||||||
decompose_goal,
|
decompose_goal,
|
||||||
generate_agent,
|
generate_agent,
|
||||||
generate_agent_patch,
|
generate_agent_patch,
|
||||||
get_agent_as_json,
|
get_agent_as_json,
|
||||||
|
json_to_graph,
|
||||||
save_agent_to_library,
|
save_agent_to_library,
|
||||||
)
|
)
|
||||||
from .fixer import apply_all_fixes
|
from .service import health_check as check_external_service_health
|
||||||
from .utils import get_blocks_info
|
from .service import is_external_service_configured
|
||||||
from .validator import validate_agent
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
# Core functions
|
# Core functions
|
||||||
"decompose_goal",
|
"decompose_goal",
|
||||||
"generate_agent",
|
"generate_agent",
|
||||||
"generate_agent_patch",
|
"generate_agent_patch",
|
||||||
"apply_agent_patch",
|
|
||||||
"save_agent_to_library",
|
"save_agent_to_library",
|
||||||
"get_agent_as_json",
|
"get_agent_as_json",
|
||||||
# Fixer
|
"json_to_graph",
|
||||||
"apply_all_fixes",
|
# Exceptions
|
||||||
# Validator
|
"AgentGeneratorNotConfiguredError",
|
||||||
"validate_agent",
|
# Service
|
||||||
# Utils
|
"is_external_service_configured",
|
||||||
"get_blocks_info",
|
"check_external_service_health",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,25 +0,0 @@
|
|||||||
"""OpenRouter client configuration for agent generation."""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from openai import AsyncOpenAI
|
|
||||||
|
|
||||||
# Configuration - use OPEN_ROUTER_API_KEY for consistency with chat/config.py
|
|
||||||
OPENROUTER_API_KEY = os.getenv("OPEN_ROUTER_API_KEY")
|
|
||||||
AGENT_GENERATOR_MODEL = os.getenv("AGENT_GENERATOR_MODEL", "anthropic/claude-opus-4.5")
|
|
||||||
|
|
||||||
# OpenRouter client (OpenAI-compatible API)
|
|
||||||
_client: AsyncOpenAI | None = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_client() -> AsyncOpenAI:
|
|
||||||
"""Get or create the OpenRouter client."""
|
|
||||||
global _client
|
|
||||||
if _client is None:
|
|
||||||
if not OPENROUTER_API_KEY:
|
|
||||||
raise ValueError("OPENROUTER_API_KEY environment variable is required")
|
|
||||||
_client = AsyncOpenAI(
|
|
||||||
base_url="https://openrouter.ai/api/v1",
|
|
||||||
api_key=OPENROUTER_API_KEY,
|
|
||||||
)
|
|
||||||
return _client
|
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
"""Core agent generation functions."""
|
"""Core agent generation functions."""
|
||||||
|
|
||||||
import copy
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any
|
from typing import Any
|
||||||
@@ -9,13 +7,35 @@ from typing import Any
|
|||||||
from backend.api.features.library import db as library_db
|
from backend.api.features.library import db as library_db
|
||||||
from backend.data.graph import Graph, Link, Node, create_graph
|
from backend.data.graph import Graph, Link, Node, create_graph
|
||||||
|
|
||||||
from .client import AGENT_GENERATOR_MODEL, get_client
|
from .service import (
|
||||||
from .prompts import DECOMPOSITION_PROMPT, GENERATION_PROMPT, PATCH_PROMPT
|
decompose_goal_external,
|
||||||
from .utils import get_block_summaries, parse_json_from_llm
|
generate_agent_external,
|
||||||
|
generate_agent_patch_external,
|
||||||
|
is_external_service_configured,
|
||||||
|
)
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AgentGeneratorNotConfiguredError(Exception):
|
||||||
|
"""Raised when the external Agent Generator service is not configured."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def _check_service_configured() -> None:
|
||||||
|
"""Check if the external Agent Generator service is configured.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
AgentGeneratorNotConfiguredError: If the service is not configured.
|
||||||
|
"""
|
||||||
|
if not is_external_service_configured():
|
||||||
|
raise AgentGeneratorNotConfiguredError(
|
||||||
|
"Agent Generator service is not configured. "
|
||||||
|
"Set AGENTGENERATOR_HOST environment variable to enable agent generation."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def decompose_goal(description: str, context: str = "") -> dict[str, Any] | None:
|
async def decompose_goal(description: str, context: str = "") -> dict[str, Any] | None:
|
||||||
"""Break down a goal into steps or return clarifying questions.
|
"""Break down a goal into steps or return clarifying questions.
|
||||||
|
|
||||||
@@ -28,40 +48,13 @@ async def decompose_goal(description: str, context: str = "") -> dict[str, Any]
|
|||||||
- {"type": "clarifying_questions", "questions": [...]}
|
- {"type": "clarifying_questions", "questions": [...]}
|
||||||
- {"type": "instructions", "steps": [...]}
|
- {"type": "instructions", "steps": [...]}
|
||||||
Or None on error
|
Or None on error
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
AgentGeneratorNotConfiguredError: If the external service is not configured.
|
||||||
"""
|
"""
|
||||||
client = get_client()
|
_check_service_configured()
|
||||||
prompt = DECOMPOSITION_PROMPT.format(block_summaries=get_block_summaries())
|
logger.info("Calling external Agent Generator service for decompose_goal")
|
||||||
|
return await decompose_goal_external(description, context)
|
||||||
full_description = description
|
|
||||||
if context:
|
|
||||||
full_description = f"{description}\n\nAdditional context:\n{context}"
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = await client.chat.completions.create(
|
|
||||||
model=AGENT_GENERATOR_MODEL,
|
|
||||||
messages=[
|
|
||||||
{"role": "system", "content": prompt},
|
|
||||||
{"role": "user", "content": full_description},
|
|
||||||
],
|
|
||||||
temperature=0,
|
|
||||||
)
|
|
||||||
|
|
||||||
content = response.choices[0].message.content
|
|
||||||
if content is None:
|
|
||||||
logger.error("LLM returned empty content for decomposition")
|
|
||||||
return None
|
|
||||||
|
|
||||||
result = parse_json_from_llm(content)
|
|
||||||
|
|
||||||
if result is None:
|
|
||||||
logger.error(f"Failed to parse decomposition response: {content[:200]}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error decomposing goal: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
async def generate_agent(instructions: dict[str, Any]) -> dict[str, Any] | None:
|
async def generate_agent(instructions: dict[str, Any]) -> dict[str, Any] | None:
|
||||||
@@ -72,31 +65,14 @@ async def generate_agent(instructions: dict[str, Any]) -> dict[str, Any] | None:
|
|||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Agent JSON dict or None on error
|
Agent JSON dict or None on error
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
AgentGeneratorNotConfiguredError: If the external service is not configured.
|
||||||
"""
|
"""
|
||||||
client = get_client()
|
_check_service_configured()
|
||||||
prompt = GENERATION_PROMPT.format(block_summaries=get_block_summaries())
|
logger.info("Calling external Agent Generator service for generate_agent")
|
||||||
|
result = await generate_agent_external(instructions)
|
||||||
try:
|
if result:
|
||||||
response = await client.chat.completions.create(
|
|
||||||
model=AGENT_GENERATOR_MODEL,
|
|
||||||
messages=[
|
|
||||||
{"role": "system", "content": prompt},
|
|
||||||
{"role": "user", "content": json.dumps(instructions, indent=2)},
|
|
||||||
],
|
|
||||||
temperature=0,
|
|
||||||
)
|
|
||||||
|
|
||||||
content = response.choices[0].message.content
|
|
||||||
if content is None:
|
|
||||||
logger.error("LLM returned empty content for agent generation")
|
|
||||||
return None
|
|
||||||
|
|
||||||
result = parse_json_from_llm(content)
|
|
||||||
|
|
||||||
if result is None:
|
|
||||||
logger.error(f"Failed to parse agent JSON: {content[:200]}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Ensure required fields
|
# Ensure required fields
|
||||||
if "id" not in result:
|
if "id" not in result:
|
||||||
result["id"] = str(uuid.uuid4())
|
result["id"] = str(uuid.uuid4())
|
||||||
@@ -104,12 +80,7 @@ async def generate_agent(instructions: dict[str, Any]) -> dict[str, Any] | None:
|
|||||||
result["version"] = 1
|
result["version"] = 1
|
||||||
if "is_active" not in result:
|
if "is_active" not in result:
|
||||||
result["is_active"] = True
|
result["is_active"] = True
|
||||||
|
return result
|
||||||
return result
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error generating agent: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def json_to_graph(agent_json: dict[str, Any]) -> Graph:
|
def json_to_graph(agent_json: dict[str, Any]) -> Graph:
|
||||||
@@ -284,108 +255,23 @@ async def get_agent_as_json(
|
|||||||
async def generate_agent_patch(
|
async def generate_agent_patch(
|
||||||
update_request: str, current_agent: dict[str, Any]
|
update_request: str, current_agent: dict[str, Any]
|
||||||
) -> dict[str, Any] | None:
|
) -> dict[str, Any] | None:
|
||||||
"""Generate a patch to update an existing agent.
|
"""Update an existing agent using natural language.
|
||||||
|
|
||||||
|
The external Agent Generator service handles:
|
||||||
|
- Generating the patch
|
||||||
|
- Applying the patch
|
||||||
|
- Fixing and validating the result
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
update_request: Natural language description of changes
|
update_request: Natural language description of changes
|
||||||
current_agent: Current agent JSON
|
current_agent: Current agent JSON
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Patch dict or clarifying questions, or None on error
|
Updated agent JSON, clarifying questions dict, or None on error
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
AgentGeneratorNotConfiguredError: If the external service is not configured.
|
||||||
"""
|
"""
|
||||||
client = get_client()
|
_check_service_configured()
|
||||||
prompt = PATCH_PROMPT.format(
|
logger.info("Calling external Agent Generator service for generate_agent_patch")
|
||||||
current_agent=json.dumps(current_agent, indent=2),
|
return await generate_agent_patch_external(update_request, current_agent)
|
||||||
block_summaries=get_block_summaries(),
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
response = await client.chat.completions.create(
|
|
||||||
model=AGENT_GENERATOR_MODEL,
|
|
||||||
messages=[
|
|
||||||
{"role": "system", "content": prompt},
|
|
||||||
{"role": "user", "content": update_request},
|
|
||||||
],
|
|
||||||
temperature=0,
|
|
||||||
)
|
|
||||||
|
|
||||||
content = response.choices[0].message.content
|
|
||||||
if content is None:
|
|
||||||
logger.error("LLM returned empty content for patch generation")
|
|
||||||
return None
|
|
||||||
|
|
||||||
return parse_json_from_llm(content)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error generating patch: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def apply_agent_patch(
|
|
||||||
current_agent: dict[str, Any], patch: dict[str, Any]
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Apply a patch to an existing agent.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
current_agent: Current agent JSON
|
|
||||||
patch: Patch dict with operations
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated agent JSON
|
|
||||||
"""
|
|
||||||
agent = copy.deepcopy(current_agent)
|
|
||||||
patches = patch.get("patches", [])
|
|
||||||
|
|
||||||
for p in patches:
|
|
||||||
patch_type = p.get("type")
|
|
||||||
|
|
||||||
if patch_type == "modify":
|
|
||||||
node_id = p.get("node_id")
|
|
||||||
changes = p.get("changes", {})
|
|
||||||
|
|
||||||
for node in agent.get("nodes", []):
|
|
||||||
if node["id"] == node_id:
|
|
||||||
_deep_update(node, changes)
|
|
||||||
logger.debug(f"Modified node {node_id}")
|
|
||||||
break
|
|
||||||
|
|
||||||
elif patch_type == "add":
|
|
||||||
new_nodes = p.get("new_nodes", [])
|
|
||||||
new_links = p.get("new_links", [])
|
|
||||||
|
|
||||||
agent["nodes"] = agent.get("nodes", []) + new_nodes
|
|
||||||
agent["links"] = agent.get("links", []) + new_links
|
|
||||||
logger.debug(f"Added {len(new_nodes)} nodes, {len(new_links)} links")
|
|
||||||
|
|
||||||
elif patch_type == "remove":
|
|
||||||
node_ids_to_remove = set(p.get("node_ids", []))
|
|
||||||
link_ids_to_remove = set(p.get("link_ids", []))
|
|
||||||
|
|
||||||
# Remove nodes
|
|
||||||
agent["nodes"] = [
|
|
||||||
n for n in agent.get("nodes", []) if n["id"] not in node_ids_to_remove
|
|
||||||
]
|
|
||||||
|
|
||||||
# Remove links (both explicit and those referencing removed nodes)
|
|
||||||
agent["links"] = [
|
|
||||||
link
|
|
||||||
for link in agent.get("links", [])
|
|
||||||
if link["id"] not in link_ids_to_remove
|
|
||||||
and link["source_id"] not in node_ids_to_remove
|
|
||||||
and link["sink_id"] not in node_ids_to_remove
|
|
||||||
]
|
|
||||||
|
|
||||||
logger.debug(
|
|
||||||
f"Removed {len(node_ids_to_remove)} nodes, {len(link_ids_to_remove)} links"
|
|
||||||
)
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def _deep_update(target: dict, source: dict) -> None:
|
|
||||||
"""Recursively update a dict with another dict."""
|
|
||||||
for key, value in source.items():
|
|
||||||
if key in target and isinstance(target[key], dict) and isinstance(value, dict):
|
|
||||||
_deep_update(target[key], value)
|
|
||||||
else:
|
|
||||||
target[key] = value
|
|
||||||
|
|||||||
@@ -1,606 +0,0 @@
|
|||||||
"""Agent fixer - Fixes common LLM generation errors."""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import uuid
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from .utils import (
|
|
||||||
ADDTODICTIONARY_BLOCK_ID,
|
|
||||||
ADDTOLIST_BLOCK_ID,
|
|
||||||
CODE_EXECUTION_BLOCK_ID,
|
|
||||||
CONDITION_BLOCK_ID,
|
|
||||||
CREATEDICT_BLOCK_ID,
|
|
||||||
CREATELIST_BLOCK_ID,
|
|
||||||
DATA_SAMPLING_BLOCK_ID,
|
|
||||||
DOUBLE_CURLY_BRACES_BLOCK_IDS,
|
|
||||||
GET_CURRENT_DATE_BLOCK_ID,
|
|
||||||
STORE_VALUE_BLOCK_ID,
|
|
||||||
UNIVERSAL_TYPE_CONVERTER_BLOCK_ID,
|
|
||||||
get_blocks_info,
|
|
||||||
is_valid_uuid,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def fix_agent_ids(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Fix invalid UUIDs in agent and link IDs."""
|
|
||||||
# Fix agent ID
|
|
||||||
if not is_valid_uuid(agent.get("id", "")):
|
|
||||||
agent["id"] = str(uuid.uuid4())
|
|
||||||
logger.debug(f"Fixed agent ID: {agent['id']}")
|
|
||||||
|
|
||||||
# Fix node IDs
|
|
||||||
id_mapping = {} # Old ID -> New ID
|
|
||||||
for node in agent.get("nodes", []):
|
|
||||||
if not is_valid_uuid(node.get("id", "")):
|
|
||||||
old_id = node.get("id", "")
|
|
||||||
new_id = str(uuid.uuid4())
|
|
||||||
id_mapping[old_id] = new_id
|
|
||||||
node["id"] = new_id
|
|
||||||
logger.debug(f"Fixed node ID: {old_id} -> {new_id}")
|
|
||||||
|
|
||||||
# Fix link IDs and update references
|
|
||||||
for link in agent.get("links", []):
|
|
||||||
if not is_valid_uuid(link.get("id", "")):
|
|
||||||
link["id"] = str(uuid.uuid4())
|
|
||||||
logger.debug(f"Fixed link ID: {link['id']}")
|
|
||||||
|
|
||||||
# Update source/sink IDs if they were remapped
|
|
||||||
if link.get("source_id") in id_mapping:
|
|
||||||
link["source_id"] = id_mapping[link["source_id"]]
|
|
||||||
if link.get("sink_id") in id_mapping:
|
|
||||||
link["sink_id"] = id_mapping[link["sink_id"]]
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_double_curly_braces(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Fix single curly braces to double in template blocks."""
|
|
||||||
for node in agent.get("nodes", []):
|
|
||||||
if node.get("block_id") not in DOUBLE_CURLY_BRACES_BLOCK_IDS:
|
|
||||||
continue
|
|
||||||
|
|
||||||
input_data = node.get("input_default", {})
|
|
||||||
for key in ("prompt", "format"):
|
|
||||||
if key in input_data and isinstance(input_data[key], str):
|
|
||||||
original = input_data[key]
|
|
||||||
# Fix simple variable references: {var} -> {{var}}
|
|
||||||
fixed = re.sub(
|
|
||||||
r"(?<!\{)\{([a-zA-Z_][a-zA-Z0-9_]*)\}(?!\})",
|
|
||||||
r"{{\1}}",
|
|
||||||
original,
|
|
||||||
)
|
|
||||||
if fixed != original:
|
|
||||||
input_data[key] = fixed
|
|
||||||
logger.debug(f"Fixed curly braces in {key}")
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_storevalue_before_condition(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Add StoreValueBlock before ConditionBlock if needed for value2."""
|
|
||||||
nodes = agent.get("nodes", [])
|
|
||||||
links = agent.get("links", [])
|
|
||||||
|
|
||||||
# Find all ConditionBlock nodes
|
|
||||||
condition_node_ids = {
|
|
||||||
node["id"] for node in nodes if node.get("block_id") == CONDITION_BLOCK_ID
|
|
||||||
}
|
|
||||||
|
|
||||||
if not condition_node_ids:
|
|
||||||
return agent
|
|
||||||
|
|
||||||
new_nodes = []
|
|
||||||
new_links = []
|
|
||||||
processed_conditions = set()
|
|
||||||
|
|
||||||
for link in links:
|
|
||||||
sink_id = link.get("sink_id")
|
|
||||||
sink_name = link.get("sink_name")
|
|
||||||
|
|
||||||
# Check if this link goes to a ConditionBlock's value2
|
|
||||||
if sink_id in condition_node_ids and sink_name == "value2":
|
|
||||||
source_node = next(
|
|
||||||
(n for n in nodes if n["id"] == link.get("source_id")), None
|
|
||||||
)
|
|
||||||
|
|
||||||
# Skip if source is already a StoreValueBlock
|
|
||||||
if source_node and source_node.get("block_id") == STORE_VALUE_BLOCK_ID:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Skip if we already processed this condition
|
|
||||||
if sink_id in processed_conditions:
|
|
||||||
continue
|
|
||||||
|
|
||||||
processed_conditions.add(sink_id)
|
|
||||||
|
|
||||||
# Create StoreValueBlock
|
|
||||||
store_node_id = str(uuid.uuid4())
|
|
||||||
store_node = {
|
|
||||||
"id": store_node_id,
|
|
||||||
"block_id": STORE_VALUE_BLOCK_ID,
|
|
||||||
"input_default": {"data": None},
|
|
||||||
"metadata": {"position": {"x": 0, "y": -100}},
|
|
||||||
}
|
|
||||||
new_nodes.append(store_node)
|
|
||||||
|
|
||||||
# Create link: original source -> StoreValueBlock
|
|
||||||
new_links.append(
|
|
||||||
{
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"source_id": link["source_id"],
|
|
||||||
"source_name": link["source_name"],
|
|
||||||
"sink_id": store_node_id,
|
|
||||||
"sink_name": "input",
|
|
||||||
"is_static": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Update original link: StoreValueBlock -> ConditionBlock
|
|
||||||
link["source_id"] = store_node_id
|
|
||||||
link["source_name"] = "output"
|
|
||||||
|
|
||||||
logger.debug(f"Added StoreValueBlock before ConditionBlock {sink_id}")
|
|
||||||
|
|
||||||
if new_nodes:
|
|
||||||
agent["nodes"] = nodes + new_nodes
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_addtolist_blocks(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Fix AddToList blocks by adding prerequisite empty AddToList block.
|
|
||||||
|
|
||||||
When an AddToList block is found:
|
|
||||||
1. Checks if there's a CreateListBlock before it
|
|
||||||
2. Removes CreateListBlock if linked directly to AddToList
|
|
||||||
3. Adds an empty AddToList block before the original
|
|
||||||
4. Ensures the original has a self-referencing link
|
|
||||||
"""
|
|
||||||
nodes = agent.get("nodes", [])
|
|
||||||
links = agent.get("links", [])
|
|
||||||
new_nodes = []
|
|
||||||
original_addtolist_ids = set()
|
|
||||||
nodes_to_remove = set()
|
|
||||||
links_to_remove = []
|
|
||||||
|
|
||||||
# First pass: identify CreateListBlock nodes to remove
|
|
||||||
for link in links:
|
|
||||||
source_node = next(
|
|
||||||
(n for n in nodes if n.get("id") == link.get("source_id")), None
|
|
||||||
)
|
|
||||||
sink_node = next((n for n in nodes if n.get("id") == link.get("sink_id")), None)
|
|
||||||
|
|
||||||
if (
|
|
||||||
source_node
|
|
||||||
and sink_node
|
|
||||||
and source_node.get("block_id") == CREATELIST_BLOCK_ID
|
|
||||||
and sink_node.get("block_id") == ADDTOLIST_BLOCK_ID
|
|
||||||
):
|
|
||||||
nodes_to_remove.add(source_node.get("id"))
|
|
||||||
links_to_remove.append(link)
|
|
||||||
logger.debug(f"Removing CreateListBlock {source_node.get('id')}")
|
|
||||||
|
|
||||||
# Second pass: process AddToList blocks
|
|
||||||
filtered_nodes = []
|
|
||||||
for node in nodes:
|
|
||||||
if node.get("id") in nodes_to_remove:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if node.get("block_id") == ADDTOLIST_BLOCK_ID:
|
|
||||||
original_addtolist_ids.add(node.get("id"))
|
|
||||||
node_id = node.get("id")
|
|
||||||
pos = node.get("metadata", {}).get("position", {"x": 0, "y": 0})
|
|
||||||
|
|
||||||
# Check if already has prerequisite
|
|
||||||
has_prereq = any(
|
|
||||||
link.get("sink_id") == node_id
|
|
||||||
and link.get("sink_name") == "list"
|
|
||||||
and link.get("source_name") == "updated_list"
|
|
||||||
for link in links
|
|
||||||
)
|
|
||||||
|
|
||||||
if not has_prereq:
|
|
||||||
# Remove links to "list" input (except self-reference)
|
|
||||||
for link in links:
|
|
||||||
if (
|
|
||||||
link.get("sink_id") == node_id
|
|
||||||
and link.get("sink_name") == "list"
|
|
||||||
and link.get("source_id") != node_id
|
|
||||||
and link not in links_to_remove
|
|
||||||
):
|
|
||||||
links_to_remove.append(link)
|
|
||||||
|
|
||||||
# Create prerequisite AddToList block
|
|
||||||
prereq_id = str(uuid.uuid4())
|
|
||||||
prereq_node = {
|
|
||||||
"id": prereq_id,
|
|
||||||
"block_id": ADDTOLIST_BLOCK_ID,
|
|
||||||
"input_default": {"list": [], "entry": None, "entries": []},
|
|
||||||
"metadata": {
|
|
||||||
"position": {"x": pos.get("x", 0) - 800, "y": pos.get("y", 0)}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
new_nodes.append(prereq_node)
|
|
||||||
|
|
||||||
# Link prerequisite to original
|
|
||||||
links.append(
|
|
||||||
{
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"source_id": prereq_id,
|
|
||||||
"source_name": "updated_list",
|
|
||||||
"sink_id": node_id,
|
|
||||||
"sink_name": "list",
|
|
||||||
"is_static": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
logger.debug(f"Added prerequisite AddToList block for {node_id}")
|
|
||||||
|
|
||||||
filtered_nodes.append(node)
|
|
||||||
|
|
||||||
# Remove marked links
|
|
||||||
filtered_links = [link for link in links if link not in links_to_remove]
|
|
||||||
|
|
||||||
# Add self-referencing links for original AddToList blocks
|
|
||||||
for node in filtered_nodes + new_nodes:
|
|
||||||
if (
|
|
||||||
node.get("block_id") == ADDTOLIST_BLOCK_ID
|
|
||||||
and node.get("id") in original_addtolist_ids
|
|
||||||
):
|
|
||||||
node_id = node.get("id")
|
|
||||||
has_self_ref = any(
|
|
||||||
link["source_id"] == node_id
|
|
||||||
and link["sink_id"] == node_id
|
|
||||||
and link["source_name"] == "updated_list"
|
|
||||||
and link["sink_name"] == "list"
|
|
||||||
for link in filtered_links
|
|
||||||
)
|
|
||||||
if not has_self_ref:
|
|
||||||
filtered_links.append(
|
|
||||||
{
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"source_id": node_id,
|
|
||||||
"source_name": "updated_list",
|
|
||||||
"sink_id": node_id,
|
|
||||||
"sink_name": "list",
|
|
||||||
"is_static": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
logger.debug(f"Added self-reference for AddToList {node_id}")
|
|
||||||
|
|
||||||
agent["nodes"] = filtered_nodes + new_nodes
|
|
||||||
agent["links"] = filtered_links
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_addtodictionary_blocks(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Fix AddToDictionary blocks by removing empty CreateDictionary nodes."""
|
|
||||||
nodes = agent.get("nodes", [])
|
|
||||||
links = agent.get("links", [])
|
|
||||||
nodes_to_remove = set()
|
|
||||||
links_to_remove = []
|
|
||||||
|
|
||||||
for link in links:
|
|
||||||
source_node = next(
|
|
||||||
(n for n in nodes if n.get("id") == link.get("source_id")), None
|
|
||||||
)
|
|
||||||
sink_node = next((n for n in nodes if n.get("id") == link.get("sink_id")), None)
|
|
||||||
|
|
||||||
if (
|
|
||||||
source_node
|
|
||||||
and sink_node
|
|
||||||
and source_node.get("block_id") == CREATEDICT_BLOCK_ID
|
|
||||||
and sink_node.get("block_id") == ADDTODICTIONARY_BLOCK_ID
|
|
||||||
):
|
|
||||||
nodes_to_remove.add(source_node.get("id"))
|
|
||||||
links_to_remove.append(link)
|
|
||||||
logger.debug(f"Removing CreateDictionary {source_node.get('id')}")
|
|
||||||
|
|
||||||
agent["nodes"] = [n for n in nodes if n.get("id") not in nodes_to_remove]
|
|
||||||
agent["links"] = [link for link in links if link not in links_to_remove]
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_code_execution_output(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Fix CodeExecutionBlock output: change 'response' to 'stdout_logs'."""
|
|
||||||
nodes = agent.get("nodes", [])
|
|
||||||
links = agent.get("links", [])
|
|
||||||
|
|
||||||
for link in links:
|
|
||||||
source_node = next(
|
|
||||||
(n for n in nodes if n.get("id") == link.get("source_id")), None
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
source_node
|
|
||||||
and source_node.get("block_id") == CODE_EXECUTION_BLOCK_ID
|
|
||||||
and link.get("source_name") == "response"
|
|
||||||
):
|
|
||||||
link["source_name"] = "stdout_logs"
|
|
||||||
logger.debug("Fixed CodeExecutionBlock output: response -> stdout_logs")
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_data_sampling_sample_size(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Fix DataSamplingBlock by setting sample_size to 1 as default."""
|
|
||||||
nodes = agent.get("nodes", [])
|
|
||||||
links = agent.get("links", [])
|
|
||||||
links_to_remove = []
|
|
||||||
|
|
||||||
for node in nodes:
|
|
||||||
if node.get("block_id") == DATA_SAMPLING_BLOCK_ID:
|
|
||||||
node_id = node.get("id")
|
|
||||||
input_default = node.get("input_default", {})
|
|
||||||
|
|
||||||
# Remove links to sample_size
|
|
||||||
for link in links:
|
|
||||||
if (
|
|
||||||
link.get("sink_id") == node_id
|
|
||||||
and link.get("sink_name") == "sample_size"
|
|
||||||
):
|
|
||||||
links_to_remove.append(link)
|
|
||||||
|
|
||||||
# Set default
|
|
||||||
input_default["sample_size"] = 1
|
|
||||||
node["input_default"] = input_default
|
|
||||||
logger.debug(f"Fixed DataSamplingBlock {node_id} sample_size to 1")
|
|
||||||
|
|
||||||
if links_to_remove:
|
|
||||||
agent["links"] = [link for link in links if link not in links_to_remove]
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_node_x_coordinates(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Fix node x-coordinates to ensure 800+ unit spacing between linked nodes."""
|
|
||||||
nodes = agent.get("nodes", [])
|
|
||||||
links = agent.get("links", [])
|
|
||||||
node_lookup = {n.get("id"): n for n in nodes}
|
|
||||||
|
|
||||||
for link in links:
|
|
||||||
source_id = link.get("source_id")
|
|
||||||
sink_id = link.get("sink_id")
|
|
||||||
|
|
||||||
source_node = node_lookup.get(source_id)
|
|
||||||
sink_node = node_lookup.get(sink_id)
|
|
||||||
|
|
||||||
if not source_node or not sink_node:
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_pos = source_node.get("metadata", {}).get("position", {})
|
|
||||||
sink_pos = sink_node.get("metadata", {}).get("position", {})
|
|
||||||
|
|
||||||
source_x = source_pos.get("x", 0)
|
|
||||||
sink_x = sink_pos.get("x", 0)
|
|
||||||
|
|
||||||
if abs(sink_x - source_x) < 800:
|
|
||||||
new_x = source_x + 800
|
|
||||||
if "metadata" not in sink_node:
|
|
||||||
sink_node["metadata"] = {}
|
|
||||||
if "position" not in sink_node["metadata"]:
|
|
||||||
sink_node["metadata"]["position"] = {}
|
|
||||||
sink_node["metadata"]["position"]["x"] = new_x
|
|
||||||
logger.debug(f"Fixed node {sink_id} x: {sink_x} -> {new_x}")
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_getcurrentdate_offset(agent: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Fix GetCurrentDateBlock offset to ensure it's positive."""
|
|
||||||
for node in agent.get("nodes", []):
|
|
||||||
if node.get("block_id") == GET_CURRENT_DATE_BLOCK_ID:
|
|
||||||
input_default = node.get("input_default", {})
|
|
||||||
if "offset" in input_default:
|
|
||||||
offset = input_default["offset"]
|
|
||||||
if isinstance(offset, (int, float)) and offset < 0:
|
|
||||||
input_default["offset"] = abs(offset)
|
|
||||||
logger.debug(f"Fixed offset: {offset} -> {abs(offset)}")
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_ai_model_parameter(
|
|
||||||
agent: dict[str, Any],
|
|
||||||
blocks_info: list[dict[str, Any]],
|
|
||||||
default_model: str = "gpt-4o",
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Add default model parameter to AI blocks if missing."""
|
|
||||||
block_map = {b.get("id"): b for b in blocks_info}
|
|
||||||
|
|
||||||
for node in agent.get("nodes", []):
|
|
||||||
block_id = node.get("block_id")
|
|
||||||
block = block_map.get(block_id)
|
|
||||||
|
|
||||||
if not block:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if block has AI category
|
|
||||||
categories = block.get("categories", [])
|
|
||||||
is_ai_block = any(
|
|
||||||
cat.get("category") == "AI" for cat in categories if isinstance(cat, dict)
|
|
||||||
)
|
|
||||||
|
|
||||||
if is_ai_block:
|
|
||||||
input_default = node.get("input_default", {})
|
|
||||||
if "model" not in input_default:
|
|
||||||
input_default["model"] = default_model
|
|
||||||
node["input_default"] = input_default
|
|
||||||
logger.debug(
|
|
||||||
f"Added model '{default_model}' to AI block {node.get('id')}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_link_static_properties(
|
|
||||||
agent: dict[str, Any], blocks_info: list[dict[str, Any]]
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Fix is_static property based on source block's staticOutput."""
|
|
||||||
block_map = {b.get("id"): b for b in blocks_info}
|
|
||||||
node_lookup = {n.get("id"): n for n in agent.get("nodes", [])}
|
|
||||||
|
|
||||||
for link in agent.get("links", []):
|
|
||||||
source_node = node_lookup.get(link.get("source_id"))
|
|
||||||
if not source_node:
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_block = block_map.get(source_node.get("block_id"))
|
|
||||||
if not source_block:
|
|
||||||
continue
|
|
||||||
|
|
||||||
static_output = source_block.get("staticOutput", False)
|
|
||||||
if link.get("is_static") != static_output:
|
|
||||||
link["is_static"] = static_output
|
|
||||||
logger.debug(f"Fixed link {link.get('id')} is_static to {static_output}")
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def fix_data_type_mismatch(
|
|
||||||
agent: dict[str, Any], blocks_info: list[dict[str, Any]]
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Fix data type mismatches by inserting UniversalTypeConverterBlock."""
|
|
||||||
nodes = agent.get("nodes", [])
|
|
||||||
links = agent.get("links", [])
|
|
||||||
block_map = {b.get("id"): b for b in blocks_info}
|
|
||||||
node_lookup = {n.get("id"): n for n in nodes}
|
|
||||||
|
|
||||||
def get_property_type(schema: dict, name: str) -> str | None:
|
|
||||||
if "_#_" in name:
|
|
||||||
parent, child = name.split("_#_", 1)
|
|
||||||
parent_schema = schema.get(parent, {})
|
|
||||||
if "properties" in parent_schema:
|
|
||||||
return parent_schema["properties"].get(child, {}).get("type")
|
|
||||||
return None
|
|
||||||
return schema.get(name, {}).get("type")
|
|
||||||
|
|
||||||
def are_types_compatible(src: str, sink: str) -> bool:
|
|
||||||
if {src, sink} <= {"integer", "number"}:
|
|
||||||
return True
|
|
||||||
return src == sink
|
|
||||||
|
|
||||||
type_mapping = {
|
|
||||||
"string": "string",
|
|
||||||
"text": "string",
|
|
||||||
"integer": "number",
|
|
||||||
"number": "number",
|
|
||||||
"float": "number",
|
|
||||||
"boolean": "boolean",
|
|
||||||
"bool": "boolean",
|
|
||||||
"array": "list",
|
|
||||||
"list": "list",
|
|
||||||
"object": "dictionary",
|
|
||||||
"dict": "dictionary",
|
|
||||||
"dictionary": "dictionary",
|
|
||||||
}
|
|
||||||
|
|
||||||
new_links = []
|
|
||||||
nodes_to_add = []
|
|
||||||
|
|
||||||
for link in links:
|
|
||||||
source_node = node_lookup.get(link.get("source_id"))
|
|
||||||
sink_node = node_lookup.get(link.get("sink_id"))
|
|
||||||
|
|
||||||
if not source_node or not sink_node:
|
|
||||||
new_links.append(link)
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_block = block_map.get(source_node.get("block_id"))
|
|
||||||
sink_block = block_map.get(sink_node.get("block_id"))
|
|
||||||
|
|
||||||
if not source_block or not sink_block:
|
|
||||||
new_links.append(link)
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_outputs = source_block.get("outputSchema", {}).get("properties", {})
|
|
||||||
sink_inputs = sink_block.get("inputSchema", {}).get("properties", {})
|
|
||||||
|
|
||||||
source_type = get_property_type(source_outputs, link.get("source_name", ""))
|
|
||||||
sink_type = get_property_type(sink_inputs, link.get("sink_name", ""))
|
|
||||||
|
|
||||||
if (
|
|
||||||
source_type
|
|
||||||
and sink_type
|
|
||||||
and not are_types_compatible(source_type, sink_type)
|
|
||||||
):
|
|
||||||
# Insert type converter
|
|
||||||
converter_id = str(uuid.uuid4())
|
|
||||||
target_type = type_mapping.get(sink_type, sink_type)
|
|
||||||
|
|
||||||
converter_node = {
|
|
||||||
"id": converter_id,
|
|
||||||
"block_id": UNIVERSAL_TYPE_CONVERTER_BLOCK_ID,
|
|
||||||
"input_default": {"type": target_type},
|
|
||||||
"metadata": {"position": {"x": 0, "y": 100}},
|
|
||||||
}
|
|
||||||
nodes_to_add.append(converter_node)
|
|
||||||
|
|
||||||
# source -> converter
|
|
||||||
new_links.append(
|
|
||||||
{
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"source_id": link["source_id"],
|
|
||||||
"source_name": link["source_name"],
|
|
||||||
"sink_id": converter_id,
|
|
||||||
"sink_name": "value",
|
|
||||||
"is_static": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# converter -> sink
|
|
||||||
new_links.append(
|
|
||||||
{
|
|
||||||
"id": str(uuid.uuid4()),
|
|
||||||
"source_id": converter_id,
|
|
||||||
"source_name": "value",
|
|
||||||
"sink_id": link["sink_id"],
|
|
||||||
"sink_name": link["sink_name"],
|
|
||||||
"is_static": False,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug(f"Inserted type converter: {source_type} -> {target_type}")
|
|
||||||
else:
|
|
||||||
new_links.append(link)
|
|
||||||
|
|
||||||
if nodes_to_add:
|
|
||||||
agent["nodes"] = nodes + nodes_to_add
|
|
||||||
agent["links"] = new_links
|
|
||||||
|
|
||||||
return agent
|
|
||||||
|
|
||||||
|
|
||||||
def apply_all_fixes(
|
|
||||||
agent: dict[str, Any], blocks_info: list[dict[str, Any]] | None = None
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
"""Apply all fixes to an agent JSON.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
agent: Agent JSON dict
|
|
||||||
blocks_info: Optional list of block info dicts for advanced fixes
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Fixed agent JSON
|
|
||||||
"""
|
|
||||||
# Basic fixes (no block info needed)
|
|
||||||
agent = fix_agent_ids(agent)
|
|
||||||
agent = fix_double_curly_braces(agent)
|
|
||||||
agent = fix_storevalue_before_condition(agent)
|
|
||||||
agent = fix_addtolist_blocks(agent)
|
|
||||||
agent = fix_addtodictionary_blocks(agent)
|
|
||||||
agent = fix_code_execution_output(agent)
|
|
||||||
agent = fix_data_sampling_sample_size(agent)
|
|
||||||
agent = fix_node_x_coordinates(agent)
|
|
||||||
agent = fix_getcurrentdate_offset(agent)
|
|
||||||
|
|
||||||
# Advanced fixes (require block info)
|
|
||||||
if blocks_info is None:
|
|
||||||
blocks_info = get_blocks_info()
|
|
||||||
|
|
||||||
agent = fix_ai_model_parameter(agent, blocks_info)
|
|
||||||
agent = fix_link_static_properties(agent, blocks_info)
|
|
||||||
agent = fix_data_type_mismatch(agent, blocks_info)
|
|
||||||
|
|
||||||
return agent
|
|
||||||
@@ -1,225 +0,0 @@
|
|||||||
"""Prompt templates for agent generation."""
|
|
||||||
|
|
||||||
DECOMPOSITION_PROMPT = """
|
|
||||||
You are an expert AutoGPT Workflow Decomposer. Your task is to analyze a user's high-level goal and break it down into a clear, step-by-step plan using the available blocks.
|
|
||||||
|
|
||||||
Each step should represent a distinct, automatable action suitable for execution by an AI automation system.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
FIRST: Analyze the user's goal and determine:
|
|
||||||
1) Design-time configuration (fixed settings that won't change per run)
|
|
||||||
2) Runtime inputs (values the agent's end-user will provide each time it runs)
|
|
||||||
|
|
||||||
For anything that can vary per run (email addresses, names, dates, search terms, etc.):
|
|
||||||
- DO NOT ask for the actual value
|
|
||||||
- Instead, define it as an Agent Input with a clear name, type, and description
|
|
||||||
|
|
||||||
Only ask clarifying questions about design-time config that affects how you build the workflow:
|
|
||||||
- Which external service to use (e.g., "Gmail vs Outlook", "Notion vs Google Docs")
|
|
||||||
- Required formats or structures (e.g., "CSV, JSON, or PDF output?")
|
|
||||||
- Business rules that must be hard-coded
|
|
||||||
|
|
||||||
IMPORTANT CLARIFICATIONS POLICY:
|
|
||||||
- Ask no more than five essential questions
|
|
||||||
- Do not ask for concrete values that can be provided at runtime as Agent Inputs
|
|
||||||
- Do not ask for API keys or credentials; the platform handles those directly
|
|
||||||
- If there is enough information to infer reasonable defaults, prefer to propose defaults
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
GUIDELINES:
|
|
||||||
1. List each step as a numbered item
|
|
||||||
2. Describe the action clearly and specify inputs/outputs
|
|
||||||
3. Ensure steps are in logical, sequential order
|
|
||||||
4. Mention block names naturally (e.g., "Use GetWeatherByLocationBlock to...")
|
|
||||||
5. Help the user reach their goal efficiently
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
RULES:
|
|
||||||
1. OUTPUT FORMAT: Only output either clarifying questions OR step-by-step instructions, not both
|
|
||||||
2. USE ONLY THE BLOCKS PROVIDED
|
|
||||||
3. ALL required_input fields must be provided
|
|
||||||
4. Data types of linked properties must match
|
|
||||||
5. Write expert-level prompts for AI-related blocks
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
CRITICAL BLOCK RESTRICTIONS:
|
|
||||||
1. AddToListBlock: Outputs updated list EVERY addition, not after all additions
|
|
||||||
2. SendEmailBlock: Draft the email for user review; set SMTP config based on email type
|
|
||||||
3. ConditionBlock: value2 is reference, value1 is contrast
|
|
||||||
4. CodeExecutionBlock: DO NOT USE - use AI blocks instead
|
|
||||||
5. ReadCsvBlock: Only use the 'rows' output, not 'row'
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
OUTPUT FORMAT:
|
|
||||||
|
|
||||||
If more information is needed:
|
|
||||||
```json
|
|
||||||
{{
|
|
||||||
"type": "clarifying_questions",
|
|
||||||
"questions": [
|
|
||||||
{{
|
|
||||||
"question": "Which email provider should be used? (Gmail, Outlook, custom SMTP)",
|
|
||||||
"keyword": "email_provider",
|
|
||||||
"example": "Gmail"
|
|
||||||
}}
|
|
||||||
]
|
|
||||||
}}
|
|
||||||
```
|
|
||||||
|
|
||||||
If ready to proceed:
|
|
||||||
```json
|
|
||||||
{{
|
|
||||||
"type": "instructions",
|
|
||||||
"steps": [
|
|
||||||
{{
|
|
||||||
"step_number": 1,
|
|
||||||
"block_name": "AgentShortTextInputBlock",
|
|
||||||
"description": "Get the URL of the content to analyze.",
|
|
||||||
"inputs": [{{"name": "name", "value": "URL"}}],
|
|
||||||
"outputs": [{{"name": "result", "description": "The URL entered by user"}}]
|
|
||||||
}}
|
|
||||||
]
|
|
||||||
}}
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
AVAILABLE BLOCKS:
|
|
||||||
{block_summaries}
|
|
||||||
"""
|
|
||||||
|
|
||||||
GENERATION_PROMPT = """
|
|
||||||
You are an expert AI workflow builder. Generate a valid agent JSON from the given instructions.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
NODES:
|
|
||||||
Each node must include:
|
|
||||||
- `id`: Unique UUID v4 (e.g. `a8f5b1e2-c3d4-4e5f-8a9b-0c1d2e3f4a5b`)
|
|
||||||
- `block_id`: The block identifier (must match an Allowed Block)
|
|
||||||
- `input_default`: Dict of inputs (can be empty if no static inputs needed)
|
|
||||||
- `metadata`: Must contain:
|
|
||||||
- `position`: {{"x": number, "y": number}} - adjacent nodes should differ by 800+ in X
|
|
||||||
- `customized_name`: Clear name describing this block's purpose in the workflow
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
LINKS:
|
|
||||||
Each link connects a source node's output to a sink node's input:
|
|
||||||
- `id`: MUST be UUID v4 (NOT "link-1", "link-2", etc.)
|
|
||||||
- `source_id`: ID of the source node
|
|
||||||
- `source_name`: Output field name from the source block
|
|
||||||
- `sink_id`: ID of the sink node
|
|
||||||
- `sink_name`: Input field name on the sink block
|
|
||||||
- `is_static`: true only if source block has static_output: true
|
|
||||||
|
|
||||||
CRITICAL: All IDs must be valid UUID v4 format!
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
AGENT (GRAPH):
|
|
||||||
Wrap nodes and links in:
|
|
||||||
- `id`: UUID of the agent
|
|
||||||
- `name`: Short, generic name (avoid specific company names, URLs)
|
|
||||||
- `description`: Short, generic description
|
|
||||||
- `nodes`: List of all nodes
|
|
||||||
- `links`: List of all links
|
|
||||||
- `version`: 1
|
|
||||||
- `is_active`: true
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
TIPS:
|
|
||||||
- All required_input fields must be provided via input_default or a valid link
|
|
||||||
- Ensure consistent source_id and sink_id references
|
|
||||||
- Avoid dangling links
|
|
||||||
- Input/output pins must match block schemas
|
|
||||||
- Do not invent unknown block_ids
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
ALLOWED BLOCKS:
|
|
||||||
{block_summaries}
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
Generate the complete agent JSON. Output ONLY valid JSON, no explanation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
PATCH_PROMPT = """
|
|
||||||
You are an expert at modifying AutoGPT agent workflows. Given the current agent and a modification request, generate a JSON patch to update the agent.
|
|
||||||
|
|
||||||
CURRENT AGENT:
|
|
||||||
{current_agent}
|
|
||||||
|
|
||||||
AVAILABLE BLOCKS:
|
|
||||||
{block_summaries}
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
PATCH FORMAT:
|
|
||||||
Return a JSON object with the following structure:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{{
|
|
||||||
"type": "patch",
|
|
||||||
"intent": "Brief description of what the patch does",
|
|
||||||
"patches": [
|
|
||||||
{{
|
|
||||||
"type": "modify",
|
|
||||||
"node_id": "uuid-of-node-to-modify",
|
|
||||||
"changes": {{
|
|
||||||
"input_default": {{"field": "new_value"}},
|
|
||||||
"metadata": {{"customized_name": "New Name"}}
|
|
||||||
}}
|
|
||||||
}},
|
|
||||||
{{
|
|
||||||
"type": "add",
|
|
||||||
"new_nodes": [
|
|
||||||
{{
|
|
||||||
"id": "new-uuid",
|
|
||||||
"block_id": "block-uuid",
|
|
||||||
"input_default": {{}},
|
|
||||||
"metadata": {{"position": {{"x": 0, "y": 0}}, "customized_name": "Name"}}
|
|
||||||
}}
|
|
||||||
],
|
|
||||||
"new_links": [
|
|
||||||
{{
|
|
||||||
"id": "link-uuid",
|
|
||||||
"source_id": "source-node-id",
|
|
||||||
"source_name": "output_field",
|
|
||||||
"sink_id": "sink-node-id",
|
|
||||||
"sink_name": "input_field"
|
|
||||||
}}
|
|
||||||
]
|
|
||||||
}},
|
|
||||||
{{
|
|
||||||
"type": "remove",
|
|
||||||
"node_ids": ["uuid-of-node-to-remove"],
|
|
||||||
"link_ids": ["uuid-of-link-to-remove"]
|
|
||||||
}}
|
|
||||||
]
|
|
||||||
}}
|
|
||||||
```
|
|
||||||
|
|
||||||
If you need more information, return:
|
|
||||||
```json
|
|
||||||
{{
|
|
||||||
"type": "clarifying_questions",
|
|
||||||
"questions": [
|
|
||||||
{{
|
|
||||||
"question": "What specific change do you want?",
|
|
||||||
"keyword": "change_type",
|
|
||||||
"example": "Add error handling"
|
|
||||||
}}
|
|
||||||
]
|
|
||||||
}}
|
|
||||||
```
|
|
||||||
|
|
||||||
Generate the minimal patch needed. Output ONLY valid JSON.
|
|
||||||
"""
|
|
||||||
@@ -0,0 +1,269 @@
|
|||||||
|
"""External Agent Generator service client.
|
||||||
|
|
||||||
|
This module provides a client for communicating with the external Agent Generator
|
||||||
|
microservice. When AGENTGENERATOR_HOST is configured, the agent generation functions
|
||||||
|
will delegate to the external service instead of using the built-in LLM-based implementation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
|
||||||
|
from backend.util.settings import Settings
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_client: httpx.AsyncClient | None = None
|
||||||
|
_settings: Settings | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_settings() -> Settings:
|
||||||
|
"""Get or create settings singleton."""
|
||||||
|
global _settings
|
||||||
|
if _settings is None:
|
||||||
|
_settings = Settings()
|
||||||
|
return _settings
|
||||||
|
|
||||||
|
|
||||||
|
def is_external_service_configured() -> bool:
|
||||||
|
"""Check if external Agent Generator service is configured."""
|
||||||
|
settings = _get_settings()
|
||||||
|
return bool(settings.config.agentgenerator_host)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_base_url() -> str:
|
||||||
|
"""Get the base URL for the external service."""
|
||||||
|
settings = _get_settings()
|
||||||
|
host = settings.config.agentgenerator_host
|
||||||
|
port = settings.config.agentgenerator_port
|
||||||
|
return f"http://{host}:{port}"
|
||||||
|
|
||||||
|
|
||||||
|
def _get_client() -> httpx.AsyncClient:
|
||||||
|
"""Get or create the HTTP client for the external service."""
|
||||||
|
global _client
|
||||||
|
if _client is None:
|
||||||
|
settings = _get_settings()
|
||||||
|
_client = httpx.AsyncClient(
|
||||||
|
base_url=_get_base_url(),
|
||||||
|
timeout=httpx.Timeout(settings.config.agentgenerator_timeout),
|
||||||
|
)
|
||||||
|
return _client
|
||||||
|
|
||||||
|
|
||||||
|
async def decompose_goal_external(
|
||||||
|
description: str, context: str = ""
|
||||||
|
) -> dict[str, Any] | None:
|
||||||
|
"""Call the external service to decompose a goal.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
description: Natural language goal description
|
||||||
|
context: Additional context (e.g., answers to previous questions)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict with either:
|
||||||
|
- {"type": "clarifying_questions", "questions": [...]}
|
||||||
|
- {"type": "instructions", "steps": [...]}
|
||||||
|
- {"type": "unachievable_goal", ...}
|
||||||
|
- {"type": "vague_goal", ...}
|
||||||
|
Or None on error
|
||||||
|
"""
|
||||||
|
client = _get_client()
|
||||||
|
|
||||||
|
# Build the request payload
|
||||||
|
payload: dict[str, Any] = {"description": description}
|
||||||
|
if context:
|
||||||
|
# The external service uses user_instruction for additional context
|
||||||
|
payload["user_instruction"] = context
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await client.post("/api/decompose-description", json=payload)
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
if not data.get("success"):
|
||||||
|
logger.error(f"External service returned error: {data.get('error')}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Map the response to the expected format
|
||||||
|
response_type = data.get("type")
|
||||||
|
if response_type == "instructions":
|
||||||
|
return {"type": "instructions", "steps": data.get("steps", [])}
|
||||||
|
elif response_type == "clarifying_questions":
|
||||||
|
return {
|
||||||
|
"type": "clarifying_questions",
|
||||||
|
"questions": data.get("questions", []),
|
||||||
|
}
|
||||||
|
elif response_type == "unachievable_goal":
|
||||||
|
return {
|
||||||
|
"type": "unachievable_goal",
|
||||||
|
"reason": data.get("reason"),
|
||||||
|
"suggested_goal": data.get("suggested_goal"),
|
||||||
|
}
|
||||||
|
elif response_type == "vague_goal":
|
||||||
|
return {
|
||||||
|
"type": "vague_goal",
|
||||||
|
"suggested_goal": data.get("suggested_goal"),
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Unknown response type from external service: {response_type}"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
except httpx.HTTPStatusError as e:
|
||||||
|
logger.error(f"HTTP error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Request error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_agent_external(
|
||||||
|
instructions: dict[str, Any]
|
||||||
|
) -> dict[str, Any] | None:
|
||||||
|
"""Call the external service to generate an agent from instructions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
instructions: Structured instructions from decompose_goal
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Agent JSON dict or None on error
|
||||||
|
"""
|
||||||
|
client = _get_client()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await client.post(
|
||||||
|
"/api/generate-agent", json={"instructions": instructions}
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
if not data.get("success"):
|
||||||
|
logger.error(f"External service returned error: {data.get('error')}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
return data.get("agent_json")
|
||||||
|
|
||||||
|
except httpx.HTTPStatusError as e:
|
||||||
|
logger.error(f"HTTP error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Request error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_agent_patch_external(
|
||||||
|
update_request: str, current_agent: dict[str, Any]
|
||||||
|
) -> dict[str, Any] | None:
|
||||||
|
"""Call the external service to generate a patch for an existing agent.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
update_request: Natural language description of changes
|
||||||
|
current_agent: Current agent JSON
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated agent JSON, clarifying questions dict, or None on error
|
||||||
|
"""
|
||||||
|
client = _get_client()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await client.post(
|
||||||
|
"/api/update-agent",
|
||||||
|
json={
|
||||||
|
"update_request": update_request,
|
||||||
|
"current_agent_json": current_agent,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
if not data.get("success"):
|
||||||
|
logger.error(f"External service returned error: {data.get('error')}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Check if it's clarifying questions
|
||||||
|
if data.get("type") == "clarifying_questions":
|
||||||
|
return {
|
||||||
|
"type": "clarifying_questions",
|
||||||
|
"questions": data.get("questions", []),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Otherwise return the updated agent JSON
|
||||||
|
return data.get("agent_json")
|
||||||
|
|
||||||
|
except httpx.HTTPStatusError as e:
|
||||||
|
logger.error(f"HTTP error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Request error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error calling external agent generator: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_blocks_external() -> list[dict[str, Any]] | None:
|
||||||
|
"""Get available blocks from the external service.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of block info dicts or None on error
|
||||||
|
"""
|
||||||
|
client = _get_client()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await client.get("/api/blocks")
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
|
||||||
|
if not data.get("success"):
|
||||||
|
logger.error("External service returned error getting blocks")
|
||||||
|
return None
|
||||||
|
|
||||||
|
return data.get("blocks", [])
|
||||||
|
|
||||||
|
except httpx.HTTPStatusError as e:
|
||||||
|
logger.error(f"HTTP error getting blocks from external service: {e}")
|
||||||
|
return None
|
||||||
|
except httpx.RequestError as e:
|
||||||
|
logger.error(f"Request error getting blocks from external service: {e}")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error getting blocks from external service: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def health_check() -> bool:
|
||||||
|
"""Check if the external service is healthy.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if healthy, False otherwise
|
||||||
|
"""
|
||||||
|
if not is_external_service_configured():
|
||||||
|
return False
|
||||||
|
|
||||||
|
client = _get_client()
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = await client.get("/health")
|
||||||
|
response.raise_for_status()
|
||||||
|
data = response.json()
|
||||||
|
return data.get("status") == "healthy" and data.get("blocks_loaded", False)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"External agent generator health check failed: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
async def close_client() -> None:
|
||||||
|
"""Close the HTTP client."""
|
||||||
|
global _client
|
||||||
|
if _client is not None:
|
||||||
|
await _client.aclose()
|
||||||
|
_client = None
|
||||||
@@ -1,213 +0,0 @@
|
|||||||
"""Utilities for agent generation."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import re
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from backend.data.block import get_blocks
|
|
||||||
|
|
||||||
# UUID validation regex
|
|
||||||
UUID_REGEX = re.compile(
|
|
||||||
r"^[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}$"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Block IDs for various fixes
|
|
||||||
STORE_VALUE_BLOCK_ID = "1ff065e9-88e8-4358-9d82-8dc91f622ba9"
|
|
||||||
CONDITION_BLOCK_ID = "715696a0-e1da-45c8-b209-c2fa9c3b0be6"
|
|
||||||
ADDTOLIST_BLOCK_ID = "aeb08fc1-2fc1-4141-bc8e-f758f183a822"
|
|
||||||
ADDTODICTIONARY_BLOCK_ID = "31d1064e-7446-4693-a7d4-65e5ca1180d1"
|
|
||||||
CREATELIST_BLOCK_ID = "a912d5c7-6e00-4542-b2a9-8034136930e4"
|
|
||||||
CREATEDICT_BLOCK_ID = "b924ddf4-de4f-4b56-9a85-358930dcbc91"
|
|
||||||
CODE_EXECUTION_BLOCK_ID = "0b02b072-abe7-11ef-8372-fb5d162dd712"
|
|
||||||
DATA_SAMPLING_BLOCK_ID = "4a448883-71fa-49cf-91cf-70d793bd7d87"
|
|
||||||
UNIVERSAL_TYPE_CONVERTER_BLOCK_ID = "95d1b990-ce13-4d88-9737-ba5c2070c97b"
|
|
||||||
GET_CURRENT_DATE_BLOCK_ID = "b29c1b50-5d0e-4d9f-8f9d-1b0e6fcbf0b1"
|
|
||||||
|
|
||||||
DOUBLE_CURLY_BRACES_BLOCK_IDS = [
|
|
||||||
"44f6c8ad-d75c-4ae1-8209-aad1c0326928", # FillTextTemplateBlock
|
|
||||||
"6ab085e2-20b3-4055-bc3e-08036e01eca6",
|
|
||||||
"90f8c45e-e983-4644-aa0b-b4ebe2f531bc",
|
|
||||||
"363ae599-353e-4804-937e-b2ee3cef3da4", # AgentOutputBlock
|
|
||||||
"3b191d9f-356f-482d-8238-ba04b6d18381",
|
|
||||||
"db7d8f02-2f44-4c55-ab7a-eae0941f0c30",
|
|
||||||
"3a7c4b8d-6e2f-4a5d-b9c1-f8d23c5a9b0e",
|
|
||||||
"ed1ae7a0-b770-4089-b520-1f0005fad19a",
|
|
||||||
"a892b8d9-3e4e-4e9c-9c1e-75f8efcf1bfa",
|
|
||||||
"b29c1b50-5d0e-4d9f-8f9d-1b0e6fcbf0b1",
|
|
||||||
"716a67b3-6760-42e7-86dc-18645c6e00fc",
|
|
||||||
"530cf046-2ce0-4854-ae2c-659db17c7a46",
|
|
||||||
"ed55ac19-356e-4243-a6cb-bc599e9b716f",
|
|
||||||
"1f292d4a-41a4-4977-9684-7c8d560b9f91", # LLM blocks
|
|
||||||
"32a87eab-381e-4dd4-bdb8-4c47151be35a",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_uuid(value: str) -> bool:
|
|
||||||
"""Check if a string is a valid UUID v4."""
|
|
||||||
return isinstance(value, str) and UUID_REGEX.match(value) is not None
|
|
||||||
|
|
||||||
|
|
||||||
def _compact_schema(schema: dict) -> dict[str, str]:
|
|
||||||
"""Extract compact type info from a JSON schema properties dict.
|
|
||||||
|
|
||||||
Returns a dict of {field_name: type_string} for essential info only.
|
|
||||||
"""
|
|
||||||
props = schema.get("properties", {})
|
|
||||||
result = {}
|
|
||||||
|
|
||||||
for name, prop in props.items():
|
|
||||||
# Skip internal/complex fields
|
|
||||||
if name.startswith("_"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Get type string
|
|
||||||
type_str = prop.get("type", "any")
|
|
||||||
|
|
||||||
# Handle anyOf/oneOf (optional types)
|
|
||||||
if "anyOf" in prop:
|
|
||||||
types = [t.get("type", "?") for t in prop["anyOf"] if t.get("type")]
|
|
||||||
type_str = "|".join(types) if types else "any"
|
|
||||||
elif "allOf" in prop:
|
|
||||||
type_str = "object"
|
|
||||||
|
|
||||||
# Add array item type if present
|
|
||||||
if type_str == "array" and "items" in prop:
|
|
||||||
items = prop["items"]
|
|
||||||
if isinstance(items, dict):
|
|
||||||
item_type = items.get("type", "any")
|
|
||||||
type_str = f"array[{item_type}]"
|
|
||||||
|
|
||||||
result[name] = type_str
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def get_block_summaries(include_schemas: bool = True) -> str:
|
|
||||||
"""Generate compact block summaries for prompts.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
include_schemas: Whether to include input/output type info
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted string of block summaries (compact format)
|
|
||||||
"""
|
|
||||||
blocks = get_blocks()
|
|
||||||
summaries = []
|
|
||||||
|
|
||||||
for block_id, block_cls in blocks.items():
|
|
||||||
block = block_cls()
|
|
||||||
name = block.name
|
|
||||||
desc = getattr(block, "description", "") or ""
|
|
||||||
|
|
||||||
# Truncate description
|
|
||||||
if len(desc) > 150:
|
|
||||||
desc = desc[:147] + "..."
|
|
||||||
|
|
||||||
if not include_schemas:
|
|
||||||
summaries.append(f"- {name} (id: {block_id}): {desc}")
|
|
||||||
else:
|
|
||||||
# Compact format with type info only
|
|
||||||
inputs = {}
|
|
||||||
outputs = {}
|
|
||||||
required = []
|
|
||||||
|
|
||||||
if hasattr(block, "input_schema"):
|
|
||||||
try:
|
|
||||||
schema = block.input_schema.jsonschema()
|
|
||||||
inputs = _compact_schema(schema)
|
|
||||||
required = schema.get("required", [])
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if hasattr(block, "output_schema"):
|
|
||||||
try:
|
|
||||||
schema = block.output_schema.jsonschema()
|
|
||||||
outputs = _compact_schema(schema)
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Build compact line format
|
|
||||||
# Format: NAME (id): desc | in: {field:type, ...} [required] | out: {field:type}
|
|
||||||
in_str = ", ".join(f"{k}:{v}" for k, v in inputs.items())
|
|
||||||
out_str = ", ".join(f"{k}:{v}" for k, v in outputs.items())
|
|
||||||
req_str = f" req=[{','.join(required)}]" if required else ""
|
|
||||||
|
|
||||||
static = " [static]" if getattr(block, "static_output", False) else ""
|
|
||||||
|
|
||||||
line = f"- {name} (id: {block_id}): {desc}"
|
|
||||||
if in_str:
|
|
||||||
line += f"\n in: {{{in_str}}}{req_str}"
|
|
||||||
if out_str:
|
|
||||||
line += f"\n out: {{{out_str}}}{static}"
|
|
||||||
|
|
||||||
summaries.append(line)
|
|
||||||
|
|
||||||
return "\n".join(summaries)
|
|
||||||
|
|
||||||
|
|
||||||
def get_blocks_info() -> list[dict[str, Any]]:
|
|
||||||
"""Get block information with schemas for validation and fixing."""
|
|
||||||
blocks = get_blocks()
|
|
||||||
blocks_info = []
|
|
||||||
for block_id, block_cls in blocks.items():
|
|
||||||
block = block_cls()
|
|
||||||
blocks_info.append(
|
|
||||||
{
|
|
||||||
"id": block_id,
|
|
||||||
"name": block.name,
|
|
||||||
"description": getattr(block, "description", ""),
|
|
||||||
"categories": getattr(block, "categories", []),
|
|
||||||
"staticOutput": getattr(block, "static_output", False),
|
|
||||||
"inputSchema": (
|
|
||||||
block.input_schema.jsonschema()
|
|
||||||
if hasattr(block, "input_schema")
|
|
||||||
else {}
|
|
||||||
),
|
|
||||||
"outputSchema": (
|
|
||||||
block.output_schema.jsonschema()
|
|
||||||
if hasattr(block, "output_schema")
|
|
||||||
else {}
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
return blocks_info
|
|
||||||
|
|
||||||
|
|
||||||
def parse_json_from_llm(text: str) -> dict[str, Any] | None:
|
|
||||||
"""Extract JSON from LLM response (handles markdown code blocks)."""
|
|
||||||
if not text:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Try fenced code block
|
|
||||||
match = re.search(r"```(?:json)?\s*([\s\S]*?)```", text, re.IGNORECASE)
|
|
||||||
if match:
|
|
||||||
try:
|
|
||||||
return json.loads(match.group(1).strip())
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Try raw text
|
|
||||||
try:
|
|
||||||
return json.loads(text.strip())
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Try finding {...} span
|
|
||||||
start = text.find("{")
|
|
||||||
end = text.rfind("}")
|
|
||||||
if start != -1 and end > start:
|
|
||||||
try:
|
|
||||||
return json.loads(text[start : end + 1])
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Try finding [...] span
|
|
||||||
start = text.find("[")
|
|
||||||
end = text.rfind("]")
|
|
||||||
if start != -1 and end > start:
|
|
||||||
try:
|
|
||||||
return json.loads(text[start : end + 1])
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return None
|
|
||||||
@@ -1,279 +0,0 @@
|
|||||||
"""Agent validator - Validates agent structure and connections."""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from .utils import get_blocks_info
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class AgentValidator:
|
|
||||||
"""Validator for AutoGPT agents with detailed error reporting."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.errors: list[str] = []
|
|
||||||
|
|
||||||
def add_error(self, error: str) -> None:
|
|
||||||
"""Add an error message."""
|
|
||||||
self.errors.append(error)
|
|
||||||
|
|
||||||
def validate_block_existence(
|
|
||||||
self, agent: dict[str, Any], blocks_info: list[dict[str, Any]]
|
|
||||||
) -> bool:
|
|
||||||
"""Validate all block IDs exist in the blocks library."""
|
|
||||||
valid = True
|
|
||||||
valid_block_ids = {b.get("id") for b in blocks_info if b.get("id")}
|
|
||||||
|
|
||||||
for node in agent.get("nodes", []):
|
|
||||||
block_id = node.get("block_id")
|
|
||||||
node_id = node.get("id")
|
|
||||||
|
|
||||||
if not block_id:
|
|
||||||
self.add_error(f"Node '{node_id}' is missing 'block_id' field.")
|
|
||||||
valid = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
if block_id not in valid_block_ids:
|
|
||||||
self.add_error(
|
|
||||||
f"Node '{node_id}' references block_id '{block_id}' which does not exist."
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
|
||||||
|
|
||||||
def validate_link_node_references(self, agent: dict[str, Any]) -> bool:
|
|
||||||
"""Validate all node IDs referenced in links exist."""
|
|
||||||
valid = True
|
|
||||||
valid_node_ids = {n.get("id") for n in agent.get("nodes", []) if n.get("id")}
|
|
||||||
|
|
||||||
for link in agent.get("links", []):
|
|
||||||
link_id = link.get("id", "Unknown")
|
|
||||||
source_id = link.get("source_id")
|
|
||||||
sink_id = link.get("sink_id")
|
|
||||||
|
|
||||||
if not source_id:
|
|
||||||
self.add_error(f"Link '{link_id}' is missing 'source_id'.")
|
|
||||||
valid = False
|
|
||||||
elif source_id not in valid_node_ids:
|
|
||||||
self.add_error(
|
|
||||||
f"Link '{link_id}' references non-existent source_id '{source_id}'."
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
if not sink_id:
|
|
||||||
self.add_error(f"Link '{link_id}' is missing 'sink_id'.")
|
|
||||||
valid = False
|
|
||||||
elif sink_id not in valid_node_ids:
|
|
||||||
self.add_error(
|
|
||||||
f"Link '{link_id}' references non-existent sink_id '{sink_id}'."
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
|
||||||
|
|
||||||
def validate_required_inputs(
|
|
||||||
self, agent: dict[str, Any], blocks_info: list[dict[str, Any]]
|
|
||||||
) -> bool:
|
|
||||||
"""Validate required inputs are provided."""
|
|
||||||
valid = True
|
|
||||||
block_map = {b.get("id"): b for b in blocks_info}
|
|
||||||
|
|
||||||
for node in agent.get("nodes", []):
|
|
||||||
block_id = node.get("block_id")
|
|
||||||
block = block_map.get(block_id)
|
|
||||||
|
|
||||||
if not block:
|
|
||||||
continue
|
|
||||||
|
|
||||||
required_inputs = block.get("inputSchema", {}).get("required", [])
|
|
||||||
input_defaults = node.get("input_default", {})
|
|
||||||
node_id = node.get("id")
|
|
||||||
|
|
||||||
# Get linked inputs
|
|
||||||
linked_inputs = {
|
|
||||||
link["sink_name"]
|
|
||||||
for link in agent.get("links", [])
|
|
||||||
if link.get("sink_id") == node_id
|
|
||||||
}
|
|
||||||
|
|
||||||
for req_input in required_inputs:
|
|
||||||
if (
|
|
||||||
req_input not in input_defaults
|
|
||||||
and req_input not in linked_inputs
|
|
||||||
and req_input != "credentials"
|
|
||||||
):
|
|
||||||
block_name = block.get("name", "Unknown Block")
|
|
||||||
self.add_error(
|
|
||||||
f"Node '{node_id}' ({block_name}) is missing required input '{req_input}'."
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
|
||||||
|
|
||||||
def validate_data_type_compatibility(
|
|
||||||
self, agent: dict[str, Any], blocks_info: list[dict[str, Any]]
|
|
||||||
) -> bool:
|
|
||||||
"""Validate linked data types are compatible."""
|
|
||||||
valid = True
|
|
||||||
block_map = {b.get("id"): b for b in blocks_info}
|
|
||||||
node_lookup = {n.get("id"): n for n in agent.get("nodes", [])}
|
|
||||||
|
|
||||||
def get_type(schema: dict, name: str) -> str | None:
|
|
||||||
if "_#_" in name:
|
|
||||||
parent, child = name.split("_#_", 1)
|
|
||||||
parent_schema = schema.get(parent, {})
|
|
||||||
if "properties" in parent_schema:
|
|
||||||
return parent_schema["properties"].get(child, {}).get("type")
|
|
||||||
return None
|
|
||||||
return schema.get(name, {}).get("type")
|
|
||||||
|
|
||||||
def are_compatible(src: str, sink: str) -> bool:
|
|
||||||
if {src, sink} <= {"integer", "number"}:
|
|
||||||
return True
|
|
||||||
return src == sink
|
|
||||||
|
|
||||||
for link in agent.get("links", []):
|
|
||||||
source_node = node_lookup.get(link.get("source_id"))
|
|
||||||
sink_node = node_lookup.get(link.get("sink_id"))
|
|
||||||
|
|
||||||
if not source_node or not sink_node:
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_block = block_map.get(source_node.get("block_id"))
|
|
||||||
sink_block = block_map.get(sink_node.get("block_id"))
|
|
||||||
|
|
||||||
if not source_block or not sink_block:
|
|
||||||
continue
|
|
||||||
|
|
||||||
source_outputs = source_block.get("outputSchema", {}).get("properties", {})
|
|
||||||
sink_inputs = sink_block.get("inputSchema", {}).get("properties", {})
|
|
||||||
|
|
||||||
source_type = get_type(source_outputs, link.get("source_name", ""))
|
|
||||||
sink_type = get_type(sink_inputs, link.get("sink_name", ""))
|
|
||||||
|
|
||||||
if source_type and sink_type and not are_compatible(source_type, sink_type):
|
|
||||||
self.add_error(
|
|
||||||
f"Type mismatch: {source_block.get('name')} output '{link['source_name']}' "
|
|
||||||
f"({source_type}) -> {sink_block.get('name')} input '{link['sink_name']}' ({sink_type})."
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
|
||||||
|
|
||||||
def validate_nested_sink_links(
|
|
||||||
self, agent: dict[str, Any], blocks_info: list[dict[str, Any]]
|
|
||||||
) -> bool:
|
|
||||||
"""Validate nested sink links (with _#_ notation)."""
|
|
||||||
valid = True
|
|
||||||
block_map = {b.get("id"): b for b in blocks_info}
|
|
||||||
node_lookup = {n.get("id"): n for n in agent.get("nodes", [])}
|
|
||||||
|
|
||||||
for link in agent.get("links", []):
|
|
||||||
sink_name = link.get("sink_name", "")
|
|
||||||
|
|
||||||
if "_#_" in sink_name:
|
|
||||||
parent, child = sink_name.split("_#_", 1)
|
|
||||||
|
|
||||||
sink_node = node_lookup.get(link.get("sink_id"))
|
|
||||||
if not sink_node:
|
|
||||||
continue
|
|
||||||
|
|
||||||
block = block_map.get(sink_node.get("block_id"))
|
|
||||||
if not block:
|
|
||||||
continue
|
|
||||||
|
|
||||||
input_props = block.get("inputSchema", {}).get("properties", {})
|
|
||||||
parent_schema = input_props.get(parent)
|
|
||||||
|
|
||||||
if not parent_schema:
|
|
||||||
self.add_error(
|
|
||||||
f"Invalid nested link '{sink_name}': parent '{parent}' not found."
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not parent_schema.get("additionalProperties"):
|
|
||||||
if not (
|
|
||||||
isinstance(parent_schema, dict)
|
|
||||||
and "properties" in parent_schema
|
|
||||||
and child in parent_schema.get("properties", {})
|
|
||||||
):
|
|
||||||
self.add_error(
|
|
||||||
f"Invalid nested link '{sink_name}': child '{child}' not found in '{parent}'."
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
|
||||||
|
|
||||||
def validate_prompt_spaces(self, agent: dict[str, Any]) -> bool:
|
|
||||||
"""Validate prompts don't have spaces in template variables."""
|
|
||||||
valid = True
|
|
||||||
|
|
||||||
for node in agent.get("nodes", []):
|
|
||||||
input_default = node.get("input_default", {})
|
|
||||||
prompt = input_default.get("prompt", "")
|
|
||||||
|
|
||||||
if not isinstance(prompt, str):
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Find {{...}} with spaces
|
|
||||||
matches = re.finditer(r"\{\{([^}]+)\}\}", prompt)
|
|
||||||
for match in matches:
|
|
||||||
content = match.group(1)
|
|
||||||
if " " in content:
|
|
||||||
self.add_error(
|
|
||||||
f"Node '{node.get('id')}' has spaces in template variable: "
|
|
||||||
f"'{{{{{content}}}}}' should be '{{{{{content.replace(' ', '_')}}}}}'."
|
|
||||||
)
|
|
||||||
valid = False
|
|
||||||
|
|
||||||
return valid
|
|
||||||
|
|
||||||
def validate(
|
|
||||||
self, agent: dict[str, Any], blocks_info: list[dict[str, Any]] | None = None
|
|
||||||
) -> tuple[bool, str | None]:
|
|
||||||
"""Run all validations.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (is_valid, error_message)
|
|
||||||
"""
|
|
||||||
self.errors = []
|
|
||||||
|
|
||||||
if blocks_info is None:
|
|
||||||
blocks_info = get_blocks_info()
|
|
||||||
|
|
||||||
checks = [
|
|
||||||
self.validate_block_existence(agent, blocks_info),
|
|
||||||
self.validate_link_node_references(agent),
|
|
||||||
self.validate_required_inputs(agent, blocks_info),
|
|
||||||
self.validate_data_type_compatibility(agent, blocks_info),
|
|
||||||
self.validate_nested_sink_links(agent, blocks_info),
|
|
||||||
self.validate_prompt_spaces(agent),
|
|
||||||
]
|
|
||||||
|
|
||||||
all_passed = all(checks)
|
|
||||||
|
|
||||||
if all_passed:
|
|
||||||
logger.info("Agent validation successful")
|
|
||||||
return True, None
|
|
||||||
|
|
||||||
error_message = "Agent validation failed:\n"
|
|
||||||
for i, error in enumerate(self.errors, 1):
|
|
||||||
error_message += f"{i}. {error}\n"
|
|
||||||
|
|
||||||
logger.warning(f"Agent validation failed with {len(self.errors)} errors")
|
|
||||||
return False, error_message
|
|
||||||
|
|
||||||
|
|
||||||
def validate_agent(
|
|
||||||
agent: dict[str, Any], blocks_info: list[dict[str, Any]] | None = None
|
|
||||||
) -> tuple[bool, str | None]:
|
|
||||||
"""Convenience function to validate an agent.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (is_valid, error_message)
|
|
||||||
"""
|
|
||||||
validator = AgentValidator()
|
|
||||||
return validator.validate(agent, blocks_info)
|
|
||||||
@@ -8,12 +8,10 @@ from langfuse import observe
|
|||||||
from backend.api.features.chat.model import ChatSession
|
from backend.api.features.chat.model import ChatSession
|
||||||
|
|
||||||
from .agent_generator import (
|
from .agent_generator import (
|
||||||
apply_all_fixes,
|
AgentGeneratorNotConfiguredError,
|
||||||
decompose_goal,
|
decompose_goal,
|
||||||
generate_agent,
|
generate_agent,
|
||||||
get_blocks_info,
|
|
||||||
save_agent_to_library,
|
save_agent_to_library,
|
||||||
validate_agent,
|
|
||||||
)
|
)
|
||||||
from .base import BaseTool
|
from .base import BaseTool
|
||||||
from .models import (
|
from .models import (
|
||||||
@@ -27,9 +25,6 @@ from .models import (
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Maximum retries for agent generation with validation feedback
|
|
||||||
MAX_GENERATION_RETRIES = 2
|
|
||||||
|
|
||||||
|
|
||||||
class CreateAgentTool(BaseTool):
|
class CreateAgentTool(BaseTool):
|
||||||
"""Tool for creating agents from natural language descriptions."""
|
"""Tool for creating agents from natural language descriptions."""
|
||||||
@@ -91,9 +86,8 @@ class CreateAgentTool(BaseTool):
|
|||||||
|
|
||||||
Flow:
|
Flow:
|
||||||
1. Decompose the description into steps (may return clarifying questions)
|
1. Decompose the description into steps (may return clarifying questions)
|
||||||
2. Generate agent JSON from the steps
|
2. Generate agent JSON (external service handles fixing and validation)
|
||||||
3. Apply fixes to correct common LLM errors
|
3. Preview or save based on the save parameter
|
||||||
4. Preview or save based on the save parameter
|
|
||||||
"""
|
"""
|
||||||
description = kwargs.get("description", "").strip()
|
description = kwargs.get("description", "").strip()
|
||||||
context = kwargs.get("context", "")
|
context = kwargs.get("context", "")
|
||||||
@@ -110,11 +104,13 @@ class CreateAgentTool(BaseTool):
|
|||||||
# Step 1: Decompose goal into steps
|
# Step 1: Decompose goal into steps
|
||||||
try:
|
try:
|
||||||
decomposition_result = await decompose_goal(description, context)
|
decomposition_result = await decompose_goal(description, context)
|
||||||
except ValueError as e:
|
except AgentGeneratorNotConfiguredError:
|
||||||
# Handle missing API key or configuration errors
|
|
||||||
return ErrorResponse(
|
return ErrorResponse(
|
||||||
message=f"Agent generation is not configured: {str(e)}",
|
message=(
|
||||||
error="configuration_error",
|
"Agent generation is not available. "
|
||||||
|
"The Agent Generator service is not configured."
|
||||||
|
),
|
||||||
|
error="service_not_configured",
|
||||||
session_id=session_id,
|
session_id=session_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -171,72 +167,32 @@ class CreateAgentTool(BaseTool):
|
|||||||
session_id=session_id,
|
session_id=session_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Step 2: Generate agent JSON with retry on validation failure
|
# Step 2: Generate agent JSON (external service handles fixing and validation)
|
||||||
blocks_info = get_blocks_info()
|
try:
|
||||||
agent_json = None
|
agent_json = await generate_agent(decomposition_result)
|
||||||
validation_errors = None
|
except AgentGeneratorNotConfiguredError:
|
||||||
|
return ErrorResponse(
|
||||||
for attempt in range(MAX_GENERATION_RETRIES + 1):
|
message=(
|
||||||
# Generate agent (include validation errors from previous attempt)
|
"Agent generation is not available. "
|
||||||
if attempt == 0:
|
"The Agent Generator service is not configured."
|
||||||
agent_json = await generate_agent(decomposition_result)
|
),
|
||||||
else:
|
error="service_not_configured",
|
||||||
# Retry with validation error feedback
|
session_id=session_id,
|
||||||
logger.info(
|
|
||||||
f"Retry {attempt}/{MAX_GENERATION_RETRIES} with validation feedback"
|
|
||||||
)
|
|
||||||
retry_instructions = {
|
|
||||||
**decomposition_result,
|
|
||||||
"previous_errors": validation_errors,
|
|
||||||
"retry_instructions": (
|
|
||||||
"The previous generation had validation errors. "
|
|
||||||
"Please fix these issues in the new generation:\n"
|
|
||||||
f"{validation_errors}"
|
|
||||||
),
|
|
||||||
}
|
|
||||||
agent_json = await generate_agent(retry_instructions)
|
|
||||||
|
|
||||||
if agent_json is None:
|
|
||||||
if attempt == MAX_GENERATION_RETRIES:
|
|
||||||
return ErrorResponse(
|
|
||||||
message="Failed to generate the agent. Please try again.",
|
|
||||||
error="Generation failed",
|
|
||||||
session_id=session_id,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Step 3: Apply fixes to correct common errors
|
|
||||||
agent_json = apply_all_fixes(agent_json, blocks_info)
|
|
||||||
|
|
||||||
# Step 4: Validate the agent
|
|
||||||
is_valid, validation_errors = validate_agent(agent_json, blocks_info)
|
|
||||||
|
|
||||||
if is_valid:
|
|
||||||
logger.info(f"Agent generated successfully on attempt {attempt + 1}")
|
|
||||||
break
|
|
||||||
|
|
||||||
logger.warning(
|
|
||||||
f"Validation failed on attempt {attempt + 1}: {validation_errors}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if attempt == MAX_GENERATION_RETRIES:
|
if agent_json is None:
|
||||||
# Return error with validation details
|
return ErrorResponse(
|
||||||
return ErrorResponse(
|
message="Failed to generate the agent. Please try again.",
|
||||||
message=(
|
error="Generation failed",
|
||||||
f"Generated agent has validation errors after {MAX_GENERATION_RETRIES + 1} attempts. "
|
session_id=session_id,
|
||||||
f"Please try rephrasing your request or simplify the workflow."
|
)
|
||||||
),
|
|
||||||
error="validation_failed",
|
|
||||||
details={"validation_errors": validation_errors},
|
|
||||||
session_id=session_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
agent_name = agent_json.get("name", "Generated Agent")
|
agent_name = agent_json.get("name", "Generated Agent")
|
||||||
agent_description = agent_json.get("description", "")
|
agent_description = agent_json.get("description", "")
|
||||||
node_count = len(agent_json.get("nodes", []))
|
node_count = len(agent_json.get("nodes", []))
|
||||||
link_count = len(agent_json.get("links", []))
|
link_count = len(agent_json.get("links", []))
|
||||||
|
|
||||||
# Step 4: Preview or save
|
# Step 3: Preview or save
|
||||||
if not save:
|
if not save:
|
||||||
return AgentPreviewResponse(
|
return AgentPreviewResponse(
|
||||||
message=(
|
message=(
|
||||||
|
|||||||
@@ -8,13 +8,10 @@ from langfuse import observe
|
|||||||
from backend.api.features.chat.model import ChatSession
|
from backend.api.features.chat.model import ChatSession
|
||||||
|
|
||||||
from .agent_generator import (
|
from .agent_generator import (
|
||||||
apply_agent_patch,
|
AgentGeneratorNotConfiguredError,
|
||||||
apply_all_fixes,
|
|
||||||
generate_agent_patch,
|
generate_agent_patch,
|
||||||
get_agent_as_json,
|
get_agent_as_json,
|
||||||
get_blocks_info,
|
|
||||||
save_agent_to_library,
|
save_agent_to_library,
|
||||||
validate_agent,
|
|
||||||
)
|
)
|
||||||
from .base import BaseTool
|
from .base import BaseTool
|
||||||
from .models import (
|
from .models import (
|
||||||
@@ -28,9 +25,6 @@ from .models import (
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Maximum retries for patch generation with validation feedback
|
|
||||||
MAX_GENERATION_RETRIES = 2
|
|
||||||
|
|
||||||
|
|
||||||
class EditAgentTool(BaseTool):
|
class EditAgentTool(BaseTool):
|
||||||
"""Tool for editing existing agents using natural language."""
|
"""Tool for editing existing agents using natural language."""
|
||||||
@@ -43,7 +37,7 @@ class EditAgentTool(BaseTool):
|
|||||||
def description(self) -> str:
|
def description(self) -> str:
|
||||||
return (
|
return (
|
||||||
"Edit an existing agent from the user's library using natural language. "
|
"Edit an existing agent from the user's library using natural language. "
|
||||||
"Generates a patch to update the agent while preserving unchanged parts."
|
"Generates updates to the agent while preserving unchanged parts."
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -98,9 +92,8 @@ class EditAgentTool(BaseTool):
|
|||||||
|
|
||||||
Flow:
|
Flow:
|
||||||
1. Fetch the current agent
|
1. Fetch the current agent
|
||||||
2. Generate a patch based on the requested changes
|
2. Generate updated agent (external service handles fixing and validation)
|
||||||
3. Apply the patch to create an updated agent
|
3. Preview or save based on the save parameter
|
||||||
4. Preview or save based on the save parameter
|
|
||||||
"""
|
"""
|
||||||
agent_id = kwargs.get("agent_id", "").strip()
|
agent_id = kwargs.get("agent_id", "").strip()
|
||||||
changes = kwargs.get("changes", "").strip()
|
changes = kwargs.get("changes", "").strip()
|
||||||
@@ -137,121 +130,58 @@ class EditAgentTool(BaseTool):
|
|||||||
if context:
|
if context:
|
||||||
update_request = f"{changes}\n\nAdditional context:\n{context}"
|
update_request = f"{changes}\n\nAdditional context:\n{context}"
|
||||||
|
|
||||||
# Step 2: Generate patch with retry on validation failure
|
# Step 2: Generate updated agent (external service handles fixing and validation)
|
||||||
blocks_info = get_blocks_info()
|
try:
|
||||||
updated_agent = None
|
result = await generate_agent_patch(update_request, current_agent)
|
||||||
validation_errors = None
|
except AgentGeneratorNotConfiguredError:
|
||||||
intent = "Applied requested changes"
|
return ErrorResponse(
|
||||||
|
message=(
|
||||||
for attempt in range(MAX_GENERATION_RETRIES + 1):
|
"Agent editing is not available. "
|
||||||
# Generate patch (include validation errors from previous attempt)
|
"The Agent Generator service is not configured."
|
||||||
try:
|
),
|
||||||
if attempt == 0:
|
error="service_not_configured",
|
||||||
patch_result = await generate_agent_patch(
|
session_id=session_id,
|
||||||
update_request, current_agent
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Retry with validation error feedback
|
|
||||||
logger.info(
|
|
||||||
f"Retry {attempt}/{MAX_GENERATION_RETRIES} with validation feedback"
|
|
||||||
)
|
|
||||||
retry_request = (
|
|
||||||
f"{update_request}\n\n"
|
|
||||||
f"IMPORTANT: The previous edit had validation errors. "
|
|
||||||
f"Please fix these issues:\n{validation_errors}"
|
|
||||||
)
|
|
||||||
patch_result = await generate_agent_patch(
|
|
||||||
retry_request, current_agent
|
|
||||||
)
|
|
||||||
except ValueError as e:
|
|
||||||
# Handle missing API key or configuration errors
|
|
||||||
return ErrorResponse(
|
|
||||||
message=f"Agent generation is not configured: {str(e)}",
|
|
||||||
error="configuration_error",
|
|
||||||
session_id=session_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
if patch_result is None:
|
|
||||||
if attempt == MAX_GENERATION_RETRIES:
|
|
||||||
return ErrorResponse(
|
|
||||||
message="Failed to generate changes. Please try rephrasing.",
|
|
||||||
error="Patch generation failed",
|
|
||||||
session_id=session_id,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Check if LLM returned clarifying questions
|
|
||||||
if patch_result.get("type") == "clarifying_questions":
|
|
||||||
questions = patch_result.get("questions", [])
|
|
||||||
return ClarificationNeededResponse(
|
|
||||||
message=(
|
|
||||||
"I need some more information about the changes. "
|
|
||||||
"Please answer the following questions:"
|
|
||||||
),
|
|
||||||
questions=[
|
|
||||||
ClarifyingQuestion(
|
|
||||||
question=q.get("question", ""),
|
|
||||||
keyword=q.get("keyword", ""),
|
|
||||||
example=q.get("example"),
|
|
||||||
)
|
|
||||||
for q in questions
|
|
||||||
],
|
|
||||||
session_id=session_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Step 3: Apply patch and fixes
|
|
||||||
try:
|
|
||||||
updated_agent = apply_agent_patch(current_agent, patch_result)
|
|
||||||
updated_agent = apply_all_fixes(updated_agent, blocks_info)
|
|
||||||
except Exception as e:
|
|
||||||
if attempt == MAX_GENERATION_RETRIES:
|
|
||||||
return ErrorResponse(
|
|
||||||
message=f"Failed to apply changes: {str(e)}",
|
|
||||||
error="patch_apply_failed",
|
|
||||||
details={"exception": str(e)},
|
|
||||||
session_id=session_id,
|
|
||||||
)
|
|
||||||
validation_errors = str(e)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Step 4: Validate the updated agent
|
|
||||||
is_valid, validation_errors = validate_agent(updated_agent, blocks_info)
|
|
||||||
|
|
||||||
if is_valid:
|
|
||||||
logger.info(f"Agent edited successfully on attempt {attempt + 1}")
|
|
||||||
intent = patch_result.get("intent", "Applied requested changes")
|
|
||||||
break
|
|
||||||
|
|
||||||
logger.warning(
|
|
||||||
f"Validation failed on attempt {attempt + 1}: {validation_errors}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if attempt == MAX_GENERATION_RETRIES:
|
if result is None:
|
||||||
# Return error with validation details
|
return ErrorResponse(
|
||||||
return ErrorResponse(
|
message="Failed to generate changes. Please try rephrasing.",
|
||||||
message=(
|
error="Update generation failed",
|
||||||
f"Updated agent has validation errors after "
|
session_id=session_id,
|
||||||
f"{MAX_GENERATION_RETRIES + 1} attempts. "
|
)
|
||||||
f"Please try rephrasing your request or simplify the changes."
|
|
||||||
),
|
|
||||||
error="validation_failed",
|
|
||||||
details={"validation_errors": validation_errors},
|
|
||||||
session_id=session_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
# At this point, updated_agent is guaranteed to be set (we return on all failure paths)
|
# Check if LLM returned clarifying questions
|
||||||
assert updated_agent is not None
|
if result.get("type") == "clarifying_questions":
|
||||||
|
questions = result.get("questions", [])
|
||||||
|
return ClarificationNeededResponse(
|
||||||
|
message=(
|
||||||
|
"I need some more information about the changes. "
|
||||||
|
"Please answer the following questions:"
|
||||||
|
),
|
||||||
|
questions=[
|
||||||
|
ClarifyingQuestion(
|
||||||
|
question=q.get("question", ""),
|
||||||
|
keyword=q.get("keyword", ""),
|
||||||
|
example=q.get("example"),
|
||||||
|
)
|
||||||
|
for q in questions
|
||||||
|
],
|
||||||
|
session_id=session_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Result is the updated agent JSON
|
||||||
|
updated_agent = result
|
||||||
|
|
||||||
agent_name = updated_agent.get("name", "Updated Agent")
|
agent_name = updated_agent.get("name", "Updated Agent")
|
||||||
agent_description = updated_agent.get("description", "")
|
agent_description = updated_agent.get("description", "")
|
||||||
node_count = len(updated_agent.get("nodes", []))
|
node_count = len(updated_agent.get("nodes", []))
|
||||||
link_count = len(updated_agent.get("links", []))
|
link_count = len(updated_agent.get("links", []))
|
||||||
|
|
||||||
# Step 5: Preview or save
|
# Step 3: Preview or save
|
||||||
if not save:
|
if not save:
|
||||||
return AgentPreviewResponse(
|
return AgentPreviewResponse(
|
||||||
message=(
|
message=(
|
||||||
f"I've updated the agent. Changes: {intent}. "
|
f"I've updated the agent. "
|
||||||
f"The agent now has {node_count} blocks. "
|
f"The agent now has {node_count} blocks. "
|
||||||
f"Review it and call edit_agent with save=true to save the changes."
|
f"Review it and call edit_agent with save=true to save the changes."
|
||||||
),
|
),
|
||||||
@@ -277,10 +207,7 @@ class EditAgentTool(BaseTool):
|
|||||||
)
|
)
|
||||||
|
|
||||||
return AgentSavedResponse(
|
return AgentSavedResponse(
|
||||||
message=(
|
message=f"Updated agent '{created_graph.name}' has been saved to your library!",
|
||||||
f"Updated agent '{created_graph.name}' has been saved to your library! "
|
|
||||||
f"Changes: {intent}"
|
|
||||||
),
|
|
||||||
agent_id=created_graph.id,
|
agent_id=created_graph.id,
|
||||||
agent_name=created_graph.name,
|
agent_name=created_graph.name,
|
||||||
library_agent_id=library_agent.id,
|
library_agent_id=library_agent.id,
|
||||||
|
|||||||
@@ -350,6 +350,19 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
|||||||
description="Whether to mark failed scans as clean or not",
|
description="Whether to mark failed scans as clean or not",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
agentgenerator_host: str = Field(
|
||||||
|
default="",
|
||||||
|
description="The host for the Agent Generator service (empty to use built-in)",
|
||||||
|
)
|
||||||
|
agentgenerator_port: int = Field(
|
||||||
|
default=8000,
|
||||||
|
description="The port for the Agent Generator service",
|
||||||
|
)
|
||||||
|
agentgenerator_timeout: int = Field(
|
||||||
|
default=120,
|
||||||
|
description="The timeout in seconds for Agent Generator service requests",
|
||||||
|
)
|
||||||
|
|
||||||
enable_example_blocks: bool = Field(
|
enable_example_blocks: bool = Field(
|
||||||
default=False,
|
default=False,
|
||||||
description="Whether to enable example blocks in production",
|
description="Whether to enable example blocks in production",
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
"""Tests for agent generator module."""
|
||||||
@@ -0,0 +1,273 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Agent Generator core module.
|
||||||
|
|
||||||
|
This test suite verifies that the core functions correctly delegate to
|
||||||
|
the external Agent Generator service.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from backend.api.features.chat.tools.agent_generator import core
|
||||||
|
from backend.api.features.chat.tools.agent_generator.core import (
|
||||||
|
AgentGeneratorNotConfiguredError,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class TestServiceNotConfigured:
|
||||||
|
"""Test that functions raise AgentGeneratorNotConfiguredError when service is not configured."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_decompose_goal_raises_when_not_configured(self):
|
||||||
|
"""Test that decompose_goal raises error when service not configured."""
|
||||||
|
with patch.object(core, "is_external_service_configured", return_value=False):
|
||||||
|
with pytest.raises(AgentGeneratorNotConfiguredError):
|
||||||
|
await core.decompose_goal("Build a chatbot")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_agent_raises_when_not_configured(self):
|
||||||
|
"""Test that generate_agent raises error when service not configured."""
|
||||||
|
with patch.object(core, "is_external_service_configured", return_value=False):
|
||||||
|
with pytest.raises(AgentGeneratorNotConfiguredError):
|
||||||
|
await core.generate_agent({"steps": []})
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_agent_patch_raises_when_not_configured(self):
|
||||||
|
"""Test that generate_agent_patch raises error when service not configured."""
|
||||||
|
with patch.object(core, "is_external_service_configured", return_value=False):
|
||||||
|
with pytest.raises(AgentGeneratorNotConfiguredError):
|
||||||
|
await core.generate_agent_patch("Add a node", {"nodes": []})
|
||||||
|
|
||||||
|
|
||||||
|
class TestDecomposeGoal:
|
||||||
|
"""Test decompose_goal function service delegation."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_calls_external_service(self):
|
||||||
|
"""Test that decompose_goal calls the external service."""
|
||||||
|
expected_result = {"type": "instructions", "steps": ["Step 1"]}
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "decompose_goal_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = expected_result
|
||||||
|
|
||||||
|
result = await core.decompose_goal("Build a chatbot")
|
||||||
|
|
||||||
|
mock_external.assert_called_once_with("Build a chatbot", "")
|
||||||
|
assert result == expected_result
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_passes_context_to_external_service(self):
|
||||||
|
"""Test that decompose_goal passes context to external service."""
|
||||||
|
expected_result = {"type": "instructions", "steps": ["Step 1"]}
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "decompose_goal_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = expected_result
|
||||||
|
|
||||||
|
await core.decompose_goal("Build a chatbot", "Use Python")
|
||||||
|
|
||||||
|
mock_external.assert_called_once_with("Build a chatbot", "Use Python")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_none_on_service_failure(self):
|
||||||
|
"""Test that decompose_goal returns None when external service fails."""
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "decompose_goal_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = None
|
||||||
|
|
||||||
|
result = await core.decompose_goal("Build a chatbot")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestGenerateAgent:
|
||||||
|
"""Test generate_agent function service delegation."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_calls_external_service(self):
|
||||||
|
"""Test that generate_agent calls the external service."""
|
||||||
|
expected_result = {"name": "Test Agent", "nodes": [], "links": []}
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "generate_agent_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = expected_result
|
||||||
|
|
||||||
|
instructions = {"type": "instructions", "steps": ["Step 1"]}
|
||||||
|
result = await core.generate_agent(instructions)
|
||||||
|
|
||||||
|
mock_external.assert_called_once_with(instructions)
|
||||||
|
# Result should have id, version, is_active added if not present
|
||||||
|
assert result is not None
|
||||||
|
assert result["name"] == "Test Agent"
|
||||||
|
assert "id" in result
|
||||||
|
assert result["version"] == 1
|
||||||
|
assert result["is_active"] is True
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_preserves_existing_id_and_version(self):
|
||||||
|
"""Test that external service result preserves existing id and version."""
|
||||||
|
expected_result = {
|
||||||
|
"id": "existing-id",
|
||||||
|
"version": 3,
|
||||||
|
"is_active": False,
|
||||||
|
"name": "Test Agent",
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "generate_agent_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = expected_result.copy()
|
||||||
|
|
||||||
|
result = await core.generate_agent({"steps": []})
|
||||||
|
|
||||||
|
assert result is not None
|
||||||
|
assert result["id"] == "existing-id"
|
||||||
|
assert result["version"] == 3
|
||||||
|
assert result["is_active"] is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_none_when_external_service_fails(self):
|
||||||
|
"""Test that generate_agent returns None when external service fails."""
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "generate_agent_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = None
|
||||||
|
|
||||||
|
result = await core.generate_agent({"steps": []})
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestGenerateAgentPatch:
|
||||||
|
"""Test generate_agent_patch function service delegation."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_calls_external_service(self):
|
||||||
|
"""Test that generate_agent_patch calls the external service."""
|
||||||
|
expected_result = {"name": "Updated Agent", "nodes": [], "links": []}
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "generate_agent_patch_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = expected_result
|
||||||
|
|
||||||
|
current_agent = {"nodes": [], "links": []}
|
||||||
|
result = await core.generate_agent_patch("Add a node", current_agent)
|
||||||
|
|
||||||
|
mock_external.assert_called_once_with("Add a node", current_agent)
|
||||||
|
assert result == expected_result
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_clarifying_questions(self):
|
||||||
|
"""Test that generate_agent_patch returns clarifying questions."""
|
||||||
|
expected_result = {
|
||||||
|
"type": "clarifying_questions",
|
||||||
|
"questions": [{"question": "What type of node?"}],
|
||||||
|
}
|
||||||
|
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "generate_agent_patch_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = expected_result
|
||||||
|
|
||||||
|
result = await core.generate_agent_patch("Add a node", {"nodes": []})
|
||||||
|
|
||||||
|
assert result == expected_result
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_returns_none_when_external_service_fails(self):
|
||||||
|
"""Test that generate_agent_patch returns None when service fails."""
|
||||||
|
with patch.object(
|
||||||
|
core, "is_external_service_configured", return_value=True
|
||||||
|
), patch.object(
|
||||||
|
core, "generate_agent_patch_external", new_callable=AsyncMock
|
||||||
|
) as mock_external:
|
||||||
|
mock_external.return_value = None
|
||||||
|
|
||||||
|
result = await core.generate_agent_patch("Add a node", {"nodes": []})
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestJsonToGraph:
|
||||||
|
"""Test json_to_graph function."""
|
||||||
|
|
||||||
|
def test_converts_agent_json_to_graph(self):
|
||||||
|
"""Test conversion of agent JSON to Graph model."""
|
||||||
|
agent_json = {
|
||||||
|
"id": "test-id",
|
||||||
|
"version": 2,
|
||||||
|
"is_active": True,
|
||||||
|
"name": "Test Agent",
|
||||||
|
"description": "A test agent",
|
||||||
|
"nodes": [
|
||||||
|
{
|
||||||
|
"id": "node1",
|
||||||
|
"block_id": "block1",
|
||||||
|
"input_default": {"key": "value"},
|
||||||
|
"metadata": {"x": 100},
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"links": [
|
||||||
|
{
|
||||||
|
"id": "link1",
|
||||||
|
"source_id": "node1",
|
||||||
|
"sink_id": "output",
|
||||||
|
"source_name": "result",
|
||||||
|
"sink_name": "input",
|
||||||
|
"is_static": False,
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
graph = core.json_to_graph(agent_json)
|
||||||
|
|
||||||
|
assert graph.id == "test-id"
|
||||||
|
assert graph.version == 2
|
||||||
|
assert graph.is_active is True
|
||||||
|
assert graph.name == "Test Agent"
|
||||||
|
assert graph.description == "A test agent"
|
||||||
|
assert len(graph.nodes) == 1
|
||||||
|
assert graph.nodes[0].id == "node1"
|
||||||
|
assert graph.nodes[0].block_id == "block1"
|
||||||
|
assert len(graph.links) == 1
|
||||||
|
assert graph.links[0].source_id == "node1"
|
||||||
|
|
||||||
|
def test_generates_ids_if_missing(self):
|
||||||
|
"""Test that missing IDs are generated."""
|
||||||
|
agent_json = {
|
||||||
|
"name": "Test Agent",
|
||||||
|
"nodes": [{"block_id": "block1"}],
|
||||||
|
"links": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
graph = core.json_to_graph(agent_json)
|
||||||
|
|
||||||
|
assert graph.id is not None
|
||||||
|
assert graph.nodes[0].id is not None
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
pytest.main([__file__, "-v"])
|
||||||
422
autogpt_platform/backend/test/agent_generator/test_service.py
Normal file
422
autogpt_platform/backend/test/agent_generator/test_service.py
Normal file
@@ -0,0 +1,422 @@
|
|||||||
|
"""
|
||||||
|
Tests for the Agent Generator external service client.
|
||||||
|
|
||||||
|
This test suite verifies the external Agent Generator service integration,
|
||||||
|
including service detection, API calls, and error handling.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, MagicMock, patch
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from backend.api.features.chat.tools.agent_generator import service
|
||||||
|
|
||||||
|
|
||||||
|
class TestServiceConfiguration:
|
||||||
|
"""Test service configuration detection."""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
"""Reset settings singleton before each test."""
|
||||||
|
service._settings = None
|
||||||
|
service._client = None
|
||||||
|
|
||||||
|
def test_external_service_not_configured_when_host_empty(self):
|
||||||
|
"""Test that external service is not configured when host is empty."""
|
||||||
|
mock_settings = MagicMock()
|
||||||
|
mock_settings.config.agentgenerator_host = ""
|
||||||
|
|
||||||
|
with patch.object(service, "_get_settings", return_value=mock_settings):
|
||||||
|
assert service.is_external_service_configured() is False
|
||||||
|
|
||||||
|
def test_external_service_configured_when_host_set(self):
|
||||||
|
"""Test that external service is configured when host is set."""
|
||||||
|
mock_settings = MagicMock()
|
||||||
|
mock_settings.config.agentgenerator_host = "agent-generator.local"
|
||||||
|
|
||||||
|
with patch.object(service, "_get_settings", return_value=mock_settings):
|
||||||
|
assert service.is_external_service_configured() is True
|
||||||
|
|
||||||
|
def test_get_base_url(self):
|
||||||
|
"""Test base URL construction."""
|
||||||
|
mock_settings = MagicMock()
|
||||||
|
mock_settings.config.agentgenerator_host = "agent-generator.local"
|
||||||
|
mock_settings.config.agentgenerator_port = 8000
|
||||||
|
|
||||||
|
with patch.object(service, "_get_settings", return_value=mock_settings):
|
||||||
|
url = service._get_base_url()
|
||||||
|
assert url == "http://agent-generator.local:8000"
|
||||||
|
|
||||||
|
|
||||||
|
class TestDecomposeGoalExternal:
|
||||||
|
"""Test decompose_goal_external function."""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
"""Reset client singleton before each test."""
|
||||||
|
service._settings = None
|
||||||
|
service._client = None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_decompose_goal_returns_instructions(self):
|
||||||
|
"""Test successful decomposition returning instructions."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": True,
|
||||||
|
"type": "instructions",
|
||||||
|
"steps": ["Step 1", "Step 2"],
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.decompose_goal_external("Build a chatbot")
|
||||||
|
|
||||||
|
assert result == {"type": "instructions", "steps": ["Step 1", "Step 2"]}
|
||||||
|
mock_client.post.assert_called_once_with(
|
||||||
|
"/api/decompose-description", json={"description": "Build a chatbot"}
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_decompose_goal_returns_clarifying_questions(self):
|
||||||
|
"""Test decomposition returning clarifying questions."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": True,
|
||||||
|
"type": "clarifying_questions",
|
||||||
|
"questions": ["What platform?", "What language?"],
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.decompose_goal_external("Build something")
|
||||||
|
|
||||||
|
assert result == {
|
||||||
|
"type": "clarifying_questions",
|
||||||
|
"questions": ["What platform?", "What language?"],
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_decompose_goal_with_context(self):
|
||||||
|
"""Test decomposition with additional context."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": True,
|
||||||
|
"type": "instructions",
|
||||||
|
"steps": ["Step 1"],
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
await service.decompose_goal_external(
|
||||||
|
"Build a chatbot", context="Use Python"
|
||||||
|
)
|
||||||
|
|
||||||
|
mock_client.post.assert_called_once_with(
|
||||||
|
"/api/decompose-description",
|
||||||
|
json={"description": "Build a chatbot", "user_instruction": "Use Python"},
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_decompose_goal_returns_unachievable_goal(self):
|
||||||
|
"""Test decomposition returning unachievable goal response."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": True,
|
||||||
|
"type": "unachievable_goal",
|
||||||
|
"reason": "Cannot do X",
|
||||||
|
"suggested_goal": "Try Y instead",
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.decompose_goal_external("Do something impossible")
|
||||||
|
|
||||||
|
assert result == {
|
||||||
|
"type": "unachievable_goal",
|
||||||
|
"reason": "Cannot do X",
|
||||||
|
"suggested_goal": "Try Y instead",
|
||||||
|
}
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_decompose_goal_handles_http_error(self):
|
||||||
|
"""Test decomposition handles HTTP errors gracefully."""
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.side_effect = httpx.HTTPStatusError(
|
||||||
|
"Server error", request=MagicMock(), response=MagicMock()
|
||||||
|
)
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.decompose_goal_external("Build a chatbot")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_decompose_goal_handles_request_error(self):
|
||||||
|
"""Test decomposition handles request errors gracefully."""
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.side_effect = httpx.RequestError("Connection failed")
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.decompose_goal_external("Build a chatbot")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_decompose_goal_handles_service_error(self):
|
||||||
|
"""Test decomposition handles service returning error."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": False,
|
||||||
|
"error": "Internal error",
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.decompose_goal_external("Build a chatbot")
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestGenerateAgentExternal:
|
||||||
|
"""Test generate_agent_external function."""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
"""Reset client singleton before each test."""
|
||||||
|
service._settings = None
|
||||||
|
service._client = None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_agent_success(self):
|
||||||
|
"""Test successful agent generation."""
|
||||||
|
agent_json = {
|
||||||
|
"name": "Test Agent",
|
||||||
|
"nodes": [],
|
||||||
|
"links": [],
|
||||||
|
}
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": True,
|
||||||
|
"agent_json": agent_json,
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.return_value = mock_response
|
||||||
|
|
||||||
|
instructions = {"type": "instructions", "steps": ["Step 1"]}
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.generate_agent_external(instructions)
|
||||||
|
|
||||||
|
assert result == agent_json
|
||||||
|
mock_client.post.assert_called_once_with(
|
||||||
|
"/api/generate-agent", json={"instructions": instructions}
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_agent_handles_error(self):
|
||||||
|
"""Test agent generation handles errors gracefully."""
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.side_effect = httpx.RequestError("Connection failed")
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.generate_agent_external({"steps": []})
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
class TestGenerateAgentPatchExternal:
|
||||||
|
"""Test generate_agent_patch_external function."""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
"""Reset client singleton before each test."""
|
||||||
|
service._settings = None
|
||||||
|
service._client = None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_patch_returns_updated_agent(self):
|
||||||
|
"""Test successful patch generation returning updated agent."""
|
||||||
|
updated_agent = {
|
||||||
|
"name": "Updated Agent",
|
||||||
|
"nodes": [{"id": "1", "block_id": "test"}],
|
||||||
|
"links": [],
|
||||||
|
}
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": True,
|
||||||
|
"agent_json": updated_agent,
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.return_value = mock_response
|
||||||
|
|
||||||
|
current_agent = {"name": "Old Agent", "nodes": [], "links": []}
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.generate_agent_patch_external(
|
||||||
|
"Add a new node", current_agent
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == updated_agent
|
||||||
|
mock_client.post.assert_called_once_with(
|
||||||
|
"/api/update-agent",
|
||||||
|
json={
|
||||||
|
"update_request": "Add a new node",
|
||||||
|
"current_agent_json": current_agent,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_generate_patch_returns_clarifying_questions(self):
|
||||||
|
"""Test patch generation returning clarifying questions."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": True,
|
||||||
|
"type": "clarifying_questions",
|
||||||
|
"questions": ["What type of node?"],
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.post.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.generate_agent_patch_external(
|
||||||
|
"Add something", {"nodes": []}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result == {
|
||||||
|
"type": "clarifying_questions",
|
||||||
|
"questions": ["What type of node?"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class TestHealthCheck:
|
||||||
|
"""Test health_check function."""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
"""Reset singletons before each test."""
|
||||||
|
service._settings = None
|
||||||
|
service._client = None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_health_check_returns_false_when_not_configured(self):
|
||||||
|
"""Test health check returns False when service not configured."""
|
||||||
|
with patch.object(
|
||||||
|
service, "is_external_service_configured", return_value=False
|
||||||
|
):
|
||||||
|
result = await service.health_check()
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_health_check_returns_true_when_healthy(self):
|
||||||
|
"""Test health check returns True when service is healthy."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"status": "healthy",
|
||||||
|
"blocks_loaded": True,
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "is_external_service_configured", return_value=True):
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.health_check()
|
||||||
|
|
||||||
|
assert result is True
|
||||||
|
mock_client.get.assert_called_once_with("/health")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_health_check_returns_false_when_not_healthy(self):
|
||||||
|
"""Test health check returns False when service is not healthy."""
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"status": "unhealthy",
|
||||||
|
"blocks_loaded": False,
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "is_external_service_configured", return_value=True):
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.health_check()
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_health_check_returns_false_on_error(self):
|
||||||
|
"""Test health check returns False on connection error."""
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.get.side_effect = httpx.RequestError("Connection failed")
|
||||||
|
|
||||||
|
with patch.object(service, "is_external_service_configured", return_value=True):
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.health_check()
|
||||||
|
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
|
||||||
|
class TestGetBlocksExternal:
|
||||||
|
"""Test get_blocks_external function."""
|
||||||
|
|
||||||
|
def setup_method(self):
|
||||||
|
"""Reset client singleton before each test."""
|
||||||
|
service._settings = None
|
||||||
|
service._client = None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_blocks_success(self):
|
||||||
|
"""Test successful blocks retrieval."""
|
||||||
|
blocks = [
|
||||||
|
{"id": "block1", "name": "Block 1"},
|
||||||
|
{"id": "block2", "name": "Block 2"},
|
||||||
|
]
|
||||||
|
mock_response = MagicMock()
|
||||||
|
mock_response.json.return_value = {
|
||||||
|
"success": True,
|
||||||
|
"blocks": blocks,
|
||||||
|
}
|
||||||
|
mock_response.raise_for_status = MagicMock()
|
||||||
|
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.get.return_value = mock_response
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.get_blocks_external()
|
||||||
|
|
||||||
|
assert result == blocks
|
||||||
|
mock_client.get.assert_called_once_with("/api/blocks")
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_get_blocks_handles_error(self):
|
||||||
|
"""Test blocks retrieval handles errors gracefully."""
|
||||||
|
mock_client = AsyncMock()
|
||||||
|
mock_client.get.side_effect = httpx.RequestError("Connection failed")
|
||||||
|
|
||||||
|
with patch.object(service, "_get_client", return_value=mock_client):
|
||||||
|
result = await service.get_blocks_external()
|
||||||
|
|
||||||
|
assert result is None
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
pytest.main([__file__, "-v"])
|
||||||
Reference in New Issue
Block a user