Compare commits

...

2 Commits

Author SHA1 Message Date
Swifty
46c65cb567 pr comments 2026-02-09 14:46:21 +01:00
Swifty
17cafff60c fix(backend): Auto-fork marketplace agent on first save to fix "Graph not found" error
When a user adds a marketplace agent to their library and tries to save
edits, the update_graph endpoint returned 404 because the graph is owned
by the original creator. Now, if the user has the graph in their library
but doesn't own it, a fork is automatically created with their edits
applied, new IDs assigned, and a new library agent entry created.
2026-02-09 12:33:10 +01:00
2 changed files with 31 additions and 10 deletions

View File

@@ -827,7 +827,28 @@ async def update_graph(
existing_versions = await graph_db.get_graph_all_versions(graph_id, user_id=user_id)
if not existing_versions:
raise HTTPException(404, detail=f"Graph #{graph_id} not found")
# User doesn't own this graph -- check if they have it in their library
# (e.g. added from the marketplace). If so, fork it and apply their edits.
library_agent = await library_db.get_library_agent_by_graph_id(
user_id=user_id, graph_id=graph_id
)
if not library_agent:
raise HTTPException(404, detail=f"Graph #{graph_id} not found")
# Fork the marketplace agent to create a user-owned copy
forked = await graph_db.fork_graph(
graph_id, library_agent.graph_version, user_id
)
forked = await on_graph_activate(forked, user_id=user_id)
await graph_db.set_graph_active_version(
graph_id=forked.id, version=forked.version, user_id=user_id
)
await library_db.create_library_agent(forked, user_id)
# Apply the user's edits on top of the fork via the normal update path
graph_id = forked.id
graph.id = forked.id
existing_versions = [forked]
graph.version = max(g.version for g in existing_versions) + 1
current_active_version = next((v for v in existing_versions if v.is_active), None)

View File

@@ -531,12 +531,12 @@ class LLMResponse(BaseModel):
def convert_openai_tool_fmt_to_anthropic(
openai_tools: list[dict] | None = None,
) -> Iterable[ToolParam] | anthropic.Omit:
) -> Iterable[ToolParam] | anthropic.NotGiven:
"""
Convert OpenAI tool format to Anthropic tool format.
"""
if not openai_tools or len(openai_tools) == 0:
return anthropic.omit
return anthropic.NOT_GIVEN
anthropic_tools = []
for tool in openai_tools:
@@ -596,10 +596,10 @@ def extract_openai_tool_calls(response) -> list[ToolContentBlock] | None:
def get_parallel_tool_calls_param(
llm_model: LlmModel, parallel_tool_calls: bool | None
) -> bool | openai.Omit:
) -> bool | openai.NotGiven:
"""Get the appropriate parallel_tool_calls parameter for OpenAI-compatible APIs."""
if llm_model.startswith("o") or parallel_tool_calls is None:
return openai.omit
return openai.NOT_GIVEN
return parallel_tool_calls
@@ -676,7 +676,7 @@ async def llm_call(
response_format=response_format, # type: ignore
max_completion_tokens=max_tokens,
tools=tools_param, # type: ignore
parallel_tool_calls=parallel_tool_calls,
parallel_tool_calls=parallel_tool_calls, # type: ignore
)
tool_calls = extract_openai_tool_calls(response)
@@ -722,7 +722,7 @@ async def llm_call(
system=sysprompt,
messages=messages,
max_tokens=max_tokens,
tools=an_tools,
tools=an_tools, # type: ignore
timeout=600,
)
@@ -838,7 +838,7 @@ async def llm_call(
messages=prompt, # type: ignore
max_tokens=max_tokens,
tools=tools_param, # type: ignore
parallel_tool_calls=parallel_tool_calls_param,
parallel_tool_calls=parallel_tool_calls_param, # type: ignore
)
# If there's no response, raise an error
@@ -880,7 +880,7 @@ async def llm_call(
messages=prompt, # type: ignore
max_tokens=max_tokens,
tools=tools_param, # type: ignore
parallel_tool_calls=parallel_tool_calls_param,
parallel_tool_calls=parallel_tool_calls_param, # type: ignore
)
# If there's no response, raise an error
@@ -951,7 +951,7 @@ async def llm_call(
response_format=response_format, # type: ignore
max_tokens=max_tokens,
tools=tools_param, # type: ignore
parallel_tool_calls=parallel_tool_calls_param,
parallel_tool_calls=parallel_tool_calls_param, # type: ignore
)
tool_calls = extract_openai_tool_calls(response)