mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-15 09:15:55 -05:00
Compare commits
5 Commits
dev
...
otto/secrt
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e19b6e458f | ||
|
|
2be589c95f | ||
|
|
79748ca7fa | ||
|
|
f3df841ea3 | ||
|
|
6cb794cbf8 |
@@ -11,15 +11,45 @@ import re
|
|||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from backend.api.features.chat.sdk.tool_adapter import (
|
from backend.api.features.chat.sdk.tool_adapter import MCP_TOOL_PREFIX
|
||||||
BLOCKED_TOOLS,
|
|
||||||
DANGEROUS_PATTERNS,
|
|
||||||
MCP_TOOL_PREFIX,
|
|
||||||
WORKSPACE_SCOPED_TOOLS,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Tools that are blocked entirely (CLI/system access).
|
||||||
|
# "Bash" (capital) is the SDK built-in — it's NOT in allowed_tools but blocked
|
||||||
|
# here as defence-in-depth. The agent uses mcp__copilot__bash_exec instead,
|
||||||
|
# which has kernel-level network isolation (unshare --net).
|
||||||
|
BLOCKED_TOOLS = {
|
||||||
|
"Bash",
|
||||||
|
"bash",
|
||||||
|
"shell",
|
||||||
|
"exec",
|
||||||
|
"terminal",
|
||||||
|
"command",
|
||||||
|
}
|
||||||
|
|
||||||
|
# Tools allowed only when their path argument stays within the SDK workspace.
|
||||||
|
# The SDK uses these to handle oversized tool results (writes to tool-results/
|
||||||
|
# files, then reads them back) and for workspace file operations.
|
||||||
|
WORKSPACE_SCOPED_TOOLS = {"Read", "Write", "Edit", "Glob", "Grep"}
|
||||||
|
|
||||||
|
# Dangerous patterns in tool inputs
|
||||||
|
DANGEROUS_PATTERNS = [
|
||||||
|
r"sudo",
|
||||||
|
r"rm\s+-rf",
|
||||||
|
r"dd\s+if=",
|
||||||
|
r"/etc/passwd",
|
||||||
|
r"/etc/shadow",
|
||||||
|
r"chmod\s+777",
|
||||||
|
r"curl\s+.*\|.*sh",
|
||||||
|
r"wget\s+.*\|.*sh",
|
||||||
|
r"eval\s*\(",
|
||||||
|
r"exec\s*\(",
|
||||||
|
r"__import__",
|
||||||
|
r"os\.system",
|
||||||
|
r"subprocess",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def _deny(reason: str) -> dict[str, Any]:
|
def _deny(reason: str) -> dict[str, Any]:
|
||||||
"""Return a hook denial response."""
|
"""Return a hook denial response."""
|
||||||
|
|||||||
@@ -41,7 +41,6 @@ from .response_adapter import SDKResponseAdapter
|
|||||||
from .security_hooks import create_security_hooks
|
from .security_hooks import create_security_hooks
|
||||||
from .tool_adapter import (
|
from .tool_adapter import (
|
||||||
COPILOT_TOOL_NAMES,
|
COPILOT_TOOL_NAMES,
|
||||||
SDK_DISALLOWED_TOOLS,
|
|
||||||
LongRunningCallback,
|
LongRunningCallback,
|
||||||
create_copilot_mcp_server,
|
create_copilot_mcp_server,
|
||||||
set_execution_context,
|
set_execution_context,
|
||||||
@@ -544,7 +543,7 @@ async def stream_chat_completion_sdk(
|
|||||||
"system_prompt": system_prompt,
|
"system_prompt": system_prompt,
|
||||||
"mcp_servers": {"copilot": mcp_server},
|
"mcp_servers": {"copilot": mcp_server},
|
||||||
"allowed_tools": COPILOT_TOOL_NAMES,
|
"allowed_tools": COPILOT_TOOL_NAMES,
|
||||||
"disallowed_tools": SDK_DISALLOWED_TOOLS,
|
"disallowed_tools": ["Bash"],
|
||||||
"hooks": security_hooks,
|
"hooks": security_hooks,
|
||||||
"cwd": sdk_cwd,
|
"cwd": sdk_cwd,
|
||||||
"max_buffer_size": config.claude_agent_max_buffer_size,
|
"max_buffer_size": config.claude_agent_max_buffer_size,
|
||||||
|
|||||||
@@ -310,48 +310,7 @@ def create_copilot_mcp_server():
|
|||||||
# Bash is NOT included — use the sandboxed MCP bash_exec tool instead,
|
# Bash is NOT included — use the sandboxed MCP bash_exec tool instead,
|
||||||
# which provides kernel-level network isolation via unshare --net.
|
# which provides kernel-level network isolation via unshare --net.
|
||||||
# Task allows spawning sub-agents (rate-limited by security hooks).
|
# Task allows spawning sub-agents (rate-limited by security hooks).
|
||||||
# WebSearch uses Brave Search via Anthropic's API — safe, no SSRF risk.
|
_SDK_BUILTIN_TOOLS = ["Read", "Write", "Edit", "Glob", "Grep", "Task"]
|
||||||
_SDK_BUILTIN_TOOLS = ["Read", "Write", "Edit", "Glob", "Grep", "Task", "WebSearch"]
|
|
||||||
|
|
||||||
# SDK built-in tools that must be explicitly blocked.
|
|
||||||
# Bash: dangerous — agent uses mcp__copilot__bash_exec with kernel-level
|
|
||||||
# network isolation (unshare --net) instead.
|
|
||||||
# WebFetch: SSRF risk — can reach internal network (localhost, 10.x, etc.).
|
|
||||||
# Agent uses the SSRF-protected mcp__copilot__web_fetch tool instead.
|
|
||||||
SDK_DISALLOWED_TOOLS = ["Bash", "WebFetch"]
|
|
||||||
|
|
||||||
# Tools that are blocked entirely in security hooks (defence-in-depth).
|
|
||||||
# Includes SDK_DISALLOWED_TOOLS plus common aliases/synonyms.
|
|
||||||
BLOCKED_TOOLS = {
|
|
||||||
*SDK_DISALLOWED_TOOLS,
|
|
||||||
"bash",
|
|
||||||
"shell",
|
|
||||||
"exec",
|
|
||||||
"terminal",
|
|
||||||
"command",
|
|
||||||
}
|
|
||||||
|
|
||||||
# Tools allowed only when their path argument stays within the SDK workspace.
|
|
||||||
# The SDK uses these to handle oversized tool results (writes to tool-results/
|
|
||||||
# files, then reads them back) and for workspace file operations.
|
|
||||||
WORKSPACE_SCOPED_TOOLS = {"Read", "Write", "Edit", "Glob", "Grep"}
|
|
||||||
|
|
||||||
# Dangerous patterns in tool inputs
|
|
||||||
DANGEROUS_PATTERNS = [
|
|
||||||
r"sudo",
|
|
||||||
r"rm\s+-rf",
|
|
||||||
r"dd\s+if=",
|
|
||||||
r"/etc/passwd",
|
|
||||||
r"/etc/shadow",
|
|
||||||
r"chmod\s+777",
|
|
||||||
r"curl\s+.*\|.*sh",
|
|
||||||
r"wget\s+.*\|.*sh",
|
|
||||||
r"eval\s*\(",
|
|
||||||
r"exec\s*\(",
|
|
||||||
r"__import__",
|
|
||||||
r"os\.system",
|
|
||||||
r"subprocess",
|
|
||||||
]
|
|
||||||
|
|
||||||
# List of tool names for allowed_tools configuration
|
# List of tool names for allowed_tools configuration
|
||||||
# Include MCP tools, the MCP Read tool for oversized results,
|
# Include MCP tools, the MCP Read tool for oversized results,
|
||||||
|
|||||||
@@ -867,9 +867,67 @@ class GraphModel(Graph, GraphMeta):
|
|||||||
|
|
||||||
return node_errors
|
return node_errors
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def prune_invalid_links(graph: BaseGraph) -> int:
|
||||||
|
"""
|
||||||
|
Remove invalid/orphan links from the graph.
|
||||||
|
|
||||||
|
This removes links that:
|
||||||
|
- Reference non-existent source or sink nodes
|
||||||
|
- Reference invalid block IDs
|
||||||
|
|
||||||
|
Note: Pin name validation is handled separately in _validate_graph_structure.
|
||||||
|
|
||||||
|
Returns the number of links pruned.
|
||||||
|
"""
|
||||||
|
node_map = {v.id: v for v in graph.nodes}
|
||||||
|
original_count = len(graph.links)
|
||||||
|
valid_links = []
|
||||||
|
|
||||||
|
for link in graph.links:
|
||||||
|
source_node = node_map.get(link.source_id)
|
||||||
|
sink_node = node_map.get(link.sink_id)
|
||||||
|
|
||||||
|
# Skip if either node doesn't exist
|
||||||
|
if not source_node or not sink_node:
|
||||||
|
logger.warning(
|
||||||
|
f"Pruning orphan link: source={link.source_id}, sink={link.sink_id} "
|
||||||
|
f"- node(s) not found"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip if source block doesn't exist
|
||||||
|
source_block = get_block(source_node.block_id)
|
||||||
|
if not source_block:
|
||||||
|
logger.warning(
|
||||||
|
f"Pruning link with invalid source block: {source_node.block_id}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Skip if sink block doesn't exist
|
||||||
|
sink_block = get_block(sink_node.block_id)
|
||||||
|
if not sink_block:
|
||||||
|
logger.warning(
|
||||||
|
f"Pruning link with invalid sink block: {sink_node.block_id}"
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
valid_links.append(link)
|
||||||
|
|
||||||
|
graph.links = valid_links
|
||||||
|
pruned_count = original_count - len(valid_links)
|
||||||
|
|
||||||
|
if pruned_count > 0:
|
||||||
|
logger.info(f"Pruned {pruned_count} invalid link(s) from graph {graph.id}")
|
||||||
|
|
||||||
|
return pruned_count
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _validate_graph_structure(graph: BaseGraph):
|
def _validate_graph_structure(graph: BaseGraph):
|
||||||
"""Validate graph structure (links, connections, etc.)"""
|
"""Validate graph structure (links, connections, etc.)"""
|
||||||
|
# First, prune invalid links to clean up orphan edges
|
||||||
|
GraphModel.prune_invalid_links(graph)
|
||||||
|
|
||||||
node_map = {v.id: v for v in graph.nodes}
|
node_map = {v.id: v for v in graph.nodes}
|
||||||
|
|
||||||
def is_static_output_block(nid: str) -> bool:
|
def is_static_output_block(nid: str) -> bool:
|
||||||
|
|||||||
@@ -133,22 +133,23 @@ export const useFlow = () => {
|
|||||||
}
|
}
|
||||||
}, [availableGraphs, setAvailableSubGraphs]);
|
}, [availableGraphs, setAvailableSubGraphs]);
|
||||||
|
|
||||||
// adding nodes
|
// adding nodes and links together to avoid race condition
|
||||||
|
// Links depend on nodes existing, so we must add nodes first
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (customNodes.length > 0) {
|
if (customNodes.length > 0) {
|
||||||
|
// Clear both stores to prevent stale data from previous graphs
|
||||||
useNodeStore.getState().setNodes([]);
|
useNodeStore.getState().setNodes([]);
|
||||||
useNodeStore.getState().clearResolutionState();
|
useNodeStore.getState().clearResolutionState();
|
||||||
addNodes(customNodes);
|
|
||||||
}
|
|
||||||
}, [customNodes, addNodes]);
|
|
||||||
|
|
||||||
// adding links
|
|
||||||
useEffect(() => {
|
|
||||||
if (graph?.links) {
|
|
||||||
useEdgeStore.getState().setEdges([]);
|
useEdgeStore.getState().setEdges([]);
|
||||||
addLinks(graph.links);
|
|
||||||
|
addNodes(customNodes);
|
||||||
|
|
||||||
|
// Only add links after nodes are in the store
|
||||||
|
if (graph?.links) {
|
||||||
|
addLinks(graph.links);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}, [graph?.links, addLinks]);
|
}, [customNodes, graph?.links, addNodes, addLinks]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (customNodes.length > 0 && graph?.links) {
|
if (customNodes.length > 0 && graph?.links) {
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ import { Graph } from "@/app/api/__generated__/models/graph";
|
|||||||
import { useNodeStore } from "../stores/nodeStore";
|
import { useNodeStore } from "../stores/nodeStore";
|
||||||
import { useEdgeStore } from "../stores/edgeStore";
|
import { useEdgeStore } from "../stores/edgeStore";
|
||||||
import { graphsEquivalent } from "../components/NewControlPanel/NewSaveControl/helpers";
|
import { graphsEquivalent } from "../components/NewControlPanel/NewSaveControl/helpers";
|
||||||
|
import { linkToCustomEdge } from "../components/helper";
|
||||||
import { useGraphStore } from "../stores/graphStore";
|
import { useGraphStore } from "../stores/graphStore";
|
||||||
import { useShallow } from "zustand/react/shallow";
|
import { useShallow } from "zustand/react/shallow";
|
||||||
import {
|
import {
|
||||||
@@ -21,6 +22,18 @@ import {
|
|||||||
getTempFlowId,
|
getTempFlowId,
|
||||||
} from "@/services/builder-draft/draft-service";
|
} from "@/services/builder-draft/draft-service";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sync the edge store with the authoritative backend state.
|
||||||
|
* This ensures the frontend matches what the backend accepted after save.
|
||||||
|
*/
|
||||||
|
function syncEdgesWithBackend(links: GraphModel["links"]) {
|
||||||
|
if (links !== undefined) {
|
||||||
|
// Replace all edges with the authoritative backend state
|
||||||
|
const newEdges = links.map(linkToCustomEdge);
|
||||||
|
useEdgeStore.getState().setEdges(newEdges);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export type SaveGraphOptions = {
|
export type SaveGraphOptions = {
|
||||||
showToast?: boolean;
|
showToast?: boolean;
|
||||||
onSuccess?: (graph: GraphModel) => void;
|
onSuccess?: (graph: GraphModel) => void;
|
||||||
@@ -64,6 +77,9 @@ export const useSaveGraph = ({
|
|||||||
flowVersion: data.version,
|
flowVersion: data.version,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Sync edge store with authoritative backend state
|
||||||
|
syncEdgesWithBackend(data.links);
|
||||||
|
|
||||||
const tempFlowId = getTempFlowId();
|
const tempFlowId = getTempFlowId();
|
||||||
if (tempFlowId) {
|
if (tempFlowId) {
|
||||||
await draftService.deleteDraft(tempFlowId);
|
await draftService.deleteDraft(tempFlowId);
|
||||||
@@ -101,6 +117,9 @@ export const useSaveGraph = ({
|
|||||||
flowVersion: data.version,
|
flowVersion: data.version,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Sync edge store with authoritative backend state
|
||||||
|
syncEdgesWithBackend(data.links);
|
||||||
|
|
||||||
// Clear the draft for this flow after successful save
|
// Clear the draft for this flow after successful save
|
||||||
if (data.id) {
|
if (data.id) {
|
||||||
await draftService.deleteDraft(data.id);
|
await draftService.deleteDraft(data.id);
|
||||||
|
|||||||
@@ -120,12 +120,64 @@ export const useEdgeStore = create<EdgeStore>((set, get) => ({
|
|||||||
isOutputConnected: (nodeId, handle) =>
|
isOutputConnected: (nodeId, handle) =>
|
||||||
get().edges.some((e) => e.source === nodeId && e.sourceHandle === handle),
|
get().edges.some((e) => e.source === nodeId && e.sourceHandle === handle),
|
||||||
|
|
||||||
getBackendLinks: () => get().edges.map(customEdgeToLink),
|
getBackendLinks: () => {
|
||||||
|
// Filter out edges referencing non-existent nodes before converting to links
|
||||||
|
const nodeIds = new Set(useNodeStore.getState().nodes.map((n) => n.id));
|
||||||
|
const validEdges = get().edges.filter((edge) => {
|
||||||
|
const isValid = nodeIds.has(edge.source) && nodeIds.has(edge.target);
|
||||||
|
if (!isValid) {
|
||||||
|
console.warn(
|
||||||
|
`[EdgeStore] Filtering out invalid edge during save: source=${edge.source}, target=${edge.target}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return isValid;
|
||||||
|
});
|
||||||
|
return validEdges.map(customEdgeToLink);
|
||||||
|
},
|
||||||
|
|
||||||
addLinks: (links) => {
|
addLinks: (links) => {
|
||||||
links.forEach((link) => {
|
// Get current node IDs to validate links
|
||||||
get().addEdge(linkToCustomEdge(link));
|
const nodeIds = new Set(useNodeStore.getState().nodes.map((n) => n.id));
|
||||||
});
|
|
||||||
|
// Convert and filter links in one pass, avoiding individual addEdge calls
|
||||||
|
// which would push to history for each edge (causing history pollution)
|
||||||
|
const newEdges: CustomEdge[] = [];
|
||||||
|
const existingEdges = get().edges;
|
||||||
|
|
||||||
|
for (const link of links) {
|
||||||
|
// Skip invalid links (orphan edges referencing non-existent nodes)
|
||||||
|
if (!nodeIds.has(link.source_id) || !nodeIds.has(link.sink_id)) {
|
||||||
|
console.warn(
|
||||||
|
`[EdgeStore] Skipping invalid link: source=${link.source_id}, sink=${link.sink_id} - node(s) not found`,
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const edge = linkToCustomEdge(link);
|
||||||
|
|
||||||
|
// Skip if edge already exists
|
||||||
|
const exists = existingEdges.some(
|
||||||
|
(e) =>
|
||||||
|
e.source === edge.source &&
|
||||||
|
e.target === edge.target &&
|
||||||
|
e.sourceHandle === edge.sourceHandle &&
|
||||||
|
e.targetHandle === edge.targetHandle,
|
||||||
|
);
|
||||||
|
if (!exists) {
|
||||||
|
newEdges.push(edge);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newEdges.length > 0) {
|
||||||
|
// Bulk add all edges at once, pushing to history only once
|
||||||
|
const prevState = {
|
||||||
|
nodes: useNodeStore.getState().nodes,
|
||||||
|
edges: existingEdges,
|
||||||
|
};
|
||||||
|
|
||||||
|
set((state) => ({ edges: [...state.edges, ...newEdges] }));
|
||||||
|
useHistoryStore.getState().pushState(prevState);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
getAllHandleIdsOfANode: (nodeId) =>
|
getAllHandleIdsOfANode: (nodeId) =>
|
||||||
|
|||||||
Reference in New Issue
Block a user