feat(autogpt_builder): Add AutoGPTServerAPI client (#7328)

- Add fully typed `AutoGPTServerAPI` client in lib/autogpt_server_api.ts
- Migrate API calls in `Flow.tsx` to new API client
This commit is contained in:
Reinier van der Leer
2024-07-05 11:03:59 +02:00
committed by GitHub
parent b7a90ce768
commit 200800312a
3 changed files with 199 additions and 60 deletions

View File

@@ -15,19 +15,14 @@ import ReactFlow, {
import 'reactflow/dist/style.css';
import CustomNode from './CustomNode';
import './flow.css';
type Schema = {
type: string;
properties: { [key: string]: any };
additionalProperties?: { type: string };
required?: string[];
};
import AutoGPTServerAPI, { Block } from '@/lib/autogpt_server_api';
import { ObjectSchema } from '@/lib/types';
type CustomNodeData = {
blockType: string;
title: string;
inputSchema: Schema;
outputSchema: Schema;
inputSchema: ObjectSchema;
outputSchema: ObjectSchema;
hardcodedValues: { [key: string]: any };
setHardcodedValues: (values: { [key: string]: any }) => void;
connections: Array<{ source: string; sourceHandle: string; target: string; targetHandle: string }>;
@@ -37,15 +32,7 @@ type CustomNodeData = {
block_id: string;
};
type AvailableNode = {
id: string;
name: string;
description: string;
inputSchema: Schema;
outputSchema: Schema;
};
const Sidebar: React.FC<{isOpen: boolean, availableNodes: AvailableNode[], addNode: (id: string, name: string) => void}> =
const Sidebar: React.FC<{isOpen: boolean, availableNodes: Block[], addNode: (id: string, name: string) => void}> =
({isOpen, availableNodes, addNode}) => {
const [searchQuery, setSearchQuery] = useState('');
@@ -78,17 +65,17 @@ const Flow: React.FC = () => {
const [nodes, setNodes] = useState<Node<CustomNodeData>[]>([]);
const [edges, setEdges] = useState<Edge[]>([]);
const [nodeId, setNodeId] = useState<number>(1);
const [availableNodes, setAvailableNodes] = useState<AvailableNode[]>([]);
const [availableNodes, setAvailableNodes] = useState<Block[]>([]);
const [agentId, setAgentId] = useState<string | null>(null);
const [isSidebarOpen, setIsSidebarOpen] = useState(true);
const apiUrl = 'http://localhost:8000';
const api = new AutoGPTServerAPI(apiUrl);
useEffect(() => {
fetch(`${apiUrl}/blocks`)
.then(response => response.json())
.then(data => setAvailableNodes(data))
.catch(error => console.error('Error fetching available blocks:', error));
api.getBlocks()
.then(blocks => setAvailableNodes(blocks))
.catch();
}, []);
const nodeTypes: NodeTypes = useMemo(() => ({ custom: CustomNode }), []);
@@ -176,7 +163,7 @@ const Flow: React.FC = () => {
return {};
}
const getNestedData = (schema: Schema, values: { [key: string]: any }): { [key: string]: any } => {
const getNestedData = (schema: ObjectSchema, values: { [key: string]: any }): { [key: string]: any } => {
let inputData: { [key: string]: any } = {};
if (schema.properties) {
@@ -268,51 +255,19 @@ const Flow: React.FC = () => {
links: links // Ensure this field is included
};
console.log("Payload being sent to the API:", JSON.stringify(payload, null, 2));
const createResponse = await fetch(`${apiUrl}/graphs`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(payload),
});
if (!createResponse.ok) {
throw new Error(`HTTP error! Status: ${createResponse.status}`);
}
const createData = await createResponse.json();
const createData = await api.createFlow(payload);
const newAgentId = createData.id;
setAgentId(newAgentId);
console.log('Response from the API:', JSON.stringify(createData, null, 2));
const executeResponse = await fetch(`${apiUrl}/graphs/${newAgentId}/execute`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({}),
});
if (!executeResponse.ok) {
throw new Error(`HTTP error! Status: ${executeResponse.status}`);
}
const executeData = await executeResponse.json();
const executeData = await api.executeFlow(newAgentId);
const runId = executeData.id;
const pollExecution = async () => {
const response = await fetch(`${apiUrl}/graphs/${newAgentId}/executions/${runId}`);
if (!response.ok) {
throw new Error(`HTTP error! Status: ${response.status}`);
}
const data = await response.json();
const data = await api.getFlowExecutionInfo(newAgentId, runId);
updateNodesWithExecutionData(data);
if (data.every((node: any) => node.status === 'COMPLETED')) {
if (data.every((node) => node.status === 'COMPLETED')) {
console.log('All nodes completed execution');
} else {
setTimeout(pollExecution, 1000);

View File

@@ -0,0 +1,178 @@
import { XYPosition } from "reactflow";
import { ObjectSchema } from "./types";
export default class AutoGPTServerAPI {
private baseUrl: string;
constructor(baseUrl: string = "http://localhost:8000") {
this.baseUrl = baseUrl;
}
async getBlocks(): Promise<Block[]> {
try {
const response = await fetch(`${this.baseUrl}/blocks`);
if (!response.ok) {
console.warn("GET /blocks returned non-OK response:", response);
throw new Error(`HTTP error ${response.status}!`);
}
return await response.json();
} catch (error) {
console.error('Error fetching blocks:', error);
throw error;
}
}
async listFlowIDs(): Promise<string[]> {
try {
const response = await fetch(`${this.baseUrl}/graphs`);
if (!response.ok) {
console.warn("GET /graphs returned non-OK response:", response);
throw new Error(`HTTP error ${response.status}!`);
}
return await response.json();
} catch (error) {
console.error('Error fetching flows:', error);
throw error;
}
}
async getFlow(id: string): Promise<Flow> {
const path = `/graphs/${id}`;
try {
const response = await fetch(this.baseUrl + path);
if (!response.ok) {
console.warn(`GET ${path} returned non-OK response:`, response);
throw new Error(`HTTP error ${response.status}!`);
}
return await response.json();
} catch (error) {
console.error('Error fetching flow:', error);
throw error;
}
}
async createFlow(flowCreateBody: FlowCreateBody): Promise<Flow> {
console.debug("POST /graphs payload:", flowCreateBody);
try {
const response = await fetch(`${this.baseUrl}/graphs`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(flowCreateBody),
});
if (!response.ok) {
console.warn("POST /graphs returned non-OK response:", response);
throw new Error(`HTTP error ${response.status}!`)
}
return await response.json();
} catch (error) {
console.error("Error storing flow:", error);
throw error;
}
}
async executeFlow(flowId: string): Promise<FlowExecuteResponse> {
const path = `/graphs/${flowId}/execute`;
console.debug(`POST ${path}`);
try {
const response = await fetch(this.baseUrl + path, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({}),
});
if (!response.ok) {
console.warn(
`POST /graphs/${flowId}/execute returned non-OK response:`, response
);
throw new Error(`HTTP error ${response.status}!`)
}
return await response.json();
} catch (error) {
console.error("Error executing flow:", error);
throw error;
}
}
async getFlowExecutionInfo(flowId: string, runId: string): Promise<NodeExecutionResult[]> {
const path = `/graphs/${flowId}/executions/${runId}`;
try {
const response = await fetch(this.baseUrl + path);
if (!response.ok) {
console.warn(`GET ${path} returned non-OK response:`, response);
throw new Error(`HTTP error ${response.status}!`);
}
return await response.json();
} catch (error) {
console.error('Error fetching execution status:', error);
throw error;
}
}
}
/* Mirror of autogpt_server/data/block.py:Block */
export type Block = {
id: string;
name: string;
description: string;
inputSchema: ObjectSchema;
outputSchema: ObjectSchema;
};
/* Mirror of autogpt_server/data/graph.py:Node */
export type Node = {
id: string;
block_id: string;
input_default: Map<string, any>;
input_nodes: Array<{ name: string, node_id: string }>;
output_nodes: Array<{ name: string, node_id: string }>;
metadata: {
position: XYPosition;
[key: string]: any;
};
};
/* Mirror of autogpt_server/data/graph.py:Link */
export type Link = {
source_id: string;
sink_id: string;
source_name: string;
sink_name: string;
}
/* Mirror of autogpt_server/data/graph.py:Graph */
export type Flow = {
id: string;
name: string;
description: string;
nodes: Array<Node>;
links: Array<Link>;
};
export type FlowCreateBody = Flow | {
id?: string;
}
/* Derived from autogpt_server/executor/manager.py:ExecutionManager.add_execution */
export type FlowExecuteResponse = {
/* ID of the initiated run */
id: string;
/* List of node executions */
executions: Array<{ id: string, node_id: string }>;
};
/* Mirror of autogpt_server/data/execution.py:ExecutionResult */
export type NodeExecutionResult = {
graph_exec_id: string;
node_exec_id: string;
node_id: string;
status: 'INCOMPLETE' | 'QUEUED' | 'RUNNING' | 'COMPLETED' | 'FAILED';
input_data: Map<string, any>;
output_data: Map<string, any[]>;
add_time: Date;
queue_time?: Date;
start_time?: Date;
end_time?: Date;
};

View File

@@ -0,0 +1,6 @@
export type ObjectSchema = {
type: string;
properties: { [key: string]: any };
additionalProperties?: { type: string };
required?: string[];
};