Merge branch 'dev' of github.com:Significant-Gravitas/AutoGPT into zamilmajdy/fix-static-output-resolve

This commit is contained in:
Zamil Majdy
2025-03-07 15:52:38 +07:00
14 changed files with 613 additions and 21 deletions

View File

@@ -0,0 +1,5 @@
>>>>>>> links {'a41a5d9e-57e5-4da4-accb-a98c644b793f': ('output', 'a'), '5507cdb3-001b-447a-b654-e04494dba4bb': ('output', 'a'), '7cd7730f-97cf-45ba-812f-c9ac9e41d37b': ('output', 'a'), '86551f0a-75c9-4a7a-9695-97176dbe4e7d': ('output', 'b')} graph_eid 80d6ecb5-83a7-4194-a3cb-96cd8cf88473 output_time 2025-03-07 08:35:39.165000+00:00 latest_output {'a': 4}
>>>>>>> links {'2a24e1ed-e114-4c4a-b9ff-0367f4017c73': ('output', 'input')} graph_eid 80d6ecb5-83a7-4194-a3cb-96cd8cf88473 output_time 2025-03-07 08:35:39.190000+00:00 latest_output {'input': 5}
>>>>>>> links {'a41a5d9e-57e5-4da4-accb-a98c644b793f': ('output', 'a'), '5507cdb3-001b-447a-b654-e04494dba4bb': ('output', 'a'), '7cd7730f-97cf-45ba-812f-c9ac9e41d37b': ('output', 'a'), '86551f0a-75c9-4a7a-9695-97176dbe4e7d': ('output', 'b')} graph_eid 80d6ecb5-83a7-4194-a3cb-96cd8cf88473 output_time 2025-03-07 08:35:39.180000+00:00 latest_output {'a': 4}
>>>>>>> links {'a41a5d9e-57e5-4da4-accb-a98c644b793f': ('output', 'a'), '5507cdb3-001b-447a-b654-e04494dba4bb': ('output', 'a'), '7cd7730f-97cf-45ba-812f-c9ac9e41d37b': ('output', 'a'), '86551f0a-75c9-4a7a-9695-97176dbe4e7d': ('output', 'b')} graph_eid 80d6ecb5-83a7-4194-a3cb-96cd8cf88473 output_time 2025-03-07 08:35:39.341000+00:00 latest_output {'b': 5, 'a': 4}
>>>>>>> links {'a41a5d9e-57e5-4da4-accb-a98c644b793f': ('output', 'a'), '5507cdb3-001b-447a-b654-e04494dba4bb': ('output', 'a'), '7cd7730f-97cf-45ba-812f-c9ac9e41d37b': ('output', 'a'), '86551f0a-75c9-4a7a-9695-97176dbe4e7d': ('output', 'b')} graph_eid 80d6ecb5-83a7-4194-a3cb-96cd8cf88473 output_time 2025-03-07 08:35:39.184000+00:00 latest_output {'a': 4}

View File

@@ -21,6 +21,7 @@ import backend.server.routers.v1
import backend.server.v2.library.db
import backend.server.v2.library.model
import backend.server.v2.library.routes
import backend.server.v2.otto.routes
import backend.server.v2.postmark.postmark
import backend.server.v2.store.model
import backend.server.v2.store.routes
@@ -68,8 +69,7 @@ docs_url = (
app = fastapi.FastAPI(
title="AutoGPT Agent Server",
description=(
"This server is used to execute agents that are created by the "
"AutoGPT system."
"This server is used to execute agents that are created by the AutoGPT system."
),
summary="AutoGPT Agent Server",
version="0.1",
@@ -102,6 +102,10 @@ app.include_router(
app.include_router(
backend.server.v2.library.routes.router, tags=["v2"], prefix="/api/library"
)
app.include_router(
backend.server.v2.otto.routes.router, tags=["v2"], prefix="/api/otto"
)
app.include_router(
backend.server.v2.postmark.postmark.router,
tags=["v2", "email"],

View File

@@ -412,11 +412,8 @@ async def add_store_agent_to_library(
added_agent = await prisma.models.LibraryAgent.prisma().create(
data={
"userId": user_id,
"Agent": {
"connect": {
"graphVersionId": {"id": graph.id, "version": graph.version}
},
},
"agentId": graph.id,
"agentVersion": graph.version,
"isCreatedByUser": False,
},
include=library_agent_include(user_id),

View File

@@ -0,0 +1,34 @@
from typing import Any, Dict, Optional
from pydantic import BaseModel
class Document(BaseModel):
url: str
relevance_score: float
class ApiResponse(BaseModel):
answer: str
documents: list[Document]
success: bool
class GraphData(BaseModel):
nodes: list[Dict[str, Any]]
edges: list[Dict[str, Any]]
graph_name: Optional[str] = None
graph_description: Optional[str] = None
class Message(BaseModel):
query: str
response: str
class ChatRequest(BaseModel):
query: str
conversation_history: list[Message]
message_id: str
include_graph_data: bool = False
graph_id: Optional[str] = None

View File

@@ -0,0 +1,30 @@
import logging
from autogpt_libs.auth.middleware import auth_middleware
from fastapi import APIRouter, Depends
from backend.server.utils import get_user_id
from backend.util.settings import Settings
from .models import ApiResponse, ChatRequest
from .service import OttoService
logger = logging.getLogger(__name__)
settings = Settings()
OTTO_API_URL = settings.config.otto_api_url
router = APIRouter()
@router.post(
"/ask", response_model=ApiResponse, dependencies=[Depends(auth_middleware)]
)
async def proxy_otto_request(
request: ChatRequest, user_id: str = Depends(get_user_id)
) -> ApiResponse:
"""
Proxy requests to Otto API while adding necessary security headers and logging.
Requires an authenticated user.
"""
return await OttoService.ask(request, user_id)

View File

@@ -0,0 +1,122 @@
import logging
from typing import Optional
import aiohttp
from fastapi import HTTPException
from backend.data import graph as graph_db
from backend.data.block import get_block
from backend.util.settings import Settings
from .models import ApiResponse, ChatRequest, GraphData
logger = logging.getLogger(__name__)
settings = Settings()
OTTO_API_URL = settings.config.otto_api_url
class OttoService:
@staticmethod
async def _fetch_graph_data(
request: ChatRequest, user_id: str
) -> Optional[GraphData]:
"""Fetch graph data if requested and available."""
if not (request.include_graph_data and request.graph_id):
return None
try:
graph = await graph_db.get_graph(request.graph_id, user_id=user_id)
if not graph:
return None
nodes_data = []
for node in graph.nodes:
block = get_block(node.block_id)
if not block:
continue
node_data = {
"id": node.id,
"block_id": node.block_id,
"block_name": block.name,
"block_type": (
block.block_type.value if hasattr(block, "block_type") else None
),
"data": {
k: v
for k, v in (node.input_default or {}).items()
if k not in ["credentials"] # Exclude sensitive data
},
}
nodes_data.append(node_data)
# Create a GraphData object with the required fields
return GraphData(
nodes=nodes_data,
edges=[],
graph_name=graph.name,
graph_description=graph.description,
)
except Exception as e:
logger.error(f"Failed to fetch graph data: {str(e)}")
return None
@staticmethod
async def ask(request: ChatRequest, user_id: str) -> ApiResponse:
"""
Send request to Otto API and handle the response.
"""
try:
async with aiohttp.ClientSession() as session:
headers = {
"Content-Type": "application/json",
"Accept": "application/json",
}
# If graph data is requested, fetch it
graph_data = await OttoService._fetch_graph_data(request, user_id)
# Prepare the payload with optional graph data
payload = {
"query": request.query,
"conversation_history": [
msg.model_dump() for msg in request.conversation_history
],
"user_id": user_id,
"message_id": request.message_id,
}
if graph_data:
payload["graph_data"] = graph_data.model_dump()
logger.info(f"Sending request to Otto API for user {user_id}")
logger.debug(f"Request payload: {payload}")
async with session.post(
OTTO_API_URL, json=payload, headers=headers
) as response:
if response.status != 200:
error_text = await response.text()
logger.error(f"Otto API error: {error_text}")
raise HTTPException(
status_code=response.status,
detail=f"Otto API request failed: {error_text}",
)
data = await response.json()
logger.info(
f"Successfully received response from Otto API for user {user_id}"
)
return ApiResponse(**data)
except aiohttp.ClientError as e:
logger.error(f"Connection error to Otto API: {str(e)}")
raise HTTPException(
status_code=503, detail="Failed to connect to Otto service"
)
except Exception as e:
logger.error(f"Unexpected error in Otto API proxy: {str(e)}")
raise HTTPException(
status_code=500, detail="Internal server error in Otto proxy"
)

View File

@@ -160,6 +160,11 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
description="The port for notification service daemon to run on",
)
otto_api_url: str = Field(
default="",
description="The URL for the Otto API service",
)
platform_base_url: str = Field(
default="",
description="Must be set so the application knows where it's hosted at. "

View File

@@ -0,0 +1,33 @@
"use server";
import { revalidatePath } from "next/cache";
import BackendAPI from "@/lib/autogpt-server-api/client";
import { OttoQuery, OttoResponse } from "@/lib/autogpt-server-api/types";
const api = new BackendAPI();
export async function askOtto(
query: string,
conversationHistory: { query: string; response: string }[],
includeGraphData: boolean,
graphId?: string,
): Promise<OttoResponse> {
const messageId = `${Date.now()}-web`;
const ottoQuery: OttoQuery = {
query,
conversation_history: conversationHistory,
message_id: messageId,
include_graph_data: includeGraphData,
graph_id: graphId,
};
try {
const response = await api.askOtto(ottoQuery);
revalidatePath("/build");
return response;
} catch (error) {
console.error("Error in askOtto server action:", error);
throw error;
}
}

View File

@@ -14,6 +14,7 @@ import { Toaster } from "@/components/ui/toaster";
import { IconType } from "@/components/ui/icons";
import { Providers } from "@/app/providers";
import TallyPopupSimple from "@/components/TallyPopup";
import OttoChatWidget from "@/components/OttoChatWidget";
const inter = Inter({ subsets: ["latin"], variable: "--font-inter" });
@@ -116,6 +117,7 @@ export default async function RootLayout({
)}
<main className="w-full flex-grow">{children}</main>
<TallyPopupSimple />
<OttoChatWidget />
</div>
<Toaster />
</Providers>

View File

@@ -7,6 +7,7 @@ import {
GraphExecution,
GraphExecutionID,
GraphExecutionMeta,
GraphID,
GraphMeta,
LibraryAgent,
LibraryAgentID,
@@ -61,22 +62,46 @@ export default function AgentRunsPage(): React.ReactElement {
setSelectedSchedule(schedule);
}, []);
const [graphVersions, setGraphVersions] = useState<Record<number, GraphMeta>>(
{},
);
const getGraphVersion = useCallback(
async (graphID: GraphID, version: number) => {
if (graphVersions[version]) return graphVersions[version];
const graphVersion = await api.getGraph(graphID, version);
setGraphVersions((prev) => ({
...prev,
[version]: graphVersion,
}));
return graphVersion;
},
[api, graphVersions],
);
const fetchAgents = useCallback(() => {
api.getLibraryAgent(agentID).then((agent) => {
setAgent(agent);
api.getGraph(agent.agent_id).then(setGraph);
getGraphVersion(agent.agent_id, agent.agent_version).then(
(_graph) =>
(graph && graph.version == _graph.version) || setGraph(_graph),
);
api.getGraphExecutions(agent.agent_id).then((agentRuns) => {
const sortedRuns = agentRuns.toSorted(
(a, b) => b.started_at - a.started_at,
);
setAgentRuns(sortedRuns);
// Preload the corresponding graph versions
new Set(sortedRuns.map((run) => run.graph_version)).forEach((version) =>
getGraphVersion(agent.agent_id, version),
);
if (!selectedView.id && isFirstLoad && sortedRuns.length > 0) {
// only for first load or first execution
setIsFirstLoad(false);
selectView({ type: "run", id: sortedRuns[0].execution_id });
setSelectedRun(sortedRuns[0]);
}
});
});
@@ -85,7 +110,7 @@ export default function AgentRunsPage(): React.ReactElement {
.getGraphExecutionInfo(agent.agent_id, selectedView.id)
.then(setSelectedRun);
}
}, [api, agentID, selectedView, isFirstLoad]);
}, [api, agentID, getGraphVersion, graph, selectedView, isFirstLoad, agent]);
useEffect(() => {
fetchAgents();
@@ -95,17 +120,29 @@ export default function AgentRunsPage(): React.ReactElement {
useEffect(() => {
if (selectedView.type != "run" || !selectedView.id || !agent) return;
// pull partial data from "cache" while waiting for the rest to load
const newSelectedRun = agentRuns.find(
(run) => run.execution_id == selectedView.id,
);
if (selectedView.id !== selectedRun?.execution_id) {
setSelectedRun(
agentRuns.find((r) => r.execution_id == selectedView.id) ?? null,
);
}
// Pull partial data from "cache" while waiting for the rest to load
setSelectedRun(newSelectedRun ?? null);
api
.getGraphExecutionInfo(agent.agent_id, selectedView.id)
.then(setSelectedRun);
}, [api, selectedView, agentID]);
// Ensure corresponding graph version is available before rendering I/O
api
.getGraphExecutionInfo(agent.agent_id, selectedView.id)
.then(async (run) => {
await getGraphVersion(run.graph_id, run.graph_version);
setSelectedRun(run);
});
}
}, [
api,
selectedView,
agent,
agentRuns,
selectedRun?.execution_id,
getGraphVersion,
]);
const fetchSchedules = useCallback(async () => {
if (!agent) return;
@@ -205,7 +242,7 @@ export default function AgentRunsPage(): React.ReactElement {
{(selectedView.type == "run" && selectedView.id ? (
selectedRun && (
<AgentRunDetailsView
graph={graph}
graph={graphVersions[selectedRun.graph_version] ?? graph}
run={selectedRun}
agentActions={agentActions}
deleteRun={() => setConfirmingDeleteAgentRun(selectedRun)}

View File

@@ -0,0 +1,298 @@
"use client";
import React, { useEffect, useState, useRef } from "react";
import { useSearchParams, usePathname } from "next/navigation";
import { useToast } from "@/components/ui/use-toast";
import useAgentGraph from "../hooks/useAgentGraph";
import ReactMarkdown from "react-markdown";
import { GraphID } from "@/lib/autogpt-server-api/types";
import { askOtto } from "@/app/build/actions";
interface Message {
type: "user" | "assistant";
content: string;
}
const OttoChatWidget = () => {
const [isOpen, setIsOpen] = useState(false);
const [messages, setMessages] = useState<Message[]>([]);
const [inputValue, setInputValue] = useState("");
const [isProcessing, setIsProcessing] = useState(false);
const [includeGraphData, setIncludeGraphData] = useState(false);
const messagesEndRef = useRef<HTMLDivElement>(null);
const searchParams = useSearchParams();
const pathname = usePathname();
const flowID = searchParams.get("flowID");
const { nodes, edges } = useAgentGraph(
flowID ? (flowID as GraphID) : undefined,
);
const { toast } = useToast();
useEffect(() => {
// Add welcome message when component mounts
if (messages.length === 0) {
setMessages([
{
type: "assistant",
content: "Hello im Otto! Ask me anything about AutoGPT!",
},
]);
}
}, [messages.length]);
useEffect(() => {
// Scroll to bottom whenever messages change
messagesEndRef.current?.scrollIntoView({ behavior: "smooth" });
}, [messages]);
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!inputValue.trim() || isProcessing) return;
const userMessage = inputValue.trim();
setInputValue("");
setIsProcessing(true);
// Add user message to chat
setMessages((prev) => [...prev, { type: "user", content: userMessage }]);
try {
// Add temporary processing message
setMessages((prev) => [
...prev,
{ type: "assistant", content: "Processing your question..." },
]);
const conversationHistory = messages.reduce<
{ query: string; response: string }[]
>((acc, msg, i, arr) => {
if (
msg.type === "user" &&
i + 1 < arr.length &&
arr[i + 1].type === "assistant"
) {
acc.push({
query: msg.content,
response: arr[i + 1].content,
});
}
return acc;
}, []);
const data = await askOtto(
userMessage,
conversationHistory,
includeGraphData,
flowID || undefined,
);
// Remove processing message and add actual response
setMessages((prev) => [
...prev.slice(0, -1),
{ type: "assistant", content: data.answer },
]);
} catch (error) {
console.error("Error calling API:", error);
// Remove processing message and add error message
const errorMessage =
error instanceof Error && error.message === "Authentication required"
? "Please sign in to use the chat feature."
: "Sorry, there was an error processing your message. Please try again.";
setMessages((prev) => [
...prev.slice(0, -1),
{ type: "assistant", content: errorMessage },
]);
if (
error instanceof Error &&
error.message === "Authentication required"
) {
toast({
title: "Authentication Error",
description: "Please sign in to use the chat feature.",
variant: "destructive",
});
}
} finally {
setIsProcessing(false);
setIncludeGraphData(false);
}
};
// Don't render the chat widget if we're not on the build page or in local mode
if (process.env.NEXT_PUBLIC_BEHAVE_AS !== "CLOUD" || pathname !== "/build") {
return null;
}
if (!isOpen) {
return (
<div className="fixed bottom-4 right-4 z-50">
<button
onClick={() => setIsOpen(true)}
className="inline-flex h-14 w-14 items-center justify-center whitespace-nowrap rounded-2xl bg-[rgba(65,65,64,1)] text-neutral-50 shadow transition-colors hover:bg-neutral-900/90 focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-neutral-950 disabled:pointer-events-none disabled:opacity-50 dark:bg-neutral-50 dark:text-neutral-900 dark:hover:bg-neutral-50/90 dark:focus-visible:ring-neutral-300"
aria-label="Open chat widget"
>
<svg
viewBox="0 0 24 24"
className="h-6 w-6"
stroke="currentColor"
strokeWidth="2"
fill="none"
strokeLinecap="round"
strokeLinejoin="round"
>
<path d="M21 15a2 2 0 0 1-2 2H7l-4 4V5a2 2 0 0 1 2-2h14a2 2 0 0 1 2 2z" />
</svg>
</button>
</div>
);
}
return (
<div className="fixed bottom-4 right-4 z-50 flex h-[600px] w-[600px] flex-col rounded-lg border bg-background shadow-xl">
{/* Header */}
<div className="flex items-center justify-between border-b p-4">
<h2 className="font-semibold">Otto Assistant</h2>
<button
onClick={() => setIsOpen(false)}
className="text-muted-foreground transition-colors hover:text-foreground"
aria-label="Close chat"
>
<svg
viewBox="0 0 24 24"
className="h-5 w-5"
stroke="currentColor"
strokeWidth="2"
fill="none"
strokeLinecap="round"
strokeLinejoin="round"
>
<line x1="18" y1="6" x2="6" y2="18" />
<line x1="6" y1="6" x2="18" y2="18" />
</svg>
</button>
</div>
{/* Messages */}
<div className="flex-1 space-y-4 overflow-y-auto p-4">
{messages.map((message, index) => (
<div
key={index}
className={`flex ${message.type === "user" ? "justify-end" : "justify-start"}`}
>
<div
className={`max-w-[80%] rounded-lg p-3 ${
message.type === "user"
? "ml-4 bg-black text-white"
: "mr-4 bg-[#8b5cf6] text-white"
}`}
>
{message.type === "user" ? (
message.content
) : (
<ReactMarkdown
className="prose prose-sm dark:prose-invert max-w-none"
components={{
p: ({ children }) => (
<p className="mb-2 last:mb-0">{children}</p>
),
code(props) {
const { children, className, node, ...rest } = props;
const match = /language-(\w+)/.exec(className || "");
return match ? (
<pre className="overflow-x-auto rounded-md bg-muted-foreground/20 p-3">
<code className="font-mono text-sm" {...rest}>
{children}
</code>
</pre>
) : (
<code
className="rounded-md bg-muted-foreground/20 px-1 py-0.5 font-mono text-sm"
{...rest}
>
{children}
</code>
);
},
ul: ({ children }) => (
<ul className="mb-2 list-disc pl-4 last:mb-0">
{children}
</ul>
),
ol: ({ children }) => (
<ol className="mb-2 list-decimal pl-4 last:mb-0">
{children}
</ol>
),
li: ({ children }) => (
<li className="mb-1 last:mb-0">{children}</li>
),
}}
>
{message.content}
</ReactMarkdown>
)}
</div>
</div>
))}
<div ref={messagesEndRef} />
</div>
{/* Input */}
<form onSubmit={handleSubmit} className="border-t p-4">
<div className="flex flex-col gap-2">
<div className="flex gap-2">
<input
type="text"
value={inputValue}
onChange={(e) => setInputValue(e.target.value)}
placeholder="Type your message..."
className="flex-1 rounded-md border bg-background px-3 py-2 focus:outline-none focus:ring-2 focus:ring-primary"
disabled={isProcessing}
/>
<button
type="submit"
disabled={isProcessing}
className="rounded-md bg-primary px-4 py-2 text-primary-foreground transition-colors hover:bg-primary/90 disabled:opacity-50"
>
Send
</button>
</div>
{nodes && edges && (
<button
type="button"
onClick={() => {
setIncludeGraphData((prev) => !prev);
}}
className={`flex items-center gap-2 rounded border px-2 py-1.5 text-sm transition-all duration-200 ${
includeGraphData
? "border-primary/30 bg-primary/10 text-primary hover:shadow-[0_0_10px_3px_rgba(139,92,246,0.3)]"
: "border-transparent bg-muted text-muted-foreground hover:bg-muted/80 hover:shadow-[0_0_10px_3px_rgba(139,92,246,0.15)]"
}`}
>
<svg
viewBox="0 0 24 24"
className="h-4 w-4"
stroke="currentColor"
strokeWidth="2"
fill="none"
strokeLinecap="round"
strokeLinejoin="round"
>
<rect x="3" y="3" width="18" height="18" rx="2" ry="2" />
<circle cx="8.5" cy="8.5" r="1.5" />
<polyline points="21 15 16 10 5 21" />
</svg>
{includeGraphData
? "Graph data will be included"
: "Include graph data"}
</button>
)}
</div>
</form>
</div>
);
};
export default OttoChatWidget;

View File

@@ -56,7 +56,7 @@ const TallyPopupSimple = () => {
};
return (
<div className="fixed bottom-1 right-6 z-50 hidden select-none items-center gap-4 p-3 transition-all duration-300 ease-in-out md:flex">
<div className="fixed bottom-1 right-24 z-50 hidden select-none items-center gap-4 p-3 transition-all duration-300 ease-in-out md:flex">
{show_tutorial && (
<Button
variant="default"

View File

@@ -47,6 +47,8 @@ import {
TransactionHistory,
User,
UserPasswordCredentials,
OttoQuery,
OttoResponse,
UserOnboarding,
} from "./types";
import { createBrowserClient } from "@supabase/ssr";
@@ -614,6 +616,10 @@ export default class BackendAPI {
return this._request("GET", path, query);
}
async askOtto(query: OttoQuery): Promise<OttoResponse> {
return this._request("POST", "/otto/ask", query);
}
private async _uploadFile(path: string, file: File): Promise<string> {
// Get session with retry logic
let token = "no-token-found";

View File

@@ -749,3 +749,22 @@ export interface UserOnboarding {
export type Brand<T, Brand extends string> = T & {
readonly [B in Brand as `__${B}_brand`]: never;
};
export interface OttoDocument {
url: string;
relevance_score: number;
}
export interface OttoResponse {
answer: string;
documents: OttoDocument[];
success: boolean;
}
export interface OttoQuery {
query: string;
conversation_history: { query: string; response: string }[];
message_id: string;
include_graph_data: boolean;
graph_id?: string;
}