Merge branch 'master' into ntindle/samples

This commit is contained in:
Nicholas Tindle
2024-08-14 13:29:39 -05:00
13 changed files with 107 additions and 35 deletions

View File

@@ -23,12 +23,16 @@ import { history } from "./history";
import NodeHandle from "./NodeHandle";
import { CustomEdgeData } from "./CustomEdge";
import { NodeGenericInputField } from "./node-input-components";
import SchemaTooltip from "./SchemaTooltip";
import { getPrimaryCategoryColor } from "@/lib/utils";
type ParsedKey = { key: string; index?: number };
export type CustomNodeData = {
blockType: string;
title: string;
description: string;
categories: string[];
inputSchema: BlockIORootSchema;
outputSchema: BlockIORootSchema;
hardcodedValues: { [key: string]: any };
@@ -281,9 +285,16 @@ const CustomNode: FC<NodeProps<CustomNodeData>> = ({ data, id }) => {
onMouseEnter={handleHovered}
onMouseLeave={handleMouseLeave}
>
<div className="mb-2 p-3 bg-gray-300/[.7] rounded-t-xl">
<div className="p-3 text-lg font-semibold font-roboto">
{beautifyString(data.blockType?.replace(/Block$/, "") || data.title)}
<div
className={`mb-2 p-3 ${getPrimaryCategoryColor(data.categories)} rounded-t-xl`}
>
<div className="flex items-center justify-between">
<div className="p-3 text-lg font-semibold font-roboto">
{beautifyString(
data.blockType?.replace(/Block$/, "") || data.title,
)}
</div>
<SchemaTooltip description={data.description} />
</div>
<div className="flex gap-[5px] ">
{isHovered && (

View File

@@ -380,6 +380,8 @@ const FlowEditor: React.FC<{
data: {
blockType: nodeType,
title: `${nodeType} ${nodeId}`,
description: nodeSchema.description,
categories: nodeSchema.categories,
inputSchema: nodeSchema.inputSchema,
outputSchema: nodeSchema.outputSchema,
hardcodedValues: {},
@@ -459,6 +461,8 @@ const FlowEditor: React.FC<{
data: {
block_id: block.id,
blockType: block.name,
categories: block.categories,
description: block.description,
title: `${block.name} ${node.id}`,
inputSchema: block.inputSchema,
outputSchema: block.outputSchema,

View File

@@ -60,7 +60,7 @@ const NodeHandle: FC<HandleProps> = ({
{label}
</div>
</Handle>
<SchemaTooltip schema={schema} />
<SchemaTooltip description={schema.description} />
</div>
);
} else {

View File

@@ -4,12 +4,11 @@ import {
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { BlockIOSubSchema } from "@/lib/autogpt-server-api/types";
import { Info } from "lucide-react";
import ReactMarkdown from "react-markdown";
const SchemaTooltip: React.FC<{ schema: BlockIOSubSchema }> = ({ schema }) => {
if (!schema.description) return null;
const SchemaTooltip: React.FC<{ description?: string }> = ({ description }) => {
if (!description) return null;
return (
<TooltipProvider delayDuration={400}>
@@ -25,7 +24,7 @@ const SchemaTooltip: React.FC<{ schema: BlockIOSubSchema }> = ({ schema }) => {
),
}}
>
{schema.description}
{description}
</ReactMarkdown>
</TooltipContent>
</Tooltip>

View File

@@ -14,7 +14,8 @@ import {
import { Block } from "@/lib/autogpt-server-api";
import { PlusIcon } from "@radix-ui/react-icons";
import { IconToyBrick } from "@/components/ui/icons";
import SchemaTooltip from "@/components/SchemaTooltip";
import { getPrimaryCategoryColor } from "@/lib/utils";
interface BlocksControlProps {
blocks: Block[];
addBlock: (id: string, name: string) => void;
@@ -73,13 +74,17 @@ export const BlocksControl: React.FC<BlocksControlProps> = ({
<CardContent className="p-1">
<ScrollArea className="h-[60vh]">
{filteredBlocks.map((block) => (
<Card key={block.id} className="m-2">
<Card
key={block.id}
className={`m-2 ${getPrimaryCategoryColor(block.categories)}`}
>
<div className="flex items-center justify-between m-3">
<div className="flex-1 min-w-0 mr-2">
<span className="font-medium truncate block">
{beautifyString(block.name)}
</span>
</div>
<SchemaTooltip description={block.description} />
<div className="flex items-center gap-1 flex-shrink-0">
<Button
variant="ghost"

View File

@@ -1,8 +1,15 @@
/* Mirror of autogpt_server/data/block.py:Block */
export type Category = {
category: string;
description: string;
};
export type Block = {
id: string;
name: string;
description: string;
categories: Category[];
inputSchema: BlockIORootSchema;
outputSchema: BlockIORootSchema;
};

View File

@@ -1,5 +1,6 @@
import { type ClassValue, clsx } from "clsx";
import { twMerge } from "tailwind-merge";
import { Category } from "./autogpt-server-api/types";
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
@@ -175,3 +176,21 @@ export function removeEmptyStringsAndNulls(obj: any): any {
}
return obj;
}
export const categoryColorMap: Record<string, string> = {
AI: "bg-orange-300/[.7]",
SOCIAL: "bg-yellow-300/[.7]",
TEXT: "bg-green-300/[.7]",
SEARCH: "bg-blue-300/[.7]",
BASIC: "bg-purple-300/[.7]",
INPUT: "bg-cyan-300/[.7]",
OUTPUT: "bg-brown-300/[.7]",
LOGIC: "bg-teal-300/[.7]",
};
export function getPrimaryCategoryColor(categories: Category[]): string {
if (categories.length === 0) {
return "bg-gray-300/[.7]";
}
return categoryColorMap[categories[0].category] || "bg-gray-300/[.7]";
}

View File

@@ -108,7 +108,7 @@ class AutoGPTAgentBlock(Block):
super().__init__(
id="d2e2ecd2-9ae6-422d-8dfe-ceca500ce6a6",
description="AutoGPT agent, it utilizes a Large Language Model and enabled components/tools to perform a task.",
categories={BlockCategory.LLM},
categories={BlockCategory.AI},
input_schema=AutoGPTAgentBlock.Input,
output_schema=AutoGPTAgentBlock.Output,
test_input={

View File

@@ -158,7 +158,6 @@ class ObjectLookupBase(Block, ABC, Generic[T]):
class ObjectLookupBlock(ObjectLookupBase[Any]):
def __init__(self):
super().__init__(categories={BlockCategory.BASIC})
@@ -167,18 +166,16 @@ class ObjectLookupBlock(ObjectLookupBase[Any]):
class InputBlock(ObjectLookupBase[Any]):
def __init__(self):
super().__init__(categories={BlockCategory.BASIC, BlockCategory.INPUT_OUTPUT})
super().__init__(categories={BlockCategory.BASIC, BlockCategory.INPUT})
def block_id(self) -> str:
return "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b"
class OutputBlock(ObjectLookupBase[Any]):
def __init__(self):
super().__init__(categories={BlockCategory.BASIC, BlockCategory.INPUT_OUTPUT})
super().__init__(categories={BlockCategory.BASIC, BlockCategory.OUTPUT})
def block_id(self) -> str:
return "363ae599-353e-4804-937e-b2ee3cef3da4"

View File

@@ -93,7 +93,7 @@ class ObjectLlmCallBlock(Block):
super().__init__(
id="ed55ac19-356e-4243-a6cb-bc599e9b716f",
description="Call a Large Language Model (LLM) to generate formatted object based on the given prompt.",
categories={BlockCategory.LLM},
categories={BlockCategory.AI},
input_schema=ObjectLlmCallBlock.Input,
output_schema=ObjectLlmCallBlock.Output,
test_input={
@@ -261,7 +261,7 @@ class TextLlmCallBlock(Block):
super().__init__(
id="1f292d4a-41a4-4977-9684-7c8d560b9f91",
description="Call a Large Language Model (LLM) to generate a string based on the given prompt.",
categories={BlockCategory.LLM},
categories={BlockCategory.AI},
input_schema=TextLlmCallBlock.Input,
output_schema=TextLlmCallBlock.Output,
test_input={"prompt": "User prompt"},
@@ -307,7 +307,7 @@ class TextSummarizerBlock(Block):
super().__init__(
id="c3d4e5f6-7g8h-9i0j-1k2l-m3n4o5p6q7r8",
description="Utilize a Large Language Model (LLM) to summarize a long text.",
categories={BlockCategory.LLM, BlockCategory.TEXT},
categories={BlockCategory.AI, BlockCategory.TEXT},
input_schema=TextSummarizerBlock.Input,
output_schema=TextSummarizerBlock.Output,
test_input={"text": "Lorem ipsum..." * 100},
@@ -450,7 +450,7 @@ class AdvancedLlmCallBlock(Block):
super().__init__(
id="c3d4e5f6-g7h8-i9j0-k1l2-m3n4o5p6q7r8",
description="Advanced LLM call that takes a list of messages and sends them to the language model.",
categories={BlockCategory.LLM},
categories={BlockCategory.AI},
input_schema=AdvancedLlmCallBlock.Input,
output_schema=AdvancedLlmCallBlock.Output,
test_input={
@@ -495,7 +495,9 @@ class AdvancedLlmCallBlock(Block):
elif provider == "anthropic":
client = anthropic.Anthropic(api_key=api_key)
response = client.messages.create(
model=model.value, max_tokens=max_tokens or 4096, messages=messages # type: ignore
model=model.value,
max_tokens=max_tokens or 4096,
messages=messages, # type: ignore
)
return response.content[0].text if response.content else ""
elif provider == "groq":
@@ -508,7 +510,9 @@ class AdvancedLlmCallBlock(Block):
return response.choices[0].message.content or ""
elif provider == "ollama":
response = ollama.chat(
model=model.value, messages=messages, stream=False # type: ignore
model=model.value,
messages=messages,
stream=False, # type: ignore
)
return response["message"]["content"]
else:

View File

@@ -115,8 +115,8 @@ class TextParserBlock(Block):
class TextFormatterBlock(Block):
class Input(BlockSchema):
texts: list[str] = Field(description="Texts (list) to format", default=[])
named_texts: dict[str, str] = Field(
texts: list[Any] = Field(description="Texts (list) to format", default=[])
named_texts: dict[str, Any] = Field(
description="Texts (dict) to format", default={}
)
format: str = Field(
@@ -150,16 +150,24 @@ class TextFormatterBlock(Block):
)
def run(self, input_data: Input) -> BlockOutput:
yield "output", input_data.format.format(
texts=input_data.texts,
**input_data.named_texts,
)
texts = [
text if isinstance(text, str) else json.dumps(text)
for text in input_data.texts
]
named_texts = {
key: value if isinstance(value, str) else json.dumps(value)
for key, value in input_data.named_texts.items()
}
yield "output", input_data.format.format(texts=texts, **named_texts)
class TextCombinerBlock(Block):
class Input(BlockSchema):
input1: str = Field(description="First text input", default="a")
input2: str = Field(description="Second text input", default="b")
input1: str = Field(description="First text input", default="")
input2: str = Field(description="Second text input", default="")
input3: str = Field(description="Second text input", default="")
input4: str = Field(description="Second text input", default="")
delimiter: str = Field(description="Delimiter to combine texts", default="")
class Output(BlockSchema):
output: str = Field(description="Combined text")
@@ -182,5 +190,14 @@ class TextCombinerBlock(Block):
)
def run(self, input_data: Input) -> BlockOutput:
combined_text = (input_data.input1 or "") + (input_data.input2 or "")
combined_text = input_data.delimiter.join(
text
for text in [
input_data.input1,
input_data.input2,
input_data.input3,
input_data.input4,
]
if text
)
yield "output", combined_text

View File

@@ -17,12 +17,14 @@ CompletedBlockOutput = dict[str, list[Any]] # Completed stream, collected as a
class BlockCategory(Enum):
LLM = "Block that leverages the Large Language Model to perform a task."
AI = "Block that leverages AI to perform a task."
SOCIAL = "Block that interacts with social media platforms."
TEXT = "Block that processes text data."
SEARCH = "Block that searches or extracts information from the internet."
BASIC = "Block that performs basic operations."
INPUT_OUTPUT = "Block that interacts with input/output of the graph."
INPUT = "Block that interacts with input of the graph."
OUTPUT = "Block that interacts with output of the graph."
LOGIC = "Programming logic to control the flow of your agent"
def dict(self) -> dict[str, str]:
return {"category": self.name, "description": self.value}
@@ -113,7 +115,6 @@ class EmptySchema(BlockSchema):
class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
def __init__(
self,
id: str = "",

View File

@@ -7,7 +7,7 @@ import prisma.types
from prisma.models import AgentGraph, AgentNode, AgentNodeLink
from pydantic import PrivateAttr
from autogpt_server.blocks.basic import InputBlock, OutputBlock
from autogpt_server.blocks.basic import InputBlock, OutputBlock, ValueBlock
from autogpt_server.data.block import BlockInput, get_block
from autogpt_server.data.db import BaseDbModel, transaction
from autogpt_server.data.user import DEFAULT_USER_ID
@@ -175,6 +175,11 @@ class Graph(GraphMeta):
)
node_map = {v.id: v for v in self.nodes}
def is_value_block(nid: str) -> bool:
bid = node_map[nid].block_id
b = get_block(bid)
return isinstance(b, ValueBlock)
def is_input_output_block(nid: str) -> bool:
bid = node_map[nid].block_id
b = get_block(bid)
@@ -217,6 +222,9 @@ class Graph(GraphMeta):
):
raise ValueError(f"{suffix}, Connecting nodes from different subgraph.")
if is_value_block(link.source_id):
link.is_static = True # Each value block output should be static.
# TODO: Add type compatibility check here.
@staticmethod