Merge branch 'master' into ntindle/samples

This commit is contained in:
Nicholas Tindle
2024-08-16 17:19:36 -07:00
committed by GitHub
18 changed files with 73 additions and 48 deletions

View File

@@ -14,6 +14,7 @@ import { FlowContext } from "./Flow";
export type CustomEdgeData = {
edgeColor: string;
sourcePos?: XYPosition;
isStatic?: boolean;
beadUp?: number;
beadDown?: number;
beadData?: any[];
@@ -57,7 +58,7 @@ const CustomEdgeFC: FC<EdgeProps<CustomEdgeData>> = ({
};
const animationDuration = 500; // Duration in milliseconds for bead to travel the curve
const beadDiameter = 10;
const beadDiameter = 12;
const deltaTime = 16;
function setTargetPositions(beads: Bead[]) {
@@ -67,7 +68,8 @@ const CustomEdgeFC: FC<EdgeProps<CustomEdgeData>> = ({
);
return beads.map((bead, index) => {
const targetPosition = distanceBetween * index + beadDiameter * 1.3;
const distanceFromEnd = beadDiameter * 1.35;
const targetPosition = distanceBetween * index + distanceFromEnd;
const t = getTForDistance(-targetPosition);
return {
@@ -86,6 +88,7 @@ const CustomEdgeFC: FC<EdgeProps<CustomEdgeData>> = ({
const beadUp = data?.beadUp!;
// Add beads
setBeads(({ beads, created, destroyed }) => {
const newBeads = [];
for (let i = 0; i < beadUp - created; i++) {
@@ -96,6 +99,7 @@ const CustomEdgeFC: FC<EdgeProps<CustomEdgeData>> = ({
return { beads: b, created: beadUp, destroyed };
});
// Remove beads if not animating
if (visualizeBeads !== "animate") {
setBeads(({ beads, created, destroyed }) => {
let destroyedCount = 0;
@@ -105,7 +109,8 @@ const CustomEdgeFC: FC<EdgeProps<CustomEdgeData>> = ({
.filter((bead, index) => {
const beadDown = data?.beadDown!;
const removeCount = beadDown - destroyed;
// Remove always one less bead in case of static edge, so it stays at the connection point
const removeCount = beadDown - destroyed - (data?.isStatic ? 1 : 0);
if (bead.t >= bead.targetT && index < removeCount) {
destroyedCount++;
return false;
@@ -122,6 +127,7 @@ const CustomEdgeFC: FC<EdgeProps<CustomEdgeData>> = ({
return;
}
// Animate and remove beads
const interval = setInterval(() => {
setBeads(({ beads, created, destroyed }) => {
let destroyedCount = 0;
@@ -142,7 +148,8 @@ const CustomEdgeFC: FC<EdgeProps<CustomEdgeData>> = ({
.filter((bead, index) => {
const beadDown = data?.beadDown!;
const removeCount = beadDown - destroyed;
// Remove always one less bead in case of static edge, so it stays at the connection point
const removeCount = beadDown - destroyed - (data?.isStatic ? 1 : 0);
if (bead.t >= bead.targetT && index < removeCount) {
destroyedCount++;
return false;
@@ -169,10 +176,11 @@ const CustomEdgeFC: FC<EdgeProps<CustomEdgeData>> = ({
path={svgPath}
markerEnd={markerEnd}
style={{
strokeWidth: isHovered ? 3 : 2,
strokeWidth: (isHovered ? 3 : 2) + (data?.isStatic ? 0.5 : 0),
stroke:
(data?.edgeColor ?? "#555555") +
(selected || isHovered ? "" : "80"),
strokeDasharray: data?.isStatic ? "5 3" : "0",
}}
/>
<path

View File

@@ -527,6 +527,7 @@ const FlowEditor: React.FC<{
getOutputType(link.source_id, link.source_name!),
),
sourcePos: getNode(link.source_id)?.position,
isStatic: link.is_static,
beadUp: 0,
beadDown: 0,
beadData: [],
@@ -825,6 +826,12 @@ const FlowEditor: React.FC<{
);
outputEdges.forEach((edge) => {
edge.data!.beadUp = (edge.data!.beadUp ?? 0) + 1;
// For static edges beadDown is always one less than beadUp
// Because there's no queueing and one bead is always at the connection point
if (edge.data?.isStatic) {
edge.data!.beadDown = (edge.data!.beadUp ?? 0) - 1;
edge.data!.beadData! = edge.data!.beadData!.slice(0, -1);
}
//todo kcze this assumes output at key is always array with one element
edge.data!.beadData = [
exec.output_data[key][0],
@@ -842,9 +849,11 @@ const FlowEditor: React.FC<{
);
inputEdges.forEach((edge) => {
// Skip decreasing bead count if edge doesn't match or if it's static
if (
edge.data!.beadData![edge.data!.beadData!.length - 1] !==
exec.input_data[key]
exec.input_data[key] ||
edge.data?.isStatic
) {
return;
}

View File

@@ -117,6 +117,7 @@ export type Link = {
sink_id: string;
source_name: string;
sink_name: string;
is_static: boolean;
};
export type LinkCreatable = Omit<Link, "id"> & {

View File

@@ -12,20 +12,7 @@ class ValueBlock(Block):
"""
This block allows you to provide a constant value as a block, in a stateless manner.
The common use-case is simply pass the `input` data, it will `output` the same data.
But this will not retain the state, once it is executed, the output is consumed.
To retain the state, you can feed the `output` to the `data` input, so that the data
is retained in the block for the next execution. You can then trigger the block by
feeding the `input` pin with any data, and the block will produce value of `data`.
Ex:
<constant_data> <any_trigger>
|| ||
=====> `data` `input`
|| \\ //
|| ValueBlock
|| ||
========= `output`
The block output will be static, the output can be consumed multiple times.
"""
class Input(BlockSchema):
@@ -46,9 +33,7 @@ class ValueBlock(Block):
super().__init__(
id="1ff065e9-88e8-4358-9d82-8dc91f622ba9",
description="This block forwards the `input` pin to `output` pin. "
"If the `data` is provided, it will prioritize forwarding `data` "
"over `input`. By connecting the `output` pin to `data` pin, "
"you can retain a constant value for the next executions.",
"This block output will be static, the output can be consumed many times.",
categories={BlockCategory.BASIC},
input_schema=ValueBlock.Input,
output_schema=ValueBlock.Output,
@@ -60,6 +45,7 @@ class ValueBlock(Block):
("output", "Hello, World!"), # No data provided, so trigger is returned
("output", "Existing Data"), # Data is provided, so data is returned.
],
static_output=True,
)
def run(self, input_data: Input) -> BlockOutput:
@@ -167,7 +153,7 @@ class ObjectLookupBlock(ObjectLookupBase[Any]):
class InputBlock(ObjectLookupBase[Any]):
def __init__(self):
super().__init__(categories={BlockCategory.BASIC, BlockCategory.INPUT})
super().__init__(categories={BlockCategory.INPUT, BlockCategory.BASIC})
def block_id(self) -> str:
return "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b"
@@ -175,7 +161,7 @@ class InputBlock(ObjectLookupBase[Any]):
class OutputBlock(ObjectLookupBase[Any]):
def __init__(self):
super().__init__(categories={BlockCategory.BASIC, BlockCategory.OUTPUT})
super().__init__(categories={BlockCategory.OUTPUT, BlockCategory.BASIC})
def block_id(self) -> str:
return "363ae599-353e-4804-937e-b2ee3cef3da4"

View File

@@ -1,7 +1,7 @@
from enum import Enum
from typing import Any
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import SchemaField
@@ -56,6 +56,7 @@ class ConditionBlock(Block):
input_schema=ConditionBlock.Input,
output_schema=ConditionBlock.Output,
description="Handles conditional logic based on comparison operators",
categories={BlockCategory.LOGIC},
test_input={
"value1": 10,
"operator": ComparisonOperator.GREATER_THAN.value,

View File

@@ -4,7 +4,7 @@ import aiohttp
import discord
from pydantic import Field
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import BlockSecret, SecretField
@@ -31,6 +31,7 @@ class DiscordReaderBlock(Block):
id="d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t", # Unique ID for the node
input_schema=DiscordReaderBlock.Input, # Assign input schema
output_schema=DiscordReaderBlock.Output, # Assign output schema
categories={BlockCategory.SOCIAL},
test_input={"discord_bot_token": "test_token", "continuous_read": False},
test_output=[
(
@@ -147,6 +148,7 @@ class DiscordMessageSenderBlock(Block):
id="h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6", # Unique ID for the node
input_schema=DiscordMessageSenderBlock.Input, # Assign input schema
output_schema=DiscordMessageSenderBlock.Output, # Assign output schema
categories={BlockCategory.SOCIAL},
test_input={
"discord_bot_token": "YOUR_DISCORD_BOT_TOKEN",
"channel_name": "general",

View File

@@ -45,7 +45,7 @@ class SendEmailBlock(Block):
super().__init__(
id="a1234567-89ab-cdef-0123-456789abcdef",
description="This block sends an email using the provided SMTP credentials.",
categories={BlockCategory.TEXT},
categories={BlockCategory.OUTPUT},
input_schema=SendEmailBlock.Input,
output_schema=SendEmailBlock.Output,
test_input={

View File

@@ -32,7 +32,7 @@ class HttpRequestBlock(Block):
super().__init__(
id="6595ae1f-b924-42cb-9a41-551a0611c4b4",
description="This block makes an HTTP request to the given URL.",
categories={BlockCategory.BASIC},
categories={BlockCategory.INPUT},
input_schema=HttpRequestBlock.Input,
output_schema=HttpRequestBlock.Output,
)

View File

@@ -1,6 +1,6 @@
from typing import Any, List, Tuple
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import SchemaField
@@ -21,6 +21,7 @@ class ForEachBlock(Block):
id="f8e7d6c5-b4a3-2c1d-0e9f-8g7h6i5j4k3l",
input_schema=ForEachBlock.Input,
output_schema=ForEachBlock.Output,
categories={BlockCategory.LOGIC},
test_input={"items": [1, "two", {"three": 3}, [4, 5]]},
test_output=[
("item", (0, 1)),

View File

@@ -528,7 +528,7 @@ class AdvancedLlmCallBlock(Block):
elif provider == "ollama":
response = ollama.chat(
model=model.value,
messages=messages,
messages=messages, # type: ignore
stream=False, # type: ignore
)
return response["message"]["content"]

View File

@@ -2,7 +2,7 @@ import operator
from enum import Enum
from typing import Any
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import SchemaField
@@ -39,6 +39,7 @@ class MathsBlock(Block):
id="b1ab9b19-67a6-406d-abf5-2dba76d00c79",
input_schema=MathsBlock.Input,
output_schema=MathsBlock.Output,
categories={BlockCategory.LOGIC},
test_input={
"operation": Operation.ADD.value,
"a": 10.0,
@@ -97,6 +98,7 @@ class CounterBlock(Block):
id="3c9c2f42-b0c3-435f-ba35-05f7a25c772a",
input_schema=CounterBlock.Input,
output_schema=CounterBlock.Output,
categories={BlockCategory.LOGIC},
test_input={"collection": [1, 2, 3, 4, 5]},
test_output=[
("count", 5),

View File

@@ -2,7 +2,7 @@ from typing import List
import requests
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import BlockSecret, SchemaField, SecretField
@@ -70,6 +70,7 @@ class CreateMediumPostBlock(Block):
id="3f7b2dcb-4a78-4e3f-b0f1-88132e1b89df",
input_schema=CreateMediumPostBlock.Input,
output_schema=CreateMediumPostBlock.Output,
categories={BlockCategory.SOCIAL},
test_input={
"author_id": "1234567890abcdef",
"title": "Test Post",

View File

@@ -5,7 +5,7 @@ from typing import Any
import feedparser
import pydantic
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import SchemaField
@@ -46,6 +46,7 @@ class RSSReaderBlock(Block):
id="c6731acb-4105-4zp1-bc9b-03d0036h370g",
input_schema=RSSReaderBlock.Input,
output_schema=RSSReaderBlock.Output,
categories={BlockCategory.OUTPUT},
test_input={
"rss_url": "https://example.com/rss",
"time_period": 10_000_000,

View File

@@ -1,10 +1,10 @@
import json
import re
from typing import Any
from pydantic import Field
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.util import json
class TextMatcherBlock(Block):

View File

@@ -3,7 +3,7 @@ from urllib.parse import parse_qs, urlparse
from youtube_transcript_api import YouTubeTranscriptApi
from youtube_transcript_api.formatters import TextFormatter
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import SchemaField
@@ -26,6 +26,7 @@ class YouTubeTranscriberBlock(Block):
id="f3a8f7e1-4b1d-4e5f-9f2a-7c3d5a2e6b4c",
input_schema=YouTubeTranscriberBlock.Input,
output_schema=YouTubeTranscriberBlock.Output,
categories={BlockCategory.SOCIAL},
test_input={"youtube_url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ"},
test_output=[
("video_id", "dQw4w9WgXcQ"),

View File

@@ -127,6 +127,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
test_output: BlockData | list[BlockData] | None = None,
test_mock: dict[str, Any] | None = None,
disabled: bool = False,
static_output: bool = False,
):
"""
Initialize the block with the given schema.
@@ -143,6 +144,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
test_output: The list or single expected output if the test_input is run.
test_mock: function names on the block implementation to mock on test run.
disabled: If the block is disabled, it will not be available for execution.
static_output: Whether the output links of the block are static by default.
"""
self.id = id
self.input_schema = input_schema
@@ -154,6 +156,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
self.categories = categories or set()
self.contributors = contributors or set()
self.disabled = disabled
self.static_output = static_output
@abstractmethod
def run(self, input_data: BlockSchemaInputType) -> BlockOutput:
@@ -180,7 +183,10 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
"outputSchema": self.output_schema.jsonschema(),
"description": self.description,
"categories": [category.dict() for category in self.categories],
"contributors": [contributor.dict() for contributor in self.contributors],
"contributors": [
contributor.model_dump() for contributor in self.contributors
],
"staticOutput": self.static_output,
}
def execute(self, input_data: BlockInput) -> BlockOutput:

View File

@@ -7,8 +7,8 @@ import prisma.types
from prisma.models import AgentGraph, AgentNode, AgentNodeLink
from pydantic import PrivateAttr
from autogpt_server.blocks.basic import InputBlock, OutputBlock, ValueBlock
from autogpt_server.data.block import BlockInput, get_block
from autogpt_server.blocks.basic import InputBlock, OutputBlock
from autogpt_server.data.block import BlockInput, get_block, get_blocks
from autogpt_server.data.db import BaseDbModel, transaction
from autogpt_server.data.user import DEFAULT_USER_ID
from autogpt_server.util import json
@@ -175,10 +175,10 @@ class Graph(GraphMeta):
)
node_map = {v.id: v for v in self.nodes}
def is_value_block(nid: str) -> bool:
def is_static_output_block(nid: str) -> bool:
bid = node_map[nid].block_id
b = get_block(bid)
return isinstance(b, ValueBlock)
return b.static_output if b else False
def is_input_output_block(nid: str) -> bool:
bid = node_map[nid].block_id
@@ -201,19 +201,24 @@ class Graph(GraphMeta):
for i, (node_id, name) in enumerate([source, sink]):
node = node_map.get(node_id)
if not node:
raise ValueError(f"{suffix}, {node_id} is invalid node.")
raise ValueError(
f"{suffix}, {node_id} is invalid node id, available nodes: {node_map.keys()}"
)
block = get_block(node.block_id)
if not block:
raise ValueError(f"{suffix}, {node.block_id} is invalid block.")
blocks = {v.id: v.name for v in get_blocks().values()}
raise ValueError(
f"{suffix}, {node.block_id} is invalid block id, available blocks: {blocks}"
)
sanitized_name = sanitize(name)
if i == 0:
fields = block.output_schema.get_fields()
fields = f"Valid output fields: {block.output_schema.get_fields()}"
else:
fields = block.input_schema.get_fields()
fields = f"Valid input fields: {block.input_schema.get_fields()}"
if sanitized_name not in fields:
raise ValueError(f"{suffix}, `{name}` invalid, fields: {fields}")
raise ValueError(f"{suffix}, `{name}` invalid, {fields}")
if (
subgraph_map.get(link.source_id) != subgraph_map.get(link.sink_id)
@@ -222,7 +227,7 @@ class Graph(GraphMeta):
):
raise ValueError(f"{suffix}, Connecting nodes from different subgraph.")
if is_value_block(link.source_id):
if is_static_output_block(link.source_id):
link.is_static = True # Each value block output should be static.
# TODO: Add type compatibility check here.

View File

@@ -86,7 +86,8 @@ def execute_node(
return
# Execute the node
logger.warning(f"{prefix} execute with input:\n`{exec_data}`")
exec_data_str = str(exec_data).encode("utf-8").decode("unicode_escape")
logger.warning(f"{prefix} execute with input:\n`{exec_data_str}`")
update_execution(ExecutionStatus.RUNNING)
try: