Compare commits

...

10 Commits

Author SHA1 Message Date
SwiftyOS
f713c75464 update meme generator graph 2024-08-19 12:18:52 +02:00
SwiftyOS
a9b8ce13e9 Added example graph 2024-08-16 10:13:50 +02:00
SwiftyOS
c2cd5a4664 Add blocks 2024-08-16 10:12:42 +02:00
Reinier van der Leer
66dfeedcb1 terminate RSS block without delay if not running continuously 2024-08-14 11:45:54 +02:00
Reinier van der Leer
62107c0dd2 fix You Tube block spelling 2024-08-14 11:37:36 +02:00
Reinier van der Leer
ed3be26365 feat(server): Add Dall-E block 2024-08-14 11:37:20 +02:00
SwiftyOS
8d241acd6d added categories 2024-08-14 11:34:10 +02:00
SwiftyOS
8643f70cf2 Added returning as base64 string 2024-08-14 11:24:40 +02:00
SwiftyOS
42af69648d tts v1 working 2024-08-14 11:22:33 +02:00
SwiftyOS
b5b47146c6 added empty files 2024-08-14 11:09:47 +02:00
16 changed files with 593 additions and 12 deletions

View File

@@ -106,6 +106,8 @@ const exceptionMap: Record<string, string> = {
Url: "URL",
Http: "HTTP",
Json: "JSON",
"Dall E": "Dall-E",
"You Tube": "YouTube",
};
const applyExceptions = (str: string): string => {

View File

@@ -167,7 +167,7 @@ class ObjectLookupBlock(ObjectLookupBase[Any]):
class InputBlock(ObjectLookupBase[Any]):
def __init__(self):
super().__init__(categories={BlockCategory.BASIC, BlockCategory.INPUT})
super().__init__(categories={BlockCategory.INPUT, BlockCategory.BASIC})
def block_id(self) -> str:
return "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b"
@@ -175,7 +175,7 @@ class InputBlock(ObjectLookupBase[Any]):
class OutputBlock(ObjectLookupBase[Any]):
def __init__(self):
super().__init__(categories={BlockCategory.BASIC, BlockCategory.OUTPUT})
super().__init__(categories={ BlockCategory.OUTPUT, BlockCategory.BASIC})
def block_id(self) -> str:
return "363ae599-353e-4804-937e-b2ee3cef3da4"

View File

@@ -1,7 +1,7 @@
from enum import Enum
from typing import Any
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockOutput, BlockSchema, BlockCategory
from autogpt_server.data.model import SchemaField
@@ -56,6 +56,7 @@ class ConditionBlock(Block):
input_schema=ConditionBlock.Input,
output_schema=ConditionBlock.Output,
description="Handles conditional logic based on comparison operators",
categories={BlockCategory.LOGIC},
test_input={
"value1": 10,
"operator": ComparisonOperator.GREATER_THAN.value,

View File

@@ -4,7 +4,7 @@ import aiohttp
import discord
from pydantic import Field
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockOutput, BlockSchema, BlockCategory
from autogpt_server.data.model import BlockSecret, SecretField
@@ -31,6 +31,7 @@ class DiscordReaderBlock(Block):
id="d3f4g5h6-1i2j-3k4l-5m6n-7o8p9q0r1s2t", # Unique ID for the node
input_schema=DiscordReaderBlock.Input, # Assign input schema
output_schema=DiscordReaderBlock.Output, # Assign output schema
categories={BlockCategory.SOCIAL},
test_input={"discord_bot_token": "test_token", "continuous_read": False},
test_output=[
(
@@ -147,6 +148,7 @@ class DiscordMessageSenderBlock(Block):
id="h1i2j3k4-5l6m-7n8o-9p0q-r1s2t3u4v5w6", # Unique ID for the node
input_schema=DiscordMessageSenderBlock.Input, # Assign input schema
output_schema=DiscordMessageSenderBlock.Output, # Assign output schema
categories={BlockCategory.SOCIAL},
test_input={
"discord_bot_token": "YOUR_DISCORD_BOT_TOKEN",
"channel_name": "general",

View File

@@ -45,7 +45,7 @@ class SendEmailBlock(Block):
super().__init__(
id="a1234567-89ab-cdef-0123-456789abcdef",
description="This block sends an email using the provided SMTP credentials.",
categories={BlockCategory.TEXT},
categories={BlockCategory.OUTPUT},
input_schema=SendEmailBlock.Input,
output_schema=SendEmailBlock.Output,
test_input={

View File

@@ -32,7 +32,7 @@ class HttpRequestBlock(Block):
super().__init__(
id="6595ae1f-b924-42cb-9a41-551a0611c4b4",
description="This block makes an HTTP request to the given URL.",
categories={BlockCategory.BASIC},
categories={BlockCategory.INPUT},
input_schema=HttpRequestBlock.Input,
output_schema=HttpRequestBlock.Output,
)

View File

@@ -0,0 +1,99 @@
import logging
from enum import Enum
from openai import OpenAI
from pydantic import HttpUrl
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import BlockSecret, SchemaField, SecretField
logger = logging.getLogger(__name__)
class ImageSize(str, Enum):
SMALL = "256x256"
MEDIUM = "512x512"
LARGE = "1024x1024"
class ImageQuality(str, Enum):
STANDARD = "standard"
HD = "hd"
class ImagineWithDallEBlock(Block):
class Input(BlockSchema):
prompt: str = SchemaField(description="The prompt to generate the image from.")
n: int = SchemaField(
default=1, description="The number of images to generate.", ge=1, le=10
)
size: ImageSize = SchemaField(
default=ImageSize.MEDIUM, description="The size of the generated image(s)."
)
quality: ImageQuality = SchemaField(
default=ImageQuality.STANDARD,
description="The quality of the generated image(s).",
)
api_key: BlockSecret = SecretField(
value="", description="OpenAI API key for DALL-E."
)
class Output(BlockSchema):
images: str = SchemaField(
description="One or more URLs of generated images."
)
error: str = SchemaField(
description="Error message if the image generation failed."
)
def __init__(self):
super().__init__(
id="7b6ce609-adac-4d27-81e9-6dd2f30d9977",
description="Generate images using DALL-E based on a text prompt.",
categories={BlockCategory.AI, BlockCategory.IMAGE},
input_schema=ImagineWithDallEBlock.Input,
output_schema=ImagineWithDallEBlock.Output,
test_input={
"prompt": "A futuristic city skyline at sunset",
"n": 1,
"size": ImageSize.MEDIUM,
"quality": ImageQuality.STANDARD,
"api_key": "test_api_key",
},
test_output=("images", ["https://example.com/generated_image.png"]),
test_mock={
"generate_image": lambda *args, **kwargs: [
"https://example.com/generated_image.png"
]
},
)
@staticmethod
def generate_image(
api_key: str, prompt: str, n: int, size: ImageSize, quality: ImageQuality
) -> list[str]:
response = OpenAI(api_key=api_key).images.generate(
model="dall-e-3",
prompt=prompt,
n=n,
size=size.value,
quality=quality.value,
response_format="url",
)
return [image.url for image in response.data]
def run(self, input_data: Input) -> BlockOutput:
try:
api_key = input_data.api_key.get_secret_value()
image_urls = self.generate_image(
api_key=api_key,
prompt=input_data.prompt,
n=input_data.n,
size=input_data.size,
quality=input_data.quality,
)
for url in image_urls:
yield "images", str(url)
except Exception as e:
logger.error(f"Error generating DALL-E image: {e}")
yield "error", f"Error generating DALL-E image: {str(e)}"

View File

@@ -1,6 +1,6 @@
from typing import Any, List, Tuple
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockOutput, BlockSchema, BlockCategory
from autogpt_server.data.model import SchemaField
@@ -21,6 +21,7 @@ class ForEachBlock(Block):
id="f8e7d6c5-b4a3-2c1d-0e9f-8g7h6i5j4k3l",
input_schema=ForEachBlock.Input,
output_schema=ForEachBlock.Output,
categories={BlockCategory.LOGIC},
test_input={"items": [1, "two", {"three": 3}, [4, 5]]},
test_output=[
("item", (0, 1)),

View File

@@ -2,7 +2,7 @@ import operator
from enum import Enum
from typing import Any
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockOutput, BlockSchema, BlockCategory
from autogpt_server.data.model import SchemaField
@@ -39,6 +39,7 @@ class MathsBlock(Block):
id="b1ab9b19-67a6-406d-abf5-2dba76d00c79",
input_schema=MathsBlock.Input,
output_schema=MathsBlock.Output,
categories={BlockCategory.LOGIC},
test_input={
"operation": Operation.ADD.value,
"a": 10.0,
@@ -97,6 +98,7 @@ class CounterBlock(Block):
id="3c9c2f42-b0c3-435f-ba35-05f7a25c772a",
input_schema=CounterBlock.Input,
output_schema=CounterBlock.Output,
categories={BlockCategory.LOGIC},
test_input={"collection": [1, 2, 3, 4, 5]},
test_output=[
("count", 5),

View File

@@ -2,7 +2,7 @@ from typing import List
import requests
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockOutput, BlockSchema, BlockCategory
from autogpt_server.data.model import BlockSecret, SchemaField, SecretField
@@ -70,6 +70,7 @@ class CreateMediumPostBlock(Block):
id="3f7b2dcb-4a78-4e3f-b0f1-88132e1b89df",
input_schema=CreateMediumPostBlock.Input,
output_schema=CreateMediumPostBlock.Output,
categories={BlockCategory.SOCIAL},
test_input={
"author_id": "1234567890abcdef",
"title": "Test Post",

View File

@@ -0,0 +1,48 @@
from pathlib import Path
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from autogpt_server.data.model import SchemaField
class FileWriterBlock(Block):
class Input(BlockSchema):
text: str = SchemaField(
description="The text content to write to the file.",
placeholder="Hello, world!",
)
output_path: str = SchemaField(
description="The path where the file should be written.",
placeholder="/path/to/output/file.txt",
)
class Output(BlockSchema):
status: str = SchemaField(description="Status of the file writing operation.")
file_path: str = SchemaField(description="The path of the written file.")
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="6f7b2dcb-4a78-4e3f-b0f1-88132e1b89df", # Replace with a proper UUID
description="Writes the given text to a file at the specified output path.",
categories={BlockCategory.OUTPUT},
input_schema=FileWriterBlock.Input,
output_schema=FileWriterBlock.Output,
test_input={"text": "Hello, world!", "output_path": "/tmp/test_output.txt"},
test_output=[("status", "success"), ("file_path", "/tmp/test_output.txt")],
)
def run(self, input_data: Input) -> BlockOutput:
try:
output_path = Path(input_data.output_path)
# Ensure the directory exists
output_path.parent.mkdir(parents=True, exist_ok=True)
# Write the text to the file
with open(output_path, "w") as file:
file.write(input_data.text)
yield "status", "success"
yield "file_path", str(output_path)
except Exception as e:
yield "error", f"Failed to write file: {str(e)}"

View File

@@ -5,7 +5,7 @@ from typing import Any
import feedparser
import pydantic
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockOutput, BlockSchema, BlockCategory
from autogpt_server.data.model import SchemaField
@@ -46,6 +46,7 @@ class RSSReaderBlock(Block):
id="c6731acb-4105-4zp1-bc9b-03d0036h370g",
input_schema=RSSReaderBlock.Input,
output_schema=RSSReaderBlock.Output,
categories={BlockCategory.OUTPUT},
test_input={
"rss_url": "https://example.com/rss",
"time_period": 10_000_000,
@@ -111,4 +112,5 @@ class RSSReaderBlock(Block):
),
)
time.sleep(input_data.polling_rate)
if keep_going:
time.sleep(input_data.polling_rate)

View File

@@ -0,0 +1,113 @@
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from pathlib import Path
from openai import OpenAI
from autogpt_server.data.model import BlockSecret, SchemaField, SecretField
import base64
import io
class TextToSpeechBlock(Block):
class Input(BlockSchema):
api_key: BlockSecret = SecretField(
key="openai_api_key",
description="Your OpenAI API key",
placeholder="Enter your OpenAI API key",
)
text: str = SchemaField(
description="The text to convert to speech",
placeholder="Enter the text you want to convert to speech",
)
voice: str = SchemaField(
description="The voice to use for speech synthesis",
placeholder="alloy",
default="alloy",
)
model: str = SchemaField(
default="tts-1",
description="The TTS model to use",
placeholder="tts-1",
)
output_path: str | None = SchemaField(
description="The path where the output audio file will be saved (optional)",
placeholder="/path/to/output/speech.mp3",
default=None,
)
class Output(BlockSchema):
file_path: str | None = SchemaField(description="The path of the generated audio file (if saved)")
file_size: int = SchemaField(description="The size of the generated audio in bytes")
duration: float = SchemaField(description="The duration of the generated audio in seconds")
file_data: str = SchemaField(description="Base64 encoded string of the audio file")
error: str = SchemaField(description="Error message if the TTS conversion failed")
def __init__(self):
super().__init__(
id="1a2b3c4d-5e6f-7g8h-9i0j-1k2l3m4n5o6p",
input_schema=TextToSpeechBlock.Input,
output_schema=TextToSpeechBlock.Output,
description="Uses OpenAI to convert the input string into audio, returning a base64 encoded string and optionally saving as an mp3",
categories=[BlockCategory.AI, BlockCategory.OUTPUT],
test_input={
"api_key": "your_test_api_key",
"text": "Hello, this is a test for text-to-speech conversion.",
"voice": "alloy",
"model": "tts-1",
"output_path": "/tmp/test_speech.mp3",
},
test_output=[
("file_path", "/tmp/test_speech.mp3"),
("file_size", 12345),
("duration", 3.5),
("file_data", "base64_encoded_string_here"),
],
test_mock={
"create_speech": lambda *args, **kwargs: MockResponse(),
},
)
def create_speech(self, api_key: str, text: str, voice: str, model: str):
client = OpenAI(api_key=api_key)
response = client.audio.speech.create(
model=model,
voice=voice,
input=text
)
return response.content
def run(self, input_data: Input) -> BlockOutput:
try:
audio_content = self.create_speech(
api_key=input_data.api_key.get_secret_value(),
text=input_data.text,
voice=input_data.voice,
model=input_data.model
)
file_size = len(audio_content)
# Encode the audio content as base64
file_data = base64.b64encode(audio_content).decode('utf-8')
# Estimate duration based on average speech rate
estimated_duration = len(input_data.text.split()) / 2.5 # Assuming 150 words per minute
yield "file_size", file_size
yield "duration", estimated_duration
yield "file_data", file_data
# Save the file if output_path is specified
if input_data.output_path:
output_file = Path(input_data.output_path)
output_file.write_bytes(audio_content)
yield "file_path", str(output_file)
else:
yield "file_path", None
except Exception as e:
yield "error", f"Error occurred during text-to-speech conversion: {str(e)}"
class MockResponse:
@property
def content(self):
return b"Mock audio content"

View File

@@ -3,7 +3,7 @@ from urllib.parse import parse_qs, urlparse
from youtube_transcript_api import YouTubeTranscriptApi
from youtube_transcript_api.formatters import TextFormatter
from autogpt_server.data.block import Block, BlockOutput, BlockSchema
from autogpt_server.data.block import Block, BlockOutput, BlockSchema, BlockCategory
from autogpt_server.data.model import SchemaField
@@ -26,6 +26,8 @@ class YouTubeTranscriberBlock(Block):
id="f3a8f7e1-4b1d-4e5f-9f2a-7c3d5a2e6b4c",
input_schema=YouTubeTranscriberBlock.Input,
output_schema=YouTubeTranscriberBlock.Output,
categories={BlockCategory.SOCIAL},
test_input={"youtube_url": "https://www.youtube.com/watch?v=dQw4w9WgXcQ"},
test_output=[
("video_id", "dQw4w9WgXcQ"),

View File

@@ -21,6 +21,7 @@ class BlockCategory(Enum):
SOCIAL = "Block that interacts with social media platforms."
TEXT = "Block that processes text data."
SEARCH = "Block that searches or extracts information from the internet."
IMAGE = "Block that processes images."
BASIC = "Block that performs basic operations."
INPUT = "Block that interacts with input of the graph."
OUTPUT = "Block that interacts with output of the graph."

View File

@@ -0,0 +1,307 @@
{
"id": "f362298f-6636-4b94-8b4d-2b79d0c45428",
"version": 40,
"is_active": true,
"is_template": false,
"name": "Meme Generator with Comemtry",
"description": "Generates Memes from the news",
"nodes": [
{
"id": "bc14925e-f7cb-422c-8e15-399d016b17b2",
"block_id": "6f7b2dcb-4a78-4e3f-b0f1-88132e1b89df",
"input_default": {},
"metadata": {
"position": {
"x": 4930.195978249109,
"y": 105.20275048645499
}
}
},
{
"id": "a690170a-af0f-458e-8049-f5b8c33743e5",
"block_id": "31d1064e-7446-4693-a7d4-65e5ca1180d1",
"input_default": {
"key": "description"
},
"metadata": {
"position": {
"x": 1433.4252367500378,
"y": 563.7670429532704
}
}
},
{
"id": "3a8873bb-e81d-4054-8877-929198357e5f",
"block_id": "31d1064e-7446-4693-a7d4-65e5ca1180d1",
"input_default": {
"key": "title",
"value": "title"
},
"metadata": {
"position": {
"x": 1418.935138220137,
"y": 42.55214291836248
}
}
},
{
"id": "e4fa3d36-4345-457e-810e-9f587729d0b3",
"block_id": "b2g2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6",
"input_default": {
"key": "title"
},
"metadata": {
"position": {
"x": 610.7649785037777,
"y": -2.0286062852246687
}
}
},
{
"id": "76051f31-965b-4e3e-9fa9-ce66c6862e4a",
"block_id": "db7d8f02-2f44-4c55-ab7a-eae0941f0c30",
"input_default": {
"format": "/Users/swifty/dev/agpt/rnd/memes/{title}.mp3"
},
"metadata": {
"position": {
"x": 4190.605357982845,
"y": 1514.1680512375478
}
}
},
{
"id": "d7752b9d-b333-428c-900a-e62b6a2b6b1d",
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
"input_default": {
"model": "gpt-4o",
"sys_prompt": "You are the world's best and funniest meme generator."
},
"metadata": {
"position": {
"x": 2755.051802984057,
"y": 294.9091570008079
}
}
},
{
"id": "4a9de344-fcdf-4fb4-a67c-1c3d6cc05a66",
"block_id": "7b6ce609-adac-4d27-81e9-6dd2f30d9977",
"input_default": {
"size": "1024x1024",
"quality": "standard"
},
"metadata": {
"position": {
"x": 3533.035030217305,
"y": 92.80142769284907
}
}
},
{
"id": "929d9760-d548-47ed-ab2a-c7fb9777c87a",
"block_id": "c6731acb-4105-4zp1-bc9b-03d0036h370g",
"input_default": {
"rss_url": "http://feeds.bbci.co.uk/news/world/europe/rss.xml",
"polling_rate": 0,
"run_continuously": false
},
"metadata": {
"position": {
"x": -34.91733516974466,
"y": 284.663331876495
}
}
},
{
"id": "63cc6a3c-0e36-4688-8002-2ee8adae6b05",
"block_id": "1f292d4a-41a4-4977-9684-7c8d560b9f91",
"input_default": {
"model": "gpt-4o",
"sys_prompt": "Write a sarcastic commentary on the meme"
},
"metadata": {
"position": {
"x": 3501.7447339413593,
"y": 990.7894479350489
}
}
},
{
"id": "1f50e732-0d73-4ecf-9c22-58f065843e9e",
"block_id": "b2g2c3d4-5e6f-7g8h-9i0j-k1l2m3n4o5p6",
"input_default": {
"key": "description"
},
"metadata": {
"position": {
"x": 694.853411307452,
"y": 497.9306050315936
}
}
},
{
"id": "1c58f9dd-cf68-4f89-a256-1178adbd6c63",
"block_id": "1a2b3c4d-5e6f-7g8h-9i0j-1k2l3m4n5o6p",
"input_default": {
"output_path": "/Users/swifty/dev/agpt/rnd/meme.mp3"
},
"metadata": {
"position": {
"x": 5066.380304706531,
"y": 1084.7406735150867
}
}
},
{
"id": "799d5b19-a728-4514-bf36-3bd78fb29770",
"block_id": "db7d8f02-2f44-4c55-ab7a-eae0941f0c30",
"input_default": {
"format": "/Users/swifty/dev/agpt/rnd/memes/{title}.txt"
},
"metadata": {
"position": {
"x": 4120.862846371426,
"y": 541.5138784374701
}
}
},
{
"id": "9d0c5f17-724d-4107-b2f2-74129e2b161a",
"block_id": "db7d8f02-2f44-4c55-ab7a-eae0941f0c30",
"input_default": {
"texts": [],
"format": "Generate an amazing meme image description for the following news story. It needs to be funny and sarcastic \n\n{title}\n----\n{description}"
},
"metadata": {
"position": {
"x": 2136.907585373278,
"y": 104.14179049299264
}
}
}
],
"links": [
{
"id": "f100f115-af6d-44be-984a-f4d010eff841",
"source_id": "a690170a-af0f-458e-8049-f5b8c33743e5",
"sink_id": "76051f31-965b-4e3e-9fa9-ce66c6862e4a",
"source_name": "updated_dictionary",
"sink_name": "named_texts",
"is_static": false
},
{
"id": "5a273a27-11af-4e2c-904d-be4340f42bfd",
"source_id": "63cc6a3c-0e36-4688-8002-2ee8adae6b05",
"sink_id": "1c58f9dd-cf68-4f89-a256-1178adbd6c63",
"source_name": "response",
"sink_name": "text",
"is_static": false
},
{
"id": "0ad4d5a8-0309-43d0-a088-7b9431ff99a0",
"source_id": "799d5b19-a728-4514-bf36-3bd78fb29770",
"sink_id": "bc14925e-f7cb-422c-8e15-399d016b17b2",
"source_name": "output",
"sink_name": "output_path",
"is_static": false
},
{
"id": "094f1c54-2d75-45b6-b2fd-f6c426434508",
"source_id": "d7752b9d-b333-428c-900a-e62b6a2b6b1d",
"sink_id": "63cc6a3c-0e36-4688-8002-2ee8adae6b05",
"source_name": "response",
"sink_name": "prompt",
"is_static": false
},
{
"id": "04f03306-2b67-43d1-9a2a-22be79905a41",
"source_id": "76051f31-965b-4e3e-9fa9-ce66c6862e4a",
"sink_id": "1c58f9dd-cf68-4f89-a256-1178adbd6c63",
"source_name": "output",
"sink_name": "output_path",
"is_static": false
},
{
"id": "9d0917e3-528b-497b-a835-4dd8778ffeaa",
"source_id": "929d9760-d548-47ed-ab2a-c7fb9777c87a",
"sink_id": "1f50e732-0d73-4ecf-9c22-58f065843e9e",
"source_name": "entry",
"sink_name": "input",
"is_static": false
},
{
"id": "40459b90-07c2-4ded-be9c-c443d16b2ba8",
"source_id": "9d0c5f17-724d-4107-b2f2-74129e2b161a",
"sink_id": "d7752b9d-b333-428c-900a-e62b6a2b6b1d",
"source_name": "output",
"sink_name": "prompt",
"is_static": false
},
{
"id": "0197c90e-eec9-43be-acc1-d2381a73f588",
"source_id": "1f50e732-0d73-4ecf-9c22-58f065843e9e",
"sink_id": "a690170a-af0f-458e-8049-f5b8c33743e5",
"source_name": "output",
"sink_name": "value",
"is_static": false
},
{
"id": "df585695-80c2-4c45-b0c9-878d9f1a4177",
"source_id": "a690170a-af0f-458e-8049-f5b8c33743e5",
"sink_id": "9d0c5f17-724d-4107-b2f2-74129e2b161a",
"source_name": "updated_dictionary",
"sink_name": "named_texts",
"is_static": false
},
{
"id": "1a618f97-9210-46c5-a983-24a1113f1b5d",
"source_id": "3a8873bb-e81d-4054-8877-929198357e5f",
"sink_id": "a690170a-af0f-458e-8049-f5b8c33743e5",
"source_name": "updated_dictionary",
"sink_name": "dictionary",
"is_static": false
},
{
"id": "d382acd2-cc10-45cc-90d8-2037613e3c32",
"source_id": "929d9760-d548-47ed-ab2a-c7fb9777c87a",
"sink_id": "e4fa3d36-4345-457e-810e-9f587729d0b3",
"source_name": "entry",
"sink_name": "input",
"is_static": false
},
{
"id": "3f9be8af-01ac-4438-babe-92a29218e9a7",
"source_id": "a690170a-af0f-458e-8049-f5b8c33743e5",
"sink_id": "799d5b19-a728-4514-bf36-3bd78fb29770",
"source_name": "updated_dictionary",
"sink_name": "named_texts",
"is_static": false
},
{
"id": "db3785ef-9b57-45a6-a9d9-904f46b127ae",
"source_id": "e4fa3d36-4345-457e-810e-9f587729d0b3",
"sink_id": "3a8873bb-e81d-4054-8877-929198357e5f",
"source_name": "output",
"sink_name": "value",
"is_static": false
},
{
"id": "06dee8cb-b087-4ff5-9eb9-5981cb0d4bb7",
"source_id": "d7752b9d-b333-428c-900a-e62b6a2b6b1d",
"sink_id": "4a9de344-fcdf-4fb4-a67c-1c3d6cc05a66",
"source_name": "response",
"sink_name": "prompt",
"is_static": false
},
{
"id": "b47076c3-0b2e-4884-93ba-c39b042c4b55",
"source_id": "4a9de344-fcdf-4fb4-a67c-1c3d6cc05a66",
"sink_id": "bc14925e-f7cb-422c-8e15-399d016b17b2",
"source_name": "images",
"sink_name": "text",
"is_static": false
}
],
"subgraphs": {}
}