Compare commits

..

3 Commits

Author SHA1 Message Date
Zamil Majdy
7474cf355a feat(blocks): add plural outputs to blocks yielding singular values in loops
Add missing plural output versions to blocks that yield individual items
in loops but don't provide the complete collection:

- GetRedditPostsBlock: add `posts` output for all Reddit posts
- ReadRSSFeedBlock: add `entries` output for all RSS entries
- AddMemoryBlock: add `results` output for all memory operation results

This allows users to access both individual items (for iteration) and the
complete collection (for aggregate operations) from the same block execution.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-07-03 12:52:33 -07:00
Zamil Majdy
05d4cd3c6c Update autogpt_platform/backend/backend/blocks/github/pull_requests.py
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2025-07-04 02:38:48 +07:00
Toran Bruce Richards
8f317db046 feat(platform/backend): yield full lists in GitHub blocks 2025-07-03 20:31:00 +01:00
63 changed files with 2734 additions and 4512 deletions

View File

@@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-latest
outputs:
cache-key: ${{ steps.cache-key.outputs.key }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -115,7 +115,7 @@ jobs:
needs: setup
# Only run on dev branch pushes or PRs targeting dev
if: github.ref == 'refs/heads/dev' || github.base_ref == 'dev'
steps:
- name: Checkout repository
uses: actions/checkout@v4
@@ -212,8 +212,6 @@ jobs:
- name: Run Playwright tests
run: pnpm test:no-build --project=${{ matrix.browser }}
env:
BROWSER_TYPE: ${{ matrix.browser }}
- name: Print Final Docker Compose logs
if: always()

View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -177,7 +177,7 @@ files = [
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
{file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"},
]
markers = {main = "python_version < \"3.11\"", dev = "python_full_version < \"3.11.3\""}
markers = {main = "python_version == \"3.10\"", dev = "python_full_version < \"3.11.3\""}
[[package]]
name = "attrs"
@@ -390,7 +390,7 @@ description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
groups = ["main"]
markers = "python_version < \"3.11\""
markers = "python_version == \"3.10\""
files = [
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
@@ -1667,30 +1667,30 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
version = "0.12.2"
version = "0.11.10"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be"},
{file = "ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e"},
{file = "ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342"},
{file = "ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a"},
{file = "ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639"},
{file = "ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12"},
{file = "ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e"},
{file = "ruff-0.11.10-py3-none-linux_armv6l.whl", hash = "sha256:859a7bfa7bc8888abbea31ef8a2b411714e6a80f0d173c2a82f9041ed6b50f58"},
{file = "ruff-0.11.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:968220a57e09ea5e4fd48ed1c646419961a0570727c7e069842edd018ee8afed"},
{file = "ruff-0.11.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:1067245bad978e7aa7b22f67113ecc6eb241dca0d9b696144256c3a879663bca"},
{file = "ruff-0.11.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4854fd09c7aed5b1590e996a81aeff0c9ff51378b084eb5a0b9cd9518e6cff2"},
{file = "ruff-0.11.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b4564e9f99168c0f9195a0fd5fa5928004b33b377137f978055e40008a082c5"},
{file = "ruff-0.11.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b6a9cc5b62c03cc1fea0044ed8576379dbaf751d5503d718c973d5418483641"},
{file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:607ecbb6f03e44c9e0a93aedacb17b4eb4f3563d00e8b474298a201622677947"},
{file = "ruff-0.11.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7b3a522fa389402cd2137df9ddefe848f727250535c70dafa840badffb56b7a4"},
{file = "ruff-0.11.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f071b0deed7e9245d5820dac235cbdd4ef99d7b12ff04c330a241ad3534319f"},
{file = "ruff-0.11.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a60e3a0a617eafba1f2e4186d827759d65348fa53708ca547e384db28406a0b"},
{file = "ruff-0.11.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:da8ec977eaa4b7bf75470fb575bea2cb41a0e07c7ea9d5a0a97d13dbca697bf2"},
{file = "ruff-0.11.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ddf8967e08227d1bd95cc0851ef80d2ad9c7c0c5aab1eba31db49cf0a7b99523"},
{file = "ruff-0.11.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5a94acf798a82db188f6f36575d80609072b032105d114b0f98661e1679c9125"},
{file = "ruff-0.11.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3afead355f1d16d95630df28d4ba17fb2cb9c8dfac8d21ced14984121f639bad"},
{file = "ruff-0.11.10-py3-none-win32.whl", hash = "sha256:dc061a98d32a97211af7e7f3fa1d4ca2fcf919fb96c28f39551f35fc55bdbc19"},
{file = "ruff-0.11.10-py3-none-win_amd64.whl", hash = "sha256:5cc725fbb4d25b0f185cb42df07ab6b76c4489b4bfb740a175f3a59c70e8a224"},
{file = "ruff-0.11.10-py3-none-win_arm64.whl", hash = "sha256:ef69637b35fb8b210743926778d0e45e1bffa850a7c61e428c6b971549b5f5d1"},
{file = "ruff-0.11.10.tar.gz", hash = "sha256:d522fb204b4959909ecac47da02830daec102eeb100fb50ea9554818d47a5fa6"},
]
[[package]]
@@ -1823,7 +1823,7 @@ description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "python_version < \"3.11\""
markers = "python_version == \"3.10\""
files = [
{file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
{file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
@@ -2176,4 +2176,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<4.0"
content-hash = "574057127b05f28c2ae39f7b11aa0d7c52f857655e9223e23a27c9989b2ac10f"
content-hash = "d92143928a88ca3a56ac200c335910eafac938940022fed8bd0d17c95040b54f"

View File

@@ -23,7 +23,7 @@ uvicorn = "^0.34.3"
[tool.poetry.group.dev.dependencies]
redis = "^5.2.1"
ruff = "^0.12.2"
ruff = "^0.11.10"
[build-system]
requires = ["poetry-core"]

View File

@@ -1,9 +1,12 @@
import enum
from typing import Any
from typing import Any, List
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema, BlockType
from backend.data.model import SchemaField
from backend.util import json
from backend.util.file import store_media_file
from backend.util.mock import MockObject
from backend.util.prompt import estimate_token_count_str
from backend.util.type import MediaFileType, convert
@@ -118,6 +121,266 @@ class PrintToConsoleBlock(Block):
yield "status", "printed"
class FindInDictionaryBlock(Block):
class Input(BlockSchema):
input: Any = SchemaField(description="Dictionary to lookup from")
key: str | int = SchemaField(description="Key to lookup in the dictionary")
class Output(BlockSchema):
output: Any = SchemaField(description="Value found for the given key")
missing: Any = SchemaField(
description="Value of the input that missing the key"
)
def __init__(self):
super().__init__(
id="0e50422c-6dee-4145-83d6-3a5a392f65de",
description="Lookup the given key in the input dictionary/object/list and return the value.",
input_schema=FindInDictionaryBlock.Input,
output_schema=FindInDictionaryBlock.Output,
test_input=[
{"input": {"apple": 1, "banana": 2, "cherry": 3}, "key": "banana"},
{"input": {"x": 10, "y": 20, "z": 30}, "key": "w"},
{"input": [1, 2, 3], "key": 1},
{"input": [1, 2, 3], "key": 3},
{"input": MockObject(value="!!", key="key"), "key": "key"},
{"input": [{"k1": "v1"}, {"k2": "v2"}, {"k1": "v3"}], "key": "k1"},
],
test_output=[
("output", 2),
("missing", {"x": 10, "y": 20, "z": 30}),
("output", 2),
("missing", [1, 2, 3]),
("output", "key"),
("output", ["v1", "v3"]),
],
categories={BlockCategory.BASIC},
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
obj = input_data.input
key = input_data.key
if isinstance(obj, str):
obj = json.loads(obj)
if isinstance(obj, dict) and key in obj:
yield "output", obj[key]
elif isinstance(obj, list) and isinstance(key, int) and 0 <= key < len(obj):
yield "output", obj[key]
elif isinstance(obj, list) and isinstance(key, str):
if len(obj) == 0:
yield "output", []
elif isinstance(obj[0], dict) and key in obj[0]:
yield "output", [item[key] for item in obj if key in item]
else:
yield "output", [getattr(val, key) for val in obj if hasattr(val, key)]
elif isinstance(obj, object) and isinstance(key, str) and hasattr(obj, key):
yield "output", getattr(obj, key)
else:
yield "missing", input_data.input
class AddToDictionaryBlock(Block):
class Input(BlockSchema):
dictionary: dict[Any, Any] = SchemaField(
default_factory=dict,
description="The dictionary to add the entry to. If not provided, a new dictionary will be created.",
)
key: str = SchemaField(
default="",
description="The key for the new entry.",
placeholder="new_key",
advanced=False,
)
value: Any = SchemaField(
default=None,
description="The value for the new entry.",
placeholder="new_value",
advanced=False,
)
entries: dict[Any, Any] = SchemaField(
default_factory=dict,
description="The entries to add to the dictionary. This is the batch version of the `key` and `value` fields.",
advanced=True,
)
class Output(BlockSchema):
updated_dictionary: dict = SchemaField(
description="The dictionary with the new entry added."
)
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="31d1064e-7446-4693-a7d4-65e5ca1180d1",
description="Adds a new key-value pair to a dictionary. If no dictionary is provided, a new one is created.",
categories={BlockCategory.BASIC},
input_schema=AddToDictionaryBlock.Input,
output_schema=AddToDictionaryBlock.Output,
test_input=[
{
"dictionary": {"existing_key": "existing_value"},
"key": "new_key",
"value": "new_value",
},
{"key": "first_key", "value": "first_value"},
{
"dictionary": {"existing_key": "existing_value"},
"entries": {"new_key": "new_value", "first_key": "first_value"},
},
],
test_output=[
(
"updated_dictionary",
{"existing_key": "existing_value", "new_key": "new_value"},
),
("updated_dictionary", {"first_key": "first_value"}),
(
"updated_dictionary",
{
"existing_key": "existing_value",
"new_key": "new_value",
"first_key": "first_value",
},
),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
updated_dict = input_data.dictionary.copy()
if input_data.value is not None and input_data.key:
updated_dict[input_data.key] = input_data.value
for key, value in input_data.entries.items():
updated_dict[key] = value
yield "updated_dictionary", updated_dict
class AddToListBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(
default_factory=list,
advanced=False,
description="The list to add the entry to. If not provided, a new list will be created.",
)
entry: Any = SchemaField(
description="The entry to add to the list. Can be of any type (string, int, dict, etc.).",
advanced=False,
default=None,
)
entries: List[Any] = SchemaField(
default_factory=lambda: list(),
description="The entries to add to the list. This is the batch version of the `entry` field.",
advanced=True,
)
position: int | None = SchemaField(
default=None,
description="The position to insert the new entry. If not provided, the entry will be appended to the end of the list.",
)
class Output(BlockSchema):
updated_list: List[Any] = SchemaField(
description="The list with the new entry added."
)
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="aeb08fc1-2fc1-4141-bc8e-f758f183a822",
description="Adds a new entry to a list. The entry can be of any type. If no list is provided, a new one is created.",
categories={BlockCategory.BASIC},
input_schema=AddToListBlock.Input,
output_schema=AddToListBlock.Output,
test_input=[
{
"list": [1, "string", {"existing_key": "existing_value"}],
"entry": {"new_key": "new_value"},
"position": 1,
},
{"entry": "first_entry"},
{"list": ["a", "b", "c"], "entry": "d"},
{
"entry": "e",
"entries": ["f", "g"],
"list": ["a", "b"],
"position": 1,
},
],
test_output=[
(
"updated_list",
[
1,
{"new_key": "new_value"},
"string",
{"existing_key": "existing_value"},
],
),
("updated_list", ["first_entry"]),
("updated_list", ["a", "b", "c", "d"]),
("updated_list", ["a", "f", "g", "e", "b"]),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
entries_added = input_data.entries.copy()
if input_data.entry:
entries_added.append(input_data.entry)
updated_list = input_data.list.copy()
if (pos := input_data.position) is not None:
updated_list = updated_list[:pos] + entries_added + updated_list[pos:]
else:
updated_list += entries_added
yield "updated_list", updated_list
class FindInListBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(description="The list to search in.")
value: Any = SchemaField(description="The value to search for.")
class Output(BlockSchema):
index: int = SchemaField(description="The index of the value in the list.")
found: bool = SchemaField(
description="Whether the value was found in the list."
)
not_found_value: Any = SchemaField(
description="The value that was not found in the list."
)
def __init__(self):
super().__init__(
id="5e2c6d0a-1e37-489f-b1d0-8e1812b23333",
description="Finds the index of the value in the list.",
categories={BlockCategory.BASIC},
input_schema=FindInListBlock.Input,
output_schema=FindInListBlock.Output,
test_input=[
{"list": [1, 2, 3, 4, 5], "value": 3},
{"list": [1, 2, 3, 4, 5], "value": 6},
],
test_output=[
("index", 2),
("found", True),
("found", False),
("not_found_value", 6),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
try:
yield "index", input_data.list.index(input_data.value)
yield "found", True
except ValueError:
yield "found", False
yield "not_found_value", input_data.value
class NoteBlock(Block):
class Input(BlockSchema):
text: str = SchemaField(description="The text to display in the sticky note.")
@@ -143,6 +406,133 @@ class NoteBlock(Block):
yield "output", input_data.text
class CreateDictionaryBlock(Block):
class Input(BlockSchema):
values: dict[str, Any] = SchemaField(
description="Key-value pairs to create the dictionary with",
placeholder="e.g., {'name': 'Alice', 'age': 25}",
)
class Output(BlockSchema):
dictionary: dict[str, Any] = SchemaField(
description="The created dictionary containing the specified key-value pairs"
)
error: str = SchemaField(
description="Error message if dictionary creation failed"
)
def __init__(self):
super().__init__(
id="b924ddf4-de4f-4b56-9a85-358930dcbc91",
description="Creates a dictionary with the specified key-value pairs. Use this when you know all the values you want to add upfront.",
categories={BlockCategory.DATA},
input_schema=CreateDictionaryBlock.Input,
output_schema=CreateDictionaryBlock.Output,
test_input=[
{
"values": {"name": "Alice", "age": 25, "city": "New York"},
},
{
"values": {"numbers": [1, 2, 3], "active": True, "score": 95.5},
},
],
test_output=[
(
"dictionary",
{"name": "Alice", "age": 25, "city": "New York"},
),
(
"dictionary",
{"numbers": [1, 2, 3], "active": True, "score": 95.5},
),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
try:
# The values are already validated by Pydantic schema
yield "dictionary", input_data.values
except Exception as e:
yield "error", f"Failed to create dictionary: {str(e)}"
class CreateListBlock(Block):
class Input(BlockSchema):
values: List[Any] = SchemaField(
description="A list of values to be combined into a new list.",
placeholder="e.g., ['Alice', 25, True]",
)
max_size: int | None = SchemaField(
default=None,
description="Maximum size of the list. If provided, the list will be yielded in chunks of this size.",
advanced=True,
)
max_tokens: int | None = SchemaField(
default=None,
description="Maximum tokens for the list. If provided, the list will be yielded in chunks that fit within this token limit.",
advanced=True,
)
class Output(BlockSchema):
list: List[Any] = SchemaField(
description="The created list containing the specified values."
)
error: str = SchemaField(description="Error message if list creation failed.")
def __init__(self):
super().__init__(
id="a912d5c7-6e00-4542-b2a9-8034136930e4",
description="Creates a list with the specified values. Use this when you know all the values you want to add upfront. This block can also yield the list in batches based on a maximum size or token limit.",
categories={BlockCategory.DATA},
input_schema=CreateListBlock.Input,
output_schema=CreateListBlock.Output,
test_input=[
{
"values": ["Alice", 25, True],
},
{
"values": [1, 2, 3, "four", {"key": "value"}],
},
],
test_output=[
(
"list",
["Alice", 25, True],
),
(
"list",
[1, 2, 3, "four", {"key": "value"}],
),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
chunk = []
cur_tokens, max_tokens = 0, input_data.max_tokens
cur_size, max_size = 0, input_data.max_size
for value in input_data.values:
if max_tokens:
tokens = estimate_token_count_str(value)
else:
tokens = 0
# Check if adding this value would exceed either limit
if (max_tokens and (cur_tokens + tokens > max_tokens)) or (
max_size and (cur_size + 1 > max_size)
):
yield "list", chunk
chunk = [value]
cur_size, cur_tokens = 1, tokens
else:
chunk.append(value)
cur_size, cur_tokens = cur_size + 1, cur_tokens + tokens
# Yield final chunk if any
if chunk or not input_data.values:
yield "list", chunk
class TypeOptions(enum.Enum):
STRING = "string"
NUMBER = "number"
@@ -160,7 +550,6 @@ class UniversalTypeConverterBlock(Block):
class Output(BlockSchema):
value: Any = SchemaField(description="The converted value.")
error: str = SchemaField(description="Error message if conversion failed.")
def __init__(self):
super().__init__(

View File

@@ -1,683 +0,0 @@
from typing import Any, List
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
from backend.data.model import SchemaField
from backend.util.json import loads
from backend.util.mock import MockObject
from backend.util.prompt import estimate_token_count_str
# =============================================================================
# Dictionary Manipulation Blocks
# =============================================================================
class CreateDictionaryBlock(Block):
class Input(BlockSchema):
values: dict[str, Any] = SchemaField(
description="Key-value pairs to create the dictionary with",
placeholder="e.g., {'name': 'Alice', 'age': 25}",
)
class Output(BlockSchema):
dictionary: dict[str, Any] = SchemaField(
description="The created dictionary containing the specified key-value pairs"
)
error: str = SchemaField(
description="Error message if dictionary creation failed"
)
def __init__(self):
super().__init__(
id="b924ddf4-de4f-4b56-9a85-358930dcbc91",
description="Creates a dictionary with the specified key-value pairs. Use this when you know all the values you want to add upfront.",
categories={BlockCategory.DATA},
input_schema=CreateDictionaryBlock.Input,
output_schema=CreateDictionaryBlock.Output,
test_input=[
{
"values": {"name": "Alice", "age": 25, "city": "New York"},
},
{
"values": {"numbers": [1, 2, 3], "active": True, "score": 95.5},
},
],
test_output=[
(
"dictionary",
{"name": "Alice", "age": 25, "city": "New York"},
),
(
"dictionary",
{"numbers": [1, 2, 3], "active": True, "score": 95.5},
),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
try:
# The values are already validated by Pydantic schema
yield "dictionary", input_data.values
except Exception as e:
yield "error", f"Failed to create dictionary: {str(e)}"
class AddToDictionaryBlock(Block):
class Input(BlockSchema):
dictionary: dict[Any, Any] = SchemaField(
default_factory=dict,
description="The dictionary to add the entry to. If not provided, a new dictionary will be created.",
)
key: str = SchemaField(
default="",
description="The key for the new entry.",
placeholder="new_key",
advanced=False,
)
value: Any = SchemaField(
default=None,
description="The value for the new entry.",
placeholder="new_value",
advanced=False,
)
entries: dict[Any, Any] = SchemaField(
default_factory=dict,
description="The entries to add to the dictionary. This is the batch version of the `key` and `value` fields.",
advanced=True,
)
class Output(BlockSchema):
updated_dictionary: dict = SchemaField(
description="The dictionary with the new entry added."
)
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="31d1064e-7446-4693-a7d4-65e5ca1180d1",
description="Adds a new key-value pair to a dictionary. If no dictionary is provided, a new one is created.",
categories={BlockCategory.BASIC},
input_schema=AddToDictionaryBlock.Input,
output_schema=AddToDictionaryBlock.Output,
test_input=[
{
"dictionary": {"existing_key": "existing_value"},
"key": "new_key",
"value": "new_value",
},
{"key": "first_key", "value": "first_value"},
{
"dictionary": {"existing_key": "existing_value"},
"entries": {"new_key": "new_value", "first_key": "first_value"},
},
],
test_output=[
(
"updated_dictionary",
{"existing_key": "existing_value", "new_key": "new_value"},
),
("updated_dictionary", {"first_key": "first_value"}),
(
"updated_dictionary",
{
"existing_key": "existing_value",
"new_key": "new_value",
"first_key": "first_value",
},
),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
updated_dict = input_data.dictionary.copy()
if input_data.value is not None and input_data.key:
updated_dict[input_data.key] = input_data.value
for key, value in input_data.entries.items():
updated_dict[key] = value
yield "updated_dictionary", updated_dict
class FindInDictionaryBlock(Block):
class Input(BlockSchema):
input: Any = SchemaField(description="Dictionary to lookup from")
key: str | int = SchemaField(description="Key to lookup in the dictionary")
class Output(BlockSchema):
output: Any = SchemaField(description="Value found for the given key")
missing: Any = SchemaField(
description="Value of the input that missing the key"
)
def __init__(self):
super().__init__(
id="0e50422c-6dee-4145-83d6-3a5a392f65de",
description="Lookup the given key in the input dictionary/object/list and return the value.",
input_schema=FindInDictionaryBlock.Input,
output_schema=FindInDictionaryBlock.Output,
test_input=[
{"input": {"apple": 1, "banana": 2, "cherry": 3}, "key": "banana"},
{"input": {"x": 10, "y": 20, "z": 30}, "key": "w"},
{"input": [1, 2, 3], "key": 1},
{"input": [1, 2, 3], "key": 3},
{"input": MockObject(value="!!", key="key"), "key": "key"},
{"input": [{"k1": "v1"}, {"k2": "v2"}, {"k1": "v3"}], "key": "k1"},
],
test_output=[
("output", 2),
("missing", {"x": 10, "y": 20, "z": 30}),
("output", 2),
("missing", [1, 2, 3]),
("output", "key"),
("output", ["v1", "v3"]),
],
categories={BlockCategory.BASIC},
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
obj = input_data.input
key = input_data.key
if isinstance(obj, str):
obj = loads(obj)
if isinstance(obj, dict) and key in obj:
yield "output", obj[key]
elif isinstance(obj, list) and isinstance(key, int) and 0 <= key < len(obj):
yield "output", obj[key]
elif isinstance(obj, list) and isinstance(key, str):
if len(obj) == 0:
yield "output", []
elif isinstance(obj[0], dict) and key in obj[0]:
yield "output", [item[key] for item in obj if key in item]
else:
yield "output", [getattr(val, key) for val in obj if hasattr(val, key)]
elif isinstance(obj, object) and isinstance(key, str) and hasattr(obj, key):
yield "output", getattr(obj, key)
else:
yield "missing", input_data.input
class RemoveFromDictionaryBlock(Block):
class Input(BlockSchema):
dictionary: dict[Any, Any] = SchemaField(
description="The dictionary to modify."
)
key: str | int = SchemaField(description="Key to remove from the dictionary.")
return_value: bool = SchemaField(
default=False, description="Whether to return the removed value."
)
class Output(BlockSchema):
updated_dictionary: dict[Any, Any] = SchemaField(
description="The dictionary after removal."
)
removed_value: Any = SchemaField(description="The removed value if requested.")
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="46afe2ea-c613-43f8-95ff-6692c3ef6876",
description="Removes a key-value pair from a dictionary.",
categories={BlockCategory.BASIC},
input_schema=RemoveFromDictionaryBlock.Input,
output_schema=RemoveFromDictionaryBlock.Output,
test_input=[
{
"dictionary": {"a": 1, "b": 2, "c": 3},
"key": "b",
"return_value": True,
},
{"dictionary": {"x": "hello", "y": "world"}, "key": "x"},
],
test_output=[
("updated_dictionary", {"a": 1, "c": 3}),
("removed_value", 2),
("updated_dictionary", {"y": "world"}),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
updated_dict = input_data.dictionary.copy()
try:
removed_value = updated_dict.pop(input_data.key)
yield "updated_dictionary", updated_dict
if input_data.return_value:
yield "removed_value", removed_value
except KeyError:
yield "error", f"Key '{input_data.key}' not found in dictionary"
class ReplaceDictionaryValueBlock(Block):
class Input(BlockSchema):
dictionary: dict[Any, Any] = SchemaField(
description="The dictionary to modify."
)
key: str | int = SchemaField(description="Key to replace the value for.")
value: Any = SchemaField(description="The new value for the given key.")
class Output(BlockSchema):
updated_dictionary: dict[Any, Any] = SchemaField(
description="The dictionary after replacement."
)
old_value: Any = SchemaField(description="The value that was replaced.")
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="27e31876-18b6-44f3-ab97-f6226d8b3889",
description="Replaces the value for a specified key in a dictionary.",
categories={BlockCategory.BASIC},
input_schema=ReplaceDictionaryValueBlock.Input,
output_schema=ReplaceDictionaryValueBlock.Output,
test_input=[
{"dictionary": {"a": 1, "b": 2, "c": 3}, "key": "b", "value": 99},
{
"dictionary": {"x": "hello", "y": "world"},
"key": "y",
"value": "universe",
},
],
test_output=[
("updated_dictionary", {"a": 1, "b": 99, "c": 3}),
("old_value", 2),
("updated_dictionary", {"x": "hello", "y": "universe"}),
("old_value", "world"),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
updated_dict = input_data.dictionary.copy()
try:
old_value = updated_dict[input_data.key]
updated_dict[input_data.key] = input_data.value
yield "updated_dictionary", updated_dict
yield "old_value", old_value
except KeyError:
yield "error", f"Key '{input_data.key}' not found in dictionary"
class DictionaryIsEmptyBlock(Block):
class Input(BlockSchema):
dictionary: dict[Any, Any] = SchemaField(description="The dictionary to check.")
class Output(BlockSchema):
is_empty: bool = SchemaField(description="True if the dictionary is empty.")
def __init__(self):
super().__init__(
id="a3cf3f64-6bb9-4cc6-9900-608a0b3359b0",
description="Checks if a dictionary is empty.",
categories={BlockCategory.BASIC},
input_schema=DictionaryIsEmptyBlock.Input,
output_schema=DictionaryIsEmptyBlock.Output,
test_input=[{"dictionary": {}}, {"dictionary": {"a": 1}}],
test_output=[("is_empty", True), ("is_empty", False)],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
yield "is_empty", len(input_data.dictionary) == 0
# =============================================================================
# List Manipulation Blocks
# =============================================================================
class CreateListBlock(Block):
class Input(BlockSchema):
values: List[Any] = SchemaField(
description="A list of values to be combined into a new list.",
placeholder="e.g., ['Alice', 25, True]",
)
max_size: int | None = SchemaField(
default=None,
description="Maximum size of the list. If provided, the list will be yielded in chunks of this size.",
advanced=True,
)
max_tokens: int | None = SchemaField(
default=None,
description="Maximum tokens for the list. If provided, the list will be yielded in chunks that fit within this token limit.",
advanced=True,
)
class Output(BlockSchema):
list: List[Any] = SchemaField(
description="The created list containing the specified values."
)
error: str = SchemaField(description="Error message if list creation failed.")
def __init__(self):
super().__init__(
id="a912d5c7-6e00-4542-b2a9-8034136930e4",
description="Creates a list with the specified values. Use this when you know all the values you want to add upfront. This block can also yield the list in batches based on a maximum size or token limit.",
categories={BlockCategory.DATA},
input_schema=CreateListBlock.Input,
output_schema=CreateListBlock.Output,
test_input=[
{
"values": ["Alice", 25, True],
},
{
"values": [1, 2, 3, "four", {"key": "value"}],
},
],
test_output=[
(
"list",
["Alice", 25, True],
),
(
"list",
[1, 2, 3, "four", {"key": "value"}],
),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
chunk = []
cur_tokens, max_tokens = 0, input_data.max_tokens
cur_size, max_size = 0, input_data.max_size
for value in input_data.values:
if max_tokens:
tokens = estimate_token_count_str(value)
else:
tokens = 0
# Check if adding this value would exceed either limit
if (max_tokens and (cur_tokens + tokens > max_tokens)) or (
max_size and (cur_size + 1 > max_size)
):
yield "list", chunk
chunk = [value]
cur_size, cur_tokens = 1, tokens
else:
chunk.append(value)
cur_size, cur_tokens = cur_size + 1, cur_tokens + tokens
# Yield final chunk if any
if chunk or not input_data.values:
yield "list", chunk
class AddToListBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(
default_factory=list,
advanced=False,
description="The list to add the entry to. If not provided, a new list will be created.",
)
entry: Any = SchemaField(
description="The entry to add to the list. Can be of any type (string, int, dict, etc.).",
advanced=False,
default=None,
)
entries: List[Any] = SchemaField(
default_factory=lambda: list(),
description="The entries to add to the list. This is the batch version of the `entry` field.",
advanced=True,
)
position: int | None = SchemaField(
default=None,
description="The position to insert the new entry. If not provided, the entry will be appended to the end of the list.",
)
class Output(BlockSchema):
updated_list: List[Any] = SchemaField(
description="The list with the new entry added."
)
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="aeb08fc1-2fc1-4141-bc8e-f758f183a822",
description="Adds a new entry to a list. The entry can be of any type. If no list is provided, a new one is created.",
categories={BlockCategory.BASIC},
input_schema=AddToListBlock.Input,
output_schema=AddToListBlock.Output,
test_input=[
{
"list": [1, "string", {"existing_key": "existing_value"}],
"entry": {"new_key": "new_value"},
"position": 1,
},
{"entry": "first_entry"},
{"list": ["a", "b", "c"], "entry": "d"},
{
"entry": "e",
"entries": ["f", "g"],
"list": ["a", "b"],
"position": 1,
},
],
test_output=[
(
"updated_list",
[
1,
{"new_key": "new_value"},
"string",
{"existing_key": "existing_value"},
],
),
("updated_list", ["first_entry"]),
("updated_list", ["a", "b", "c", "d"]),
("updated_list", ["a", "f", "g", "e", "b"]),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
entries_added = input_data.entries.copy()
if input_data.entry:
entries_added.append(input_data.entry)
updated_list = input_data.list.copy()
if (pos := input_data.position) is not None:
updated_list = updated_list[:pos] + entries_added + updated_list[pos:]
else:
updated_list += entries_added
yield "updated_list", updated_list
class FindInListBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(description="The list to search in.")
value: Any = SchemaField(description="The value to search for.")
class Output(BlockSchema):
index: int = SchemaField(description="The index of the value in the list.")
found: bool = SchemaField(
description="Whether the value was found in the list."
)
not_found_value: Any = SchemaField(
description="The value that was not found in the list."
)
def __init__(self):
super().__init__(
id="5e2c6d0a-1e37-489f-b1d0-8e1812b23333",
description="Finds the index of the value in the list.",
categories={BlockCategory.BASIC},
input_schema=FindInListBlock.Input,
output_schema=FindInListBlock.Output,
test_input=[
{"list": [1, 2, 3, 4, 5], "value": 3},
{"list": [1, 2, 3, 4, 5], "value": 6},
],
test_output=[
("index", 2),
("found", True),
("found", False),
("not_found_value", 6),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
try:
yield "index", input_data.list.index(input_data.value)
yield "found", True
except ValueError:
yield "found", False
yield "not_found_value", input_data.value
class GetListItemBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(description="The list to get the item from.")
index: int = SchemaField(
description="The 0-based index of the item (supports negative indices)."
)
class Output(BlockSchema):
item: Any = SchemaField(description="The item at the specified index.")
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="262ca24c-1025-43cf-a578-534e23234e97",
description="Returns the element at the given index.",
categories={BlockCategory.BASIC},
input_schema=GetListItemBlock.Input,
output_schema=GetListItemBlock.Output,
test_input=[
{"list": [1, 2, 3], "index": 1},
{"list": [1, 2, 3], "index": -1},
],
test_output=[
("item", 2),
("item", 3),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
try:
yield "item", input_data.list[input_data.index]
except IndexError:
yield "error", "Index out of range"
class RemoveFromListBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(description="The list to modify.")
value: Any = SchemaField(
default=None, description="Value to remove from the list."
)
index: int | None = SchemaField(
default=None,
description="Index of the item to pop (supports negative indices).",
)
return_item: bool = SchemaField(
default=False, description="Whether to return the removed item."
)
class Output(BlockSchema):
updated_list: List[Any] = SchemaField(description="The list after removal.")
removed_item: Any = SchemaField(description="The removed item if requested.")
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="d93c5a93-ac7e-41c1-ae5c-ef67e6e9b826",
description="Removes an item from a list by value or index.",
categories={BlockCategory.BASIC},
input_schema=RemoveFromListBlock.Input,
output_schema=RemoveFromListBlock.Output,
test_input=[
{"list": [1, 2, 3], "index": 1, "return_item": True},
{"list": ["a", "b", "c"], "value": "b"},
],
test_output=[
("updated_list", [1, 3]),
("removed_item", 2),
("updated_list", ["a", "c"]),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
lst = input_data.list.copy()
removed = None
try:
if input_data.index is not None:
removed = lst.pop(input_data.index)
elif input_data.value is not None:
lst.remove(input_data.value)
removed = input_data.value
else:
raise ValueError("No index or value provided for removal")
except (IndexError, ValueError):
yield "error", "Index or value not found"
return
yield "updated_list", lst
if input_data.return_item:
yield "removed_item", removed
class ReplaceListItemBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(description="The list to modify.")
index: int = SchemaField(
description="Index of the item to replace (supports negative indices)."
)
value: Any = SchemaField(description="The new value for the given index.")
class Output(BlockSchema):
updated_list: List[Any] = SchemaField(description="The list after replacement.")
old_item: Any = SchemaField(description="The item that was replaced.")
error: str = SchemaField(description="Error message if the operation failed.")
def __init__(self):
super().__init__(
id="fbf62922-bea1-4a3d-8bac-23587f810b38",
description="Replaces an item at the specified index.",
categories={BlockCategory.BASIC},
input_schema=ReplaceListItemBlock.Input,
output_schema=ReplaceListItemBlock.Output,
test_input=[
{"list": [1, 2, 3], "index": 1, "value": 99},
{"list": ["a", "b"], "index": -1, "value": "c"},
],
test_output=[
("updated_list", [1, 99, 3]),
("old_item", 2),
("updated_list", ["a", "c"]),
("old_item", "b"),
],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
lst = input_data.list.copy()
try:
old = lst[input_data.index]
lst[input_data.index] = input_data.value
except IndexError:
yield "error", "Index out of range"
return
yield "updated_list", lst
yield "old_item", old
class ListIsEmptyBlock(Block):
class Input(BlockSchema):
list: List[Any] = SchemaField(description="The list to check.")
class Output(BlockSchema):
is_empty: bool = SchemaField(description="True if the list is empty.")
def __init__(self):
super().__init__(
id="896ed73b-27d0-41be-813c-c1c1dc856c03",
description="Checks if a list is empty.",
categories={BlockCategory.BASIC},
input_schema=ListIsEmptyBlock.Input,
output_schema=ListIsEmptyBlock.Output,
test_input=[{"list": []}, {"list": [1]}],
test_output=[("is_empty", True), ("is_empty", False)],
)
async def run(self, input_data: Input, **kwargs) -> BlockOutput:
yield "is_empty", len(input_data.list) == 0

View File

@@ -516,6 +516,13 @@ class GithubListIssuesBlock(Block):
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"issue",
{
"title": "Issue 1",
"url": "https://github.com/owner/repo/issues/1",
},
),
(
"issues",
[
@@ -525,13 +532,6 @@ class GithubListIssuesBlock(Block):
}
],
),
(
"issue",
{
"title": "Issue 1",
"url": "https://github.com/owner/repo/issues/1",
},
),
],
test_mock={
"list_issues": lambda *args, **kwargs: [

View File

@@ -34,9 +34,7 @@ class GithubListPullRequestsBlock(Block):
pull_requests: list[PRItem] = SchemaField(
description="List of pull requests with their title and URL"
)
error: str = SchemaField(
description="Error message if listing pull requests failed"
)
error: str = SchemaField(description="Error message if listing pull requests failed")
def __init__(self):
super().__init__(
@@ -51,6 +49,13 @@ class GithubListPullRequestsBlock(Block):
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"pull_request",
{
"title": "Pull request 1",
"url": "https://github.com/owner/repo/pull/1",
},
),
(
"pull_requests",
[
@@ -60,13 +65,6 @@ class GithubListPullRequestsBlock(Block):
}
],
),
(
"pull_request",
{
"title": "Pull request 1",
"url": "https://github.com/owner/repo/pull/1",
},
),
],
test_mock={
"list_prs": lambda *args, **kwargs: [
@@ -495,6 +493,13 @@ class GithubListPRReviewersBlock(Block):
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"reviewer",
{
"username": "reviewer1",
"url": "https://github.com/reviewer1",
},
),
(
"reviewers",
[
@@ -504,13 +509,6 @@ class GithubListPRReviewersBlock(Block):
}
],
),
(
"reviewer",
{
"username": "reviewer1",
"url": "https://github.com/reviewer1",
},
),
],
test_mock={
"list_reviewers": lambda *args, **kwargs: [

View File

@@ -49,6 +49,13 @@ class GithubListTagsBlock(Block):
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"tag",
{
"name": "v1.0.0",
"url": "https://github.com/owner/repo/tree/v1.0.0",
},
),
(
"tags",
[
@@ -58,13 +65,6 @@ class GithubListTagsBlock(Block):
}
],
),
(
"tag",
{
"name": "v1.0.0",
"url": "https://github.com/owner/repo/tree/v1.0.0",
},
),
],
test_mock={
"list_tags": lambda *args, **kwargs: [
@@ -145,6 +145,13 @@ class GithubListBranchesBlock(Block):
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"branch",
{
"name": "main",
"url": "https://github.com/owner/repo/tree/main",
},
),
(
"branches",
[
@@ -154,13 +161,6 @@ class GithubListBranchesBlock(Block):
}
],
),
(
"branch",
{
"name": "main",
"url": "https://github.com/owner/repo/tree/main",
},
),
],
test_mock={
"list_branches": lambda *args, **kwargs: [
@@ -246,6 +246,13 @@ class GithubListDiscussionsBlock(Block):
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"discussion",
{
"title": "Discussion 1",
"url": "https://github.com/owner/repo/discussions/1",
},
),
(
"discussions",
[
@@ -255,13 +262,6 @@ class GithubListDiscussionsBlock(Block):
}
],
),
(
"discussion",
{
"title": "Discussion 1",
"url": "https://github.com/owner/repo/discussions/1",
},
),
],
test_mock={
"list_discussions": lambda *args, **kwargs: [
@@ -357,6 +357,13 @@ class GithubListReleasesBlock(Block):
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"release",
{
"name": "v1.0.0",
"url": "https://github.com/owner/repo/releases/tag/v1.0.0",
},
),
(
"releases",
[
@@ -366,13 +373,6 @@ class GithubListReleasesBlock(Block):
}
],
),
(
"release",
{
"name": "v1.0.0",
"url": "https://github.com/owner/repo/releases/tag/v1.0.0",
},
),
],
test_mock={
"list_releases": lambda *args, **kwargs: [
@@ -1113,6 +1113,13 @@ class GithubListStargazersBlock(Block):
},
test_credentials=TEST_CREDENTIALS,
test_output=[
(
"stargazer",
{
"username": "octocat",
"url": "https://github.com/octocat",
},
),
(
"stargazers",
[
@@ -1122,13 +1129,6 @@ class GithubListStargazersBlock(Block):
}
],
),
(
"stargazer",
{
"username": "octocat",
"url": "https://github.com/octocat",
},
),
],
test_mock={
"list_stargazers": lambda *args, **kwargs: [

View File

@@ -1,6 +1,5 @@
import asyncio
from enum import Enum
from typing import Any
from google.oauth2.credentials import Credentials
from googleapiclient.discovery import build
@@ -42,43 +41,6 @@ def parse_a1_notation(a1: str) -> tuple[str | None, str]:
return None, a1
def extract_spreadsheet_id(spreadsheet_id_or_url: str) -> str:
"""Extract spreadsheet ID from either a direct ID or a Google Sheets URL.
Examples
--------
>>> extract_spreadsheet_id("1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms")
"1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms"
>>> extract_spreadsheet_id("https://docs.google.com/spreadsheets/d/1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms/edit")
"1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms"
"""
if "/spreadsheets/d/" in spreadsheet_id_or_url:
# Extract ID from URL: https://docs.google.com/spreadsheets/d/{ID}/edit...
parts = spreadsheet_id_or_url.split("/d/")[1].split("/")[0]
return parts
return spreadsheet_id_or_url
def format_sheet_name(sheet_name: str) -> str:
"""Format sheet name for Google Sheets API, adding quotes if needed.
Examples
--------
>>> format_sheet_name("Sheet1")
"Sheet1"
>>> format_sheet_name("Non-matching Leads")
"'Non-matching Leads'"
"""
# If sheet name contains spaces, special characters, or starts with a digit, wrap in quotes
if (
" " in sheet_name
or any(char in sheet_name for char in "!@#$%^&*()+-=[]{}|;:,.<>?")
or (sheet_name and sheet_name[0].isdigit())
):
return f"'{sheet_name}'"
return sheet_name
def _first_sheet_meta(service, spreadsheet_id: str) -> tuple[str, int]:
"""Return *(title, sheetId)* for the first sheet in *spreadsheet_id*."""
@@ -91,27 +53,10 @@ def _first_sheet_meta(service, spreadsheet_id: str) -> tuple[str, int]:
return first["title"], first["sheetId"]
def get_all_sheet_names(service, spreadsheet_id: str) -> list[str]:
"""Get all sheet names in the spreadsheet."""
meta = service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute()
return [
sheet.get("properties", {}).get("title", "") for sheet in meta.get("sheets", [])
]
def resolve_sheet_name(service, spreadsheet_id: str, sheet_name: str | None) -> str:
"""Resolve *sheet_name*, falling back to the workbook's first sheet if empty.
"""Resolve *sheet_name*, falling back to the workbook's first sheet if empty."""
Validates that the sheet exists in the spreadsheet and provides helpful error info.
"""
if sheet_name:
# Validate that the sheet exists
all_sheets = get_all_sheet_names(service, spreadsheet_id)
if sheet_name not in all_sheets:
raise ValueError(
f'Sheet "{sheet_name}" not found in spreadsheet. '
f"Available sheets: {all_sheets}"
)
return sheet_name
title, _ = _first_sheet_meta(service, spreadsheet_id)
return title
@@ -127,35 +72,6 @@ def sheet_id_by_name(service, spreadsheet_id: str, sheet_name: str) -> int | Non
return None
def _convert_dicts_to_rows(
data: list[dict[str, Any]], headers: list[str]
) -> list[list[str]]:
"""Convert list of dictionaries to list of rows using the specified header order.
Args:
data: List of dictionaries to convert
headers: List of column headers to use for ordering
Returns:
List of rows where each row is a list of string values in header order
"""
if not data:
return []
if not headers:
raise ValueError("Headers are required when using list[dict] format")
rows = []
for item in data:
row = []
for header in headers:
value = item.get(header, "")
row.append(str(value) if value is not None else "")
rows.append(row)
return rows
def _build_sheets_service(credentials: GoogleCredentials):
settings = Settings()
creds = Credentials(
@@ -183,16 +99,6 @@ class SheetOperation(str, Enum):
COPY = "copy"
class ValueInputOption(str, Enum):
RAW = "RAW"
USER_ENTERED = "USER_ENTERED"
class InsertDataOption(str, Enum):
OVERWRITE = "OVERWRITE"
INSERT_ROWS = "INSERT_ROWS"
class BatchOperationType(str, Enum):
UPDATE = "update"
CLEAR = "clear"
@@ -214,8 +120,7 @@ class GoogleSheetsReadBlock(Block):
["https://www.googleapis.com/auth/spreadsheets.readonly"]
)
spreadsheet_id: str = SchemaField(
description="The ID or URL of the spreadsheet to read from",
title="Spreadsheet ID or URL",
description="The ID of the spreadsheet to read from",
)
range: str = SchemaField(
description="The A1 notation of the range to read",
@@ -264,9 +169,8 @@ class GoogleSheetsReadBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
data = await asyncio.to_thread(
self._read_sheet, service, spreadsheet_id, input_data.range
self._read_sheet, service, input_data.spreadsheet_id, input_data.range
)
yield "result", data
@@ -282,8 +186,7 @@ class GoogleSheetsWriteBlock(Block):
["https://www.googleapis.com/auth/spreadsheets"]
)
spreadsheet_id: str = SchemaField(
description="The ID or URL of the spreadsheet to write to",
title="Spreadsheet ID or URL",
description="The ID of the spreadsheet to write to",
)
range: str = SchemaField(
description="The A1 notation of the range to write",
@@ -337,11 +240,10 @@ class GoogleSheetsWriteBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
result = await asyncio.to_thread(
self._write_sheet,
service,
spreadsheet_id,
input_data.spreadsheet_id,
input_data.range,
input_data.values,
)
@@ -370,41 +272,12 @@ class GoogleSheetsAppendBlock(Block):
credentials: GoogleCredentialsInput = GoogleCredentialsField(
["https://www.googleapis.com/auth/spreadsheets"]
)
spreadsheet_id: str = SchemaField(
description="Spreadsheet ID or URL",
title="Spreadsheet ID or URL",
)
spreadsheet_id: str = SchemaField(description="Spreadsheet ID")
sheet_name: str = SchemaField(
description="Optional sheet to append to (defaults to first sheet)",
default="",
)
values: list[list[str]] = SchemaField(
description="Rows to append as list of rows (list[list[str]])",
default=[],
)
dict_values: list[dict[str, Any]] = SchemaField(
description="Rows to append as list of dictionaries (list[dict])",
default=[],
)
headers: list[str] = SchemaField(
description="Column headers to use for ordering dict values (required when dict_values is provided)",
default=[],
)
range: str = SchemaField(
description="Range to append to (e.g. 'A:A' for column A only, 'A:C' for columns A-C, or leave empty for unlimited columns). When empty, data will span as many columns as needed.",
default="",
advanced=True,
)
value_input_option: ValueInputOption = SchemaField(
description="How input data should be interpreted",
default=ValueInputOption.USER_ENTERED,
advanced=True,
)
insert_data_option: InsertDataOption = SchemaField(
description="How new data should be inserted",
default=InsertDataOption.INSERT_ROWS,
advanced=True,
)
values: list[list[str]] = SchemaField(description="Rows to append")
class Output(BlockSchema):
result: dict = SchemaField(description="Append API response")
@@ -413,7 +286,7 @@ class GoogleSheetsAppendBlock(Block):
def __init__(self):
super().__init__(
id="531d50c0-d6b9-4cf9-a013-7bf783d313c7",
description="Append data to a Google Sheet. Use 'values' for list of rows (list[list[str]]) or 'dict_values' with 'headers' for list of dictionaries (list[dict]). Data is added to the next empty row without overwriting existing content. Leave range empty for unlimited columns, or specify range like 'A:A' to constrain to specific columns.",
description="Append data to a Google Sheet (sheet optional)",
categories={BlockCategory.DATA},
input_schema=GoogleSheetsAppendBlock.Input,
output_schema=GoogleSheetsAppendBlock.Output,
@@ -440,34 +313,12 @@ class GoogleSheetsAppendBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
# Determine which values to use and convert if needed
processed_values: list[list[str]]
# Validate that only one format is provided
if input_data.values and input_data.dict_values:
raise ValueError("Provide either 'values' or 'dict_values', not both")
if input_data.dict_values:
if not input_data.headers:
raise ValueError("Headers are required when using dict_values")
processed_values = _convert_dicts_to_rows(
input_data.dict_values, input_data.headers
)
elif input_data.values:
processed_values = input_data.values
else:
raise ValueError("Either 'values' or 'dict_values' must be provided")
result = await asyncio.to_thread(
self._append_sheet,
service,
spreadsheet_id,
input_data.spreadsheet_id,
input_data.sheet_name,
processed_values,
input_data.range,
input_data.value_input_option,
input_data.insert_data_option,
input_data.values,
)
yield "result", result
@@ -477,28 +328,17 @@ class GoogleSheetsAppendBlock(Block):
spreadsheet_id: str,
sheet_name: str,
values: list[list[str]],
range: str,
value_input_option: ValueInputOption,
insert_data_option: InsertDataOption,
) -> dict:
target_sheet = resolve_sheet_name(service, spreadsheet_id, sheet_name)
formatted_sheet = format_sheet_name(target_sheet)
# If no range specified, use A1 to let Google Sheets find the next empty row with unlimited columns
# If range specified, use it to constrain columns (e.g., A:A for column A only)
if range:
append_range = f"{formatted_sheet}!{range}"
else:
# Use A1 as starting point for unlimited columns - Google Sheets will find next empty row
append_range = f"{formatted_sheet}!A1"
body = {"values": values}
return (
service.spreadsheets()
.values()
.append(
spreadsheetId=spreadsheet_id,
range=append_range,
valueInputOption=value_input_option.value,
insertDataOption=insert_data_option.value,
range=f"{target_sheet}!A:A",
valueInputOption="USER_ENTERED",
insertDataOption="INSERT_ROWS",
body=body,
)
.execute()
@@ -511,8 +351,7 @@ class GoogleSheetsClearBlock(Block):
["https://www.googleapis.com/auth/spreadsheets"]
)
spreadsheet_id: str = SchemaField(
description="The ID or URL of the spreadsheet to clear",
title="Spreadsheet ID or URL",
description="The ID of the spreadsheet to clear",
)
range: str = SchemaField(
description="The A1 notation of the range to clear",
@@ -554,11 +393,10 @@ class GoogleSheetsClearBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
result = await asyncio.to_thread(
self._clear_range,
service,
spreadsheet_id,
input_data.spreadsheet_id,
input_data.range,
)
yield "result", result
@@ -579,8 +417,7 @@ class GoogleSheetsMetadataBlock(Block):
["https://www.googleapis.com/auth/spreadsheets.readonly"]
)
spreadsheet_id: str = SchemaField(
description="The ID or URL of the spreadsheet to get metadata for",
title="Spreadsheet ID or URL",
description="The ID of the spreadsheet to get metadata for",
)
class Output(BlockSchema):
@@ -625,11 +462,10 @@ class GoogleSheetsMetadataBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
result = await asyncio.to_thread(
self._get_metadata,
service,
spreadsheet_id,
input_data.spreadsheet_id,
)
yield "result", result
@@ -659,10 +495,7 @@ class GoogleSheetsManageSheetBlock(Block):
credentials: GoogleCredentialsInput = GoogleCredentialsField(
["https://www.googleapis.com/auth/spreadsheets"]
)
spreadsheet_id: str = SchemaField(
description="Spreadsheet ID or URL",
title="Spreadsheet ID or URL",
)
spreadsheet_id: str = SchemaField(description="Spreadsheet ID")
operation: SheetOperation = SchemaField(description="Operation to perform")
sheet_name: str = SchemaField(
description="Target sheet name (defaults to first sheet for delete)",
@@ -707,11 +540,10 @@ class GoogleSheetsManageSheetBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
result = await asyncio.to_thread(
self._manage_sheet,
service,
spreadsheet_id,
input_data.spreadsheet_id,
input_data.operation,
input_data.sheet_name,
input_data.source_sheet_id,
@@ -768,8 +600,7 @@ class GoogleSheetsBatchOperationsBlock(Block):
["https://www.googleapis.com/auth/spreadsheets"]
)
spreadsheet_id: str = SchemaField(
description="The ID or URL of the spreadsheet to perform batch operations on",
title="Spreadsheet ID or URL",
description="The ID of the spreadsheet to perform batch operations on",
)
operations: list[BatchOperation] = SchemaField(
description="List of operations to perform",
@@ -823,11 +654,10 @@ class GoogleSheetsBatchOperationsBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
result = await asyncio.to_thread(
self._batch_operations,
service,
spreadsheet_id,
input_data.spreadsheet_id,
input_data.operations,
)
yield "result", result
@@ -885,8 +715,7 @@ class GoogleSheetsFindReplaceBlock(Block):
["https://www.googleapis.com/auth/spreadsheets"]
)
spreadsheet_id: str = SchemaField(
description="The ID or URL of the spreadsheet to perform find/replace on",
title="Spreadsheet ID or URL",
description="The ID of the spreadsheet to perform find/replace on",
)
find_text: str = SchemaField(
description="The text to find",
@@ -944,11 +773,10 @@ class GoogleSheetsFindReplaceBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
result = await asyncio.to_thread(
self._find_replace,
service,
spreadsheet_id,
input_data.spreadsheet_id,
input_data.find_text,
input_data.replace_text,
input_data.sheet_id,
@@ -989,283 +817,12 @@ class GoogleSheetsFindReplaceBlock(Block):
return result
class GoogleSheetsFindBlock(Block):
class Input(BlockSchema):
credentials: GoogleCredentialsInput = GoogleCredentialsField(
["https://www.googleapis.com/auth/spreadsheets.readonly"]
)
spreadsheet_id: str = SchemaField(
description="The ID or URL of the spreadsheet to search in",
title="Spreadsheet ID or URL",
)
find_text: str = SchemaField(
description="The text to find",
)
sheet_id: int = SchemaField(
description="The ID of the specific sheet to search (optional, searches all sheets if not provided)",
default=-1,
)
match_case: bool = SchemaField(
description="Whether to match case",
default=False,
)
match_entire_cell: bool = SchemaField(
description="Whether to match entire cell",
default=False,
)
find_all: bool = SchemaField(
description="Whether to find all occurrences (true) or just the first one (false)",
default=True,
)
range: str = SchemaField(
description="The A1 notation range to search in (optional, searches entire sheet if not provided)",
default="",
advanced=True,
)
class Output(BlockSchema):
result: dict = SchemaField(
description="The result of the find operation including locations and count",
)
locations: list[dict] = SchemaField(
description="List of cell locations where the text was found",
)
count: int = SchemaField(
description="Number of occurrences found",
)
error: str = SchemaField(
description="Error message if any",
)
def __init__(self):
super().__init__(
id="0f4ecc72-b958-47b2-b65e-76d6d26b9b27",
description="Find text in a Google Sheets spreadsheet. Returns locations and count of occurrences. Can find all occurrences or just the first one.",
categories={BlockCategory.DATA},
input_schema=GoogleSheetsFindBlock.Input,
output_schema=GoogleSheetsFindBlock.Output,
disabled=GOOGLE_SHEETS_DISABLED,
test_input={
"spreadsheet_id": "1BxiMVs0XRA5nFMdKvBdBZjgmUUqptlbs74OgvE2upms",
"find_text": "search_value",
"match_case": False,
"match_entire_cell": False,
"find_all": True,
"range": "Sheet1!A1:C10",
"credentials": TEST_CREDENTIALS_INPUT,
},
test_credentials=TEST_CREDENTIALS,
test_output=[
("count", 3),
(
"locations",
[
{"sheet": "Sheet1", "row": 2, "column": 1, "address": "A2"},
{"sheet": "Sheet1", "row": 5, "column": 3, "address": "C5"},
{"sheet": "Sheet2", "row": 1, "column": 2, "address": "B1"},
],
),
("result", {"success": True}),
],
test_mock={
"_find_text": lambda *args, **kwargs: {
"locations": [
{"sheet": "Sheet1", "row": 2, "column": 1, "address": "A2"},
{"sheet": "Sheet1", "row": 5, "column": 3, "address": "C5"},
{"sheet": "Sheet2", "row": 1, "column": 2, "address": "B1"},
],
"count": 3,
},
},
)
async def run(
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
result = await asyncio.to_thread(
self._find_text,
service,
spreadsheet_id,
input_data.find_text,
input_data.sheet_id,
input_data.match_case,
input_data.match_entire_cell,
input_data.find_all,
input_data.range,
)
yield "count", result["count"]
yield "locations", result["locations"]
yield "result", {"success": True}
def _find_text(
self,
service,
spreadsheet_id: str,
find_text: str,
sheet_id: int,
match_case: bool,
match_entire_cell: bool,
find_all: bool,
range: str,
) -> dict:
# Unfortunately, Google Sheets API doesn't have a dedicated "find-only" operation
# that returns cell locations. The findReplace operation only returns a count.
# So we need to search through the values manually to get location details.
locations = []
search_range = range if range else None
if not search_range:
# If no range specified, search entire spreadsheet
meta = service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute()
sheets = meta.get("sheets", [])
# Filter to specific sheet if provided
if sheet_id >= 0:
sheets = [
s
for s in sheets
if s.get("properties", {}).get("sheetId") == sheet_id
]
# Search each sheet
for sheet in sheets:
sheet_name = sheet.get("properties", {}).get("title", "")
sheet_range = f"'{sheet_name}'"
self._search_range(
service,
spreadsheet_id,
sheet_range,
sheet_name,
find_text,
match_case,
match_entire_cell,
find_all,
locations,
)
if not find_all and locations:
break
else:
# Search specific range
sheet_name, cell_range = parse_a1_notation(search_range)
if not sheet_name:
# Get first sheet name if not specified
meta = (
service.spreadsheets().get(spreadsheetId=spreadsheet_id).execute()
)
sheet_name = (
meta.get("sheets", [{}])[0]
.get("properties", {})
.get("title", "Sheet1")
)
search_range = f"'{sheet_name}'!{search_range}"
self._search_range(
service,
spreadsheet_id,
search_range,
sheet_name,
find_text,
match_case,
match_entire_cell,
find_all,
locations,
)
return {"locations": locations, "count": len(locations)}
def _search_range(
self,
service,
spreadsheet_id: str,
range_name: str,
sheet_name: str,
find_text: str,
match_case: bool,
match_entire_cell: bool,
find_all: bool,
locations: list,
):
"""Search within a specific range and add results to locations list."""
values_result = (
service.spreadsheets()
.values()
.get(spreadsheetId=spreadsheet_id, range=range_name)
.execute()
)
values = values_result.get("values", [])
# Parse range to get starting position
_, cell_range = parse_a1_notation(range_name)
start_col = 0
start_row = 0
if cell_range and ":" in cell_range:
start_cell = cell_range.split(":")[0]
# Parse A1 notation (e.g., "B3" -> col=1, row=2)
col_part = ""
row_part = ""
for char in start_cell:
if char.isalpha():
col_part += char
elif char.isdigit():
row_part += char
if col_part:
start_col = ord(col_part.upper()) - ord("A")
if row_part:
start_row = int(row_part) - 1
# Search through values
for row_idx, row in enumerate(values):
for col_idx, cell_value in enumerate(row):
if cell_value is None:
continue
cell_str = str(cell_value)
# Apply search criteria
search_text = find_text if match_case else find_text.lower()
cell_text = cell_str if match_case else cell_str.lower()
found = False
if match_entire_cell:
found = cell_text == search_text
else:
found = search_text in cell_text
if found:
# Calculate actual spreadsheet position
actual_row = start_row + row_idx + 1
actual_col = start_col + col_idx + 1
col_letter = chr(ord("A") + start_col + col_idx)
address = f"{col_letter}{actual_row}"
location = {
"sheet": sheet_name,
"row": actual_row,
"column": actual_col,
"address": address,
"value": cell_str,
}
locations.append(location)
# Stop after first match if find_all is False
if not find_all:
return
class GoogleSheetsFormatBlock(Block):
class Input(BlockSchema):
credentials: GoogleCredentialsInput = GoogleCredentialsField(
["https://www.googleapis.com/auth/spreadsheets"]
)
spreadsheet_id: str = SchemaField(
description="Spreadsheet ID or URL",
title="Spreadsheet ID or URL",
)
spreadsheet_id: str = SchemaField(description="Spreadsheet ID")
range: str = SchemaField(description="A1 notation sheet optional")
background_color: dict = SchemaField(default={})
text_color: dict = SchemaField(default={})
@@ -1301,11 +858,10 @@ class GoogleSheetsFormatBlock(Block):
self, input_data: Input, *, credentials: GoogleCredentials, **kwargs
) -> BlockOutput:
service = _build_sheets_service(credentials)
spreadsheet_id = extract_spreadsheet_id(input_data.spreadsheet_id)
result = await asyncio.to_thread(
self._format_cells,
service,
spreadsheet_id,
input_data.spreadsheet_id,
input_data.range,
input_data.background_color,
input_data.text_color,

View File

@@ -127,9 +127,6 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = (
"perplexity/llama-3.1-sonar-large-128k-online"
)
PERPLEXITY_SONAR = "perplexity/sonar"
PERPLEXITY_SONAR_PRO = "perplexity/sonar-pro"
PERPLEXITY_SONAR_DEEP_RESEARCH = "perplexity/sonar-deep-research"
QWEN_QWQ_32B_PREVIEW = "qwen/qwq-32b-preview"
NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B = "nousresearch/hermes-3-llama-3.1-405b"
NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B = "nousresearch/hermes-3-llama-3.1-70b"
@@ -232,13 +229,6 @@ MODEL_METADATA = {
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: ModelMetadata(
"open_router", 127072, 127072
),
LlmModel.PERPLEXITY_SONAR: ModelMetadata("open_router", 127000, 127000),
LlmModel.PERPLEXITY_SONAR_PRO: ModelMetadata("open_router", 200000, 8000),
LlmModel.PERPLEXITY_SONAR_DEEP_RESEARCH: ModelMetadata(
"open_router",
128000,
128000,
),
LlmModel.QWEN_QWQ_32B_PREVIEW: ModelMetadata("open_router", 32768, 32768),
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata(
"open_router", 131000, 4096
@@ -283,7 +273,6 @@ class LLMResponse(BaseModel):
tool_calls: Optional[List[ToolContentBlock]] | None
prompt_tokens: int
completion_tokens: int
reasoning: Optional[str] = None
def convert_openai_tool_fmt_to_anthropic(
@@ -318,46 +307,6 @@ def convert_openai_tool_fmt_to_anthropic(
return anthropic_tools
def extract_openai_reasoning(response) -> str | None:
"""Extract reasoning from OpenAI-compatible response if available."""
"""Note: This will likely not working since the reasoning is not present in another Response API"""
reasoning = None
choice = response.choices[0]
if hasattr(choice, "reasoning") and getattr(choice, "reasoning", None):
reasoning = str(getattr(choice, "reasoning"))
elif hasattr(response, "reasoning") and getattr(response, "reasoning", None):
reasoning = str(getattr(response, "reasoning"))
elif hasattr(choice.message, "reasoning") and getattr(
choice.message, "reasoning", None
):
reasoning = str(getattr(choice.message, "reasoning"))
return reasoning
def extract_openai_tool_calls(response) -> list[ToolContentBlock] | None:
"""Extract tool calls from OpenAI-compatible response."""
if response.choices[0].message.tool_calls:
return [
ToolContentBlock(
id=tool.id,
type=tool.type,
function=ToolCall(
name=tool.function.name,
arguments=tool.function.arguments,
),
)
for tool in response.choices[0].message.tool_calls
]
return None
def get_parallel_tool_calls_param(llm_model: LlmModel, parallel_tool_calls):
"""Get the appropriate parallel_tool_calls parameter for OpenAI-compatible APIs."""
if llm_model.startswith("o") or parallel_tool_calls is None:
return openai.NOT_GIVEN
return parallel_tool_calls
async def llm_call(
credentials: APIKeyCredentials,
llm_model: LlmModel,
@@ -411,9 +360,8 @@ async def llm_call(
oai_client = openai.AsyncOpenAI(api_key=credentials.api_key.get_secret_value())
response_format = None
parallel_tool_calls = get_parallel_tool_calls_param(
llm_model, parallel_tool_calls
)
if llm_model.startswith("o") or parallel_tool_calls is None:
parallel_tool_calls = openai.NOT_GIVEN
if json_format:
response_format = {"type": "json_object"}
@@ -427,8 +375,20 @@ async def llm_call(
parallel_tool_calls=parallel_tool_calls,
)
tool_calls = extract_openai_tool_calls(response)
reasoning = extract_openai_reasoning(response)
if response.choices[0].message.tool_calls:
tool_calls = [
ToolContentBlock(
id=tool.id,
type=tool.type,
function=ToolCall(
name=tool.function.name,
arguments=tool.function.arguments,
),
)
for tool in response.choices[0].message.tool_calls
]
else:
tool_calls = None
return LLMResponse(
raw_response=response.choices[0].message,
@@ -437,7 +397,6 @@ async def llm_call(
tool_calls=tool_calls,
prompt_tokens=response.usage.prompt_tokens if response.usage else 0,
completion_tokens=response.usage.completion_tokens if response.usage else 0,
reasoning=reasoning,
)
elif provider == "anthropic":
@@ -499,12 +458,6 @@ async def llm_call(
f"Tool use stop reason but no tool calls found in content. {resp}"
)
reasoning = None
for content_block in resp.content:
if hasattr(content_block, "type") and content_block.type == "thinking":
reasoning = content_block.thinking
break
return LLMResponse(
raw_response=resp,
prompt=prompt,
@@ -516,7 +469,6 @@ async def llm_call(
tool_calls=tool_calls,
prompt_tokens=resp.usage.input_tokens,
completion_tokens=resp.usage.output_tokens,
reasoning=reasoning,
)
except anthropic.APIError as e:
error_message = f"Anthropic API error: {str(e)}"
@@ -541,7 +493,6 @@ async def llm_call(
tool_calls=None,
prompt_tokens=response.usage.prompt_tokens if response.usage else 0,
completion_tokens=response.usage.completion_tokens if response.usage else 0,
reasoning=None,
)
elif provider == "ollama":
if tools:
@@ -563,7 +514,6 @@ async def llm_call(
tool_calls=None,
prompt_tokens=response.get("prompt_eval_count") or 0,
completion_tokens=response.get("eval_count") or 0,
reasoning=None,
)
elif provider == "open_router":
tools_param = tools if tools else openai.NOT_GIVEN
@@ -572,10 +522,6 @@ async def llm_call(
api_key=credentials.api_key.get_secret_value(),
)
parallel_tool_calls_param = get_parallel_tool_calls_param(
llm_model, parallel_tool_calls
)
response = await client.chat.completions.create(
extra_headers={
"HTTP-Referer": "https://agpt.co",
@@ -585,7 +531,6 @@ async def llm_call(
messages=prompt, # type: ignore
max_tokens=max_tokens,
tools=tools_param, # type: ignore
parallel_tool_calls=parallel_tool_calls_param,
)
# If there's no response, raise an error
@@ -595,8 +540,19 @@ async def llm_call(
else:
raise ValueError("No response from OpenRouter.")
tool_calls = extract_openai_tool_calls(response)
reasoning = extract_openai_reasoning(response)
if response.choices[0].message.tool_calls:
tool_calls = [
ToolContentBlock(
id=tool.id,
type=tool.type,
function=ToolCall(
name=tool.function.name, arguments=tool.function.arguments
),
)
for tool in response.choices[0].message.tool_calls
]
else:
tool_calls = None
return LLMResponse(
raw_response=response.choices[0].message,
@@ -605,7 +561,6 @@ async def llm_call(
tool_calls=tool_calls,
prompt_tokens=response.usage.prompt_tokens if response.usage else 0,
completion_tokens=response.usage.completion_tokens if response.usage else 0,
reasoning=reasoning,
)
elif provider == "llama_api":
tools_param = tools if tools else openai.NOT_GIVEN
@@ -614,10 +569,6 @@ async def llm_call(
api_key=credentials.api_key.get_secret_value(),
)
parallel_tool_calls_param = get_parallel_tool_calls_param(
llm_model, parallel_tool_calls
)
response = await client.chat.completions.create(
extra_headers={
"HTTP-Referer": "https://agpt.co",
@@ -627,7 +578,9 @@ async def llm_call(
messages=prompt, # type: ignore
max_tokens=max_tokens,
tools=tools_param, # type: ignore
parallel_tool_calls=parallel_tool_calls_param,
parallel_tool_calls=(
openai.NOT_GIVEN if parallel_tool_calls is None else parallel_tool_calls
),
)
# If there's no response, raise an error
@@ -637,8 +590,19 @@ async def llm_call(
else:
raise ValueError("No response from Llama API.")
tool_calls = extract_openai_tool_calls(response)
reasoning = extract_openai_reasoning(response)
if response.choices[0].message.tool_calls:
tool_calls = [
ToolContentBlock(
id=tool.id,
type=tool.type,
function=ToolCall(
name=tool.function.name, arguments=tool.function.arguments
),
)
for tool in response.choices[0].message.tool_calls
]
else:
tool_calls = None
return LLMResponse(
raw_response=response.choices[0].message,
@@ -647,7 +611,6 @@ async def llm_call(
tool_calls=tool_calls,
prompt_tokens=response.usage.prompt_tokens if response.usage else 0,
completion_tokens=response.usage.completion_tokens if response.usage else 0,
reasoning=reasoning,
)
elif provider == "aiml_api":
client = openai.OpenAI(
@@ -671,7 +634,6 @@ async def llm_call(
completion_tokens=(
completion.usage.completion_tokens if completion.usage else 0
),
reasoning=None,
)
else:
raise ValueError(f"Unsupported LLM provider: {provider}")
@@ -785,7 +747,6 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
tool_calls=None,
prompt_tokens=0,
completion_tokens=0,
reasoning=None,
)
},
)

View File

@@ -128,7 +128,6 @@ class RetrieveInformationBlock(Block):
("value", {"theme": "light", "language": "en"}),
],
test_mock={"_retrieve_data": lambda *args, **kwargs: None},
static_output=True,
)
async def run(

View File

@@ -117,18 +117,6 @@ class GetRedditPostsBlock(Block):
"post_limit": 2,
},
test_output=[
(
"post",
RedditPost(
id="id1", subreddit="subreddit", title="title1", body="body1"
),
),
(
"post",
RedditPost(
id="id2", subreddit="subreddit", title="title2", body="body2"
),
),
(
"posts",
[
@@ -146,6 +134,18 @@ class GetRedditPostsBlock(Block):
),
],
),
(
"post",
RedditPost(
id="id1", subreddit="subreddit", title="title1", body="body1"
),
),
(
"post",
RedditPost(
id="id2", subreddit="subreddit", title="title2", body="body2"
),
),
],
test_mock={
"get_posts": lambda input_data, credentials: [

View File

@@ -56,17 +56,6 @@ class ReadRSSFeedBlock(Block):
"run_continuously": False,
},
test_output=[
(
"entry",
RSSEntry(
title="Example RSS Item",
link="https://example.com/article",
description="This is an example RSS item description.",
pub_date=datetime(2023, 6, 23, 12, 30, 0, tzinfo=timezone.utc),
author="John Doe",
categories=["Technology", "News"],
),
),
(
"entries",
[
@@ -82,6 +71,17 @@ class ReadRSSFeedBlock(Block):
),
],
),
(
"entry",
RSSEntry(
title="Example RSS Item",
link="https://example.com/article",
description="This is an example RSS item description.",
pub_date=datetime(2023, 6, 23, 12, 30, 0, tzinfo=timezone.utc),
author="John Doe",
categories=["Technology", "News"],
),
),
],
test_mock={
"parse_feed": lambda *args, **kwargs: {

View File

@@ -452,33 +452,28 @@ class SmartDecisionMakerBlock(Block):
if pending_tool_calls and input_data.last_tool_output is None:
raise ValueError(f"Tool call requires an output for {pending_tool_calls}")
# Only assign the last tool output to the first pending tool call
tool_output = []
if pending_tool_calls and input_data.last_tool_output is not None:
# Get the first pending tool call ID
first_call_id = next(iter(pending_tool_calls.keys()))
tool_output.append(
_create_tool_response(first_call_id, input_data.last_tool_output)
# Prefill all missing tool calls with the last tool output/
# TODO: we need a better way to handle this.
tool_output = [
_create_tool_response(pending_call_id, input_data.last_tool_output)
for pending_call_id, count in pending_tool_calls.items()
for _ in range(count)
]
# If the SDM block only calls 1 tool at a time, this should not happen.
if len(tool_output) > 1:
logger.warning(
f"[SmartDecisionMakerBlock-node_exec_id={node_exec_id}] "
f"Multiple pending tool calls are prefilled using a single output. "
f"Execution may not be accurate."
)
# Add tool output to prompt right away
prompt.extend(tool_output)
# Check if there are still pending tool calls after handling the first one
remaining_pending_calls = get_pending_tool_calls(prompt)
# If there are still pending tool calls, yield the conversation and return early
if remaining_pending_calls:
yield "conversations", prompt
return
# Fallback on adding tool output in the conversation history as user prompt.
elif input_data.last_tool_output:
logger.error(
if len(tool_output) == 0 and input_data.last_tool_output:
logger.warning(
f"[SmartDecisionMakerBlock-node_exec_id={node_exec_id}] "
f"No pending tool calls found. This may indicate an issue with the "
f"conversation history, or the tool giving response more than once."
f"This should not happen! Please check the conversation history for any inconsistencies."
f"conversation history, or an LLM calling two tools at the same time."
)
tool_output.append(
{
@@ -486,7 +481,8 @@ class SmartDecisionMakerBlock(Block):
"content": f"Last tool output: {json.dumps(input_data.last_tool_output)}",
}
)
prompt.extend(tool_output)
prompt.extend(tool_output)
if input_data.multiple_tool_calls:
input_data.sys_prompt += "\nYou can call a tool (different tools) multiple times in a single response."
else:
@@ -554,11 +550,5 @@ class SmartDecisionMakerBlock(Block):
else:
yield f"tools_^_{tool_name}_~_{arg_name}", None
# Add reasoning to conversation history if available
if response.reasoning:
prompt.append(
{"role": "assistant", "content": f"[Reasoning]: {response.reasoning}"}
)
prompt.append(response.raw_response)
yield "conversations", prompt
response.prompt.append(response.raw_response)
yield "conversations", response.prompt

View File

@@ -85,9 +85,6 @@ MODEL_COST: dict[LlmModel, int] = {
LlmModel.EVA_QWEN_2_5_32B: 1,
LlmModel.DEEPSEEK_CHAT: 2,
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: 1,
LlmModel.PERPLEXITY_SONAR: 1,
LlmModel.PERPLEXITY_SONAR_PRO: 5,
LlmModel.PERPLEXITY_SONAR_DEEP_RESEARCH: 10,
LlmModel.QWEN_QWQ_32B_PREVIEW: 2,
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: 1,
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: 1,

View File

@@ -389,10 +389,8 @@ class GraphModel(Graph):
# Reassign Link IDs
for link in graph.links:
if link.source_id in id_map:
link.source_id = id_map[link.source_id]
if link.sink_id in id_map:
link.sink_id = id_map[link.sink_id]
link.source_id = id_map[link.source_id]
link.sink_id = id_map[link.sink_id]
# Reassign User IDs for agent blocks
for node in graph.nodes:

View File

@@ -7,8 +7,7 @@ from prisma.models import User
import backend.server.v2.library.model
import backend.server.v2.store.model
from backend.blocks.basic import StoreValueBlock
from backend.blocks.data_manipulation import FindInDictionaryBlock
from backend.blocks.basic import FindInDictionaryBlock, StoreValueBlock
from backend.blocks.io import AgentInputBlock
from backend.blocks.maths import CalculatorBlock, Operation
from backend.data import execution, graph

View File

@@ -669,14 +669,17 @@ async def execute_graph(
)
async def stop_graph_run(
graph_id: str, graph_exec_id: str, user_id: Annotated[str, Depends(get_user_id)]
) -> execution_db.GraphExecutionMeta | None:
) -> execution_db.GraphExecutionMeta:
res = await _stop_graph_run(
user_id=user_id,
graph_id=graph_id,
graph_exec_id=graph_exec_id,
)
if not res:
return None
raise HTTPException(
status_code=HTTP_404_NOT_FOUND,
detail=f"Graph execution #{graph_exec_id} not found.",
)
return res[0]

View File

@@ -170,14 +170,7 @@ async def get_library_agent(id: str, user_id: str) -> library_model.LibraryAgent
if not library_agent:
raise NotFoundError(f"Library agent #{id} not found")
return library_model.LibraryAgent.from_db(
library_agent,
sub_graphs=(
await graph_db.get_sub_graphs(library_agent.AgentGraph)
if library_agent.AgentGraph
else None
),
)
return library_model.LibraryAgent.from_db(library_agent)
except prisma.errors.PrismaError as e:
logger.error(f"Database error fetching library agent: {e}")

View File

@@ -51,7 +51,7 @@ class LibraryAgent(pydantic.BaseModel):
description: str
input_schema: dict[str, Any] # Should be BlockIOObjectSubSchema in frontend
credentials_input_schema: dict[str, Any] | None = pydantic.Field(
credentials_input_schema: dict[str, Any] = pydantic.Field(
description="Input schema for credentials required by the agent",
)
@@ -70,10 +70,7 @@ class LibraryAgent(pydantic.BaseModel):
is_latest_version: bool
@staticmethod
def from_db(
agent: prisma.models.LibraryAgent,
sub_graphs: Optional[list[prisma.models.AgentGraph]] = None,
) -> "LibraryAgent":
def from_db(agent: prisma.models.LibraryAgent) -> "LibraryAgent":
"""
Factory method that constructs a LibraryAgent from a Prisma LibraryAgent
model instance.
@@ -81,7 +78,7 @@ class LibraryAgent(pydantic.BaseModel):
if not agent.AgentGraph:
raise ValueError("Associated Agent record is required.")
graph = graph_model.GraphModel.from_db(agent.AgentGraph, sub_graphs=sub_graphs)
graph = graph_model.GraphModel.from_db(agent.AgentGraph)
agent_updated_at = agent.AgentGraph.updatedAt
lib_agent_updated_at = agent.updatedAt
@@ -126,9 +123,7 @@ class LibraryAgent(pydantic.BaseModel):
name=graph.name,
description=graph.description,
input_schema=graph.input_schema,
credentials_input_schema=(
graph.credentials_input_schema if sub_graphs else None
),
credentials_input_schema=graph.credentials_input_schema,
has_external_trigger=graph.has_webhook_trigger,
trigger_setup_info=(
LibraryAgentTriggerInfo(

View File

@@ -12,7 +12,7 @@ from backend.util import json
def _tok_len(text: str, enc) -> int:
"""True token length of *text* in tokenizer *enc* (no wrapper cost)."""
return len(enc.encode(str(text)))
return len(enc.encode(text))
def _msg_tokens(msg: dict, enc) -> int:
@@ -29,7 +29,7 @@ def _truncate_middle_tokens(text: str, enc, max_tok: int) -> str:
Return *text* shortened to ≈max_tok tokens by keeping the head & tail
and inserting an ellipsis token in the middle.
"""
ids = enc.encode(str(text))
ids = enc.encode(text)
if len(ids) <= max_tok:
return text # nothing to do

View File

@@ -1,6 +1,5 @@
import json
import types
from typing import Any, Type, TypeVar, Union, cast, get_args, get_origin, overload
from typing import Any, Type, TypeVar, cast, get_args, get_origin
from prisma import Json as PrismaJson
@@ -105,37 +104,9 @@ def __convert_bool(value: Any) -> bool:
return bool(value)
def _try_convert(value: Any, target_type: Any, raise_on_mismatch: bool) -> Any:
def _try_convert(value: Any, target_type: Type, raise_on_mismatch: bool) -> Any:
origin = get_origin(target_type)
args = get_args(target_type)
# Handle Union types (including Optional which is Union[T, None])
if origin is Union or origin is types.UnionType:
# Handle None values for Optional types
if value is None:
if type(None) in args:
return None
elif raise_on_mismatch:
raise TypeError(f"Value {value} is not of expected type {target_type}")
else:
return value
# Try to convert to each type in the union, excluding None
non_none_types = [arg for arg in args if arg is not type(None)]
# Try each type in the union, using the original raise_on_mismatch behavior
for arg_type in non_none_types:
try:
return _try_convert(value, arg_type, raise_on_mismatch)
except (TypeError, ValueError, ConversionError):
continue
# If no conversion succeeded
if raise_on_mismatch:
raise TypeError(f"Value {value} is not of expected type {target_type}")
else:
return value
if origin is None:
origin = target_type
if origin not in [list, dict, tuple, str, set, int, float, bool]:
@@ -218,19 +189,11 @@ def type_match(value: Any, target_type: Type[T]) -> T:
return cast(T, _try_convert(value, target_type, raise_on_mismatch=True))
@overload
def convert(value: Any, target_type: Type[T]) -> T: ...
@overload
def convert(value: Any, target_type: Any) -> Any: ...
def convert(value: Any, target_type: Any) -> Any:
def convert(value: Any, target_type: Type[T]) -> T:
try:
if isinstance(value, PrismaJson):
value = value.data
return _try_convert(value, target_type, raise_on_mismatch=False)
return cast(T, _try_convert(value, target_type, raise_on_mismatch=False))
except Exception as e:
raise ConversionError(f"Failed to convert {value} to {target_type}") from e
@@ -240,7 +203,6 @@ class FormattedStringType(str):
@classmethod
def __get_pydantic_core_schema__(cls, source_type, handler):
_ = source_type # unused parameter required by pydantic
return handler(str)
@classmethod

View File

@@ -1,5 +1,3 @@
from typing import List, Optional
from backend.util.type import convert
@@ -7,8 +5,6 @@ def test_type_conversion():
assert convert(5.5, int) == 5
assert convert("5.5", int) == 5
assert convert([1, 2, 3], int) == 3
assert convert("7", Optional[int]) == 7
assert convert("7", int | None) == 7
assert convert("5.5", float) == 5.5
assert convert(5, float) == 5.0
@@ -29,6 +25,8 @@ def test_type_conversion():
assert convert([1, 2, 3], dict) == {0: 1, 1: 2, 2: 3}
assert convert((1, 2, 3), dict) == {0: 1, 1: 2, 2: 3}
from typing import List
assert convert("5", List[int]) == [5]
assert convert("[5,4,2]", List[int]) == [5, 4, 2]
assert convert([5, 4, 2], List[str]) == ["5", "4", "2"]

View File

@@ -31,18 +31,18 @@ files = [
[[package]]
name = "aiodns"
version = "3.5.0"
version = "3.4.0"
description = "Simple DNS resolver for asyncio"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "aiodns-3.5.0-py3-none-any.whl", hash = "sha256:6d0404f7d5215849233f6ee44854f2bb2481adf71b336b2279016ea5990ca5c5"},
{file = "aiodns-3.5.0.tar.gz", hash = "sha256:11264edbab51896ecf546c18eb0dd56dff0428c6aa6d2cd87e643e07300eb310"},
{file = "aiodns-3.4.0-py3-none-any.whl", hash = "sha256:4da2b25f7475343f3afbb363a2bfe46afa544f2b318acb9a945065e622f4ed24"},
{file = "aiodns-3.4.0.tar.gz", hash = "sha256:24b0ae58410530367f21234d0c848e4de52c1f16fbddc111726a4ab536ec1b2f"},
]
[package.dependencies]
pycares = ">=4.9.0"
pycares = ">=4.0.0"
[[package]]
name = "aiofiles"
@@ -222,14 +222,14 @@ files = [
[[package]]
name = "anthropic"
version = "0.57.1"
version = "0.51.0"
description = "The official Python library for the anthropic API"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "anthropic-0.57.1-py3-none-any.whl", hash = "sha256:33afc1f395af207d07ff1bffc0a3d1caac53c371793792569c5d2f09283ea306"},
{file = "anthropic-0.57.1.tar.gz", hash = "sha256:7815dd92245a70d21f65f356f33fc80c5072eada87fb49437767ea2918b2c4b0"},
{file = "anthropic-0.51.0-py3-none-any.whl", hash = "sha256:b8b47d482c9aa1f81b923555cebb687c2730309a20d01be554730c8302e0f62a"},
{file = "anthropic-0.51.0.tar.gz", hash = "sha256:6f824451277992af079554430d5b2c8ff5bc059cc2c968cdc3f06824437da201"},
]
[package.dependencies]
@@ -242,7 +242,6 @@ sniffio = "*"
typing-extensions = ">=4.10,<5"
[package.extras]
aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.6)"]
bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"]
vertex = ["google-auth[requests] (>=2,<3)"]
@@ -1006,14 +1005,14 @@ pgp = ["gpg"]
[[package]]
name = "e2b"
version = "1.5.4"
version = "1.5.0"
description = "E2B SDK that give agents cloud environments"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "e2b-1.5.4-py3-none-any.whl", hash = "sha256:9c8d22f9203311dff890e037823596daaba3d793300238117f2efc5426888f2c"},
{file = "e2b-1.5.4.tar.gz", hash = "sha256:49f1c115d0198244beef5854d19cc857fda9382e205f137b98d3dae0e7e0b2d2"},
{file = "e2b-1.5.0-py3-none-any.whl", hash = "sha256:875a843d1d314a9945e24bfb78c9b1b5cac7e2ecb1e799664d827a26a0b2276a"},
{file = "e2b-1.5.0.tar.gz", hash = "sha256:905730eea5c07f271d073d4b5d2a9ef44c8ac04b9b146a99fa0235db77bf6854"},
]
[package.dependencies]
@@ -1027,19 +1026,19 @@ typing-extensions = ">=4.1.0"
[[package]]
name = "e2b-code-interpreter"
version = "1.5.2"
version = "1.5.0"
description = "E2B Code Interpreter - Stateful code execution"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "e2b_code_interpreter-1.5.2-py3-none-any.whl", hash = "sha256:5c3188d8f25226b28fef4b255447cc6a4c36afb748bdd5180b45be486d5169f3"},
{file = "e2b_code_interpreter-1.5.2.tar.gz", hash = "sha256:3bd6ea70596290e85aaf0a2f19f28bf37a5e73d13086f5e6a0080bb591c5a547"},
{file = "e2b_code_interpreter-1.5.0-py3-none-any.whl", hash = "sha256:299f5641a3754264a07f8edc3cccb744d6b009f10dc9285789a9352e24989a9b"},
{file = "e2b_code_interpreter-1.5.0.tar.gz", hash = "sha256:cd6028b6f20c4231e88a002de86484b9d4a99ea588b5be183b9ec7189a0f3cf6"},
]
[package.dependencies]
attrs = ">=21.3.0"
e2b = ">=1.5.4,<2.0.0"
e2b = ">=1.4.0,<2.0.0"
httpx = ">=0.20.0,<1.0.0"
[[package]]
@@ -1110,14 +1109,14 @@ typing-extensions = "*"
[[package]]
name = "fastapi"
version = "0.115.14"
version = "0.115.12"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "fastapi-0.115.14-py3-none-any.whl", hash = "sha256:6c0c8bf9420bd58f565e585036d971872472b4f7d3f6c73b698e10cffdefb3ca"},
{file = "fastapi-0.115.14.tar.gz", hash = "sha256:b1de15cdc1c499a4da47914db35d0e4ef8f1ce62b624e94e0e5824421df99739"},
{file = "fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d"},
{file = "fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681"},
]
[package.dependencies]
@@ -1193,20 +1192,20 @@ packaging = ">=20"
[[package]]
name = "flake8"
version = "7.3.0"
version = "7.2.0"
description = "the modular source code checker: pep8 pyflakes and co"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "flake8-7.3.0-py2.py3-none-any.whl", hash = "sha256:b9696257b9ce8beb888cdbe31cf885c90d31928fe202be0889a7cdafad32f01e"},
{file = "flake8-7.3.0.tar.gz", hash = "sha256:fe044858146b9fc69b551a4b490d69cf960fcb78ad1edcb84e7fbb1b4a8e3872"},
{file = "flake8-7.2.0-py2.py3-none-any.whl", hash = "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343"},
{file = "flake8-7.2.0.tar.gz", hash = "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426"},
]
[package.dependencies]
mccabe = ">=0.7.0,<0.8.0"
pycodestyle = ">=2.14.0,<2.15.0"
pyflakes = ">=3.4.0,<3.5.0"
pycodestyle = ">=2.13.0,<2.14.0"
pyflakes = ">=3.3.0,<3.4.0"
[[package]]
name = "frozenlist"
@@ -1357,14 +1356,14 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.0)"]
[[package]]
name = "google-api-python-client"
version = "2.176.0"
version = "2.170.0"
description = "Google API Client Library for Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_api_python_client-2.176.0-py3-none-any.whl", hash = "sha256:e22239797f1d085341e12cd924591fc65c56d08e0af02549d7606092e6296510"},
{file = "google_api_python_client-2.176.0.tar.gz", hash = "sha256:2b451cdd7fd10faeb5dd20f7d992f185e1e8f4124c35f2cdcc77c843139a4cf1"},
{file = "google_api_python_client-2.170.0-py3-none-any.whl", hash = "sha256:7bf518a0527ad23322f070fa69f4f24053170d5c766821dc970ff0571ec22748"},
{file = "google_api_python_client-2.170.0.tar.gz", hash = "sha256:75f3a1856f11418ea3723214e0abc59d9b217fd7ed43dcf743aab7f06ab9e2b1"},
]
[package.dependencies]
@@ -1517,27 +1516,27 @@ protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4
[[package]]
name = "google-cloud-storage"
version = "3.2.0"
version = "3.1.0"
description = "Google Cloud Storage API client library"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "google_cloud_storage-3.2.0-py3-none-any.whl", hash = "sha256:ff7a9a49666954a7c3d1598291220c72d3b9e49d9dfcf9dfaecb301fc4fb0b24"},
{file = "google_cloud_storage-3.2.0.tar.gz", hash = "sha256:decca843076036f45633198c125d1861ffbf47ebf5c0e3b98dcb9b2db155896c"},
{file = "google_cloud_storage-3.1.0-py2.py3-none-any.whl", hash = "sha256:eaf36966b68660a9633f03b067e4a10ce09f1377cae3ff9f2c699f69a81c66c6"},
{file = "google_cloud_storage-3.1.0.tar.gz", hash = "sha256:944273179897c7c8a07ee15f2e6466a02da0c7c4b9ecceac2a26017cb2972049"},
]
[package.dependencies]
google-api-core = ">=2.15.0,<3.0.0"
google-auth = ">=2.26.1,<3.0.0"
google-cloud-core = ">=2.4.2,<3.0.0"
google-crc32c = ">=1.1.3,<2.0.0"
google-resumable-media = ">=2.7.2,<3.0.0"
requests = ">=2.22.0,<3.0.0"
google-api-core = ">=2.15.0,<3.0.0dev"
google-auth = ">=2.26.1,<3.0dev"
google-cloud-core = ">=2.4.2,<3.0dev"
google-crc32c = ">=1.0,<2.0dev"
google-resumable-media = ">=2.7.2"
requests = ">=2.18.0,<3.0.0dev"
[package.extras]
protobuf = ["protobuf (>=3.20.2,<7.0.0)"]
tracing = ["opentelemetry-api (>=1.1.0,<2.0.0)"]
protobuf = ["protobuf (<6.0.0dev)"]
tracing = ["opentelemetry-api (>=1.1.0)"]
[[package]]
name = "google-crc32c"
@@ -1745,14 +1744,14 @@ test = ["objgraph", "psutil"]
[[package]]
name = "groq"
version = "0.29.0"
version = "0.24.0"
description = "The official Python library for the groq API"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "groq-0.29.0-py3-none-any.whl", hash = "sha256:03515ec46be1ef1feef0cd9d876b6f30a39ee2742e76516153d84acd7c97f23a"},
{file = "groq-0.29.0.tar.gz", hash = "sha256:109dc4d696c05d44e4c2cd157652c4c6600c3e96f093f6e158facb5691e37847"},
{file = "groq-0.24.0-py3-none-any.whl", hash = "sha256:0020e6b0b2b267263c9eb7c318deef13c12f399c6525734200b11d777b00088e"},
{file = "groq-0.24.0.tar.gz", hash = "sha256:e821559de8a77fb81d2585b3faec80ff923d6d64fd52339b33f6c94997d6f7f5"},
]
[package.dependencies]
@@ -1763,9 +1762,6 @@ pydantic = ">=1.9.0,<3"
sniffio = "*"
typing-extensions = ">=4.10,<5"
[package.extras]
aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.6)"]
[[package]]
name = "grpc-google-iam-v1"
version = "0.14.2"
@@ -2552,14 +2548,14 @@ files = [
[[package]]
name = "mem0ai"
version = "0.1.114"
version = "0.1.102"
description = "Long-term memory for AI Agents"
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "mem0ai-0.1.114-py3-none-any.whl", hash = "sha256:dfb7f0079ee282f5d9782e220f6f09707bcf5e107925d1901dbca30d8dd83f9b"},
{file = "mem0ai-0.1.114.tar.gz", hash = "sha256:b27886132eaec78544e8b8b54f0b14a36728f3c99da54cb7cb417150e2fad7e1"},
{file = "mem0ai-0.1.102-py3-none-any.whl", hash = "sha256:1401ccfd2369e2182ce78abb61b817e739fe49508b5a8ad98abcd4f8ad4db0b4"},
{file = "mem0ai-0.1.102.tar.gz", hash = "sha256:7358dba4fbe954b9c3f33204c14df7babaf9067e2eb48241d89a32e6bc774988"},
]
[package.dependencies]
@@ -2572,11 +2568,8 @@ sqlalchemy = ">=2.0.31"
[package.extras]
dev = ["isort (>=5.13.2)", "pytest (>=8.2.2)", "ruff (>=0.6.5)"]
extras = ["boto3 (>=1.34.0)", "elasticsearch (>=8.0.0)", "langchain-community (>=0.0.0)", "langchain-memgraph (>=0.1.0)", "opensearch-py (>=2.0.0)", "sentence-transformers (>=5.0.0)"]
graph = ["langchain-aws (>=0.2.23)", "langchain-neo4j (>=0.4.0)", "neo4j (>=5.23.1)", "rank-bm25 (>=0.2.2)"]
llms = ["google-genai (>=1.0.0)", "google-generativeai (>=0.3.0)", "groq (>=0.3.0)", "litellm (>=0.1.0)", "ollama (>=0.1.0)", "together (>=0.2.10)", "vertexai (>=0.1.0)"]
graph = ["langchain-neo4j (>=0.4.0)", "neo4j (>=5.23.1)", "rank-bm25 (>=0.2.2)"]
test = ["pytest (>=8.2.2)", "pytest-asyncio (>=0.23.7)", "pytest-mock (>=3.14.0)"]
vector-stores = ["azure-search-documents (>=11.4.0b8)", "chromadb (>=0.4.24)", "faiss-cpu (>=1.7.4)", "pinecone (<=7.3.0)", "pinecone-text (>=0.10.0)", "pymochow (>=2.2.9)", "pymongo (>=4.13.2)", "upstash-vector (>=0.1.0)", "vecs (>=0.4.0)", "weaviate-client (>=4.4.0)"]
[[package]]
name = "more-itertools"
@@ -2915,14 +2908,14 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"]
[[package]]
name = "ollama"
version = "0.5.1"
version = "0.4.9"
description = "The official Python client for Ollama."
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "ollama-0.5.1-py3-none-any.whl", hash = "sha256:4c8839f35bc173c7057b1eb2cbe7f498c1a7e134eafc9192824c8aecb3617506"},
{file = "ollama-0.5.1.tar.gz", hash = "sha256:5a799e4dc4e7af638b11e3ae588ab17623ee019e496caaf4323efbaa8feeff93"},
{file = "ollama-0.4.9-py3-none-any.whl", hash = "sha256:18c8c85358c54d7f73d6a66cda495b0e3ba99fdb88f824ae470d740fbb211a50"},
{file = "ollama-0.4.9.tar.gz", hash = "sha256:5266d4d29b5089a01489872b8e8f980f018bccbdd1082b3903448af1d5615ce7"},
]
[package.dependencies]
@@ -2931,14 +2924,14 @@ pydantic = ">=2.9"
[[package]]
name = "openai"
version = "1.93.2"
version = "1.82.1"
description = "The official Python library for the openai API"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "openai-1.93.2-py3-none-any.whl", hash = "sha256:5adbbebd48eae160e6d68efc4c0a4f7cb1318a44c62d9fc626cec229f418eab4"},
{file = "openai-1.93.2.tar.gz", hash = "sha256:4a7312b426b5e4c98b78dfa1148b5683371882de3ad3d5f7c8e0c74f3cc90778"},
{file = "openai-1.82.1-py3-none-any.whl", hash = "sha256:334eb5006edf59aa464c9e932b9d137468d810b2659e5daea9b3a8c39d052395"},
{file = "openai-1.82.1.tar.gz", hash = "sha256:ffc529680018e0417acac85f926f92aa0bbcbc26e82e2621087303c66bc7f95d"},
]
[package.dependencies]
@@ -2952,7 +2945,6 @@ tqdm = ">4"
typing-extensions = ">=4.11,<5"
[package.extras]
aiohttp = ["aiohttp", "httpx-aiohttp (>=0.1.6)"]
datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"]
realtime = ["websockets (>=13,<16)"]
voice-helpers = ["numpy (>=2.0.2)", "sounddevice (>=0.5.1)"]
@@ -3267,14 +3259,14 @@ testing = ["coverage", "pytest", "pytest-benchmark"]
[[package]]
name = "poethepoet"
version = "0.36.0"
description = "A task runner that works well with poetry and uv."
version = "0.34.0"
description = "A task runner that works well with poetry."
optional = false
python-versions = ">=3.9"
groups = ["dev"]
files = [
{file = "poethepoet-0.36.0-py3-none-any.whl", hash = "sha256:693e3c1eae9f6731d3613c3c0c40f747d3c5c68a375beda42e590a63c5623308"},
{file = "poethepoet-0.36.0.tar.gz", hash = "sha256:2217b49cb4e4c64af0b42ff8c4814b17f02e107d38bc461542517348ede25663"},
{file = "poethepoet-0.34.0-py3-none-any.whl", hash = "sha256:c472d6f0fdb341b48d346f4ccd49779840c15b30dfd6bc6347a80d6274b5e34e"},
{file = "poethepoet-0.34.0.tar.gz", hash = "sha256:86203acce555bbfe45cb6ccac61ba8b16a5784264484195874da457ddabf5850"},
]
[package.dependencies]
@@ -3500,14 +3492,14 @@ tqdm = "*"
[[package]]
name = "prometheus-client"
version = "0.22.1"
version = "0.21.1"
description = "Python client for the Prometheus monitoring system."
optional = false
python-versions = ">=3.9"
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "prometheus_client-0.22.1-py3-none-any.whl", hash = "sha256:cca895342e308174341b2cbf99a56bef291fbc0ef7b9e5412a0f26d653ba7094"},
{file = "prometheus_client-0.22.1.tar.gz", hash = "sha256:190f1331e783cf21eb60bca559354e0a4d4378facecf78f5428c39b675d20d28"},
{file = "prometheus_client-0.21.1-py3-none-any.whl", hash = "sha256:594b45c410d6f4f8888940fe80b5cc2521b305a1fafe1c58609ef715a001f301"},
{file = "prometheus_client-0.21.1.tar.gz", hash = "sha256:252505a722ac04b0456be05c05f75f45d760c2911ffc45f2a06bcaed9f3ae3fb"},
]
[package.extras]
@@ -3791,88 +3783,83 @@ pyasn1 = ">=0.6.1,<0.7.0"
[[package]]
name = "pycares"
version = "4.9.0"
version = "4.8.0"
description = "Python interface for c-ares"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pycares-4.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b8bd9a3ee6e9bc990e1933dc7e7e2f44d4184f49a90fa444297ac12ab6c0c84"},
{file = "pycares-4.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:417a5c20861f35977240ad4961479a6778125bcac21eb2ad1c3aad47e2ff7fab"},
{file = "pycares-4.9.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab290faa4ea53ce53e3ceea1b3a42822daffce2d260005533293a52525076750"},
{file = "pycares-4.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b1df81193084c9717734e4615e8c5074b9852478c9007d1a8bb242f7f580e67"},
{file = "pycares-4.9.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:20c7a6af0c2ccd17cc5a70d76e299a90e7ebd6c4d8a3d7fff5ae533339f61431"},
{file = "pycares-4.9.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:370f41442a5b034aebdb2719b04ee04d3e805454a20d3f64f688c1c49f9137c3"},
{file = "pycares-4.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:340e4a3bbfd14d73c01ec0793a321b8a4a93f64c508225883291078b7ee17ac8"},
{file = "pycares-4.9.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f0ec94785856ea4f5556aa18f4c027361ba4b26cb36c4ad97d2105ef4eec68ba"},
{file = "pycares-4.9.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd6b7e23a4a9e2039b5d67dfa0499d2d5f114667dc13fb5d7d03eed230c7ac4f"},
{file = "pycares-4.9.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:490c978b0be9d35a253a5e31dd598f6d66b453625f0eb7dc2d81b22b8c3bb3f4"},
{file = "pycares-4.9.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e433faaf07f44e44f1a1b839fee847480fe3db9431509dafc9f16d618d491d0f"},
{file = "pycares-4.9.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cf6d8851a06b79d10089962c9dadcb34dad00bf027af000f7102297a54aaff2e"},
{file = "pycares-4.9.0-cp310-cp310-win32.whl", hash = "sha256:4f803e7d66ac7d8342998b8b07393788991353a46b05bbaad0b253d6f3484ea8"},
{file = "pycares-4.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:8e17bd32267e3870855de3baed7d0efa6337344d68f44853fd9195c919f39400"},
{file = "pycares-4.9.0-cp310-cp310-win_arm64.whl", hash = "sha256:6b74f75d8e430f9bb11a1cc99b2e328eed74b17d8d4b476de09126f38d419eb9"},
{file = "pycares-4.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:16a97ee83ec60d35c7f716f117719932c27d428b1bb56b242ba1c4aa55521747"},
{file = "pycares-4.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:78748521423a211ce699a50c27cc5c19e98b7db610ccea98daad652ace373990"},
{file = "pycares-4.9.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8818b2c7a57d9d6d41e8b64d9ff87992b8ea2522fc0799686725228bc3cff6c5"},
{file = "pycares-4.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96df8990f16013ca5194d6ece19dddb4ef9cd7c3efaab9f196ec3ccd44b40f8d"},
{file = "pycares-4.9.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61af86fd58b8326e723b0d20fb96b56acaec2261c3a7c9a1c29d0a79659d613a"},
{file = "pycares-4.9.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ec72edb276bda559813cc807bc47b423d409ffab2402417a5381077e9c2c6be1"},
{file = "pycares-4.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:832fb122c7376c76cab62f8862fa5e398b9575fb7c9ff6bc9811086441ee64ca"},
{file = "pycares-4.9.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cdcfaef24f771a471671470ccfd676c0366ab6b0616fd8217b8f356c40a02b83"},
{file = "pycares-4.9.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:52cb056d06ff55d78a8665b97ae948abaaba2ca200ca59b10346d4526bce1e7d"},
{file = "pycares-4.9.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:54985ed3f2e8a87315269f24cb73441622857a7830adfc3a27c675a94c3261c1"},
{file = "pycares-4.9.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:08048e223615d4aef3dac81fe0ea18fb18d6fc97881f1eb5be95bb1379969b8d"},
{file = "pycares-4.9.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cc60037421ce05a409484287b2cd428e1363cca73c999b5f119936bb8f255208"},
{file = "pycares-4.9.0-cp311-cp311-win32.whl", hash = "sha256:62b86895b60cfb91befb3086caa0792b53f949231c6c0c3053c7dfee3f1386ab"},
{file = "pycares-4.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:7046b3c80954beaabf2db52b09c3d6fe85f6c4646af973e61be79d1c51589932"},
{file = "pycares-4.9.0-cp311-cp311-win_arm64.whl", hash = "sha256:fcbda3fdf44e94d3962ca74e6ba3dc18c0d7029106f030d61c04c0876f319403"},
{file = "pycares-4.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d68ca2da1001aeccdc81c4a2fb1f1f6cfdafd3d00e44e7c1ed93e3e05437f666"},
{file = "pycares-4.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4f0c8fa5a384d79551a27eafa39eed29529e66ba8fa795ee432ab88d050432a3"},
{file = "pycares-4.9.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eb8c428cf3b9c6ff9c641ba50ab6357b4480cd737498733e6169b0ac8a1a89b"},
{file = "pycares-4.9.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6845bd4a43abf6dab7fedbf024ef458ac3750a25b25076ea9913e5ac5fec4548"},
{file = "pycares-4.9.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e28f4acc3b97e46610cf164665ebf914f709daea6ced0ca4358ce55bc1c3d6b"},
{file = "pycares-4.9.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9464a39861840ce35a79352c34d653a9db44f9333af7c9feddb97998d3e00c07"},
{file = "pycares-4.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0611c1bd46d1fc6bdd9305b8850eb84c77df485769f72c574ed7b8389dfbee2"},
{file = "pycares-4.9.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4fb5a38a51d03b75ac4320357e632c2e72e03fdeb13263ee333a40621415fdc"},
{file = "pycares-4.9.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:df5edae05fb3e1370ab7639e67e8891fdaa9026cb10f05dbd57893713f7a9cfe"},
{file = "pycares-4.9.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:397123ea53d261007bb0aa7e767ef238778f45026db40bed8196436da2cc73de"},
{file = "pycares-4.9.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bb0d874d0b131b29894fd8a0f842be91ac21d50f90ec04cff4bb3f598464b523"},
{file = "pycares-4.9.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:497cc03a61ec1585eb17d2cb086a29a6a67d24babf1e9be519b47222916a3b06"},
{file = "pycares-4.9.0-cp312-cp312-win32.whl", hash = "sha256:b46e46313fdb5e82da15478652aac0fd15e1c9f33e08153bad845aa4007d6f84"},
{file = "pycares-4.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:12547a06445777091605a7581da15a0da158058beb8a05a3ebbf7301fd1f58d4"},
{file = "pycares-4.9.0-cp312-cp312-win_arm64.whl", hash = "sha256:f1e10bf1e8eb80b08e5c828627dba1ebc4acd54803bd0a27d92b9063b6aa99d8"},
{file = "pycares-4.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:574d815112a95ab09d75d0a9dc7dea737c06985e3125cf31c32ba6a3ed6ca006"},
{file = "pycares-4.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50e5ab06361d59625a27a7ad93d27e067dc7c9f6aa529a07d691eb17f3b43605"},
{file = "pycares-4.9.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:785f5fd11ff40237d9bc8afa441551bb449e2812c74334d1d10859569e07515c"},
{file = "pycares-4.9.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e194a500e403eba89b91fb863c917495c5b3dfcd1ce0ee8dc3a6f99a1360e2fc"},
{file = "pycares-4.9.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:112dd49cdec4e6150a8d95b197e8b6b7b4468a3170b30738ed9b248cb2240c04"},
{file = "pycares-4.9.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94aa3c2f3eb0aa69160137134775501f06c901188e722aac63d2a210d4084f99"},
{file = "pycares-4.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b510d71255cf5a92ccc2643a553548fcb0623d6ed11c8c633b421d99d7fa4167"},
{file = "pycares-4.9.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5c6aa30b1492b8130f7832bf95178642c710ce6b7ba610c2b17377f77177e3cd"},
{file = "pycares-4.9.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5767988e044faffe2aff6a76aa08df99a8b6ef2641be8b00ea16334ce5dea93"},
{file = "pycares-4.9.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b9928a942820a82daa3207509eaba9e0fa9660756ac56667ec2e062815331fcb"},
{file = "pycares-4.9.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:556c854174da76d544714cdfab10745ed5d4b99eec5899f7b13988cd26ff4763"},
{file = "pycares-4.9.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d42e2202ca9aa9a0a9a6e43a4a4408bbe0311aaa44800fa27b8fd7f82b20152a"},
{file = "pycares-4.9.0-cp313-cp313-win32.whl", hash = "sha256:cce8ef72c9ed4982c84114e6148a4e42e989d745de7862a0ad8b3f1cdc05def2"},
{file = "pycares-4.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:318cdf24f826f1d2f0c5a988730bd597e1683296628c8f1be1a5b96643c284fe"},
{file = "pycares-4.9.0-cp313-cp313-win_arm64.whl", hash = "sha256:faa9de8e647ed06757a2c117b70a7645a755561def814da6aca0d766cf71a402"},
{file = "pycares-4.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8310d27d68fa25be9781ce04d330f4860634a2ac34dd9265774b5f404679b41f"},
{file = "pycares-4.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99cf98452d3285307eec123049f2c9c50b109e06751b0727c6acefb6da30c6a0"},
{file = "pycares-4.9.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ffd6e8c8250655504602b076f106653e085e6b1e15318013442558101aa4777"},
{file = "pycares-4.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4065858d8c812159c9a55601fda73760d9e5e3300f7868d9e546eab1084f36c"},
{file = "pycares-4.9.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91ee6818113faf9013945c2b54bcd6b123d0ac192ae3099cf4288cedaf2dbb25"},
{file = "pycares-4.9.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:21f0602059ec11857ab7ad608c7ec8bc6f7a302c04559ec06d33e82f040585f8"},
{file = "pycares-4.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e22e5b46ed9b12183091da56e4a5a20813b5436c4f13135d7a1c20a84027ca8a"},
{file = "pycares-4.9.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9eded8649867bfd7aea7589c5755eae4d37686272f6ed7a995da40890d02de71"},
{file = "pycares-4.9.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f71d31cbbe066657a2536c98aad850724a9ab7b1cd2624f491832ae9667ea8e7"},
{file = "pycares-4.9.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2b30945982ab4741f097efc5b0853051afc3c11df26996ed53a700c7575175af"},
{file = "pycares-4.9.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:54a8f1f067d64810426491d33033f5353b54f35e5339126440ad4e6afbf3f149"},
{file = "pycares-4.9.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:41556a269a192349e92eee953f62eddd867e9eddb27f444b261e2c1c4a4a9eff"},
{file = "pycares-4.9.0-cp39-cp39-win32.whl", hash = "sha256:524d6c14eaa167ed098a4fe54856d1248fa20c296cdd6976f9c1b838ba32d014"},
{file = "pycares-4.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:15f930c733d36aa487b4ad60413013bd811281b5ea4ca620070fa38505d84df4"},
{file = "pycares-4.9.0-cp39-cp39-win_arm64.whl", hash = "sha256:79b7addb2a41267d46650ac0d9c4f3b3233b036f186b85606f7586881dfb4b69"},
{file = "pycares-4.9.0.tar.gz", hash = "sha256:8ee484ddb23dbec4d88d14ed5b6d592c1960d2e93c385d5e52b6fad564d82395"},
{file = "pycares-4.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f40d9f4a8de398b110fdf226cdfadd86e8c7eb71d5298120ec41cf8d94b0012f"},
{file = "pycares-4.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:339de06fc849a51015968038d2bbed68fc24047522404af9533f32395ca80d25"},
{file = "pycares-4.8.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:372a236c1502b9056b0bea195c64c329603b4efa70b593a33b7ae37fbb7fad00"},
{file = "pycares-4.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03f66a5e143d102ccc204bd4e29edd70bed28420f707efd2116748241e30cb73"},
{file = "pycares-4.8.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef50504296cd5fc58cfd6318f82e20af24fbe2c83004f6ff16259adb13afdf14"},
{file = "pycares-4.8.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1bc541b627c7951dd36136b18bd185c5244a0fb2af5b1492ffb8acaceec1c5b"},
{file = "pycares-4.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:938d188ed6bed696099be67ebdcdf121827b9432b17a9ea9e40dc35fd9d85363"},
{file = "pycares-4.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:327837ffdc0c7adda09c98e1263c64b2aff814eea51a423f66733c75ccd9a642"},
{file = "pycares-4.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a6b9b8d08c4508c45bd39e0c74e9e7052736f18ca1d25a289365bb9ac36e5849"},
{file = "pycares-4.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:feac07d5e6d2d8f031c71237c21c21b8c995b41a1eba64560e8cf1e42ac11bc6"},
{file = "pycares-4.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5bcdbf37012fd2323ca9f2a1074421a9ccf277d772632f8f0ce8c46ec7564250"},
{file = "pycares-4.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e3ebb692cb43fcf34fe0d26f2cf9a0ea53fdfb136463845b81fad651277922db"},
{file = "pycares-4.8.0-cp310-cp310-win32.whl", hash = "sha256:d98447ec0efff3fa868ccc54dcc56e71faff498f8848ecec2004c3108efb4da2"},
{file = "pycares-4.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:1abb8f40917960ead3c2771277f0bdee1967393b0fdf68743c225b606787da68"},
{file = "pycares-4.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e25db89005ddd8d9c5720293afe6d6dd92e682fc6bc7a632535b84511e2060d"},
{file = "pycares-4.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6f9665ef116e6ee216c396f5f927756c2164f9f3316aec7ff1a9a1e1e7ec9b2a"},
{file = "pycares-4.8.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54a96893133471f6889b577147adcc21a480dbe316f56730871028379c8313f3"},
{file = "pycares-4.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51024b3a69762bd3100d94986a29922be15e13f56f991aaefb41f5bcd3d7f0bb"},
{file = "pycares-4.8.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47ff9db50c599e4d965ae3bec99cc30941c1d2b0f078ec816680b70d052dd54a"},
{file = "pycares-4.8.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27ef8ff4e0f60ea6769a60d1c3d1d2aefed1d832e7bb83fc3934884e2dba5cdd"},
{file = "pycares-4.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63511af7a3f9663f562fbb6bfa3591a259505d976e2aba1fa2da13dde43c6ca7"},
{file = "pycares-4.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:73c3219b47616e6a5ad1810de96ed59721c7751f19b70ae7bf24997a8365408f"},
{file = "pycares-4.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:da42a45207c18f37be5e491c14b6d1063cfe1e46620eb661735d0cedc2b59099"},
{file = "pycares-4.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8a068e898bb5dd09cd654e19cd2abf20f93d0cc59d5d955135ed48ea0f806aa1"},
{file = "pycares-4.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:962aed95675bb66c0b785a2fbbd1bb58ce7f009e283e4ef5aaa4a1f2dc00d217"},
{file = "pycares-4.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ce8b1a16c1e4517a82a0ebd7664783a327166a3764d844cf96b1fb7b9dd1e493"},
{file = "pycares-4.8.0-cp311-cp311-win32.whl", hash = "sha256:b3749ddbcbd216376c3b53d42d8b640b457133f1a12b0e003f3838f953037ae7"},
{file = "pycares-4.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:5ce8a4e1b485b2360ab666c4ea1db97f57ede345a3b566d80bfa52b17e616610"},
{file = "pycares-4.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3273e01a75308ed06d2492d83c7ba476e579a60a24d9f20fe178ce5e9d8d028b"},
{file = "pycares-4.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fcedaadea1f452911fd29935749f98d144dae758d6003b7e9b6c5d5bd47d1dff"},
{file = "pycares-4.8.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aae6cb33e287e06a4aabcbc57626df682c9a4fa8026207f5b498697f1c2fb562"},
{file = "pycares-4.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25038b930e5be82839503fb171385b2aefd6d541bc5b7da0938bdb67780467d2"},
{file = "pycares-4.8.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cc8499b6e7dfbe4af65f6938db710ce9acd1debf34af2cbb93b898b1e5da6a5a"},
{file = "pycares-4.8.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4e1c6a68ef56a7622f6176d9946d4e51f3c853327a0123ef35a5380230c84cd"},
{file = "pycares-4.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7cc8c3c9114b9c84e4062d25ca9b4bddc80a65d0b074c7cb059275273382f89"},
{file = "pycares-4.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4404014069d3e362abf404c9932d4335bb9c07ba834cfe7d683c725b92e0f9da"},
{file = "pycares-4.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ee0a58c32ec2a352cef0e1d20335a7caf9871cd79b73be2ca2896fe70f09c9d7"},
{file = "pycares-4.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:35f32f52b486b8fede3cbebf088f30b01242d0321b5216887c28e80490595302"},
{file = "pycares-4.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ecbb506e27a3b3a2abc001c77beeccf265475c84b98629a6b3e61bd9f2987eaa"},
{file = "pycares-4.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9392b2a34adbf60cb9e38f4a0d363413ecea8d835b5a475122f50f76676d59dd"},
{file = "pycares-4.8.0-cp312-cp312-win32.whl", hash = "sha256:f0fbefe68403ffcff19c869b8d621c88a6d2cef18d53cf0dab0fa9458a6ca712"},
{file = "pycares-4.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa8aab6085a2ddfb1b43a06ddf1b498347117bb47cd620d9b12c43383c9c2737"},
{file = "pycares-4.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:358a9a2c6fed59f62788e63d88669224955443048a1602016d4358e92aedb365"},
{file = "pycares-4.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e3e1278967fa8d4a0056be3fcc8fc551b8bad1fc7d0e5172196dccb8ddb036a"},
{file = "pycares-4.8.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79befb773e370a8f97de9f16f5ea2c7e7fa0e3c6c74fbea6d332bf58164d7d06"},
{file = "pycares-4.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b00d3695db64ce98a34e632e1d53f5a1cdb25451489f227bec2a6c03ff87ee8"},
{file = "pycares-4.8.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:37bdc4f2ff0612d60fc4f7547e12ff02cdcaa9a9e42e827bb64d4748994719f1"},
{file = "pycares-4.8.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd92c44498ec7a6139888b464b28c49f7ba975933689bd67ea8d572b94188404"},
{file = "pycares-4.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2665a0d810e2bbc41e97f3c3e5ea7950f666b3aa19c5f6c99d6b018ccd2e0052"},
{file = "pycares-4.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45a629a6470a33478514c566bce50c63f1b17d1c5f2f964c9a6790330dc105fb"},
{file = "pycares-4.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:47bb378f1773f41cca8e31dcdf009ce4a9b8aff8a30c7267aaff9a099c407ba5"},
{file = "pycares-4.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fb3feae38458005cc101956e38f16eb3145fff8cd793e35cd4bdef6bf1aa2623"},
{file = "pycares-4.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:14bc28aeaa66b0f4331ac94455e8043c8a06b3faafd78cc49d4b677bae0d0b08"},
{file = "pycares-4.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:62c82b871470f2864a1febf7b96bb1d108ce9063e6d3d43727e8a46f0028a456"},
{file = "pycares-4.8.0-cp313-cp313-win32.whl", hash = "sha256:01afa8964c698c8f548b46d726f766aa7817b2d4386735af1f7996903d724920"},
{file = "pycares-4.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:22f86f81b12ab17b0a7bd0da1e27938caaed11715225c1168763af97f8bb51a7"},
{file = "pycares-4.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:61325d13a95255e858f42a7a1a9e482ff47ef2233f95ad9a4f308a3bd8ecf903"},
{file = "pycares-4.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dfec3a7d42336fa46a1e7e07f67000fd4b97860598c59a894c08f81378629e4e"},
{file = "pycares-4.8.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b65067e4b4f5345688817fff6be06b9b1f4ec3619b0b9ecc639bc681b73f646b"},
{file = "pycares-4.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0322ad94bbaa7016139b5bbdcd0de6f6feb9d146d69e03a82aaca342e06830a6"},
{file = "pycares-4.8.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:456c60f170c997f9a43c7afa1085fced8efb7e13ae49dd5656f998ae13c4bdb4"},
{file = "pycares-4.8.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57a2c4c9ce423a85b0e0227409dbaf0d478f5e0c31d9e626768e77e1e887d32f"},
{file = "pycares-4.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:478d9c479108b7527266864c0affe3d6e863492c9bc269217e36100c8fd89b91"},
{file = "pycares-4.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aed56bca096990ca0aa9bbf95761fc87e02880e04b0845922b5c12ea9abe523f"},
{file = "pycares-4.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ef265a390928ee2f77f8901c2273c53293157860451ad453ce7f45dd268b72f9"},
{file = "pycares-4.8.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:a5f17d7a76d8335f1c90a8530c8f1e8bb22e9a1d70a96f686efaed946de1c908"},
{file = "pycares-4.8.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:891f981feb2ef34367378f813fc17b3d706ce95b6548eeea0c9fe7705d7e54b1"},
{file = "pycares-4.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4102f6d9117466cc0a1f527907a1454d109cc9e8551b8074888071ef16050fe3"},
{file = "pycares-4.8.0-cp39-cp39-win32.whl", hash = "sha256:d6775308659652adc88c82c53eda59b5e86a154aaba5ad1e287bbb3e0be77076"},
{file = "pycares-4.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:8bc05462aa44788d48544cca3d2532466fed2cdc5a2f24a43a92b620a61c9d19"},
{file = "pycares-4.8.0.tar.gz", hash = "sha256:2fc2ebfab960f654b3e3cf08a732486950da99393a657f8b44618ad3ed2d39c1"},
]
[package.dependencies]
@@ -3883,14 +3870,14 @@ idna = ["idna (>=2.1)"]
[[package]]
name = "pycodestyle"
version = "2.14.0"
version = "2.13.0"
description = "Python style guide checker"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pycodestyle-2.14.0-py2.py3-none-any.whl", hash = "sha256:dd6bf7cb4ee77f8e016f9c8e74a35ddd9f67e1d5fd4184d86c3b98e07099f42d"},
{file = "pycodestyle-2.14.0.tar.gz", hash = "sha256:c4b5b517d278089ff9d0abdec919cd97262a3367449ea1c8b49b91529167b783"},
{file = "pycodestyle-2.13.0-py2.py3-none-any.whl", hash = "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9"},
{file = "pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae"},
]
[[package]]
@@ -3907,14 +3894,14 @@ files = [
[[package]]
name = "pydantic"
version = "2.11.7"
version = "2.11.5"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"},
{file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"},
{file = "pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7"},
{file = "pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a"},
]
[package.dependencies]
@@ -4042,14 +4029,14 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
[[package]]
name = "pydantic-settings"
version = "2.10.1"
version = "2.9.1"
description = "Settings management using Pydantic"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796"},
{file = "pydantic_settings-2.10.1.tar.gz", hash = "sha256:06f0062169818d0f5524420a360d632d5857b83cffd4d42fe29597807a1614ee"},
{file = "pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef"},
{file = "pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268"},
]
[package.dependencies]
@@ -4066,31 +4053,16 @@ yaml = ["pyyaml (>=6.0.1)"]
[[package]]
name = "pyflakes"
version = "3.4.0"
version = "3.3.2"
description = "passive checker of Python programs"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "pyflakes-3.4.0-py2.py3-none-any.whl", hash = "sha256:f742a7dbd0d9cb9ea41e9a24a918996e8170c799fa528688d40dd582c8265f4f"},
{file = "pyflakes-3.4.0.tar.gz", hash = "sha256:b24f96fafb7d2ab0ec5075b7350b3d2d2218eab42003821c06344973d3ea2f58"},
{file = "pyflakes-3.3.2-py2.py3-none-any.whl", hash = "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a"},
{file = "pyflakes-3.3.2.tar.gz", hash = "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b"},
]
[[package]]
name = "pygments"
version = "2.19.2"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b"},
{file = "pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887"},
]
[package.extras]
windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pyjwt"
version = "2.10.1"
@@ -4150,14 +4122,14 @@ files = [
[[package]]
name = "pyright"
version = "1.1.402"
version = "1.1.401"
description = "Command line wrapper for pyright"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "pyright-1.1.402-py3-none-any.whl", hash = "sha256:2c721f11869baac1884e846232800fe021c33f1b4acb3929cff321f7ea4e2982"},
{file = "pyright-1.1.402.tar.gz", hash = "sha256:85a33c2d40cd4439c66aa946fd4ce71ab2f3f5b8c22ce36a623f59ac22937683"},
{file = "pyright-1.1.401-py3-none-any.whl", hash = "sha256:6fde30492ba5b0d7667c16ecaf6c699fab8d7a1263f6a18549e0b00bf7724c06"},
{file = "pyright-1.1.401.tar.gz", hash = "sha256:788a82b6611fa5e34a326a921d86d898768cddf59edde8e93e56087d277cc6f1"},
]
[package.dependencies]
@@ -4171,27 +4143,26 @@ nodejs = ["nodejs-wheel-binaries"]
[[package]]
name = "pytest"
version = "8.4.1"
version = "8.3.5"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.9"
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7"},
{file = "pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c"},
{file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"},
{file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"},
]
[package.dependencies]
colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""}
iniconfig = ">=1"
packaging = ">=20"
colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=1.5,<2"
pygments = ">=2.7.2"
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"]
dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-asyncio"
@@ -4278,14 +4249,14 @@ six = ">=1.5"
[[package]]
name = "python-dotenv"
version = "1.1.1"
version = "1.1.0"
description = "Read key-value pairs from a .env file and set them as environment variables"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc"},
{file = "python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab"},
{file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"},
{file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"},
]
[package.extras]
@@ -4731,19 +4702,19 @@ typing_extensions = ">=4.5.0"
[[package]]
name = "requests"
version = "2.32.4"
version = "2.32.3"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.8"
groups = ["main", "dev"]
files = [
{file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"},
{file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"},
{file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"},
{file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset_normalizer = ">=2,<4"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<3"
@@ -4929,30 +4900,30 @@ pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
version = "0.12.2"
version = "0.11.12"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
files = [
{file = "ruff-0.12.2-py3-none-linux_armv6l.whl", hash = "sha256:093ea2b221df1d2b8e7ad92fc6ffdca40a2cb10d8564477a987b44fd4008a7be"},
{file = "ruff-0.12.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:09e4cf27cc10f96b1708100fa851e0daf21767e9709e1649175355280e0d950e"},
{file = "ruff-0.12.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8ae64755b22f4ff85e9c52d1f82644abd0b6b6b6deedceb74bd71f35c24044cc"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eb3a6b2db4d6e2c77e682f0b988d4d61aff06860158fdb413118ca133d57922"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:73448de992d05517170fc37169cbca857dfeaeaa8c2b9be494d7bcb0d36c8f4b"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b94317cbc2ae4a2771af641739f933934b03555e51515e6e021c64441532d"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:45fc42c3bf1d30d2008023a0a9a0cfb06bf9835b147f11fe0679f21ae86d34b1"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce48f675c394c37e958bf229fb5c1e843e20945a6d962cf3ea20b7a107dcd9f4"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793d8859445ea47591272021a81391350205a4af65a9392401f418a95dfb75c9"},
{file = "ruff-0.12.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6932323db80484dda89153da3d8e58164d01d6da86857c79f1961934354992da"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:6aa7e623a3a11538108f61e859ebf016c4f14a7e6e4eba1980190cacb57714ce"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2a4a20aeed74671b2def096bdf2eac610c7d8ffcbf4fb0e627c06947a1d7078d"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:71a4c550195612f486c9d1f2b045a600aeba851b298c667807ae933478fcef04"},
{file = "ruff-0.12.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4987b8f4ceadf597c927beee65a5eaf994c6e2b631df963f86d8ad1bdea99342"},
{file = "ruff-0.12.2-py3-none-win32.whl", hash = "sha256:369ffb69b70cd55b6c3fc453b9492d98aed98062db9fec828cdfd069555f5f1a"},
{file = "ruff-0.12.2-py3-none-win_amd64.whl", hash = "sha256:dca8a3b6d6dc9810ed8f328d406516bf4d660c00caeaef36eb831cf4871b0639"},
{file = "ruff-0.12.2-py3-none-win_arm64.whl", hash = "sha256:48d6c6bfb4761df68bc05ae630e24f506755e702d4fb08f08460be778c7ccb12"},
{file = "ruff-0.12.2.tar.gz", hash = "sha256:d7b4f55cd6f325cb7621244f19c873c565a08aff5a4ba9c69aa7355f3f7afd3e"},
{file = "ruff-0.11.12-py3-none-linux_armv6l.whl", hash = "sha256:c7680aa2f0d4c4f43353d1e72123955c7a2159b8646cd43402de6d4a3a25d7cc"},
{file = "ruff-0.11.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2cad64843da9f134565c20bcc430642de897b8ea02e2e79e6e02a76b8dcad7c3"},
{file = "ruff-0.11.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9b6886b524a1c659cee1758140138455d3c029783d1b9e643f3624a5ee0cb0aa"},
{file = "ruff-0.11.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc3a3690aad6e86c1958d3ec3c38c4594b6ecec75c1f531e84160bd827b2012"},
{file = "ruff-0.11.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f97fdbc2549f456c65b3b0048560d44ddd540db1f27c778a938371424b49fe4a"},
{file = "ruff-0.11.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74adf84960236961090e2d1348c1a67d940fd12e811a33fb3d107df61eef8fc7"},
{file = "ruff-0.11.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:b56697e5b8bcf1d61293ccfe63873aba08fdbcbbba839fc046ec5926bdb25a3a"},
{file = "ruff-0.11.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d47afa45e7b0eaf5e5969c6b39cbd108be83910b5c74626247e366fd7a36a13"},
{file = "ruff-0.11.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:692bf9603fe1bf949de8b09a2da896f05c01ed7a187f4a386cdba6760e7f61be"},
{file = "ruff-0.11.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:08033320e979df3b20dba567c62f69c45e01df708b0f9c83912d7abd3e0801cd"},
{file = "ruff-0.11.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:929b7706584f5bfd61d67d5070f399057d07c70585fa8c4491d78ada452d3bef"},
{file = "ruff-0.11.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7de4a73205dc5756b8e09ee3ed67c38312dce1aa28972b93150f5751199981b5"},
{file = "ruff-0.11.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2635c2a90ac1b8ca9e93b70af59dfd1dd2026a40e2d6eebaa3efb0465dd9cf02"},
{file = "ruff-0.11.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d05d6a78a89166f03f03a198ecc9d18779076ad0eec476819467acb401028c0c"},
{file = "ruff-0.11.12-py3-none-win32.whl", hash = "sha256:f5a07f49767c4be4772d161bfc049c1f242db0cfe1bd976e0f0886732a4765d6"},
{file = "ruff-0.11.12-py3-none-win_amd64.whl", hash = "sha256:5a4d9f8030d8c3a45df201d7fb3ed38d0219bccd7955268e863ee4a115fa0832"},
{file = "ruff-0.11.12-py3-none-win_arm64.whl", hash = "sha256:65194e37853158d368e333ba282217941029a28ea90913c67e558c611d04daa5"},
{file = "ruff-0.11.12.tar.gz", hash = "sha256:43cf7f69c7d7c7d7513b9d59c5d8cafd704e05944f978614aa9faff6ac202603"},
]
[[package]]
@@ -4986,14 +4957,14 @@ files = [
[[package]]
name = "sentry-sdk"
version = "2.32.0"
version = "2.29.1"
description = "Python client for Sentry (https://sentry.io)"
optional = false
python-versions = ">=3.6"
groups = ["main"]
files = [
{file = "sentry_sdk-2.32.0-py2.py3-none-any.whl", hash = "sha256:6cf51521b099562d7ce3606da928c473643abe99b00ce4cb5626ea735f4ec345"},
{file = "sentry_sdk-2.32.0.tar.gz", hash = "sha256:9016c75d9316b0f6921ac14c8cd4fb938f26002430ac5be9945ab280f78bec6b"},
{file = "sentry_sdk-2.29.1-py2.py3-none-any.whl", hash = "sha256:90862fe0616ded4572da6c9dadb363121a1ae49a49e21c418f0634e9d10b4c19"},
{file = "sentry_sdk-2.29.1.tar.gz", hash = "sha256:8d4a0206b95fa5fe85e5e7517ed662e3888374bdc342c00e435e10e6d831aa6d"},
]
[package.dependencies]
@@ -5280,23 +5251,23 @@ typing-extensions = {version = ">=4.5.0", markers = "python_version >= \"3.7\""}
[[package]]
name = "supabase"
version = "2.16.0"
version = "2.15.1"
description = "Supabase client for Python."
optional = false
python-versions = "<4.0,>=3.9"
groups = ["main"]
files = [
{file = "supabase-2.16.0-py3-none-any.whl", hash = "sha256:99065caab3d90a56650bf39fbd0e49740995da3738ab28706c61bd7f2401db55"},
{file = "supabase-2.16.0.tar.gz", hash = "sha256:98f3810158012d4ec0e3083f2e5515f5e10b32bd71e7d458662140e963c1d164"},
{file = "supabase-2.15.1-py3-none-any.whl", hash = "sha256:749299cdd74ecf528f52045c1e60d9dba81cc2054656f754c0ca7fba0dd34827"},
{file = "supabase-2.15.1.tar.gz", hash = "sha256:66e847dab9346062aa6a25b4e81ac786b972c5d4299827c57d1d5bd6a0346070"},
]
[package.dependencies]
gotrue = ">=2.11.0,<3.0.0"
httpx = ">=0.26,<0.29"
postgrest = ">0.19,<1.2"
realtime = ">=2.4.0,<2.6.0"
storage3 = ">=0.10,<0.13"
supafunc = ">=0.9,<0.11"
postgrest = ">0.19,<1.1"
realtime = ">=2.4.0,<2.5.0"
storage3 = ">=0.10,<0.12"
supafunc = ">=0.9,<0.10"
[[package]]
name = "supafunc"
@@ -5503,14 +5474,14 @@ files = [
[[package]]
name = "tweepy"
version = "4.16.0"
description = "Library for accessing the X API (Twitter)"
version = "4.15.0"
description = "Twitter library for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "tweepy-4.16.0-py3-none-any.whl", hash = "sha256:48d1a1eb311d2c4b8990abcfa6f9fa2b2ad61be05c723b1a9b4f242656badae2"},
{file = "tweepy-4.16.0.tar.gz", hash = "sha256:1d95cbdc50bf6353a387f881f2584eaf60d14e00dbbdd8872a73de79c66878e3"},
{file = "tweepy-4.15.0-py3-none-any.whl", hash = "sha256:64adcea317158937059e4e2897b3ceb750b0c2dd5df58938c2da8f7eb3b88e6a"},
{file = "tweepy-4.15.0.tar.gz", hash = "sha256:1345cbcdf0a75e2d89f424c559fd49fda4d8cd7be25cd5131e3b57bad8a21d76"},
]
[package.dependencies]
@@ -5521,6 +5492,8 @@ requests-oauthlib = ">=1.2.0,<3"
[package.extras]
async = ["aiohttp (>=3.7.3,<4)", "async-lru (>=1.0.3,<3)"]
dev = ["coverage (>=4.4.2)", "coveralls (>=2.1.0)", "tox (>=3.21.0)"]
docs = ["myst-parser (==0.15.2)", "readthedocs-sphinx-search (==0.1.1)", "sphinx (==4.2.0)", "sphinx-hoverxref (==0.7b1)", "sphinx-tabs (==3.2.0)", "sphinx_rtd_theme (==1.0.0)"]
socks = ["requests[socks] (>=2.27.0,<3)"]
test = ["urllib3 (<2)", "vcrpy (>=1.10.3)"]
[[package]]
@@ -6280,14 +6253,14 @@ requests = "*"
[[package]]
name = "zerobouncesdk"
version = "1.1.2"
version = "1.1.1"
description = "ZeroBounce Python API - https://www.zerobounce.net."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "zerobouncesdk-1.1.2-py3-none-any.whl", hash = "sha256:a89febfb3adade01c314e6bad2113ad093f1e1cca6ddf9fcf445a8b2a9a458b4"},
{file = "zerobouncesdk-1.1.2.tar.gz", hash = "sha256:24810a2e39c963bc75b4732356b0fc8b10091f2c892f0c8b08fbb32640fdccaf"},
{file = "zerobouncesdk-1.1.1-py3-none-any.whl", hash = "sha256:9fb9dfa44fe4ce35d6f2e43d5144c31ca03544a3317d75643cb9f86b0c028675"},
{file = "zerobouncesdk-1.1.1.tar.gz", hash = "sha256:00aa537263d5bc21534c0007dd9f94ce8e0986caa530c5a0bbe0bd917451f236"},
]
[package.dependencies]
@@ -6429,4 +6402,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<3.13"
content-hash = "476228d2bf59b90edc5425c462c1263cbc1f2d346f79a826ac5e7efe7823aaa6"
content-hash = "b5c1201f27ee8d05d5d8c89702123df4293f124301d1aef7451591a351872260"

View File

@@ -10,61 +10,61 @@ packages = [{ include = "backend", format = "sdist" }]
[tool.poetry.dependencies]
python = ">=3.10,<3.13"
aio-pika = "^9.5.5"
aiodns = "^3.5.0"
anthropic = "^0.57.1"
aiodns = "^3.1.1"
anthropic = "^0.51.0"
apscheduler = "^3.11.0"
autogpt-libs = { path = "../autogpt_libs", develop = true }
bleach = { extras = ["css"], version = "^6.2.0" }
click = "^8.2.0"
cryptography = "^43.0"
discord-py = "^2.5.2"
e2b-code-interpreter = "^1.5.2"
fastapi = "^0.115.14"
e2b-code-interpreter = "^1.5.0"
fastapi = "^0.115.12"
feedparser = "^6.0.11"
flake8 = "^7.3.0"
google-api-python-client = "^2.176.0"
flake8 = "^7.2.0"
google-api-python-client = "^2.169.0"
google-auth-oauthlib = "^1.2.2"
google-cloud-storage = "^3.2.0"
google-cloud-storage = "^3.1.0"
googlemaps = "^4.10.0"
gravitasml = "^0.1.3"
groq = "^0.29.0"
groq = "^0.24.0"
jinja2 = "^3.1.6"
jsonref = "^1.1.0"
jsonschema = "^4.22.0"
launchdarkly-server-sdk = "^9.11.0"
mem0ai = "^0.1.114"
mem0ai = "^0.1.98"
moviepy = "^2.1.2"
ollama = "^0.5.1"
openai = "^1.93.2"
ollama = "^0.4.8"
openai = "^1.78.1"
pika = "^1.3.2"
pinecone = "^5.3.1"
poetry = "2.1.1" # CHECK DEPENDABOT SUPPORT BEFORE UPGRADING
postmarker = "^1.0"
praw = "~7.8.1"
prisma = "^0.15.0"
prometheus-client = "^0.22.1"
prometheus-client = "^0.21.1"
psutil = "^7.0.0"
psycopg2-binary = "^2.9.10"
pydantic = { extras = ["email"], version = "^2.11.7" }
pydantic-settings = "^2.10.1"
pytest = "^8.4.1"
pydantic = { extras = ["email"], version = "^2.11.4" }
pydantic-settings = "^2.9.1"
pytest = "^8.3.5"
pytest-asyncio = "^0.26.0"
python-dotenv = "^1.1.1"
python-dotenv = "^1.1.0"
python-multipart = "^0.0.20"
redis = "^5.2.0"
replicate = "^1.0.6"
sentry-sdk = {extras = ["anthropic", "fastapi", "launchdarkly", "openai", "sqlalchemy"], version = "^2.32.0"}
sentry-sdk = {extras = ["anthropic", "fastapi", "launchdarkly", "openai", "sqlalchemy"], version = "^2.28.0"}
sqlalchemy = "^2.0.40"
strenum = "^0.4.9"
stripe = "^11.5.0"
supabase = "2.16.0"
supabase = "2.15.1"
tenacity = "^9.1.2"
todoist-api-python = "^2.1.7"
tweepy = "^4.16.0"
tweepy = "^4.14.0"
uvicorn = { extras = ["standard"], version = "^0.34.2" }
websockets = "^14.2"
youtube-transcript-api = "^0.6.2"
zerobouncesdk = "^1.1.2"
zerobouncesdk = "^1.1.1"
# NOTE: please insert new dependencies in their alphabetical location
pytest-snapshot = "^0.9.0"
aiofiles = "^24.1.0"
@@ -78,12 +78,12 @@ black = "^24.10.0"
faker = "^33.3.1"
httpx = "^0.28.1"
isort = "^5.13.2"
poethepoet = "^0.36.0"
pyright = "^1.1.402"
poethepoet = "^0.34.0"
pyright = "^1.1.400"
pytest-mock = "^3.14.0"
pytest-watcher = "^0.4.2"
requests = "^2.32.4"
ruff = "^0.12.2"
requests = "^2.32.3"
ruff = "^0.11.10"
# NOTE: please insert new dependencies in their alphabetical location
[build-system]

View File

@@ -26,9 +26,9 @@
"defaults"
],
"dependencies": {
"@faker-js/faker": "9.9.0",
"@faker-js/faker": "9.8.0",
"@hookform/resolvers": "5.1.1",
"@next/third-parties": "15.3.5",
"@next/third-parties": "15.3.3",
"@phosphor-icons/react": "2.1.10",
"@radix-ui/react-alert-dialog": "1.1.14",
"@radix-ui/react-avatar": "1.1.10",
@@ -49,14 +49,13 @@
"@radix-ui/react-tabs": "1.1.12",
"@radix-ui/react-toast": "1.2.14",
"@radix-ui/react-tooltip": "1.2.7",
"@sentry/nextjs": "9.35.0",
"@sentry/nextjs": "9.27.0",
"@supabase/ssr": "0.6.1",
"@supabase/supabase-js": "2.50.3",
"@tanstack/react-query": "5.81.5",
"@supabase/supabase-js": "2.50.0",
"@tanstack/react-query": "5.80.7",
"@tanstack/react-table": "8.21.3",
"@tanstack/react-virtual": "3.13.12",
"@types/jaro-winkler": "0.2.4",
"@xyflow/react": "12.8.1",
"@xyflow/react": "12.6.4",
"ajv": "8.17.1",
"boring-avatars": "1.11.2",
"class-variance-authority": "0.7.1",
@@ -67,21 +66,21 @@
"dotenv": "16.5.0",
"elliptic": "6.6.1",
"embla-carousel-react": "8.6.0",
"framer-motion": "12.23.0",
"framer-motion": "12.16.0",
"geist": "1.4.2",
"jaro-winkler": "0.2.8",
"launchdarkly-react-client-sdk": "3.8.1",
"lodash": "4.17.21",
"lucide-react": "0.525.0",
"lucide-react": "0.513.0",
"moment": "2.30.1",
"next": "15.3.5",
"next": "15.3.3",
"next-themes": "0.4.6",
"party-js": "2.2.0",
"react": "18.3.1",
"react-day-picker": "9.8.0",
"react-day-picker": "9.7.0",
"react-dom": "18.3.1",
"react-drag-drop-files": "2.4.0",
"react-hook-form": "7.60.0",
"react-hook-form": "7.57.0",
"react-icons": "5.5.0",
"react-markdown": "9.0.3",
"react-modal": "3.16.3",
@@ -92,20 +91,20 @@
"tailwindcss-animate": "1.0.7",
"uuid": "11.1.0",
"vaul": "1.1.2",
"zod": "3.25.76"
"zod": "3.25.56"
},
"devDependencies": {
"@chromatic-com/storybook": "4.0.1",
"@playwright/test": "1.53.2",
"@storybook/addon-a11y": "9.0.16",
"@storybook/addon-docs": "9.0.16",
"@storybook/addon-links": "9.0.16",
"@storybook/addon-onboarding": "9.0.16",
"@storybook/nextjs": "9.0.16",
"@playwright/test": "1.53.1",
"@storybook/addon-a11y": "9.0.14",
"@storybook/addon-docs": "9.0.14",
"@storybook/addon-links": "9.0.14",
"@storybook/addon-onboarding": "9.0.14",
"@storybook/nextjs": "9.0.14",
"@tanstack/eslint-plugin-query": "5.81.2",
"@tanstack/react-query-devtools": "5.81.5",
"@types/canvas-confetti": "1.9.0",
"@types/lodash": "4.17.20",
"@types/lodash": "4.17.19",
"@types/negotiator": "0.6.4",
"@types/node": "22.15.30",
"@types/react": "18.3.17",
@@ -116,18 +115,17 @@
"concurrently": "9.2.0",
"cross-env": "7.0.3",
"eslint": "8.57.1",
"eslint-config-next": "15.3.5",
"eslint-plugin-storybook": "9.0.16",
"eslint-config-next": "15.3.4",
"eslint-plugin-storybook": "9.0.14",
"import-in-the-middle": "1.14.2",
"msw": "2.10.3",
"msw": "2.10.2",
"msw-storybook-addon": "2.0.5",
"orval": "7.10.0",
"pbkdf2": "3.1.3",
"postcss": "8.5.6",
"prettier": "3.6.2",
"prettier-plugin-tailwindcss": "0.6.13",
"require-in-the-middle": "7.5.2",
"storybook": "9.0.16",
"storybook": "9.0.14",
"tailwindcss": "3.4.17",
"typescript": "5.8.3"
},

View File

@@ -13,8 +13,6 @@ dotenv.config({ path: path.resolve(__dirname, "../backend/.env") });
*/
export default defineConfig({
testDir: "./src/tests",
/* Global setup file that runs before all tests */
globalSetup: "./src/tests/global-setup.ts",
/* Run tests in files in parallel */
fullyParallel: true,
/* Fail the build on CI if you accidentally left test.only in the source code. */

File diff suppressed because it is too large Load Diff

View File

@@ -1,23 +1,23 @@
"use client";
import SmartImage from "@/components/agptui/SmartImage";
import { useOnboarding } from "@/components/onboarding/onboarding-provider";
import OnboardingButton from "@/components/onboarding/OnboardingButton";
import {
OnboardingHeader,
OnboardingStep,
OnboardingHeader,
} from "@/components/onboarding/OnboardingStep";
import { OnboardingText } from "@/components/onboarding/OnboardingText";
import StarRating from "@/components/onboarding/StarRating";
import SchemaTooltip from "@/components/SchemaTooltip";
import { TypeBasedInput } from "@/components/type-based-input";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { useToast } from "@/components/ui/use-toast";
import { Play } from "lucide-react";
import { cn } from "@/lib/utils";
import { useCallback, useEffect, useState } from "react";
import { GraphMeta, StoreAgentDetails } from "@/lib/autogpt-server-api";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import { cn } from "@/lib/utils";
import { Play } from "lucide-react";
import { useRouter } from "next/navigation";
import { useCallback, useEffect, useState } from "react";
import { useOnboarding } from "@/components/onboarding/onboarding-provider";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import SchemaTooltip from "@/components/SchemaTooltip";
import { TypeBasedInput } from "@/components/type-based-input";
import SmartImage from "@/components/agptui/SmartImage";
import { useToast } from "@/components/ui/use-toast";
export default function Page() {
const { state, updateState, setStep } = useOnboarding(
@@ -52,7 +52,7 @@ export default function Page() {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const update: { [key: string]: any } = {};
// Set default values from schema
Object.entries(agent.input_schema?.properties || {}).forEach(
Object.entries(agent.input_schema.properties).forEach(
([key, value]) => {
// Skip if already set
if (state.agentInput && state.agentInput[key]) {
@@ -224,7 +224,7 @@ export default function Page() {
<CardTitle className="font-poppins text-lg">Input</CardTitle>
</CardHeader>
<CardContent className="flex flex-col gap-4">
{Object.entries(agent?.input_schema?.properties || {}).map(
{Object.entries(agent?.input_schema.properties || {}).map(
([key, inputSubSchema]) => (
<div key={key} className="flex flex-col space-y-2">
<label className="flex items-center gap-1 text-sm font-medium">

View File

@@ -1,5 +1,4 @@
"use client";
import { useParams, useRouter } from "next/navigation";
import React, {
useCallback,
useEffect,
@@ -7,31 +6,31 @@ import React, {
useRef,
useState,
} from "react";
import { useParams, useRouter } from "next/navigation";
import { exportAsJSONFile } from "@/lib/utils";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import {
Graph,
GraphExecution,
GraphExecutionID,
GraphExecutionMeta,
Graph,
GraphID,
LibraryAgent,
LibraryAgentID,
LibraryAgentPreset,
LibraryAgentPresetID,
Schedule,
ScheduleID,
LibraryAgentPreset,
LibraryAgentPresetID,
} from "@/lib/autogpt-server-api";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import { exportAsJSONFile } from "@/lib/utils";
import AgentRunDetailsView from "@/components/agents/agent-run-details-view";
import type { ButtonAction } from "@/components/agptui/types";
import DeleteConfirmDialog from "@/components/agptui/delete-confirm-dialog";
import AgentRunDraftView from "@/components/agents/agent-run-draft-view";
import AgentRunDetailsView from "@/components/agents/agent-run-details-view";
import AgentRunsSelectorList from "@/components/agents/agent-runs-selector-list";
import AgentScheduleDetailsView from "@/components/agents/agent-schedule-details-view";
import DeleteConfirmDialog from "@/components/agptui/delete-confirm-dialog";
import type { ButtonAction } from "@/components/agptui/types";
import { useOnboarding } from "@/components/onboarding/onboarding-provider";
import { Button } from "@/components/ui/button";
import {
Dialog,
DialogContent,
@@ -40,8 +39,9 @@ import {
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import LoadingBox, { LoadingSpinner } from "@/components/ui/loading";
import { Button } from "@/components/ui/button";
import { useToast } from "@/components/ui/use-toast";
import LoadingBox, { LoadingSpinner } from "@/components/ui/loading";
export default function AgentRunsPage(): React.ReactElement {
const { id: agentID }: { id: LibraryAgentID } = useParams();
@@ -434,9 +434,6 @@ export default function AgentRunsPage(): React.ReactElement {
[agent, downloadGraph],
);
const runGraph =
graphVersions.current[selectedRun?.graph_version ?? 0] ?? graph;
const onCreateSchedule = useCallback(
(schedule: Schedule) => {
setSchedules((prev) => [...prev, schedule]);
@@ -499,16 +496,16 @@ export default function AgentRunsPage(): React.ReactElement {
{/* Run / Schedule views */}
{(selectedView.type == "run" && selectedView.id ? (
selectedRun && runGraph ? (
selectedRun && (
<AgentRunDetailsView
agent={agent}
graph={runGraph}
graph={graphVersions.current[selectedRun.graph_version] ?? graph}
run={selectedRun}
agentActions={agentActions}
onRun={selectRun}
deleteRun={() => setConfirmingDeleteAgentRun(selectedRun)}
/>
) : null
)
) : selectedView.type == "run" ? (
/* Draft new runs / Create new presets */
<AgentRunDraftView
@@ -532,8 +529,7 @@ export default function AgentRunsPage(): React.ReactElement {
agentActions={agentActions}
/>
) : selectedView.type == "schedule" ? (
selectedSchedule &&
graph && (
selectedSchedule && (
<AgentScheduleDetailsView
graph={graph}
schedule={selectedSchedule}

View File

@@ -1,25 +0,0 @@
import { useState } from "react";
import { AgentTableRowProps } from "../AgentTableRow/AgentTableRow";
interface useAgentTableProps {
agents: Omit<
AgentTableRowProps,
| "setSelectedAgents"
| "selectedAgents"
| "onEditSubmission"
| "onDeleteSubmission"
>[];
}
export const useAgentTable = ({ agents }: useAgentTableProps) => {
const [selectedAgents, setSelectedAgents] = useState<Set<string>>(new Set());
const handleSelectAll = (e: React.ChangeEvent<HTMLInputElement>) => {
if (e.target.checked) {
setSelectedAgents(new Set(agents.map((agent) => agent.agent_id)));
} else {
setSelectedAgents(new Set());
}
};
return { selectedAgents, handleSelectAll, setSelectedAgents };
};

View File

@@ -1,59 +0,0 @@
import { StoreSubmissionRequest } from "@/app/api/__generated__/models/storeSubmissionRequest";
interface useAgentTableRowProps {
id: number;
onEditSubmission: (submission: StoreSubmissionRequest) => void;
onDeleteSubmission: (submission_id: string) => void;
agent_id: string;
agent_version: number;
agentName: string;
sub_heading: string;
description: string;
imageSrc: string[];
selectedAgents: Set<string>;
setSelectedAgents: React.Dispatch<React.SetStateAction<Set<string>>>;
}
export const useAgentTableRow = ({
id,
onEditSubmission,
onDeleteSubmission,
agent_id,
agent_version,
agentName,
sub_heading,
description,
imageSrc,
selectedAgents,
setSelectedAgents,
}: useAgentTableRowProps) => {
const checkboxId = `agent-${id}-checkbox`;
const handleEdit = () => {
onEditSubmission({
agent_id,
agent_version,
slug: "",
name: agentName,
sub_heading,
description,
image_urls: imageSrc,
categories: [],
} satisfies StoreSubmissionRequest);
};
const handleDelete = () => {
onDeleteSubmission(agent_id);
};
const handleCheckboxChange = () => {
if (selectedAgents.has(agent_id)) {
selectedAgents.delete(agent_id);
} else {
selectedAgents.add(agent_id);
}
setSelectedAgents(new Set(selectedAgents));
};
return { checkboxId, handleEdit, handleDelete, handleCheckboxChange };
};

View File

@@ -1,91 +0,0 @@
import { PublishAgentPopout } from "@/components/agptui/composite/PublishAgentPopout";
import { Button } from "@/components/ui/button";
import { useMainDashboardPage } from "./useMainDashboardPage";
import { Separator } from "@/components/ui/separator";
import { AgentTable } from "../AgentTable/AgentTable";
import { StatusType } from "@/components/agptui/Status";
export const MainDashboardPage = () => {
const {
onOpenPopout,
onDeleteSubmission,
onEditSubmission,
submissions,
isLoading,
openPopout,
submissionData,
popoutStep,
} = useMainDashboardPage();
if (isLoading) {
return "Loading....";
}
return (
<main className="flex-1 py-8">
{/* Header Section */}
<div className="mb-8 flex flex-col gap-4 md:flex-row md:items-end md:justify-between">
<div className="space-y-6">
<h1 className="text-4xl font-medium text-neutral-900 dark:text-neutral-100">
Agent dashboard
</h1>
<div className="space-y-2">
<h2 className="text-xl font-medium text-neutral-900 dark:text-neutral-100">
Submit a New Agent
</h2>
<p className="text-sm text-[#707070] dark:text-neutral-400">
Select from the list of agents you currently have, or upload from
your local machine.
</p>
</div>
</div>
<PublishAgentPopout
trigger={
<Button
onClick={onOpenPopout}
className="h-9 rounded-full bg-black px-4 text-sm font-medium text-white hover:bg-neutral-700 dark:hover:bg-neutral-600"
>
Submit agent
</Button>
}
openPopout={openPopout}
inputStep={popoutStep}
submissionData={submissionData}
/>
</div>
<Separator className="mb-8" />
{/* Agents Section */}
<div>
<h2 className="mb-4 text-xl font-bold text-neutral-900 dark:text-neutral-100">
Your uploaded agents
</h2>
{submissions && (
<AgentTable
agents={
submissions?.submissions.map((submission, index) => ({
id: index,
agent_id: submission.agent_id,
agent_version: submission.agent_version,
sub_heading: submission.sub_heading,
date_submitted: submission.date_submitted,
agentName: submission.name,
description: submission.description,
imageSrc: submission.image_urls || [""],
dateSubmitted: new Date(
submission.date_submitted,
).toLocaleDateString(),
status: submission.status.toLowerCase() as StatusType,
runs: submission.runs,
rating: submission.rating,
})) || []
}
onEditSubmission={onEditSubmission}
onDeleteSubmission={onDeleteSubmission}
/>
)}
</div>
</main>
);
};

View File

@@ -1,72 +0,0 @@
import {
getGetV2ListMySubmissionsQueryKey,
useDeleteV2DeleteStoreSubmission,
useGetV2ListMySubmissions,
} from "@/app/api/__generated__/endpoints/store/store";
import { StoreSubmissionRequest } from "@/app/api/__generated__/models/storeSubmissionRequest";
import { StoreSubmissionsResponse } from "@/app/api/__generated__/models/storeSubmissionsResponse";
import { getQueryClient } from "@/lib/react-query/queryClient";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { useState } from "react";
export const useMainDashboardPage = () => {
const queryClient = getQueryClient();
const { user } = useSupabase();
const [openPopout, setOpenPopout] = useState<boolean>(false);
const [submissionData, setSubmissionData] =
useState<StoreSubmissionRequest>();
const [popoutStep, setPopoutStep] = useState<"select" | "info" | "review">(
"info",
);
const { mutateAsync: deleteSubmission } = useDeleteV2DeleteStoreSubmission({
mutation: {
onSuccess: () => {
queryClient.invalidateQueries({
queryKey: getGetV2ListMySubmissionsQueryKey(),
});
},
},
});
const { data: submissions, isLoading } = useGetV2ListMySubmissions(
undefined,
{
query: {
select: (x) => {
return x.data as StoreSubmissionsResponse;
},
enabled: !!user,
},
},
);
const onEditSubmission = (submission: StoreSubmissionRequest) => {
setSubmissionData(submission);
setPopoutStep("review");
setOpenPopout(true);
};
const onDeleteSubmission = async (submission_id: string) => {
await deleteSubmission({
submissionId: submission_id,
});
};
const onOpenPopout = () => {
setPopoutStep("select");
setOpenPopout(true);
};
return {
onOpenPopout,
onDeleteSubmission,
onEditSubmission,
submissions,
isLoading,
openPopout,
submissionData,
popoutStep,
};
};

View File

@@ -1,7 +1,133 @@
"use client";
import { MainDashboardPage } from "./components/MainDashboardPage/MainDashboardPage";
import * as React from "react";
import { AgentTable } from "@/components/agptui/AgentTable";
import { Button } from "@/components/agptui/Button";
import { Separator } from "@/components/ui/separator";
import { StatusType } from "@/components/agptui/Status";
import { PublishAgentPopout } from "@/components/agptui/composite/PublishAgentPopout";
import { useCallback, useEffect, useState } from "react";
import {
StoreSubmissionsResponse,
StoreSubmissionRequest,
} from "@/lib/autogpt-server-api/types";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
export default function Page() {
return <MainDashboardPage />;
const { supabase } = useSupabase();
const api = useBackendAPI();
const [submissions, setSubmissions] = useState<StoreSubmissionsResponse>();
const [openPopout, setOpenPopout] = useState<boolean>(false);
const [submissionData, setSubmissionData] =
useState<StoreSubmissionRequest>();
const [popoutStep, setPopoutStep] = useState<"select" | "info" | "review">(
"info",
);
const fetchData = useCallback(async () => {
try {
const submissions = await api.getStoreSubmissions();
setSubmissions(submissions);
} catch (error) {
console.error("Error fetching submissions:", error);
}
}, [api]);
useEffect(() => {
if (!supabase) {
return;
}
fetchData();
}, [supabase, fetchData]);
const onEditSubmission = useCallback((submission: StoreSubmissionRequest) => {
setSubmissionData(submission);
setPopoutStep("review");
setOpenPopout(true);
}, []);
const onDeleteSubmission = useCallback(
(submission_id: string) => {
if (!supabase) {
return;
}
api.deleteStoreSubmission(submission_id);
fetchData();
},
[api, supabase, fetchData],
);
const onOpenPopout = useCallback(() => {
setPopoutStep("select");
setOpenPopout(true);
}, []);
return (
<main className="flex-1 py-8">
{/* Header Section */}
<div className="mb-8 flex flex-col gap-4 md:flex-row md:items-end md:justify-between">
<div className="space-y-6">
<h1 className="text-4xl font-medium text-neutral-900 dark:text-neutral-100">
Agent dashboard
</h1>
<div className="space-y-2">
<h2 className="text-xl font-medium text-neutral-900 dark:text-neutral-100">
Submit a New Agent
</h2>
<p className="text-sm text-[#707070] dark:text-neutral-400">
Select from the list of agents you currently have, or upload from
your local machine.
</p>
</div>
</div>
<PublishAgentPopout
trigger={
<Button
onClick={onOpenPopout}
className="h-9 rounded-full bg-black px-4 text-sm font-medium text-white hover:bg-neutral-700 dark:hover:bg-neutral-600"
>
Submit agent
</Button>
}
openPopout={openPopout}
inputStep={popoutStep}
submissionData={submissionData}
/>
</div>
<Separator className="mb-8" />
{/* Agents Section */}
<div>
<h2 className="mb-4 text-xl font-bold text-neutral-900 dark:text-neutral-100">
Your uploaded agents
</h2>
{submissions && (
<AgentTable
agents={
submissions?.submissions.map((submission, index) => ({
id: index,
agent_id: submission.agent_id,
agent_version: submission.agent_version,
sub_heading: submission.sub_heading,
date_submitted: submission.date_submitted,
agentName: submission.name,
description: submission.description,
imageSrc: submission.image_urls || [""],
dateSubmitted: new Date(
submission.date_submitted,
).toLocaleDateString(),
status: submission.status.toLowerCase() as StatusType,
runs: submission.runs,
rating: submission.rating,
})) || []
}
onEditSubmission={onEditSubmission}
onDeleteSubmission={onDeleteSubmission}
/>
)}
</div>
</main>
);
}

View File

@@ -27,7 +27,7 @@ export async function sendResetEmail(email: string, turnstileToken: string) {
}
const { error } = await supabase.auth.resetPasswordForEmail(email, {
redirectTo: `${origin}/api/auth/callback/reset-password`,
redirectTo: `${origin}/reset-password`,
});
if (error) {

View File

@@ -18,23 +18,18 @@ import {
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import LoadingBox from "@/components/ui/loading";
import { useToast } from "@/components/ui/use-toast";
import { useTurnstile } from "@/hooks/useTurnstile";
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
import { getBehaveAs } from "@/lib/utils";
import { changePasswordFormSchema, sendEmailFormSchema } from "@/types/auth";
import { zodResolver } from "@hookform/resolvers/zod";
import { useRouter, useSearchParams } from "next/navigation";
import { useCallback, useEffect, useState } from "react";
import { useCallback, useState } from "react";
import { useForm } from "react-hook-form";
import { z } from "zod";
import { changePassword, sendResetEmail } from "./actions";
export default function ResetPasswordPage() {
const { supabase, user, isUserLoading } = useSupabase();
const { toast } = useToast();
const searchParams = useSearchParams();
const router = useRouter();
const [isLoading, setIsLoading] = useState(false);
const [feedback, setFeedback] = useState<string | null>(null);
const [isError, setIsError] = useState(false);
@@ -42,21 +37,6 @@ export default function ResetPasswordPage() {
const [sendEmailCaptchaKey, setSendEmailCaptchaKey] = useState(0);
const [changePasswordCaptchaKey, setChangePasswordCaptchaKey] = useState(0);
useEffect(() => {
const error = searchParams.get("error");
if (error) {
toast({
title: "Password Reset Failed",
description: error,
variant: "destructive",
});
const newUrl = new URL(window.location.href);
newUrl.searchParams.delete("error");
router.replace(newUrl.pathname + newUrl.search);
}
}, [searchParams, toast, router]);
const sendEmailTurnstile = useTurnstile({
action: "reset_password",
autoVerify: false,

View File

@@ -1,41 +0,0 @@
import { exchangePasswordResetCode } from "@/lib/supabase/helpers";
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
import { NextRequest, NextResponse } from "next/server";
export async function GET(request: NextRequest) {
const { searchParams } = new URL(request.url);
const code = searchParams.get("code");
const origin =
process.env.NEXT_PUBLIC_FRONTEND_BASE_URL || "http://localhost:3000";
if (!code) {
return NextResponse.redirect(
`${origin}/reset-password?error=Missing verification code`,
);
}
try {
const supabase = await getServerSupabase();
if (!supabase) {
return NextResponse.redirect(
`${origin}/reset-password?error=no-auth-client`,
);
}
const result = await exchangePasswordResetCode(supabase, code);
if (!result.success) {
return NextResponse.redirect(
`${origin}/reset-password?error=${encodeURIComponent(result.error || "Password reset failed")}`,
);
}
return NextResponse.redirect(`${origin}/reset-password`);
} catch (error) {
console.error("Password reset callback error:", error);
return NextResponse.redirect(
`${origin}/reset-password?error=Password reset failed`,
);
}
}

View File

@@ -3237,6 +3237,48 @@
}
}
},
"/api/library/agents/by-graph/{graph_id}": {
"get": {
"tags": ["v2", "library", "private"],
"summary": "Get Library Agent By Graph Id",
"operationId": "getV2GetLibraryAgentByGraphId",
"parameters": [
{
"name": "graph_id",
"in": "path",
"required": true,
"schema": { "type": "string", "title": "Graph Id" }
},
{
"name": "version",
"in": "query",
"required": false,
"schema": {
"anyOf": [{ "type": "integer" }, { "type": "null" }],
"title": "Version"
}
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/LibraryAgent" }
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
}
}
}
},
"/api/library/agents/marketplace/{store_listing_version_id}": {
"get": {
"tags": ["v2", "library", "private", "store, library"],
@@ -5142,6 +5184,7 @@
"AGENT_INPUT",
"CONGRATS",
"GET_RESULTS",
"RUN_AGENTS",
"MARKETPLACE_VISIT",
"MARKETPLACE_ADD_AGENT",
"MARKETPLACE_RUN_AGENT",

View File

@@ -23,16 +23,7 @@ async function handleJsonRequest(
method: string,
backendUrl: string,
): Promise<any> {
let payload;
try {
payload = await req.json();
} catch (error) {
// Handle cases where request body is empty, invalid JSON, or already consumed
console.warn("Failed to parse JSON from request body:", error);
payload = null;
}
const payload = await req.json();
return await makeAuthenticatedRequest(
method,
backendUrl,
@@ -86,7 +77,7 @@ function createUnsupportedContentTypeResponse(
"application/x-www-form-urlencoded",
],
},
{ status: 415 },
{ status: 415 }, // Unsupported Media Type
);
}
@@ -110,19 +101,9 @@ function createErrorResponse(error: unknown): NextResponse {
// If it's our custom ApiError, preserve the original status and response
if (error instanceof ApiError) {
return NextResponse.json(error.response || { error: error.message }, {
status: error.status,
});
}
// For JSON parsing errors, provide more context
if (error instanceof SyntaxError && error.message.includes("JSON")) {
return NextResponse.json(
{
error: "Invalid response from backend",
detail: error.message ?? "Backend returned non-JSON response",
},
{ status: 502 },
error.response || { error: error.message, detail: error.message },
{ status: error.status },
);
}
@@ -131,7 +112,7 @@ function createErrorResponse(error: unknown): NextResponse {
error instanceof Error ? error.message : "An unknown error occurred";
return NextResponse.json(
{ error: "Proxy request failed", detail },
{ status: 500 },
{ status: 500 }, // Internal Server Error
);
}

View File

@@ -43,7 +43,7 @@ import { CustomEdge } from "./CustomEdge";
import ConnectionLine from "./ConnectionLine";
import { Control, ControlPanel } from "@/components/edit/control/ControlPanel";
import { SaveControl } from "@/components/edit/control/SaveControl";
import { BlocksControl } from "@/components/edit/control/BlocksControl/BlocksControl";
import { BlocksControl } from "@/components/edit/control/BlocksControl";
import { IconUndo2, IconRedo2 } from "@/components/ui/icons";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import { startTutorial } from "./tutorial";

View File

@@ -1,8 +1,9 @@
"use client";
import React, { useCallback, useMemo } from "react";
import { isEmpty } from "lodash";
import moment from "moment";
import React, { useCallback, useMemo } from "react";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import {
Graph,
GraphExecution,
@@ -10,15 +11,14 @@ import {
GraphExecutionMeta,
LibraryAgent,
} from "@/lib/autogpt-server-api";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import ActionButtonGroup from "@/components/agptui/action-button-group";
import type { ButtonAction } from "@/components/agptui/types";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { IconRefresh, IconSquare } from "@/components/ui/icons";
import { Input } from "@/components/ui/input";
import LoadingBox from "@/components/ui/loading";
import { useToastOnFail } from "@/components/ui/use-toast";
import ActionButtonGroup from "@/components/agptui/action-button-group";
import LoadingBox from "@/components/ui/loading";
import { Input } from "@/components/ui/input";
import {
AgentRunStatus,
@@ -199,7 +199,7 @@ export default function AgentRunDetailsView({
stopRun,
deleteRun,
graph.has_webhook_trigger,
graph.credentials_input_schema?.properties,
graph.credentials_input_schema.properties,
agent.can_access_graph,
run.graph_id,
run.graph_version,
@@ -242,7 +242,7 @@ export default function AgentRunDetailsView({
</label>
{values.map((value, i) => (
<p
className="resize-none overflow-x-auto whitespace-pre-wrap break-words border-none text-sm text-neutral-700 disabled:cursor-not-allowed"
className="resize-none whitespace-pre-wrap break-words border-none text-sm text-neutral-700 disabled:cursor-not-allowed"
key={i}
>
{value}

View File

@@ -1,6 +1,7 @@
"use client";
import React, { useCallback, useEffect, useMemo, useState } from "react";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import {
CredentialsMetaInput,
GraphExecutionID,
@@ -10,21 +11,21 @@ import {
LibraryAgentPresetUpdatable,
Schedule,
} from "@/lib/autogpt-server-api";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import ActionButtonGroup from "@/components/agptui/action-button-group";
import type { ButtonAction } from "@/components/agptui/types";
import { CronSchedulerDialog } from "@/components/cron-scheduler-dialog";
import { CredentialsInput } from "@/components/integrations/credentials-input";
import { useOnboarding } from "@/components/onboarding/onboarding-provider";
import SchemaTooltip from "@/components/SchemaTooltip";
import { TypeBasedInput } from "@/components/type-based-input";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { IconCross, IconPlay, IconSave } from "@/components/ui/icons";
import { Input } from "@/components/ui/input";
import { useToast, useToastOnFail } from "@/components/ui/use-toast";
import { isEmpty } from "lodash";
import { CalendarClockIcon, Trash2Icon } from "lucide-react";
import { CronSchedulerDialog } from "@/components/cron-scheduler-dialog";
import { CredentialsInput } from "@/components/integrations/credentials-input";
import { TypeBasedInput } from "@/components/type-based-input";
import { useToastOnFail } from "@/components/ui/use-toast";
import ActionButtonGroup from "@/components/agptui/action-button-group";
import { useOnboarding } from "@/components/onboarding/onboarding-provider";
import SchemaTooltip from "@/components/SchemaTooltip";
import { useToast } from "@/components/ui/use-toast";
import { isEmpty } from "lodash";
import { Input } from "@/components/ui/input";
export default function AgentRunDraftView({
agent,
@@ -90,27 +91,20 @@ export default function AgentRunDraftView({
const agentInputFields = useMemo(
() =>
Object.fromEntries(
Object.entries(agentInputSchema?.properties || {}).filter(
Object.entries(agentInputSchema.properties).filter(
([_, subSchema]) => !subSchema.hidden,
),
),
[agentInputSchema],
);
const agentCredentialsInputFields = useMemo(
() => agent.credentials_input_schema?.properties || {},
() => agent.credentials_input_schema.properties,
[agent],
);
const [allRequiredInputsAreSet, missingInputs] = useMemo(() => {
const nonEmptyInputs = new Set(
Object.keys(inputValues).filter((k) => {
const value = inputValues[k];
return (
value !== undefined &&
value !== "" &&
(typeof value !== "object" || !isEmpty(value))
);
}),
Object.keys(inputValues).filter((k) => !isEmpty(inputValues[k])),
);
const requiredInputs = new Set(
agentInputSchema.required as string[] | undefined,

View File

@@ -9,16 +9,16 @@ import {
} from "@/lib/autogpt-server-api";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import { AgentRunStatus } from "@/components/agents/agent-run-status-chip";
import ActionButtonGroup from "@/components/agptui/action-button-group";
import type { ButtonAction } from "@/components/agptui/types";
import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card";
import { IconCross } from "@/components/ui/icons";
import { Input } from "@/components/ui/input";
import LoadingBox from "@/components/ui/loading";
import { useToastOnFail } from "@/components/ui/use-toast";
import { humanizeCronExpression } from "@/lib/cron-expression-utils";
import { AgentRunStatus } from "@/components/agents/agent-run-status-chip";
import { useToastOnFail } from "@/components/ui/use-toast";
import ActionButtonGroup from "@/components/agptui/action-button-group";
import { IconCross } from "@/components/ui/icons";
import { PlayIcon } from "lucide-react";
import LoadingBox from "@/components/ui/loading";
import { Input } from "@/components/ui/input";
export default function AgentScheduleDetailsView({
graph,

View File

@@ -1,13 +1,9 @@
"use client";
import * as React from "react";
import { AgentTableCard } from "../AgentTableCard/AgentTableCard";
import { StoreSubmissionRequest } from "@/app/api/__generated__/models/storeSubmissionRequest";
import { useAgentTable } from "./useAgentTable";
import {
AgentTableRow,
AgentTableRowProps,
} from "../AgentTableRow/AgentTableRow";
import { AgentTableRow, AgentTableRowProps } from "./AgentTableRow";
import { AgentTableCard } from "./AgentTableCard";
import { StoreSubmissionRequest } from "@/lib/autogpt-server-api/types";
export interface AgentTableProps {
agents: Omit<
@@ -26,9 +22,22 @@ export const AgentTable: React.FC<AgentTableProps> = ({
onEditSubmission,
onDeleteSubmission,
}) => {
const { selectedAgents, handleSelectAll, setSelectedAgents } = useAgentTable({
agents,
});
// Use state to track selected agents
const [selectedAgents, setSelectedAgents] = React.useState<Set<string>>(
new Set(),
);
// Handle select all checkbox
const handleSelectAll = React.useCallback(
(e: React.ChangeEvent<HTMLInputElement>) => {
if (e.target.checked) {
setSelectedAgents(new Set(agents.map((agent) => agent.agent_id)));
} else {
setSelectedAgents(new Set());
}
},
[agents],
);
return (
<div className="w-full">

View File

@@ -1,9 +1,10 @@
"use client";
import * as React from "react";
import Image from "next/image";
import { IconStarFilled, IconMore } from "@/components/ui/icons";
import { StoreSubmissionRequest } from "@/app/api/__generated__/models/storeSubmissionRequest";
import { Status, StatusType } from "@/components/agptui/Status";
import { Status, StatusType } from "./Status";
import { StoreSubmissionRequest } from "@/lib/autogpt-server-api";
export interface AgentTableCardProps {
agent_id: string;
@@ -20,7 +21,7 @@ export interface AgentTableCardProps {
onEditSubmission: (submission: StoreSubmissionRequest) => void;
}
export const AgentTableCard = ({
export const AgentTableCard: React.FC<AgentTableCardProps> = ({
agent_id,
agent_version,
agentName,
@@ -32,8 +33,9 @@ export const AgentTableCard = ({
runs,
rating,
onEditSubmission,
}: AgentTableCardProps) => {
}) => {
const onEdit = () => {
console.log("Edit agent", agentName);
onEditSubmission({
agent_id,
agent_version,

View File

@@ -1,12 +1,12 @@
"use client";
import * as React from "react";
import Image from "next/image";
import { IconStarFilled, IconMore, IconEdit } from "@/components/ui/icons";
import { Status, StatusType } from "./Status";
import * as DropdownMenu from "@radix-ui/react-dropdown-menu";
import { TrashIcon } from "@radix-ui/react-icons";
import { Status, StatusType } from "@/components/agptui/Status";
import { useAgentTableRow } from "./useAgentTableRow";
import { StoreSubmissionRequest } from "@/app/api/__generated__/models/storeSubmissionRequest";
import { StoreSubmissionRequest } from "@/lib/autogpt-server-api/types";
export interface AgentTableRowProps {
agent_id: string;
@@ -27,7 +27,7 @@ export interface AgentTableRowProps {
onDeleteSubmission: (submission_id: string) => void;
}
export const AgentTableRow = ({
export const AgentTableRow: React.FC<AgentTableRowProps> = ({
agent_id,
agent_version,
agentName,
@@ -43,21 +43,43 @@ export const AgentTableRow = ({
setSelectedAgents,
onEditSubmission,
onDeleteSubmission,
}: AgentTableRowProps) => {
const { checkboxId, handleEdit, handleDelete, handleCheckboxChange } =
useAgentTableRow({
id,
onEditSubmission,
onDeleteSubmission,
}) => {
// Create a unique ID for the checkbox
const checkboxId = `agent-${id}-checkbox`;
const handleEdit = React.useCallback(() => {
onEditSubmission({
agent_id,
agent_version,
agentName,
slug: "",
name: agentName,
sub_heading,
description,
imageSrc,
selectedAgents,
setSelectedAgents,
});
image_urls: imageSrc,
categories: [],
} satisfies StoreSubmissionRequest);
}, [
agent_id,
agent_version,
agentName,
sub_heading,
description,
imageSrc,
onEditSubmission,
]);
const handleDelete = React.useCallback(() => {
onDeleteSubmission(agent_id);
}, [agent_id, onDeleteSubmission]);
const handleCheckboxChange = React.useCallback(() => {
if (selectedAgents.has(agent_id)) {
selectedAgents.delete(agent_id);
} else {
selectedAgents.add(agent_id);
}
setSelectedAgents(new Set(selectedAgents));
}, [agent_id, selectedAgents, setSelectedAgents]);
return (
<div className="hidden items-center border-b border-neutral-300 px-4 py-4 hover:bg-neutral-50 dark:border-neutral-700 dark:hover:bg-neutral-800 md:flex">

View File

@@ -14,11 +14,13 @@ import {
} from "../PublishAgentSelectInfo";
import { PublishAgentAwaitingReview } from "../PublishAgentAwaitingReview";
import { Button } from "../Button";
import { MyAgentsResponse } from "@/lib/autogpt-server-api";
import {
StoreSubmissionRequest,
MyAgentsResponse,
} from "@/lib/autogpt-server-api";
import { useRouter } from "next/navigation";
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
import { useToast } from "@/components/ui/use-toast";
import { StoreSubmissionRequest } from "@/app/api/__generated__/models/storeSubmissionRequest";
interface PublishAgentPopoutProps {
trigger?: React.ReactNode;
openPopout?: boolean;
@@ -261,8 +263,8 @@ export const PublishAgentPopout: React.FC<PublishAgentPopoutProps> = ({
<PublishAgentAwaitingReview
agentName={publishData.name}
subheader={publishData.sub_heading}
description={publishData.description || ""}
thumbnailSrc={publishData.image_urls?.[0]}
description={publishData.description}
thumbnailSrc={publishData.image_urls[0]}
onClose={handleClose}
onDone={handleClose}
onViewProgress={() => {

View File

@@ -0,0 +1,339 @@
import React, { useState, useMemo } from "react";
import { Card, CardContent, CardHeader } from "@/components/ui/card";
import { Label } from "@/components/ui/label";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { TextRenderer } from "@/components/ui/render";
import { ScrollArea } from "@/components/ui/scroll-area";
import { CustomNode } from "@/components/CustomNode";
import { beautifyString } from "@/lib/utils";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import { Block, BlockUIType, SpecialBlockID } from "@/lib/autogpt-server-api";
import { MagnifyingGlassIcon, PlusIcon } from "@radix-ui/react-icons";
import { IconToyBrick } from "@/components/ui/icons";
import { getPrimaryCategoryColor } from "@/lib/utils";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { GraphMeta } from "@/lib/autogpt-server-api";
import jaro from "jaro-winkler";
interface BlocksControlProps {
blocks: Block[];
addBlock: (
id: string,
name: string,
hardcodedValues: Record<string, any>,
) => void;
pinBlocksPopover: boolean;
flows: GraphMeta[];
nodes: CustomNode[];
}
/**
* A React functional component that displays a control for managing blocks.
*
* @component
* @param {Object} BlocksControlProps - The properties for the BlocksControl component.
* @param {Block[]} BlocksControlProps.blocks - An array of blocks to be displayed and filtered.
* @param {(id: string, name: string) => void} BlocksControlProps.addBlock - A function to call when a block is added.
* @returns The rendered BlocksControl component.
*/
export const BlocksControl: React.FC<BlocksControlProps> = ({
blocks,
addBlock,
pinBlocksPopover,
flows,
nodes,
}) => {
const [searchQuery, setSearchQuery] = useState("");
const [selectedCategory, setSelectedCategory] = useState<string | null>(null);
const graphHasWebhookNodes = nodes.some((n) =>
[BlockUIType.WEBHOOK, BlockUIType.WEBHOOK_MANUAL].includes(n.data.uiType),
);
const graphHasInputNodes = nodes.some(
(n) => n.data.uiType == BlockUIType.INPUT,
);
const filteredAvailableBlocks = useMemo(() => {
const blockList = blocks
.filter((b) => b.uiType !== BlockUIType.AGENT)
.sort((a, b) => a.name.localeCompare(b.name));
const agentBlockList = flows.map(
(flow) =>
({
id: SpecialBlockID.AGENT,
name: flow.name,
description:
`Ver.${flow.version}` +
(flow.description ? ` | ${flow.description}` : ""),
categories: [{ category: "AGENT", description: "" }],
inputSchema: flow.input_schema,
outputSchema: flow.output_schema,
staticOutput: false,
uiType: BlockUIType.AGENT,
uiKey: flow.id,
costs: [],
hardcodedValues: {
graph_id: flow.id,
graph_version: flow.version,
input_schema: flow.input_schema,
output_schema: flow.output_schema,
},
}) satisfies Block,
);
/**
* Evaluates how well a block matches the search query and returns a relevance score.
* The scoring algorithm works as follows:
* - Returns 1 if no query (all blocks match equally)
* - Normalized query for case-insensitive matching
* - Returns 3 for exact substring matches in block name (highest priority)
* - Returns 2 when all query words appear in the block name (regardless of order)
* - Returns 1.X for blocks with names similar to query using Jaro-Winkler distance (X is similarity score)
* - Returns 0.5 when all query words appear in the block description (lowest priority)
* - Returns 0 for no match
*
* Higher scores will appear first in search results.
*/
const matchesSearch = (block: Block, query: string): number => {
if (!query) return 1;
const normalizedQuery = query.toLowerCase().trim();
const queryWords = normalizedQuery.split(/\s+/);
const blockName = block.name.toLowerCase();
const beautifiedName = beautifyString(block.name).toLowerCase();
const description = block.description.toLowerCase();
// 1. Exact match in name (highest priority)
if (
blockName.includes(normalizedQuery) ||
beautifiedName.includes(normalizedQuery)
) {
return 3;
}
// 2. All query words in name (regardless of order)
const allWordsInName = queryWords.every(
(word) => blockName.includes(word) || beautifiedName.includes(word),
);
if (allWordsInName) return 2;
// 3. Similarity with name (Jaro-Winkler)
const similarityThreshold = 0.65;
const nameSimilarity = jaro(blockName, normalizedQuery);
const beautifiedSimilarity = jaro(beautifiedName, normalizedQuery);
const maxSimilarity = Math.max(nameSimilarity, beautifiedSimilarity);
if (maxSimilarity > similarityThreshold) {
return 1 + maxSimilarity; // Score between 1 and 2
}
// 4. All query words in description (lower priority)
const allWordsInDescription = queryWords.every((word) =>
description.includes(word),
);
if (allWordsInDescription) return 0.5;
return 0;
};
return blockList
.concat(agentBlockList)
.map((block) => ({
block,
score: matchesSearch(block, searchQuery),
}))
.filter(
({ block, score }) =>
score > 0 &&
(!selectedCategory ||
block.categories.some((cat) => cat.category === selectedCategory)),
)
.sort((a, b) => b.score - a.score)
.map(({ block }) => ({
...block,
notAvailable:
(block.uiType == BlockUIType.WEBHOOK &&
graphHasWebhookNodes &&
"Agents can only have one webhook-triggered block") ||
(block.uiType == BlockUIType.WEBHOOK &&
graphHasInputNodes &&
"Webhook-triggered blocks can't be used together with input blocks") ||
(block.uiType == BlockUIType.INPUT &&
graphHasWebhookNodes &&
"Input blocks can't be used together with a webhook-triggered block") ||
null,
}));
}, [
blocks,
flows,
searchQuery,
selectedCategory,
graphHasInputNodes,
graphHasWebhookNodes,
]);
const resetFilters = React.useCallback(() => {
setSearchQuery("");
setSelectedCategory(null);
}, []);
// Extract unique categories from blocks
const categories = Array.from(
new Set([
null,
...blocks
.flatMap((block) => block.categories.map((cat) => cat.category))
.sort(),
]),
);
return (
<Popover
open={pinBlocksPopover ? true : undefined}
onOpenChange={(open) => open || resetFilters()}
>
<Tooltip delayDuration={500}>
<TooltipTrigger asChild>
<PopoverTrigger asChild>
<Button
variant="ghost"
size="icon"
data-id="blocks-control-popover-trigger"
data-testid="blocks-control-blocks-button"
name="Blocks"
className="dark:hover:bg-slate-800"
>
<IconToyBrick />
</Button>
</PopoverTrigger>
</TooltipTrigger>
<TooltipContent side="right">Blocks</TooltipContent>
</Tooltip>
<PopoverContent
side="right"
sideOffset={22}
align="start"
className="absolute -top-3 w-[17rem] rounded-xl border-none p-0 shadow-none md:w-[30rem]"
data-id="blocks-control-popover-content"
>
<Card className="p-3 pb-0 dark:bg-slate-900">
<CardHeader className="flex flex-col gap-x-8 gap-y-1 p-3 px-2">
<div className="items-center justify-between">
<Label
htmlFor="search-blocks"
className="whitespace-nowrap text-base font-bold text-black dark:text-white 2xl:text-xl"
data-id="blocks-control-label"
data-testid="blocks-control-blocks-label"
>
Blocks
</Label>
</div>
<div className="relative flex items-center">
<MagnifyingGlassIcon className="absolute m-2 h-5 w-5 text-gray-500 dark:text-gray-400" />
<Input
id="search-blocks"
type="text"
placeholder="Search blocks"
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
className="rounded-lg px-8 py-5 dark:bg-slate-800 dark:text-white"
data-id="blocks-control-search-input"
/>
</div>
<div className="mt-2 flex flex-wrap gap-2">
{categories.map((category) => {
const color = getPrimaryCategoryColor([
{ category: category || "All", description: "" },
]);
const colorClass =
selectedCategory === category ? `${color}` : "";
return (
<div
key={category}
className={`cursor-pointer rounded-xl border px-2 py-2 text-xs font-medium dark:border-slate-700 dark:text-white ${colorClass}`}
onClick={() =>
setSelectedCategory(
selectedCategory === category ? null : category,
)
}
>
{beautifyString((category || "All").toLowerCase())}
</div>
);
})}
</div>
</CardHeader>
<CardContent className="overflow-scroll border-t border-t-gray-200 p-0 dark:border-t-slate-700">
<ScrollArea
className="h-[60vh] w-full"
data-id="blocks-control-scroll-area"
>
{filteredAvailableBlocks.map((block) => (
<Card
key={block.uiKey || block.id}
className={`m-2 my-4 flex h-20 shadow-none dark:border-slate-700 dark:bg-slate-800 dark:text-slate-100 dark:hover:bg-slate-700 ${
block.notAvailable
? "cursor-not-allowed opacity-50"
: "cursor-pointer hover:shadow-lg"
}`}
data-id={`block-card-${block.id}`}
onClick={() =>
!block.notAvailable &&
addBlock(block.id, block.name, block?.hardcodedValues || {})
}
title={block.notAvailable ?? undefined}
>
<div
className={`-ml-px h-full w-3 rounded-l-xl ${getPrimaryCategoryColor(block.categories)}`}
></div>
<div className="mx-3 flex flex-1 items-center justify-between">
<div className="mr-2 min-w-0">
<span
className="block truncate pb-1 text-sm font-semibold dark:text-white"
data-id={`block-name-${block.id}`}
data-type={block.uiType}
data-testid={`block-name-${block.id}`}
>
<TextRenderer
value={beautifyString(block.name).replace(
/ Block$/,
"",
)}
truncateLengthLimit={45}
/>
</span>
<span
className="block break-all text-xs font-normal text-gray-500 dark:text-gray-400"
data-testid={`block-description-${block.id}`}
>
<TextRenderer
value={block.description}
truncateLengthLimit={165}
/>
</span>
</div>
<div
className="flex flex-shrink-0 items-center gap-1"
data-id={`block-tooltip-${block.id}`}
data-testid={`block-add`}
>
<PlusIcon className="h-6 w-6 rounded-lg bg-gray-200 stroke-black stroke-[0.5px] p-1 dark:bg-gray-700 dark:stroke-white" />
</div>
</div>
</Card>
))}
</ScrollArea>
</CardContent>
</Card>
</PopoverContent>
</Popover>
);
};

View File

@@ -1,233 +0,0 @@
import React, { useRef } from "react";
import { useVirtualizer, VirtualItem } from "@tanstack/react-virtual";
import { Card, CardContent, CardHeader } from "@/components/ui/card";
import { Label } from "@/components/ui/label";
import { Button } from "@/components/ui/button";
import { Input } from "@/components/ui/input";
import { TextRenderer } from "@/components/ui/render";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import { MagnifyingGlassIcon, PlusIcon } from "@radix-ui/react-icons";
import { IconToyBrick } from "@/components/ui/icons";
import { getPrimaryCategoryColor, beautifyString } from "@/lib/utils";
import {
Tooltip,
TooltipContent,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { Block, GraphMeta } from "@/lib/autogpt-server-api";
import { CustomNode } from "@/components/CustomNode";
import { useBlocksControl } from "./useBlocksControl";
interface Props {
blocks: Block[];
addBlock: (
id: string,
name: string,
hardcodedValues: Record<string, any>,
) => void;
pinBlocksPopover: boolean;
flows: GraphMeta[];
nodes: CustomNode[];
}
function BlocksControlComponent({
blocks,
addBlock,
pinBlocksPopover,
flows,
nodes,
}: Props) {
const {
searchQuery,
setSearchQuery,
selectedCategory,
filteredAvailableBlocks,
categories,
resetFilters,
handleCategoryClick,
handleAddBlock,
} = useBlocksControl({
blocks,
flows,
nodes,
addBlock,
});
const parentRef = useRef<HTMLDivElement>(null);
const virtualizer = useVirtualizer({
count: filteredAvailableBlocks.length,
getScrollElement: () => parentRef.current,
estimateSize: () => 96, // 80px height + 16px margin (my-4)
overscan: 5,
});
return (
<Popover
open={pinBlocksPopover ? true : undefined}
onOpenChange={(open) => open || resetFilters()}
>
<Tooltip delayDuration={500}>
<TooltipTrigger asChild>
<PopoverTrigger asChild>
<Button
variant="ghost"
size="icon"
data-id="blocks-control-popover-trigger"
data-testid="blocks-control-blocks-button"
name="Blocks"
className="dark:hover:bg-slate-800"
>
<IconToyBrick />
</Button>
</PopoverTrigger>
</TooltipTrigger>
<TooltipContent side="right">Blocks</TooltipContent>
</Tooltip>
<PopoverContent
side="right"
sideOffset={22}
align="start"
className="absolute -top-3 w-[17rem] rounded-xl border-none p-0 shadow-none md:w-[30rem]"
data-id="blocks-control-popover-content"
>
<Card className="p-3 pb-0 dark:bg-slate-900">
<CardHeader className="flex flex-col gap-x-8 gap-y-1 p-3 px-2">
<div className="items-center justify-between">
<Label
htmlFor="search-blocks"
className="whitespace-nowrap text-base font-bold text-black dark:text-white 2xl:text-xl"
data-id="blocks-control-label"
data-testid="blocks-control-blocks-label"
>
Blocks
</Label>
</div>
<div className="relative flex items-center">
<MagnifyingGlassIcon className="absolute m-2 h-5 w-5 text-gray-500 dark:text-gray-400" />
<Input
id="search-blocks"
type="text"
placeholder="Search blocks"
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
className="rounded-lg px-8 py-5 dark:bg-slate-800 dark:text-white"
data-id="blocks-control-search-input"
/>
</div>
<div className="mt-2 flex flex-wrap gap-2">
{categories.map((category) => {
const color = getPrimaryCategoryColor([
{ category: category || "All", description: "" },
]);
const colorClass =
selectedCategory === category ? `${color}` : "";
return (
<div
key={category}
className={`cursor-pointer rounded-xl border px-2 py-2 text-xs font-medium dark:border-slate-700 dark:text-white ${colorClass}`}
onClick={() => handleCategoryClick(category)}
>
{beautifyString((category || "All").toLowerCase())}
</div>
);
})}
</div>
</CardHeader>
<CardContent className="border-t border-t-gray-200 p-0 dark:border-t-slate-700">
<div
ref={parentRef}
className="h-[60vh] w-full overflow-auto"
data-id="blocks-control-scroll-area"
>
<div
style={{
height: `${virtualizer.getTotalSize()}px`,
width: "100%",
position: "relative",
}}
>
{virtualizer
.getVirtualItems()
.map((virtualItem: VirtualItem) => {
const block = filteredAvailableBlocks[virtualItem.index];
return (
<div
key={virtualItem.key}
style={{
position: "absolute",
top: 0,
left: 0,
width: "100%",
height: `${virtualItem.size}px`,
transform: `translateY(${virtualItem.start}px)`,
}}
>
<Card
className={`m-2 my-4 flex h-20 shadow-none dark:border-slate-700 dark:bg-slate-800 dark:text-slate-100 dark:hover:bg-slate-700 ${
block.notAvailable
? "cursor-not-allowed opacity-50"
: "cursor-pointer hover:shadow-lg"
}`}
data-id={`block-card-${block.id}`}
onClick={() => handleAddBlock(block)}
title={block.notAvailable ?? undefined}
>
<div
className={`-ml-px h-full w-3 rounded-l-xl ${getPrimaryCategoryColor(block.categories)}`}
></div>
<div className="mx-3 flex flex-1 items-center justify-between">
<div className="mr-2 min-w-0">
<span
className="block truncate pb-1 text-sm font-semibold dark:text-white"
data-id={`block-name-${block.id}`}
data-type={block.uiType}
data-testid={`block-name-${block.id}`}
>
<TextRenderer
value={beautifyString(block.name).replace(
/ Block$/,
"",
)}
truncateLengthLimit={45}
/>
</span>
<span
className="block break-all text-xs font-normal text-gray-500 dark:text-gray-400"
data-testid={`block-description-${block.id}`}
>
<TextRenderer
value={block.description}
truncateLengthLimit={165}
/>
</span>
</div>
<div
className="flex flex-shrink-0 items-center gap-1"
data-id={`block-tooltip-${block.id}`}
data-testid={`block-add`}
>
<PlusIcon className="h-6 w-6 rounded-lg bg-gray-200 stroke-black stroke-[0.5px] p-1 dark:bg-gray-700 dark:stroke-white" />
</div>
</div>
</Card>
</div>
);
})}
</div>
</div>
</CardContent>
</Card>
</PopoverContent>
</Popover>
);
}
BlocksControlComponent.displayName = "BlocksControl";
export const BlocksControl = React.memo(BlocksControlComponent);

View File

@@ -1,103 +0,0 @@
import { beautifyString } from "@/lib/utils";
import { Block, BlockUIType } from "@/lib/autogpt-server-api";
import jaro from "jaro-winkler";
export interface BlockSearchData {
blockName: string;
beautifiedName: string;
description: string;
}
export interface EnhancedBlock extends Block {
searchData: BlockSearchData;
}
export interface BlockWithAvailability extends Block {
notAvailable?: string | null;
}
export interface GraphState {
hasWebhookNodes: boolean;
hasInputNodes: boolean;
}
export function getBlockSearchData(
block: Pick<Block, "name" | "description">,
): BlockSearchData {
return {
blockName: block.name.toLowerCase(),
beautifiedName: beautifyString(block.name).toLowerCase(),
description: block.description.toLowerCase(),
};
}
export function matchesSearch(block: EnhancedBlock, query: string): number {
if (!query) return 1;
const normalizedQuery = query.toLowerCase().trim();
const queryWords = normalizedQuery.split(/\s+/);
const { blockName, beautifiedName, description } = block.searchData;
// Exact match in name (highest priority)
if (
blockName.includes(normalizedQuery) ||
beautifiedName.includes(normalizedQuery)
) {
return 3;
}
// All query words in name (regardless of order)
const allWordsInName = queryWords.every(
(word) => blockName.includes(word) || beautifiedName.includes(word),
);
if (allWordsInName) return 2;
// Similarity with name (Jaro-Winkler) - Only for short queries to avoid performance issues
if (normalizedQuery.length <= 12) {
const similarityThreshold = 0.65;
const nameSimilarity = jaro(blockName, normalizedQuery);
const beautifiedSimilarity = jaro(beautifiedName, normalizedQuery);
const maxSimilarity = Math.max(nameSimilarity, beautifiedSimilarity);
if (maxSimilarity > similarityThreshold) {
return 1 + maxSimilarity; // Score between 1 and 2
}
}
// All query words in description (lower priority)
const allWordsInDescription = queryWords.every((word) =>
description.includes(word),
);
if (allWordsInDescription) return 0.5;
return 0;
}
export function getBlockAvailability(
block: Block,
graphState: GraphState,
): string | null {
if (block.uiType === BlockUIType.WEBHOOK && graphState.hasWebhookNodes) {
return "Agents can only have one webhook-triggered block";
}
if (block.uiType === BlockUIType.WEBHOOK && graphState.hasInputNodes) {
return "Webhook-triggered blocks can't be used together with input blocks";
}
if (block.uiType === BlockUIType.INPUT && graphState.hasWebhookNodes) {
return "Input blocks can't be used together with a webhook-triggered block";
}
return null;
}
export function extractCategories(blocks: Block[]): (string | null)[] {
return Array.from(
new Set([
null,
...blocks
.flatMap((block) => block.categories.map((cat) => cat.category))
.sort(),
]),
);
}

View File

@@ -1,151 +0,0 @@
import { useState, useMemo, useRef, useCallback } from "react";
import {
Block,
BlockUIType,
SpecialBlockID,
GraphMeta,
} from "@/lib/autogpt-server-api";
import { CustomNode } from "@/components/CustomNode";
import {
getBlockSearchData,
matchesSearch,
getBlockAvailability,
extractCategories,
EnhancedBlock,
BlockWithAvailability,
GraphState,
} from "./helpers";
interface Args {
blocks: Block[];
flows: GraphMeta[];
nodes: CustomNode[];
addBlock: (
id: string,
name: string,
hardcodedValues: Record<string, any>,
) => void;
}
export function useBlocksControl({ blocks, flows, nodes, addBlock }: Args) {
const [searchQuery, setSearchQuery] = useState("");
const [selectedCategory, setSelectedCategory] = useState<string | null>(null);
const animationFrameRef = useRef<number | null>(null);
// Memoize graph state checks to avoid recalculating on every render
const graphState = useMemo(
(): GraphState => ({
hasWebhookNodes: nodes.some((n) =>
[BlockUIType.WEBHOOK, BlockUIType.WEBHOOK_MANUAL].includes(
n.data.uiType,
),
),
hasInputNodes: nodes.some((n) => n.data.uiType === BlockUIType.INPUT),
}),
[nodes],
);
// Memoize blocks with precomputed search data
const blocksWithSearchData = useMemo((): EnhancedBlock[] => {
return blocks.map((block) => ({
...block,
searchData: getBlockSearchData(block),
}));
}, [blocks]);
// Memoize agent blocks list with search data
const agentBlocksWithSearchData = useMemo((): EnhancedBlock[] => {
return flows.map((flow) => {
const description = `Ver.${flow.version}${flow.description ? ` | ${flow.description}` : ""}`;
return {
id: SpecialBlockID.AGENT,
name: flow.name,
description,
categories: [{ category: "AGENT", description: "" }],
inputSchema: flow.input_schema,
outputSchema: flow.output_schema,
staticOutput: false,
uiType: BlockUIType.AGENT,
uiKey: flow.id,
costs: [],
hardcodedValues: {
graph_id: flow.id,
graph_version: flow.version,
input_schema: flow.input_schema,
output_schema: flow.output_schema,
},
searchData: getBlockSearchData({ name: flow.name, description }),
} satisfies EnhancedBlock;
});
}, [flows]);
// Memoize filtered and sorted blocks
const filteredAvailableBlocks = useMemo((): BlockWithAvailability[] => {
const blockList = blocksWithSearchData
.filter((b) => b.uiType !== BlockUIType.AGENT)
.sort((a, b) => a.name.localeCompare(b.name));
const allBlocks = blockList.concat(agentBlocksWithSearchData);
return allBlocks
.map((block) => ({
block,
score: matchesSearch(block, searchQuery),
}))
.filter(
({ block, score }) =>
score > 0 &&
(!selectedCategory ||
block.categories.some((cat) => cat.category === selectedCategory)),
)
.sort((a, b) => b.score - a.score)
.map(({ block }) => ({
...block,
notAvailable: getBlockAvailability(block, graphState),
}));
}, [
blocksWithSearchData,
agentBlocksWithSearchData,
searchQuery,
selectedCategory,
graphState,
]);
const categories = useMemo(() => extractCategories(blocks), [blocks]);
// Create requestAnimationFrame-based search query setter
const debouncedSetSearchQuery = useCallback((value: string) => {
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
animationFrameRef.current = requestAnimationFrame(() => {
setSearchQuery(value);
});
}, []);
function resetFilters() {
setSearchQuery("");
setSelectedCategory(null);
}
function handleCategoryClick(category: string | null) {
setSelectedCategory(selectedCategory === category ? null : category);
}
function handleAddBlock(block: BlockWithAvailability) {
if (!block.notAvailable) {
addBlock(block.id, block.name, block?.hardcodedValues || {});
}
}
return {
searchQuery,
setSearchQuery: debouncedSetSearchQuery,
selectedCategory,
filteredAvailableBlocks,
categories,
resetFilters,
handleCategoryClick,
handleAddBlock,
};
}

View File

@@ -197,10 +197,9 @@ function useToastOnFail() {
return React.useCallback(
(action: string, { rethrow = false }: ToastOnFailOptions = {}) =>
(error: any) => {
const err = error as Error;
toast({
title: `Unable to ${action}`,
description: err.message ?? "Something went wrong",
description: (error as Error)?.message ?? "Something went wrong",
variant: "destructive",
duration: 10000,
});
@@ -212,4 +211,4 @@ function useToastOnFail() {
);
}
export { toast, useToast, useToastOnFail };
export { useToast, toast, useToastOnFail };

View File

@@ -78,7 +78,7 @@ export async function getServerAuthToken(): Promise<string> {
error,
} = await supabase.auth.getSession();
if (error || !session || !session.access_token) {
if (error || !session?.access_token) {
return "no-token-found";
}

View File

@@ -1,5 +1,4 @@
import { type CookieOptions } from "@supabase/ssr";
import { SupabaseClient } from "@supabase/supabase-js";
// Detect if we're in a Playwright test environment
const isTest = process.env.NEXT_PUBLIC_PW_TEST === "true";
@@ -99,38 +98,3 @@ export function setupSessionEventListeners(
},
};
}
export interface CodeExchangeResult {
success: boolean;
error?: string;
}
export async function exchangePasswordResetCode(
supabase: SupabaseClient<any, "public", any>,
code: string,
): Promise<CodeExchangeResult> {
try {
const { data, error } = await supabase.auth.exchangeCodeForSession(code);
if (error) {
return {
success: false,
error: error.message,
};
}
if (!data.session) {
return {
success: false,
error: "Failed to create session",
};
}
return { success: true };
} catch (error) {
return {
success: false,
error: error instanceof Error ? error.message : "Unknown error",
};
}
}

View File

@@ -43,17 +43,18 @@ export async function updateSession(request: NextRequest) {
},
);
const userResponse = await supabase.auth.getUser();
const user = userResponse.data.user;
const userRole = user?.role;
const url = request.nextUrl.clone();
const pathname = request.nextUrl.pathname;
// IMPORTANT: Avoid writing any logic between createServerClient and
// supabase.auth.getUser(). A simple mistake could make it very hard to debug
// issues with users being randomly logged out.
const {
data: { user },
} = await supabase.auth.getUser();
const userRole = user?.role;
const url = request.nextUrl.clone();
const pathname = request.nextUrl.pathname;
// AUTH REDIRECTS
// 1. Check if user is not authenticated but trying to access protected content
if (!user) {

View File

@@ -1,43 +0,0 @@
import { FullConfig } from "@playwright/test";
import { createTestUsers, saveUserPool, loadUserPool } from "./utils/auth";
/**
* Global setup function that runs before all tests
* Creates test users and saves them to file system
*/
async function globalSetup(config: FullConfig) {
console.log("🚀 Starting global test setup...");
try {
const existingUserPool = await loadUserPool();
if (existingUserPool && existingUserPool.users.length > 0) {
console.log(
`♻️ Found existing user pool with ${existingUserPool.users.length} users`,
);
console.log("✅ Using existing user pool");
return;
}
// Create test users using signup page
const numberOfUsers = (config.workers || 1) + 3; // workers + buffer
console.log(`👥 Creating ${numberOfUsers} test users via signup...`);
const users = await createTestUsers(numberOfUsers);
if (users.length === 0) {
throw new Error("Failed to create any test users");
}
// Save user pool
await saveUserPool(users);
console.log("✅ Global setup completed successfully!");
console.log(`📊 Created ${users.length} test users via signup page`);
} catch (error) {
console.error("❌ Global setup failed:", error);
throw error;
}
}
export default globalSetup;

View File

@@ -1,113 +0,0 @@
import { test, expect } from "./fixtures";
import {
signupTestUser,
validateSignupForm,
generateTestEmail,
generateTestPassword,
} from "./utils/signup";
test.describe("Signup Flow", () => {
test("user can signup successfully", async ({ page }) => {
console.log("🧪 Testing user signup flow...");
try {
const testUser = await signupTestUser(page);
// Verify user was created
expect(testUser.email).toBeTruthy();
expect(testUser.password).toBeTruthy();
expect(testUser.createdAt).toBeTruthy();
// Verify we're on marketplace and authenticated
await expect(page).toHaveURL("/marketplace");
await expect(
page.getByText(
"Bringing you AI agents designed by thinkers from around the world",
),
).toBeVisible();
await expect(
page.getByTestId("profile-popout-menu-trigger"),
).toBeVisible();
console.log(`✅ User successfully signed up: ${testUser.email}`);
} catch (error) {
console.error("❌ Signup test failed:", error);
}
});
test("signup form validation works", async ({ page }) => {
console.log("🧪 Testing signup form validation...");
await validateSignupForm(page);
// Additional validation tests
await page.goto("/signup");
// Test with mismatched passwords
console.log("❌ Testing mismatched passwords...");
await page.getByPlaceholder("m@example.com").fill(generateTestEmail());
const passwordInputs = page.getByTitle("Password");
await passwordInputs.nth(0).fill("password1");
await passwordInputs.nth(1).fill("password2");
await page.getByRole("checkbox").click();
await page.getByRole("button", { name: "Sign up" }).click();
// Should still be on signup page
await expect(page).toHaveURL(/\/signup/);
console.log("✅ Mismatched passwords correctly blocked");
});
test("user can signup with custom credentials", async ({ page }) => {
console.log("🧪 Testing signup with custom credentials...");
try {
const customEmail = generateTestEmail();
const customPassword = generateTestPassword();
const testUser = await signupTestUser(page, customEmail, customPassword);
// Verify correct credentials were used
expect(testUser.email).toBe(customEmail);
expect(testUser.password).toBe(customPassword);
// Verify successful signup
await expect(page).toHaveURL("/marketplace");
await expect(
page.getByTestId("profile-popout-menu-trigger"),
).toBeVisible();
console.log(`✅ Custom credentials signup worked: ${testUser.email}`);
} catch (error) {
console.error("❌ Custom credentials signup test failed:", error);
}
});
test("user can signup with existing email handling", async ({ page }) => {
console.log("🧪 Testing duplicate email handling...");
try {
const testEmail = generateTestEmail();
const testPassword = generateTestPassword();
// First signup
console.log(`👤 First signup attempt: ${testEmail}`);
const firstUser = await signupTestUser(page, testEmail, testPassword);
expect(firstUser.email).toBe(testEmail);
console.log("✅ First signup successful");
// Second signup attempt with same email should handle gracefully
console.log(`👤 Second signup attempt: ${testEmail}`);
try {
await signupTestUser(page, testEmail, testPassword);
console.log(" Second signup handled gracefully");
} catch (_error) {
console.log(" Second signup rejected as expected");
}
console.log("✅ Duplicate email handling test completed");
} catch (error) {
console.error("❌ Duplicate email handling test failed:", error);
}
});
});

View File

@@ -1,184 +0,0 @@
import { faker } from "@faker-js/faker";
import { chromium, webkit } from "@playwright/test";
import fs from "fs";
import path from "path";
import { signupTestUser } from "./signup";
export interface TestUser {
email: string;
password: string;
id?: string;
createdAt?: string;
}
export interface UserPool {
users: TestUser[];
createdAt: string;
version: string;
}
// Using Playwright MCP server tools for browser automation
// No need to manage browser instances manually
/**
* Create a new test user through signup page using Playwright MCP server
* @param email - User email (optional, will generate if not provided)
* @param password - User password (optional, will generate if not provided)
* @param ignoreOnboarding - Skip onboarding and go to marketplace (default: true)
* @returns Promise<TestUser> - Created user object
*/
export async function createTestUser(
email?: string,
password?: string,
ignoreOnboarding: boolean = true,
): Promise<TestUser> {
const userEmail = email || faker.internet.email();
const userPassword = password || faker.internet.password({ length: 12 });
try {
const browserType = process.env.BROWSER_TYPE || "chromium";
const browser =
browserType === "webkit"
? await webkit.launch({ headless: true })
: await chromium.launch({ headless: true });
const context = await browser.newContext();
const page = await context.newPage();
try {
const testUser = await signupTestUser(
page,
userEmail,
userPassword,
ignoreOnboarding,
);
return testUser;
} finally {
await page.close();
await context.close();
await browser.close();
}
} catch (error) {
console.error(`❌ Error creating test user ${userEmail}:`, error);
throw error;
}
}
/**
* Create multiple test users
* @param count - Number of users to create
* @returns Promise<TestUser[]> - Array of created users
*/
export async function createTestUsers(count: number): Promise<TestUser[]> {
console.log(`👥 Creating ${count} test users...`);
const users: TestUser[] = [];
for (let i = 0; i < count; i++) {
try {
const user = await createTestUser();
users.push(user);
console.log(`✅ Created user ${i + 1}/${count}: ${user.email}`);
} catch (error) {
console.error(`❌ Failed to create user ${i + 1}/${count}:`, error);
// Continue creating other users even if one fails
}
}
console.log(`🎉 Successfully created ${users.length}/${count} test users`);
return users;
}
/**
* Save user pool to file system
* @param users - Array of users to save
* @param filePath - Path to save the file (optional)
*/
export async function saveUserPool(
users: TestUser[],
filePath?: string,
): Promise<void> {
const defaultPath = path.resolve(process.cwd(), ".auth", "user-pool.json");
const finalPath = filePath || defaultPath;
// Ensure .auth directory exists
const dirPath = path.dirname(finalPath);
if (!fs.existsSync(dirPath)) {
fs.mkdirSync(dirPath, { recursive: true });
}
const userPool: UserPool = {
users,
createdAt: new Date().toISOString(),
version: "1.0.0",
};
try {
fs.writeFileSync(finalPath, JSON.stringify(userPool, null, 2));
console.log(`✅ Successfully saved user pool to: ${finalPath}`);
} catch (error) {
console.error(`❌ Failed to save user pool to ${finalPath}:`, error);
throw error;
}
}
/**
* Load user pool from file system
* @param filePath - Path to load from (optional)
* @returns Promise<UserPool | null> - Loaded user pool or null if not found
*/
export async function loadUserPool(
filePath?: string,
): Promise<UserPool | null> {
const defaultPath = path.resolve(process.cwd(), ".auth", "user-pool.json");
const finalPath = filePath || defaultPath;
console.log(`📖 Loading user pool from: ${finalPath}`);
try {
if (!fs.existsSync(finalPath)) {
console.log(`⚠️ User pool file not found: ${finalPath}`);
return null;
}
const fileContent = fs.readFileSync(finalPath, "utf-8");
const userPool: UserPool = JSON.parse(fileContent);
console.log(
`✅ Successfully loaded ${userPool.users.length} users from: ${finalPath}`,
);
console.log(`📅 User pool created at: ${userPool.createdAt}`);
console.log(`🔖 User pool version: ${userPool.version}`);
return userPool;
} catch (error) {
console.error(`❌ Failed to load user pool from ${finalPath}:`, error);
return null;
}
}
/**
* Clean up all test users from a pool
* Note: When using signup page method, cleanup removes the user pool file
* @param filePath - Path to load from (optional)
*/
export async function cleanupTestUsers(filePath?: string): Promise<void> {
const defaultPath = path.resolve(process.cwd(), ".auth", "user-pool.json");
const finalPath = filePath || defaultPath;
console.log(`🧹 Cleaning up test users...`);
try {
if (fs.existsSync(finalPath)) {
fs.unlinkSync(finalPath);
console.log(`✅ Deleted user pool file: ${finalPath}`);
} else {
console.log(`⚠️ No user pool file found to cleanup`);
}
} catch (error) {
console.error(`❌ Failed to cleanup user pool:`, error);
}
console.log(`🎉 Cleanup completed`);
}

View File

@@ -1,94 +0,0 @@
import { Page } from "@playwright/test";
import { LoginPage } from "../pages/login.page";
import { TestUser } from "../fixtures/test-user.fixture";
/**
* Utility functions for signin/authentication tests
*/
export class SigninUtils {
constructor(
private page: Page,
private loginPage: LoginPage,
) {}
/**
* Perform login and verify success
*/
async loginAndVerify(testUser: TestUser): Promise<void> {
console.log(`🔐 Logging in as: ${testUser.email}`);
await this.page.goto("/login");
await this.loginPage.login(testUser.email, testUser.password);
// Verify we're on marketplace
await this.page.waitForURL("/marketplace");
// Verify profile menu is visible (user is authenticated)
await this.page.getByTestId("profile-popout-menu-trigger").waitFor({
state: "visible",
timeout: 5000,
});
console.log("✅ Login successful");
}
/**
* Perform logout and verify success
*/
async logoutAndVerify(): Promise<void> {
console.log("🚪 Logging out...");
// Open profile menu
await this.page.getByTestId("profile-popout-menu-trigger").click();
// Wait for menu to be visible
await this.page.getByRole("button", { name: "Log out" }).waitFor({
state: "visible",
timeout: 5000,
});
// Click logout
await this.page.getByRole("button", { name: "Log out" }).click();
// Verify we're back on login page
await this.page.waitForURL("/login");
console.log("✅ Logout successful");
}
/**
* Complete authentication cycle: login -> logout -> login
*/
async fullAuthenticationCycle(testUser: TestUser): Promise<void> {
console.log("🔄 Starting full authentication cycle...");
// First login
await this.loginAndVerify(testUser);
// Logout
await this.logoutAndVerify();
// Login again
await this.loginAndVerify(testUser);
console.log("✅ Full authentication cycle completed");
}
/**
* Verify user is on marketplace and authenticated
*/
async verifyAuthenticated(): Promise<void> {
await this.page.waitForURL("/marketplace");
await this.page.getByTestId("profile-popout-menu-trigger").waitFor({
state: "visible",
timeout: 5000,
});
}
/**
* Verify user is on login page (not authenticated)
*/
async verifyNotAuthenticated(): Promise<void> {
await this.page.waitForURL("/login");
}
}

View File

@@ -1,166 +0,0 @@
import { faker } from "@faker-js/faker";
import { TestUser } from "./auth";
/**
* Create a test user through signup page for test setup
* @param page - Playwright page object
* @param email - User email (optional, will generate if not provided)
* @param password - User password (optional, will generate if not provided)
* @param ignoreOnboarding - Skip onboarding and go to marketplace (default: true)
* @returns Promise<TestUser> - Created user object
*/
export async function signupTestUser(
page: any,
email?: string,
password?: string,
ignoreOnboarding: boolean = true,
): Promise<TestUser> {
const userEmail = email || faker.internet.email();
const userPassword = password || faker.internet.password({ length: 12 });
try {
// Navigate to signup page
await page.goto("http://localhost:3000/signup");
// Wait for page to load
const emailInput = page.getByPlaceholder("m@example.com");
await emailInput.waitFor({ state: "visible", timeout: 10000 });
// Fill form
await emailInput.fill(userEmail);
const passwordInputs = page.getByTitle("Password");
await passwordInputs.nth(0).fill(userPassword);
await passwordInputs.nth(1).fill(userPassword);
// Agree to terms and submit
await page.getByRole("checkbox").click();
const signupButton = page.getByRole("button", { name: "Sign up" });
await signupButton.click();
// Wait for successful signup - could redirect to onboarding or marketplace
try {
// Wait for either onboarding or marketplace redirect
await Promise.race([
page.waitForURL(/\/onboarding/, { timeout: 15000 }),
page.waitForURL(/\/marketplace/, { timeout: 15000 }),
]);
} catch (error) {
console.error(
"❌ Timeout waiting for redirect, current URL:",
page.url(),
);
throw error;
}
const currentUrl = page.url();
// Handle onboarding or marketplace redirect
if (currentUrl.includes("/onboarding") && ignoreOnboarding) {
await page.goto("http://localhost:3000/marketplace");
await page.waitForLoadState("domcontentloaded", { timeout: 10000 });
}
// Verify we're on the expected final page
if (ignoreOnboarding || currentUrl.includes("/marketplace")) {
// Verify we're on marketplace
await page
.getByText(
"Bringing you AI agents designed by thinkers from around the world",
)
.waitFor({ state: "visible", timeout: 10000 });
// Verify user is authenticated (profile menu visible)
await page
.getByTestId("profile-popout-menu-trigger")
.waitFor({ state: "visible", timeout: 10000 });
}
const testUser: TestUser = {
email: userEmail,
password: userPassword,
createdAt: new Date().toISOString(),
};
return testUser;
} catch (error) {
console.error(`❌ Error creating test user ${userEmail}:`, error);
throw error;
}
}
/**
* Complete signup and navigate to marketplace
* @param page - Playwright page object from MCP server
* @param email - User email (optional, will generate if not provided)
* @param password - User password (optional, will generate if not provided)
* @returns Promise<TestUser> - Created user object
*/
export async function signupAndNavigateToMarketplace(
page: any,
email?: string,
password?: string,
): Promise<TestUser> {
console.log("🧪 Creating user and navigating to marketplace...");
// Create the user via signup and automatically navigate to marketplace
const testUser = await signupTestUser(page, email, password, true);
console.log("✅ User successfully created and authenticated in marketplace");
return testUser;
}
/**
* Validate signup form behavior
* @param page - Playwright page object from MCP server
* @returns Promise<void>
*/
export async function validateSignupForm(page: any): Promise<void> {
console.log("🧪 Validating signup form...");
await page.goto("http://localhost:3000/signup");
// Test empty form submission
console.log("❌ Testing empty form submission...");
const signupButton = page.getByRole("button", { name: "Sign up" });
await signupButton.click();
// Should still be on signup page
const currentUrl = page.url();
if (currentUrl.includes("/signup")) {
console.log("✅ Empty form correctly blocked");
} else {
console.log("⚠️ Empty form was not blocked as expected");
}
// Test invalid email
console.log("❌ Testing invalid email...");
await page.getByPlaceholder("m@example.com").fill("invalid-email");
await signupButton.click();
// Should still be on signup page
const currentUrl2 = page.url();
if (currentUrl2.includes("/signup")) {
console.log("✅ Invalid email correctly blocked");
} else {
console.log("⚠️ Invalid email was not blocked as expected");
}
console.log("✅ Signup form validation completed");
}
/**
* Generate unique test email
* @returns string - Unique test email
*/
export function generateTestEmail(): string {
return `test.${Date.now()}.${Math.random().toString(36).substring(7)}@example.com`;
}
/**
* Generate secure test password
* @returns string - Secure test password
*/
export function generateTestPassword(): string {
return faker.internet.password({ length: 12 });
}

View File

@@ -1,42 +1,9 @@
import { faker } from "@faker-js/faker";
import { TestUser } from "./auth";
/**
* Generate a test user with random data
* @param options - Optional parameters to override defaults
* @returns TestUser object with generated data
*/
export function generateUser(options?: {
email?: string;
password?: string;
name?: string;
}): TestUser {
console.log("🎲 Generating test user...");
const user: TestUser = {
email: options?.email || faker.internet.email(),
password: options?.password || faker.internet.password({ length: 12 }),
createdAt: new Date().toISOString(),
export function generateUser() {
return {
email: faker.internet.email(),
password: faker.internet.password(),
name: faker.person.fullName(),
};
console.log(`✅ Generated user: ${user.email}`);
return user;
}
/**
* Generate multiple test users
* @param count - Number of users to generate
* @returns Array of TestUser objects
*/
export function generateUsers(count: number): TestUser[] {
console.log(`👥 Generating ${count} test users...`);
const users: TestUser[] = [];
for (let i = 0; i < count; i++) {
users.push(generateUser());
}
console.log(`✅ Generated ${users.length} test users`);
return users;
}