mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-04-08 03:00:28 -04:00
Compare commits
104 Commits
dependabot
...
swiftyos/o
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9cf61aa2d | ||
|
|
0e4d0ce108 | ||
|
|
ce1d63c517 | ||
|
|
b5b9a008bf | ||
|
|
7e04fbd25f | ||
|
|
016ec0ff6b | ||
|
|
2f11e9601e | ||
|
|
3b8cde6d11 | ||
|
|
d050a3f77c | ||
|
|
1626bf9e16 | ||
|
|
539d3e0791 | ||
|
|
40613fe23e | ||
|
|
6eee9206f7 | ||
|
|
64050faef6 | ||
|
|
00c312d02c | ||
|
|
610be988c4 | ||
|
|
1a4ba533ca | ||
|
|
56a307d048 | ||
|
|
a5ad90f09b | ||
|
|
a315b3fc41 | ||
|
|
1a1fe7c0b7 | ||
|
|
c693875951 | ||
|
|
797916cf14 | ||
|
|
5d8fe1e184 | ||
|
|
8181ee8cd1 | ||
|
|
6183ed5a63 | ||
|
|
4b76aae1c9 | ||
|
|
f6e395f36e | ||
|
|
9c2d19cdb5 | ||
|
|
277a896a83 | ||
|
|
bd9c0d741a | ||
|
|
c098a8d659 | ||
|
|
1d30e401fe | ||
|
|
22536de71f | ||
|
|
0915879049 | ||
|
|
243122311c | ||
|
|
533d120e98 | ||
|
|
58cadeb3b9 | ||
|
|
9151211d2a | ||
|
|
8e68e20fef | ||
|
|
a7d545cd5d | ||
|
|
0fbabe690a | ||
|
|
cdd2d5696c | ||
|
|
f44453be6e | ||
|
|
7a14e5dd66 | ||
|
|
e711c1db4e | ||
|
|
4302c5d60a | ||
|
|
c282d2d912 | ||
|
|
582097221f | ||
|
|
53aea8908a | ||
|
|
7b50e9bd77 | ||
|
|
3de982792e | ||
|
|
74b8b45e0a | ||
|
|
5bdd8c252e | ||
|
|
8e33af6d99 | ||
|
|
8dacdd16f2 | ||
|
|
04d7feea28 | ||
|
|
3bcd6df193 | ||
|
|
f3c524a74a | ||
|
|
f481de173d | ||
|
|
24306a16bd | ||
|
|
f5bf36cd97 | ||
|
|
1a08922ccf | ||
|
|
1f83a8c61a | ||
|
|
195261835d | ||
|
|
a44c9333d3 | ||
|
|
bd27ce5f26 | ||
|
|
2c473146dd | ||
|
|
e82df96e56 | ||
|
|
311100d26f | ||
|
|
f3319f23ba | ||
|
|
b03e3e47a2 | ||
|
|
a4b962462c | ||
|
|
1b69bcbce2 | ||
|
|
ef118eff34 | ||
|
|
f1bc9d1581 | ||
|
|
f67060fd8f | ||
|
|
c31a2ec565 | ||
|
|
644cff8155 | ||
|
|
d0c4a1f14a | ||
|
|
4d20e419e1 | ||
|
|
6007def168 | ||
|
|
1f82cebb05 | ||
|
|
bc8043b862 | ||
|
|
2a86f22eb4 | ||
|
|
fa5f24eb12 | ||
|
|
37c59990f5 | ||
|
|
783ca12927 | ||
|
|
52598759df | ||
|
|
37e8b51821 | ||
|
|
f051266a33 | ||
|
|
0c4888f15f | ||
|
|
bb8e5622b3 | ||
|
|
46d573c472 | ||
|
|
c027080607 | ||
|
|
9750b79ced | ||
|
|
797f9eda5c | ||
|
|
4f861e3823 | ||
|
|
69747cc891 | ||
|
|
c6daeefa06 | ||
|
|
a7e0af0551 | ||
|
|
6df94aac44 | ||
|
|
6b9580b666 | ||
|
|
44659948e5 |
@@ -1,6 +1,6 @@
|
||||
version = 1
|
||||
|
||||
test_patterns = ["test_*.py", "tests/**", "test/**", "*.spec.ts", "*_test.py"]
|
||||
test_patterns = ["**/*.spec.ts","**/*_test.py","**/*_tests.py","**/test_*.py"]
|
||||
|
||||
exclude_patterns = ["classic/**"]
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ To run the AutoGPT Platform, follow these steps:
|
||||
|
||||
2. Run the following command:
|
||||
```
|
||||
git submodule update --init --recursive
|
||||
git submodule update --init --recursive --progress
|
||||
```
|
||||
This command will initialize and update the submodules in the repository. The `supabase` folder will be cloned to the root directory.
|
||||
|
||||
|
||||
75
autogpt_platform/autogpt_libs/poetry.lock
generated
75
autogpt_platform/autogpt_libs/poetry.lock
generated
@@ -606,14 +606,14 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-logging"
|
||||
version = "3.11.3"
|
||||
version = "3.11.4"
|
||||
description = "Stackdriver Logging API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_cloud_logging-3.11.3-py2.py3-none-any.whl", hash = "sha256:b8ec23f2998f76a58f8492db26a0f4151dd500425c3f08448586b85972f3c494"},
|
||||
{file = "google_cloud_logging-3.11.3.tar.gz", hash = "sha256:0a73cd94118875387d4535371d9e9426861edef8e44fba1261e86782d5b8d54f"},
|
||||
{file = "google_cloud_logging-3.11.4-py2.py3-none-any.whl", hash = "sha256:1d465ac62df29fb94bba4d6b4891035e57d573d84541dd8a40eebbc74422b2f0"},
|
||||
{file = "google_cloud_logging-3.11.4.tar.gz", hash = "sha256:32305d989323f3c58603044e2ac5d9cf23e9465ede511bbe90b4309270d3195c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -625,7 +625,8 @@ google-cloud-core = ">=2.0.0,<3.0.0dev"
|
||||
grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev"
|
||||
opentelemetry-api = ">=1.9.0"
|
||||
proto-plus = [
|
||||
{version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""},
|
||||
{version = ">=1.25.0,<2.0.0dev", markers = "python_version >= \"3.13\""},
|
||||
{version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\" and python_version < \"3.13\""},
|
||||
{version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""},
|
||||
]
|
||||
protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev"
|
||||
@@ -1072,14 +1073,14 @@ strenum = {version = ">=0.4.9,<0.5.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "proto-plus"
|
||||
version = "1.24.0"
|
||||
description = "Beautiful, Pythonic protocol buffers."
|
||||
version = "1.26.0"
|
||||
description = "Beautiful, Pythonic protocol buffers"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "proto-plus-1.24.0.tar.gz", hash = "sha256:30b72a5ecafe4406b0d339db35b56c4059064e69227b8c3bda7462397f966445"},
|
||||
{file = "proto_plus-1.24.0-py3-none-any.whl", hash = "sha256:402576830425e5f6ce4c2a6702400ac79897dab0b4343821aa5188b0fab81a12"},
|
||||
{file = "proto_plus-1.26.0-py3-none-any.whl", hash = "sha256:bf2dfaa3da281fc3187d12d224c707cb57214fb2c22ba854eb0c105a3fb2d4d7"},
|
||||
{file = "proto_plus-1.26.0.tar.gz", hash = "sha256:6e93d5f5ca267b54300880fff156b6a3386b3fa3f43b1da62e680fc0c586ef22"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1138,14 +1139,14 @@ pyasn1 = ">=0.4.6,<0.7.0"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.10.5"
|
||||
version = "2.10.6"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"},
|
||||
{file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"},
|
||||
{file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"},
|
||||
{file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1334,14 +1335,14 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "0.25.2"
|
||||
version = "0.25.3"
|
||||
description = "Pytest support for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"},
|
||||
{file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"},
|
||||
{file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"},
|
||||
{file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1475,30 +1476,30 @@ pyasn1 = ">=0.1.3"
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.9.2"
|
||||
version = "0.9.3"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["dev"]
|
||||
files = [
|
||||
{file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"},
|
||||
{file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"},
|
||||
{file = "ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"},
|
||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d"},
|
||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c"},
|
||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f"},
|
||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684"},
|
||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d"},
|
||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df"},
|
||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247"},
|
||||
{file = "ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e"},
|
||||
{file = "ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe"},
|
||||
{file = "ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb"},
|
||||
{file = "ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a"},
|
||||
{file = "ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145"},
|
||||
{file = "ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5"},
|
||||
{file = "ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6"},
|
||||
{file = "ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0"},
|
||||
{file = "ruff-0.9.3-py3-none-linux_armv6l.whl", hash = "sha256:7f39b879064c7d9670197d91124a75d118d00b0990586549949aae80cdc16624"},
|
||||
{file = "ruff-0.9.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a187171e7c09efa4b4cc30ee5d0d55a8d6c5311b3e1b74ac5cb96cc89bafc43c"},
|
||||
{file = "ruff-0.9.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c59ab92f8e92d6725b7ded9d4a31be3ef42688a115c6d3da9457a5bda140e2b4"},
|
||||
{file = "ruff-0.9.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc153c25e715be41bb228bc651c1e9b1a88d5c6e5ed0194fa0dfea02b026439"},
|
||||
{file = "ruff-0.9.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:646909a1e25e0dc28fbc529eab8eb7bb583079628e8cbe738192853dbbe43af5"},
|
||||
{file = "ruff-0.9.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a5a46e09355695fbdbb30ed9889d6cf1c61b77b700a9fafc21b41f097bfbba4"},
|
||||
{file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c4bb09d2bbb394e3730d0918c00276e79b2de70ec2a5231cd4ebb51a57df9ba1"},
|
||||
{file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96a87ec31dc1044d8c2da2ebbed1c456d9b561e7d087734336518181b26b3aa5"},
|
||||
{file = "ruff-0.9.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb7554aca6f842645022fe2d301c264e6925baa708b392867b7a62645304df4"},
|
||||
{file = "ruff-0.9.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabc332b7075a914ecea912cd1f3d4370489c8018f2c945a30bcc934e3bc06a6"},
|
||||
{file = "ruff-0.9.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:33866c3cc2a575cbd546f2cd02bdd466fed65118e4365ee538a3deffd6fcb730"},
|
||||
{file = "ruff-0.9.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:006e5de2621304c8810bcd2ee101587712fa93b4f955ed0985907a36c427e0c2"},
|
||||
{file = "ruff-0.9.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ba6eea4459dbd6b1be4e6bfc766079fb9b8dd2e5a35aff6baee4d9b1514ea519"},
|
||||
{file = "ruff-0.9.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90230a6b8055ad47d3325e9ee8f8a9ae7e273078a66401ac66df68943ced029b"},
|
||||
{file = "ruff-0.9.3-py3-none-win32.whl", hash = "sha256:eabe5eb2c19a42f4808c03b82bd313fc84d4e395133fb3fc1b1516170a31213c"},
|
||||
{file = "ruff-0.9.3-py3-none-win_amd64.whl", hash = "sha256:040ceb7f20791dfa0e78b4230ee9dce23da3b64dd5848e40e3bf3ab76468dcf4"},
|
||||
{file = "ruff-0.9.3-py3-none-win_arm64.whl", hash = "sha256:800d773f6d4d33b0a3c60e2c6ae8f4c202ea2de056365acfa519aa48acf28e0b"},
|
||||
{file = "ruff-0.9.3.tar.gz", hash = "sha256:8293f89985a090ebc3ed1064df31f3b4b56320cdfcec8b60d3295bddb955c22a"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1560,14 +1561,14 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "supabase"
|
||||
version = "2.11.0"
|
||||
version = "2.13.0"
|
||||
description = "Supabase client for Python."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase-2.11.0-py3-none-any.whl", hash = "sha256:67a0da498895f4cd6554935e2854b4c41f87b297b78fb9c9414902a382041406"},
|
||||
{file = "supabase-2.11.0.tar.gz", hash = "sha256:2a906f7909fd9a50f944cd9332ce66c684e2d37c0864284d34c5815e6c63cc01"},
|
||||
{file = "supabase-2.13.0-py3-none-any.whl", hash = "sha256:6cfccc055be21dab311afc5e9d5b37f3a4966f8394703763fbc8f8e86f36eaa6"},
|
||||
{file = "supabase-2.13.0.tar.gz", hash = "sha256:452574d34bd978c8d11b5f02b0182b48e8854e511c969483c83875ec01495f11"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1928,4 +1929,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "53a31ce3d94999d9267f2a229c53a9d97d96c9413843bfdcb7ef0c0c21723e49"
|
||||
content-hash = "a4d81b3b55a67036ca7a441793e13e8fbe20af973fcf1623f36cdee7bc82999f"
|
||||
|
||||
@@ -9,19 +9,19 @@ packages = [{ include = "autogpt_libs" }]
|
||||
[tool.poetry.dependencies]
|
||||
colorama = "^0.4.6"
|
||||
expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.10.5"
|
||||
google-cloud-logging = "^3.11.4"
|
||||
pydantic = "^2.10.6"
|
||||
pydantic-settings = "^2.7.1"
|
||||
pyjwt = "^2.10.1"
|
||||
pytest-asyncio = "^0.25.2"
|
||||
pytest-asyncio = "^0.25.3"
|
||||
pytest-mock = "^3.14.0"
|
||||
python = ">=3.10,<4.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.11.0"
|
||||
supabase = "^2.13.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
redis = "^5.2.1"
|
||||
ruff = "^0.9.2"
|
||||
ruff = "^0.9.3"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
@@ -31,6 +31,12 @@ SUPABASE_URL=http://localhost:8000
|
||||
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
|
||||
# RabbitMQ credentials -- Used for communication between services
|
||||
RABBITMQ_HOST=localhost
|
||||
RABBITMQ_PORT=5672
|
||||
RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
|
||||
## For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow
|
||||
## for integrations to work. Defaults to the value of PLATFORM_BASE_URL if not set.
|
||||
# FRONTEND_BASE_URL=http://localhost:3000
|
||||
@@ -82,6 +88,14 @@ TWITTER_CLIENT_SECRET=
|
||||
LINEAR_CLIENT_ID=
|
||||
LINEAR_CLIENT_SECRET=
|
||||
|
||||
# To obtain Todoist API credentials:
|
||||
# 1. Create a Todoist account at todoist.com
|
||||
# 2. Visit the Developer Console: https://developer.todoist.com/appconsole.html
|
||||
# 3. Click "Create new app"
|
||||
# 4. Once created, copy your Client ID and Client Secret below
|
||||
TODOIST_CLIENT_ID=
|
||||
TODOIST_CLIENT_SECRET=
|
||||
|
||||
## ===== OPTIONAL API KEYS ===== ##
|
||||
|
||||
# LLM
|
||||
|
||||
@@ -66,10 +66,17 @@ We use the Poetry to manage the dependencies. To set up the project, follow thes
|
||||
|
||||
### Starting the server without Docker
|
||||
|
||||
To run the server locally, start in the autogpt_platform folder:
|
||||
|
||||
```sh
|
||||
cd ..
|
||||
```
|
||||
|
||||
Run the following command to run database in docker but the application locally:
|
||||
|
||||
```sh
|
||||
docker compose --profile local up deps --build --detach
|
||||
cd backend
|
||||
poetry run app
|
||||
```
|
||||
|
||||
|
||||
@@ -297,6 +297,7 @@ class AgentOutputBlock(Block):
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: Any = SchemaField(description="The value recorded as output.")
|
||||
name: Any = SchemaField(description="The name of the value recorded as output.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -348,6 +349,7 @@ class AgentOutputBlock(Block):
|
||||
yield "output", f"Error: {e}, {input_data.value}"
|
||||
else:
|
||||
yield "output", input_data.value
|
||||
yield "name", input_data.name
|
||||
|
||||
|
||||
class AddToDictionaryBlock(Block):
|
||||
|
||||
@@ -69,6 +69,7 @@ def AICredentialsField() -> AICredentials:
|
||||
class ModelMetadata(NamedTuple):
|
||||
provider: str
|
||||
context_window: int
|
||||
max_output_tokens: int | None
|
||||
|
||||
|
||||
class LlmModelMeta(EnumMeta):
|
||||
@@ -92,6 +93,8 @@ class LlmModelMeta(EnumMeta):
|
||||
|
||||
class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
# OpenAI models
|
||||
O3_MINI = "o3-mini"
|
||||
O1 = "o1"
|
||||
O1_PREVIEW = "o1-preview"
|
||||
O1_MINI = "o1-mini"
|
||||
GPT4O_MINI = "gpt-4o-mini"
|
||||
@@ -100,30 +103,31 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
GPT3_5_TURBO = "gpt-3.5-turbo"
|
||||
# Anthropic models
|
||||
CLAUDE_3_5_SONNET = "claude-3-5-sonnet-latest"
|
||||
CLAUDE_3_5_HAIKU = "claude-3-5-haiku-latest"
|
||||
CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
|
||||
# Groq models
|
||||
LLAMA3_8B = "llama3-8b-8192"
|
||||
LLAMA3_70B = "llama3-70b-8192"
|
||||
MIXTRAL_8X7B = "mixtral-8x7b-32768"
|
||||
GEMMA_7B = "gemma-7b-it"
|
||||
GEMMA2_9B = "gemma2-9b-it"
|
||||
# New Groq models (Preview)
|
||||
LLAMA3_1_405B = "llama-3.1-405b-reasoning"
|
||||
LLAMA3_1_70B = "llama-3.1-70b-versatile"
|
||||
LLAMA3_3_70B = "llama-3.3-70b-versatile"
|
||||
LLAMA3_1_8B = "llama-3.1-8b-instant"
|
||||
LLAMA3_70B = "llama3-70b-8192"
|
||||
LLAMA3_8B = "llama3-8b-8192"
|
||||
MIXTRAL_8X7B = "mixtral-8x7b-32768"
|
||||
# Groq preview models
|
||||
DEEPSEEK_LLAMA_70B = "deepseek-r1-distill-llama-70b"
|
||||
# Ollama models
|
||||
OLLAMA_LLAMA3_3 = "llama3.3"
|
||||
OLLAMA_LLAMA3_2 = "llama3.2"
|
||||
OLLAMA_LLAMA3_8B = "llama3"
|
||||
OLLAMA_LLAMA3_405B = "llama3.1:405b"
|
||||
OLLAMA_DOLPHIN = "dolphin-mistral:latest"
|
||||
# OpenRouter models
|
||||
GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5"
|
||||
GEMINI_FLASH_1_5 = "google/gemini-flash-1.5"
|
||||
GROK_BETA = "x-ai/grok-beta"
|
||||
MISTRAL_NEMO = "mistralai/mistral-nemo"
|
||||
COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024"
|
||||
COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024"
|
||||
EVA_QWEN_2_5_32B = "eva-unit-01/eva-qwen-2.5-32b"
|
||||
DEEPSEEK_CHAT = "deepseek/deepseek-chat"
|
||||
DEEPSEEK_CHAT = "deepseek/deepseek-chat" # Actually: DeepSeek V3
|
||||
PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = (
|
||||
"perplexity/llama-3.1-sonar-large-128k-online"
|
||||
)
|
||||
@@ -148,47 +152,74 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
def context_window(self) -> int:
|
||||
return self.metadata.context_window
|
||||
|
||||
@property
|
||||
def max_output_tokens(self) -> int | None:
|
||||
return self.metadata.max_output_tokens
|
||||
|
||||
|
||||
MODEL_METADATA = {
|
||||
LlmModel.O1_PREVIEW: ModelMetadata("openai", 32000),
|
||||
LlmModel.O1_MINI: ModelMetadata("openai", 62000),
|
||||
LlmModel.GPT4O_MINI: ModelMetadata("openai", 128000),
|
||||
LlmModel.GPT4O: ModelMetadata("openai", 128000),
|
||||
LlmModel.GPT4_TURBO: ModelMetadata("openai", 128000),
|
||||
LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385),
|
||||
LlmModel.CLAUDE_3_5_SONNET: ModelMetadata("anthropic", 200000),
|
||||
LlmModel.CLAUDE_3_HAIKU: ModelMetadata("anthropic", 200000),
|
||||
LlmModel.LLAMA3_8B: ModelMetadata("groq", 8192),
|
||||
LlmModel.LLAMA3_70B: ModelMetadata("groq", 8192),
|
||||
LlmModel.MIXTRAL_8X7B: ModelMetadata("groq", 32768),
|
||||
LlmModel.GEMMA_7B: ModelMetadata("groq", 8192),
|
||||
LlmModel.GEMMA2_9B: ModelMetadata("groq", 8192),
|
||||
LlmModel.LLAMA3_1_405B: ModelMetadata("groq", 8192),
|
||||
# Limited to 16k during preview
|
||||
LlmModel.LLAMA3_1_70B: ModelMetadata("groq", 131072),
|
||||
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 131072),
|
||||
LlmModel.OLLAMA_LLAMA3_2: ModelMetadata("ollama", 8192),
|
||||
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192),
|
||||
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192),
|
||||
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768),
|
||||
LlmModel.GEMINI_FLASH_1_5_8B: ModelMetadata("open_router", 8192),
|
||||
LlmModel.GROK_BETA: ModelMetadata("open_router", 8192),
|
||||
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 4000),
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 4000),
|
||||
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata("open_router", 4000),
|
||||
LlmModel.EVA_QWEN_2_5_32B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.DEEPSEEK_CHAT: ModelMetadata("open_router", 8192),
|
||||
# https://platform.openai.com/docs/models
|
||||
LlmModel.O3_MINI: ModelMetadata("openai", 200000, 100000), # o3-mini-2025-01-31
|
||||
LlmModel.O1: ModelMetadata("openai", 200000, 100000), # o1-2024-12-17
|
||||
LlmModel.O1_PREVIEW: ModelMetadata(
|
||||
"openai", 128000, 32768
|
||||
), # o1-preview-2024-09-12
|
||||
LlmModel.O1_MINI: ModelMetadata("openai", 128000, 65536), # o1-mini-2024-09-12
|
||||
LlmModel.GPT4O_MINI: ModelMetadata(
|
||||
"openai", 128000, 16384
|
||||
), # gpt-4o-mini-2024-07-18
|
||||
LlmModel.GPT4O: ModelMetadata("openai", 128000, 16384), # gpt-4o-2024-08-06
|
||||
LlmModel.GPT4_TURBO: ModelMetadata(
|
||||
"openai", 128000, 4096
|
||||
), # gpt-4-turbo-2024-04-09
|
||||
LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385, 4096), # gpt-3.5-turbo-0125
|
||||
# https://docs.anthropic.com/en/docs/about-claude/models
|
||||
LlmModel.CLAUDE_3_5_SONNET: ModelMetadata(
|
||||
"anthropic", 200000, 8192
|
||||
), # claude-3-5-sonnet-20241022
|
||||
LlmModel.CLAUDE_3_5_HAIKU: ModelMetadata(
|
||||
"anthropic", 200000, 8192
|
||||
), # claude-3-5-haiku-20241022
|
||||
LlmModel.CLAUDE_3_HAIKU: ModelMetadata(
|
||||
"anthropic", 200000, 4096
|
||||
), # claude-3-haiku-20240307
|
||||
# https://console.groq.com/docs/models
|
||||
LlmModel.GEMMA2_9B: ModelMetadata("groq", 8192, None),
|
||||
LlmModel.LLAMA3_3_70B: ModelMetadata("groq", 128000, 32768),
|
||||
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 128000, 8192),
|
||||
LlmModel.LLAMA3_70B: ModelMetadata("groq", 8192, None),
|
||||
LlmModel.LLAMA3_8B: ModelMetadata("groq", 8192, None),
|
||||
LlmModel.MIXTRAL_8X7B: ModelMetadata("groq", 32768, None),
|
||||
LlmModel.DEEPSEEK_LLAMA_70B: ModelMetadata("groq", 128000, None),
|
||||
# https://ollama.com/library
|
||||
LlmModel.OLLAMA_LLAMA3_3: ModelMetadata("ollama", 8192, None),
|
||||
LlmModel.OLLAMA_LLAMA3_2: ModelMetadata("ollama", 8192, None),
|
||||
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192, None),
|
||||
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192, None),
|
||||
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768, None),
|
||||
# https://openrouter.ai/models
|
||||
LlmModel.GEMINI_FLASH_1_5: ModelMetadata("open_router", 1000000, 8192),
|
||||
LlmModel.GROK_BETA: ModelMetadata("open_router", 131072, 131072),
|
||||
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 128000, 4096),
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 128000, 4096),
|
||||
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata("open_router", 128000, 4096),
|
||||
LlmModel.EVA_QWEN_2_5_32B: ModelMetadata("open_router", 16384, 4096),
|
||||
LlmModel.DEEPSEEK_CHAT: ModelMetadata("open_router", 64000, 2048),
|
||||
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: ModelMetadata(
|
||||
"open_router", 8192
|
||||
"open_router", 127072, 127072
|
||||
),
|
||||
LlmModel.QWEN_QWQ_32B_PREVIEW: ModelMetadata("open_router", 4000),
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.AMAZON_NOVA_LITE_V1: ModelMetadata("open_router", 4000),
|
||||
LlmModel.AMAZON_NOVA_MICRO_V1: ModelMetadata("open_router", 4000),
|
||||
LlmModel.AMAZON_NOVA_PRO_V1: ModelMetadata("open_router", 4000),
|
||||
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.GRYPHE_MYTHOMAX_L2_13B: ModelMetadata("open_router", 4000),
|
||||
LlmModel.QWEN_QWQ_32B_PREVIEW: ModelMetadata("open_router", 32768, 32768),
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata(
|
||||
"open_router", 131000, 4096
|
||||
),
|
||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: ModelMetadata(
|
||||
"open_router", 12288, 12288
|
||||
),
|
||||
LlmModel.AMAZON_NOVA_LITE_V1: ModelMetadata("open_router", 300000, 5120),
|
||||
LlmModel.AMAZON_NOVA_MICRO_V1: ModelMetadata("open_router", 128000, 5120),
|
||||
LlmModel.AMAZON_NOVA_PRO_V1: ModelMetadata("open_router", 300000, 5120),
|
||||
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: ModelMetadata("open_router", 65536, 4096),
|
||||
LlmModel.GRYPHE_MYTHOMAX_L2_13B: ModelMetadata("open_router", 4096, 4096),
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
@@ -314,7 +345,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
llm_model: LlmModel,
|
||||
prompt: list[dict],
|
||||
json_format: bool,
|
||||
max_tokens: int | None = None,
|
||||
max_tokens: int | None,
|
||||
ollama_host: str = "localhost:11434",
|
||||
) -> tuple[str, int, int]:
|
||||
"""
|
||||
@@ -332,6 +363,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
The number of tokens used in the completion.
|
||||
"""
|
||||
provider = llm_model.metadata.provider
|
||||
max_tokens = max_tokens or llm_model.max_output_tokens or 4096
|
||||
|
||||
if provider == "openai":
|
||||
oai_client = openai.OpenAI(api_key=credentials.api_key.get_secret_value())
|
||||
@@ -381,7 +413,7 @@ class AIStructuredResponseGeneratorBlock(AIBlockBase):
|
||||
model=llm_model.value,
|
||||
system=sysprompt,
|
||||
messages=messages,
|
||||
max_tokens=max_tokens or 8192,
|
||||
max_tokens=max_tokens,
|
||||
)
|
||||
self.prompt = json.dumps(prompt)
|
||||
|
||||
|
||||
174
autogpt_platform/backend/backend/blocks/screenshotone.py
Normal file
174
autogpt_platform/backend/backend/blocks/screenshotone.py
Normal file
@@ -0,0 +1,174 @@
|
||||
from base64 import b64encode
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import (
|
||||
APIKeyCredentials,
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
SchemaField,
|
||||
)
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.file import MediaFile, store_media_file
|
||||
from backend.util.request import Requests
|
||||
|
||||
|
||||
class Format(str, Enum):
|
||||
PNG = "png"
|
||||
JPEG = "jpeg"
|
||||
WEBP = "webp"
|
||||
|
||||
|
||||
class ScreenshotWebPageBlock(Block):
|
||||
"""Block for taking screenshots using ScreenshotOne API"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: CredentialsMetaInput[
|
||||
Literal[ProviderName.SCREENSHOTONE], Literal["api_key"]
|
||||
] = CredentialsField(description="The ScreenshotOne API key")
|
||||
url: str = SchemaField(
|
||||
description="URL of the website to screenshot",
|
||||
placeholder="https://example.com",
|
||||
)
|
||||
viewport_width: int = SchemaField(
|
||||
description="Width of the viewport in pixels", default=1920
|
||||
)
|
||||
viewport_height: int = SchemaField(
|
||||
description="Height of the viewport in pixels", default=1080
|
||||
)
|
||||
full_page: bool = SchemaField(
|
||||
description="Whether to capture the full page length", default=False
|
||||
)
|
||||
format: Format = SchemaField(
|
||||
description="Output format (png, jpeg, webp)", default=Format.PNG
|
||||
)
|
||||
block_ads: bool = SchemaField(description="Whether to block ads", default=True)
|
||||
block_cookie_banners: bool = SchemaField(
|
||||
description="Whether to block cookie banners", default=True
|
||||
)
|
||||
block_chats: bool = SchemaField(
|
||||
description="Whether to block chat widgets", default=True
|
||||
)
|
||||
cache: bool = SchemaField(
|
||||
description="Whether to enable caching", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
image: MediaFile = SchemaField(description="The screenshot image data")
|
||||
error: str = SchemaField(description="Error message if the screenshot failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="3a7c4b8d-6e2f-4a5d-b9c1-f8d23c5a9b0e", # Generated UUID
|
||||
description="Takes a screenshot of a specified website using ScreenshotOne API",
|
||||
categories={BlockCategory.DATA},
|
||||
input_schema=ScreenshotWebPageBlock.Input,
|
||||
output_schema=ScreenshotWebPageBlock.Output,
|
||||
test_input={
|
||||
"url": "https://example.com",
|
||||
"viewport_width": 1920,
|
||||
"viewport_height": 1080,
|
||||
"full_page": False,
|
||||
"format": "png",
|
||||
"block_ads": True,
|
||||
"block_cookie_banners": True,
|
||||
"block_chats": True,
|
||||
"cache": False,
|
||||
"credentials": {
|
||||
"provider": "screenshotone",
|
||||
"type": "api_key",
|
||||
"id": "test-id",
|
||||
"title": "Test API Key",
|
||||
},
|
||||
},
|
||||
test_credentials=APIKeyCredentials(
|
||||
id="test-id",
|
||||
provider="screenshotone",
|
||||
api_key=SecretStr("test-key"),
|
||||
title="Test API Key",
|
||||
expires_at=None,
|
||||
),
|
||||
test_output=[
|
||||
(
|
||||
"image",
|
||||
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAB5JREFUOE9jZPjP8J+BAsA4agDDaBgwjIYBw7AIAwCV5B/xAsMbygAAAABJRU5ErkJggg==",
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"take_screenshot": lambda *args, **kwargs: {
|
||||
"image": "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAAAXNSR0IArs4c6QAAAB5JREFUOE9jZPjP8J+BAsA4agDDaBgwjIYBw7AIAwCV5B/xAsMbygAAAABJRU5ErkJggg==",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def take_screenshot(
|
||||
credentials: APIKeyCredentials,
|
||||
graph_exec_id: str,
|
||||
url: str,
|
||||
viewport_width: int,
|
||||
viewport_height: int,
|
||||
full_page: bool,
|
||||
format: Format,
|
||||
block_ads: bool,
|
||||
block_cookie_banners: bool,
|
||||
block_chats: bool,
|
||||
cache: bool,
|
||||
) -> dict:
|
||||
"""
|
||||
Takes a screenshot using the ScreenshotOne API
|
||||
"""
|
||||
api = Requests(trusted_origins=["https://api.screenshotone.com"])
|
||||
|
||||
# Build API URL with parameters
|
||||
params = {
|
||||
"access_key": credentials.api_key.get_secret_value(),
|
||||
"url": url,
|
||||
"viewport_width": viewport_width,
|
||||
"viewport_height": viewport_height,
|
||||
"full_page": str(full_page).lower(),
|
||||
"format": format.value,
|
||||
"block_ads": str(block_ads).lower(),
|
||||
"block_cookie_banners": str(block_cookie_banners).lower(),
|
||||
"block_chats": str(block_chats).lower(),
|
||||
"cache": str(cache).lower(),
|
||||
}
|
||||
|
||||
response = api.get("https://api.screenshotone.com/take", params=params)
|
||||
|
||||
return {
|
||||
"image": store_media_file(
|
||||
graph_exec_id=graph_exec_id,
|
||||
file=f"data:image/{format.value};base64,{b64encode(response.content).decode('utf-8')}",
|
||||
return_content=True,
|
||||
)
|
||||
}
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: APIKeyCredentials,
|
||||
graph_exec_id: str,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
screenshot_data = self.take_screenshot(
|
||||
credentials=credentials,
|
||||
graph_exec_id=graph_exec_id,
|
||||
url=input_data.url,
|
||||
viewport_width=input_data.viewport_width,
|
||||
viewport_height=input_data.viewport_height,
|
||||
full_page=input_data.full_page,
|
||||
format=input_data.format,
|
||||
block_ads=input_data.block_ads,
|
||||
block_cookie_banners=input_data.block_cookie_banners,
|
||||
block_chats=input_data.block_chats,
|
||||
cache=input_data.cache,
|
||||
)
|
||||
yield "image", screenshot_data["image"]
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
@@ -76,6 +76,8 @@ class ExtractTextInformationBlock(Block):
|
||||
class Output(BlockSchema):
|
||||
positive: str = SchemaField(description="Extracted text")
|
||||
negative: str = SchemaField(description="Original text")
|
||||
matched_results: list[str] = SchemaField(description="List of matched results")
|
||||
matched_count: int = SchemaField(description="Number of matched results")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -103,13 +105,31 @@ class ExtractTextInformationBlock(Block):
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
# Test case 1
|
||||
("positive", "World!"),
|
||||
("matched_results", ["World!"]),
|
||||
("matched_count", 1),
|
||||
# Test case 2
|
||||
("positive", "Hello, World!"),
|
||||
("matched_results", ["Hello, World!"]),
|
||||
("matched_count", 1),
|
||||
# Test case 3
|
||||
("negative", "Hello, World!"),
|
||||
("matched_results", []),
|
||||
("matched_count", 0),
|
||||
# Test case 4
|
||||
("positive", "Hello,"),
|
||||
("matched_results", ["Hello,"]),
|
||||
("matched_count", 1),
|
||||
# Test case 5
|
||||
("positive", "World!!"),
|
||||
("matched_results", ["World!!"]),
|
||||
("matched_count", 1),
|
||||
# Test case 6
|
||||
("positive", "World!!"),
|
||||
("positive", "Earth!!"),
|
||||
("matched_results", ["World!!", "Earth!!"]),
|
||||
("matched_count", 2),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -130,13 +150,16 @@ class ExtractTextInformationBlock(Block):
|
||||
for match in re.finditer(input_data.pattern, txt, flags)
|
||||
if input_data.group <= len(match.groups())
|
||||
]
|
||||
if not input_data.find_all:
|
||||
matches = matches[:1]
|
||||
for match in matches:
|
||||
yield "positive", match
|
||||
if not input_data.find_all:
|
||||
return
|
||||
if not matches:
|
||||
yield "negative", input_data.text
|
||||
|
||||
yield "matched_results", matches
|
||||
yield "matched_count", len(matches)
|
||||
|
||||
|
||||
class FillTextTemplateBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
@@ -252,3 +275,31 @@ class TextSplitBlock(Block):
|
||||
if input_data.strip:
|
||||
texts = [text.strip() for text in texts]
|
||||
yield "texts", texts
|
||||
|
||||
|
||||
class TextReplaceBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(description="The text to replace.")
|
||||
old: str = SchemaField(description="The old text to replace.")
|
||||
new: str = SchemaField(description="The new text to replace with.")
|
||||
|
||||
class Output(BlockSchema):
|
||||
output: str = SchemaField(description="The text with the replaced text.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7e7c87ab-3469-4bcc-9abe-67705091b713",
|
||||
description="This block is used to replace a text with a new text.",
|
||||
categories={BlockCategory.TEXT},
|
||||
input_schema=TextReplaceBlock.Input,
|
||||
output_schema=TextReplaceBlock.Output,
|
||||
test_input=[
|
||||
{"text": "Hello, World!", "old": "Hello", "new": "Hi"},
|
||||
],
|
||||
test_output=[
|
||||
("output", "Hi, World!"),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "output", input_data.text.replace(input_data.old, input_data.new)
|
||||
|
||||
61
autogpt_platform/backend/backend/blocks/todoist/_auth.py
Normal file
61
autogpt_platform/backend/backend/blocks/todoist/_auth.py
Normal file
@@ -0,0 +1,61 @@
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import (
|
||||
CredentialsField,
|
||||
CredentialsMetaInput,
|
||||
OAuth2Credentials,
|
||||
ProviderName,
|
||||
)
|
||||
from backend.integrations.oauth.todoist import TodoistOAuthHandler
|
||||
from backend.util.settings import Secrets
|
||||
|
||||
secrets = Secrets()
|
||||
TODOIST_OAUTH_IS_CONFIGURED = bool(
|
||||
secrets.todoist_client_id and secrets.todoist_client_secret
|
||||
)
|
||||
|
||||
TodoistCredentials = OAuth2Credentials
|
||||
TodoistCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.TODOIST], Literal["oauth2"]
|
||||
]
|
||||
|
||||
|
||||
def TodoistCredentialsField(scopes: list[str]) -> TodoistCredentialsInput:
|
||||
"""
|
||||
Creates a Todoist credentials input on a block.
|
||||
|
||||
Params:
|
||||
scopes: The authorization scopes needed for the block to work.
|
||||
"""
|
||||
return CredentialsField(
|
||||
required_scopes=set(TodoistOAuthHandler.DEFAULT_SCOPES + scopes),
|
||||
description="The Todoist integration requires OAuth2 authentication.",
|
||||
)
|
||||
|
||||
|
||||
TEST_CREDENTIALS = OAuth2Credentials(
|
||||
id="01234567-89ab-cdef-0123-456789abcdef",
|
||||
provider="todoist",
|
||||
access_token=SecretStr("mock-todoist-access-token"),
|
||||
refresh_token=None,
|
||||
access_token_expires_at=None,
|
||||
scopes=[
|
||||
"task:add",
|
||||
"data:read",
|
||||
"data:read_write",
|
||||
"data:delete",
|
||||
"project:delete",
|
||||
],
|
||||
title="Mock Todoist OAuth2 Credentials",
|
||||
username="mock-todoist-username",
|
||||
refresh_token_expires_at=None,
|
||||
)
|
||||
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
24
autogpt_platform/backend/backend/blocks/todoist/_types.py
Normal file
24
autogpt_platform/backend/backend/blocks/todoist/_types.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class Colors(Enum):
|
||||
berry_red = "berry_red"
|
||||
red = "red"
|
||||
orange = "orange"
|
||||
yellow = "yellow"
|
||||
olive_green = "olive_green"
|
||||
lime_green = "lime_green"
|
||||
green = "green"
|
||||
mint_green = "mint_green"
|
||||
teal = "teal"
|
||||
sky_blue = "sky_blue"
|
||||
light_blue = "light_blue"
|
||||
blue = "blue"
|
||||
grape = "grape"
|
||||
violet = "violet"
|
||||
lavender = "lavender"
|
||||
magenta = "magenta"
|
||||
salmon = "salmon"
|
||||
charcoal = "charcoal"
|
||||
grey = "grey"
|
||||
taupe = "taupe"
|
||||
439
autogpt_platform/backend/backend/blocks/todoist/comments.py
Normal file
439
autogpt_platform/backend/backend/blocks/todoist/comments.py
Normal file
@@ -0,0 +1,439 @@
|
||||
from typing import Literal, Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
from todoist_api_python.api import TodoistAPI
|
||||
from typing_extensions import Optional
|
||||
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TaskId(BaseModel):
|
||||
discriminator: Literal["task"]
|
||||
task_id: str
|
||||
|
||||
|
||||
class ProjectId(BaseModel):
|
||||
discriminator: Literal["project"]
|
||||
project_id: str
|
||||
|
||||
|
||||
class TodoistCreateCommentBlock(Block):
|
||||
"""Creates a new comment on a Todoist task or project"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
content: str = SchemaField(description="Comment content")
|
||||
id_type: Union[TaskId, ProjectId] = SchemaField(
|
||||
discriminator="discriminator",
|
||||
description="Specify either task_id or project_id to comment on",
|
||||
default=TaskId(discriminator="task", task_id=""),
|
||||
advanced=False,
|
||||
)
|
||||
attachment: Optional[dict] = SchemaField(
|
||||
description="Optional file attachment", default=None
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
id: str = SchemaField(description="ID of created comment")
|
||||
content: str = SchemaField(description="Comment content")
|
||||
posted_at: str = SchemaField(description="Comment timestamp")
|
||||
task_id: Optional[str] = SchemaField(
|
||||
description="Associated task ID", default=None
|
||||
)
|
||||
project_id: Optional[str] = SchemaField(
|
||||
description="Associated project ID", default=None
|
||||
)
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="1bba7e54-2310-4a31-8e6f-54d5f9ab7459",
|
||||
description="Creates a new comment on a Todoist task or project",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCreateCommentBlock.Input,
|
||||
output_schema=TodoistCreateCommentBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"content": "Test comment",
|
||||
"id_type": {"discriminator": "task", "task_id": "2995104339"},
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "2992679862"),
|
||||
("content", "Test comment"),
|
||||
("posted_at", "2016-09-22T07:00:00.000000Z"),
|
||||
("task_id", "2995104339"),
|
||||
("project_id", None),
|
||||
],
|
||||
test_mock={
|
||||
"create_comment": lambda content, credentials, task_id=None, project_id=None, attachment=None: {
|
||||
"id": "2992679862",
|
||||
"content": "Test comment",
|
||||
"posted_at": "2016-09-22T07:00:00.000000Z",
|
||||
"task_id": "2995104339",
|
||||
"project_id": None,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_comment(
|
||||
credentials: TodoistCredentials,
|
||||
content: str,
|
||||
task_id: Optional[str] = None,
|
||||
project_id: Optional[str] = None,
|
||||
attachment: Optional[dict] = None,
|
||||
):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
comment = api.add_comment(
|
||||
content=content,
|
||||
task_id=task_id,
|
||||
project_id=project_id,
|
||||
attachment=attachment,
|
||||
)
|
||||
return comment.__dict__
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
task_id = None
|
||||
project_id = None
|
||||
|
||||
if isinstance(input_data.id_type, TaskId):
|
||||
task_id = input_data.id_type.task_id
|
||||
else:
|
||||
project_id = input_data.id_type.project_id
|
||||
|
||||
comment_data = self.create_comment(
|
||||
credentials,
|
||||
input_data.content,
|
||||
task_id=task_id,
|
||||
project_id=project_id,
|
||||
attachment=input_data.attachment,
|
||||
)
|
||||
|
||||
if comment_data:
|
||||
yield "id", comment_data["id"]
|
||||
yield "content", comment_data["content"]
|
||||
yield "posted_at", comment_data["posted_at"]
|
||||
yield "task_id", comment_data["task_id"]
|
||||
yield "project_id", comment_data["project_id"]
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistGetCommentsBlock(Block):
|
||||
"""Get all comments for a Todoist task or project"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
id_type: Union[TaskId, ProjectId] = SchemaField(
|
||||
discriminator="discriminator",
|
||||
description="Specify either task_id or project_id to get comments for",
|
||||
default=TaskId(discriminator="task", task_id=""),
|
||||
advanced=False,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
comments: list = SchemaField(description="List of comments")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="9972d8ae-ddf2-11ef-a9b8-32d3674e8b7e",
|
||||
description="Get all comments for a Todoist task or project",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetCommentsBlock.Input,
|
||||
output_schema=TodoistGetCommentsBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"id_type": {"discriminator": "task", "task_id": "2995104339"},
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"comments",
|
||||
[
|
||||
{
|
||||
"id": "2992679862",
|
||||
"content": "Test comment",
|
||||
"posted_at": "2016-09-22T07:00:00.000000Z",
|
||||
"task_id": "2995104339",
|
||||
"project_id": None,
|
||||
"attachment": None,
|
||||
}
|
||||
],
|
||||
)
|
||||
],
|
||||
test_mock={
|
||||
"get_comments": lambda credentials, task_id=None, project_id=None: [
|
||||
{
|
||||
"id": "2992679862",
|
||||
"content": "Test comment",
|
||||
"posted_at": "2016-09-22T07:00:00.000000Z",
|
||||
"task_id": "2995104339",
|
||||
"project_id": None,
|
||||
"attachment": None,
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_comments(
|
||||
credentials: TodoistCredentials,
|
||||
task_id: Optional[str] = None,
|
||||
project_id: Optional[str] = None,
|
||||
):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
comments = api.get_comments(task_id=task_id, project_id=project_id)
|
||||
return [comment.__dict__ for comment in comments]
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
task_id = None
|
||||
project_id = None
|
||||
|
||||
if isinstance(input_data.id_type, TaskId):
|
||||
task_id = input_data.id_type.task_id
|
||||
else:
|
||||
project_id = input_data.id_type.project_id
|
||||
|
||||
comments = self.get_comments(
|
||||
credentials, task_id=task_id, project_id=project_id
|
||||
)
|
||||
|
||||
yield "comments", comments
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistGetCommentBlock(Block):
|
||||
"""Get a single comment from Todoist using comment ID"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
comment_id: str = SchemaField(description="Comment ID to retrieve")
|
||||
|
||||
class Output(BlockSchema):
|
||||
content: str = SchemaField(description="Comment content")
|
||||
id: str = SchemaField(description="Comment ID")
|
||||
posted_at: str = SchemaField(description="Comment timestamp")
|
||||
project_id: Optional[str] = SchemaField(
|
||||
description="Associated project ID", default=None
|
||||
)
|
||||
task_id: Optional[str] = SchemaField(
|
||||
description="Associated task ID", default=None
|
||||
)
|
||||
attachment: Optional[dict] = SchemaField(
|
||||
description="Optional file attachment", default=None
|
||||
)
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="a809d264-ddf2-11ef-9764-32d3674e8b7e",
|
||||
description="Get a single comment from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetCommentBlock.Input,
|
||||
output_schema=TodoistGetCommentBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"comment_id": "2992679862",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("content", "Test comment"),
|
||||
("id", "2992679862"),
|
||||
("posted_at", "2016-09-22T07:00:00.000000Z"),
|
||||
("project_id", None),
|
||||
("task_id", "2995104339"),
|
||||
("attachment", None),
|
||||
],
|
||||
test_mock={
|
||||
"get_comment": lambda credentials, comment_id: {
|
||||
"content": "Test comment",
|
||||
"id": "2992679862",
|
||||
"posted_at": "2016-09-22T07:00:00.000000Z",
|
||||
"project_id": None,
|
||||
"task_id": "2995104339",
|
||||
"attachment": None,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_comment(credentials: TodoistCredentials, comment_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
comment = api.get_comment(comment_id=comment_id)
|
||||
return comment.__dict__
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
comment_data = self.get_comment(
|
||||
credentials, comment_id=input_data.comment_id
|
||||
)
|
||||
|
||||
if comment_data:
|
||||
yield "content", comment_data["content"]
|
||||
yield "id", comment_data["id"]
|
||||
yield "posted_at", comment_data["posted_at"]
|
||||
yield "project_id", comment_data["project_id"]
|
||||
yield "task_id", comment_data["task_id"]
|
||||
yield "attachment", comment_data["attachment"]
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistUpdateCommentBlock(Block):
|
||||
"""Updates a Todoist comment"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
comment_id: str = SchemaField(description="Comment ID to update")
|
||||
content: str = SchemaField(description="New content for the comment")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the update was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b773c520-ddf2-11ef-9f34-32d3674e8b7e",
|
||||
description="Updates a Todoist comment",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistUpdateCommentBlock.Input,
|
||||
output_schema=TodoistUpdateCommentBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"comment_id": "2992679862",
|
||||
"content": "Need one bottle of milk",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"update_comment": lambda credentials, comment_id, content: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_comment(credentials: TodoistCredentials, comment_id: str, content: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
api.update_comment(comment_id=comment_id, content=content)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.update_comment(
|
||||
credentials,
|
||||
comment_id=input_data.comment_id,
|
||||
content=input_data.content,
|
||||
)
|
||||
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistDeleteCommentBlock(Block):
|
||||
"""Deletes a Todoist comment"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
comment_id: str = SchemaField(description="Comment ID to delete")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the deletion was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="bda4c020-ddf2-11ef-b114-32d3674e8b7e",
|
||||
description="Deletes a Todoist comment",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteCommentBlock.Input,
|
||||
output_schema=TodoistDeleteCommentBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"comment_id": "2992679862",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"delete_comment": lambda credentials, comment_id: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_comment(credentials: TodoistCredentials, comment_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
success = api.delete_comment(comment_id=comment_id)
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.delete_comment(credentials, comment_id=input_data.comment_id)
|
||||
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
557
autogpt_platform/backend/backend/blocks/todoist/labels.py
Normal file
557
autogpt_platform/backend/backend/blocks/todoist/labels.py
Normal file
@@ -0,0 +1,557 @@
|
||||
from todoist_api_python.api import TodoistAPI
|
||||
from typing_extensions import Optional
|
||||
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
)
|
||||
from backend.blocks.todoist._types import Colors
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TodoistCreateLabelBlock(Block):
|
||||
"""Creates a new label in Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
name: str = SchemaField(description="Name of the label")
|
||||
order: Optional[int] = SchemaField(description="Label order", default=None)
|
||||
color: Optional[Colors] = SchemaField(
|
||||
description="The color of the label icon", default=Colors.charcoal
|
||||
)
|
||||
is_favorite: bool = SchemaField(
|
||||
description="Whether the label is a favorite", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
id: str = SchemaField(description="ID of the created label")
|
||||
name: str = SchemaField(description="Name of the label")
|
||||
color: str = SchemaField(description="Color of the label")
|
||||
order: int = SchemaField(description="Label order")
|
||||
is_favorite: bool = SchemaField(description="Favorite status")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7288a968-de14-11ef-8997-32d3674e8b7e",
|
||||
description="Creates a new label in Todoist, It will not work if same name already exists",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCreateLabelBlock.Input,
|
||||
output_schema=TodoistCreateLabelBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"name": "Test Label",
|
||||
"color": Colors.charcoal.value,
|
||||
"order": 1,
|
||||
"is_favorite": False,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "2156154810"),
|
||||
("name", "Test Label"),
|
||||
("color", "charcoal"),
|
||||
("order", 1),
|
||||
("is_favorite", False),
|
||||
],
|
||||
test_mock={
|
||||
"create_label": lambda *args, **kwargs: {
|
||||
"id": "2156154810",
|
||||
"name": "Test Label",
|
||||
"color": "charcoal",
|
||||
"order": 1,
|
||||
"is_favorite": False,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_label(credentials: TodoistCredentials, name: str, **kwargs):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
label = api.add_label(name=name, **kwargs)
|
||||
return label.__dict__
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
label_args = {
|
||||
"order": input_data.order,
|
||||
"color": (
|
||||
input_data.color.value if input_data.color is not None else None
|
||||
),
|
||||
"is_favorite": input_data.is_favorite,
|
||||
}
|
||||
|
||||
label_data = self.create_label(
|
||||
credentials,
|
||||
input_data.name,
|
||||
**{k: v for k, v in label_args.items() if v is not None},
|
||||
)
|
||||
|
||||
if label_data:
|
||||
yield "id", label_data["id"]
|
||||
yield "name", label_data["name"]
|
||||
yield "color", label_data["color"]
|
||||
yield "order", label_data["order"]
|
||||
yield "is_favorite", label_data["is_favorite"]
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistListLabelsBlock(Block):
|
||||
"""Gets all personal labels from Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
|
||||
class Output(BlockSchema):
|
||||
labels: list = SchemaField(description="List of complete label data")
|
||||
label_ids: list = SchemaField(description="List of label IDs")
|
||||
label_names: list = SchemaField(description="List of label names")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="776dd750-de14-11ef-b927-32d3674e8b7e",
|
||||
description="Gets all personal labels from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistListLabelsBlock.Input,
|
||||
output_schema=TodoistListLabelsBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
(
|
||||
"labels",
|
||||
[
|
||||
{
|
||||
"id": "2156154810",
|
||||
"name": "Test Label",
|
||||
"color": "charcoal",
|
||||
"order": 1,
|
||||
"is_favorite": False,
|
||||
}
|
||||
],
|
||||
),
|
||||
("label_ids", ["2156154810"]),
|
||||
("label_names", ["Test Label"]),
|
||||
],
|
||||
test_mock={
|
||||
"get_labels": lambda *args, **kwargs: [
|
||||
{
|
||||
"id": "2156154810",
|
||||
"name": "Test Label",
|
||||
"color": "charcoal",
|
||||
"order": 1,
|
||||
"is_favorite": False,
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_labels(credentials: TodoistCredentials):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
labels = api.get_labels()
|
||||
return [label.__dict__ for label in labels]
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
labels = self.get_labels(credentials)
|
||||
yield "labels", labels
|
||||
yield "label_ids", [label["id"] for label in labels]
|
||||
yield "label_names", [label["name"] for label in labels]
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistGetLabelBlock(Block):
|
||||
"""Gets a personal label from Todoist by ID"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
label_id: str = SchemaField(description="ID of the label to retrieve")
|
||||
|
||||
class Output(BlockSchema):
|
||||
id: str = SchemaField(description="ID of the label")
|
||||
name: str = SchemaField(description="Name of the label")
|
||||
color: str = SchemaField(description="Color of the label")
|
||||
order: int = SchemaField(description="Label order")
|
||||
is_favorite: bool = SchemaField(description="Favorite status")
|
||||
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="7f236514-de14-11ef-bd7a-32d3674e8b7e",
|
||||
description="Gets a personal label from Todoist by ID",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetLabelBlock.Input,
|
||||
output_schema=TodoistGetLabelBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"label_id": "2156154810",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "2156154810"),
|
||||
("name", "Test Label"),
|
||||
("color", "charcoal"),
|
||||
("order", 1),
|
||||
("is_favorite", False),
|
||||
],
|
||||
test_mock={
|
||||
"get_label": lambda *args, **kwargs: {
|
||||
"id": "2156154810",
|
||||
"name": "Test Label",
|
||||
"color": "charcoal",
|
||||
"order": 1,
|
||||
"is_favorite": False,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_label(credentials: TodoistCredentials, label_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
label = api.get_label(label_id=label_id)
|
||||
return label.__dict__
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
label_data = self.get_label(credentials, input_data.label_id)
|
||||
|
||||
if label_data:
|
||||
yield "id", label_data["id"]
|
||||
yield "name", label_data["name"]
|
||||
yield "color", label_data["color"]
|
||||
yield "order", label_data["order"]
|
||||
yield "is_favorite", label_data["is_favorite"]
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistUpdateLabelBlock(Block):
|
||||
"""Updates a personal label in Todoist using ID"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
label_id: str = SchemaField(description="ID of the label to update")
|
||||
name: Optional[str] = SchemaField(
|
||||
description="New name of the label", default=None
|
||||
)
|
||||
order: Optional[int] = SchemaField(description="Label order", default=None)
|
||||
color: Optional[Colors] = SchemaField(
|
||||
description="The color of the label icon", default=None
|
||||
)
|
||||
is_favorite: bool = SchemaField(
|
||||
description="Whether the label is a favorite (true/false)", default=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the update was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8755614c-de14-11ef-9b56-32d3674e8b7e",
|
||||
description="Updates a personal label in Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistUpdateLabelBlock.Input,
|
||||
output_schema=TodoistUpdateLabelBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"label_id": "2156154810",
|
||||
"name": "Updated Label",
|
||||
"color": Colors.charcoal.value,
|
||||
"order": 2,
|
||||
"is_favorite": True,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"update_label": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_label(credentials: TodoistCredentials, label_id: str, **kwargs):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
api.update_label(label_id=label_id, **kwargs)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
label_args = {}
|
||||
if input_data.name is not None:
|
||||
label_args["name"] = input_data.name
|
||||
if input_data.order is not None:
|
||||
label_args["order"] = input_data.order
|
||||
if input_data.color is not None:
|
||||
label_args["color"] = input_data.color.value
|
||||
if input_data.is_favorite is not None:
|
||||
label_args["is_favorite"] = input_data.is_favorite
|
||||
|
||||
success = self.update_label(
|
||||
credentials,
|
||||
input_data.label_id,
|
||||
**{k: v for k, v in label_args.items() if v is not None},
|
||||
)
|
||||
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistDeleteLabelBlock(Block):
|
||||
"""Deletes a personal label in Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
label_id: str = SchemaField(description="ID of the label to delete")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the deletion was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="901b8f86-de14-11ef-98b8-32d3674e8b7e",
|
||||
description="Deletes a personal label in Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteLabelBlock.Input,
|
||||
output_schema=TodoistDeleteLabelBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"label_id": "2156154810",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"delete_label": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_label(credentials: TodoistCredentials, label_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
success = api.delete_label(label_id=label_id)
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.delete_label(credentials, input_data.label_id)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistGetSharedLabelsBlock(Block):
|
||||
"""Gets all shared labels from Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
|
||||
class Output(BlockSchema):
|
||||
labels: list = SchemaField(description="List of shared label names")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="55fba510-de15-11ef-aed2-32d3674e8b7e",
|
||||
description="Gets all shared labels from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetSharedLabelsBlock.Input,
|
||||
output_schema=TodoistGetSharedLabelsBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("labels", ["Label1", "Label2", "Label3"])],
|
||||
test_mock={
|
||||
"get_shared_labels": lambda *args, **kwargs: [
|
||||
"Label1",
|
||||
"Label2",
|
||||
"Label3",
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_shared_labels(credentials: TodoistCredentials):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
labels = api.get_shared_labels()
|
||||
return labels
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
labels = self.get_shared_labels(credentials)
|
||||
yield "labels", labels
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistRenameSharedLabelsBlock(Block):
|
||||
"""Renames all instances of a shared label"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
name: str = SchemaField(description="The name of the existing label to rename")
|
||||
new_name: str = SchemaField(description="The new name for the label")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the rename was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="9d63ad9a-de14-11ef-ab3f-32d3674e8b7e",
|
||||
description="Renames all instances of a shared label",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistRenameSharedLabelsBlock.Input,
|
||||
output_schema=TodoistRenameSharedLabelsBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"name": "OldLabel",
|
||||
"new_name": "NewLabel",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"rename_shared_labels": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def rename_shared_labels(credentials: TodoistCredentials, name: str, new_name: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
success = api.rename_shared_label(name=name, new_name=new_name)
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.rename_shared_labels(
|
||||
credentials, input_data.name, input_data.new_name
|
||||
)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistRemoveSharedLabelsBlock(Block):
|
||||
"""Removes all instances of a shared label"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
name: str = SchemaField(description="The name of the label to remove")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the removal was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="a6c5cbde-de14-11ef-8863-32d3674e8b7e",
|
||||
description="Removes all instances of a shared label",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistRemoveSharedLabelsBlock.Input,
|
||||
output_schema=TodoistRemoveSharedLabelsBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "name": "LabelToRemove"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"remove_shared_label": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def remove_shared_label(credentials: TodoistCredentials, name: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
success = api.remove_shared_label(name=name)
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.remove_shared_label(credentials, input_data.name)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
566
autogpt_platform/backend/backend/blocks/todoist/projects.py
Normal file
566
autogpt_platform/backend/backend/blocks/todoist/projects.py
Normal file
@@ -0,0 +1,566 @@
|
||||
from todoist_api_python.api import TodoistAPI
|
||||
from typing_extensions import Optional
|
||||
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
)
|
||||
from backend.blocks.todoist._types import Colors
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TodoistListProjectsBlock(Block):
|
||||
"""Gets all projects for a Todoist user"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
|
||||
class Output(BlockSchema):
|
||||
names_list: list[str] = SchemaField(description="List of project names")
|
||||
ids_list: list[str] = SchemaField(description="List of project IDs")
|
||||
url_list: list[str] = SchemaField(description="List of project URLs")
|
||||
complete_data: list[dict] = SchemaField(
|
||||
description="Complete project data including all fields"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="5f3e1d5b-6bc5-40e3-97ee-1318b3f38813",
|
||||
description="Gets all projects and their details from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistListProjectsBlock.Input,
|
||||
output_schema=TodoistListProjectsBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("names_list", ["Inbox"]),
|
||||
("ids_list", ["220474322"]),
|
||||
("url_list", ["https://todoist.com/showProject?id=220474322"]),
|
||||
(
|
||||
"complete_data",
|
||||
[
|
||||
{
|
||||
"id": "220474322",
|
||||
"name": "Inbox",
|
||||
"url": "https://todoist.com/showProject?id=220474322",
|
||||
}
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_project_lists": lambda *args, **kwargs: (
|
||||
["Inbox"],
|
||||
["220474322"],
|
||||
["https://todoist.com/showProject?id=220474322"],
|
||||
[
|
||||
{
|
||||
"id": "220474322",
|
||||
"name": "Inbox",
|
||||
"url": "https://todoist.com/showProject?id=220474322",
|
||||
}
|
||||
],
|
||||
None,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_project_lists(credentials: TodoistCredentials):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
projects = api.get_projects()
|
||||
|
||||
names = []
|
||||
ids = []
|
||||
urls = []
|
||||
complete_data = []
|
||||
|
||||
for project in projects:
|
||||
names.append(project.name)
|
||||
ids.append(project.id)
|
||||
urls.append(project.url)
|
||||
complete_data.append(project.__dict__)
|
||||
|
||||
return names, ids, urls, complete_data, None
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
names, ids, urls, data, error = self.get_project_lists(credentials)
|
||||
|
||||
if names:
|
||||
yield "names_list", names
|
||||
if ids:
|
||||
yield "ids_list", ids
|
||||
if urls:
|
||||
yield "url_list", urls
|
||||
if data:
|
||||
yield "complete_data", data
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistCreateProjectBlock(Block):
|
||||
"""Creates a new project in Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
name: str = SchemaField(description="Name of the project", advanced=False)
|
||||
parent_id: Optional[str] = SchemaField(
|
||||
description="Parent project ID", default=None, advanced=True
|
||||
)
|
||||
color: Optional[Colors] = SchemaField(
|
||||
description="Color of the project icon",
|
||||
default=Colors.charcoal,
|
||||
advanced=True,
|
||||
)
|
||||
is_favorite: bool = SchemaField(
|
||||
description="Whether the project is a favorite",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
view_style: Optional[str] = SchemaField(
|
||||
description="Display style (list or board)", default=None, advanced=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the creation was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="ade60136-de14-11ef-b5e5-32d3674e8b7e",
|
||||
description="Creates a new project in Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCreateProjectBlock.Input,
|
||||
output_schema=TodoistCreateProjectBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "name": "Test Project"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"create_project": lambda *args, **kwargs: (True)},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_project(
|
||||
credentials: TodoistCredentials,
|
||||
name: str,
|
||||
parent_id: Optional[str],
|
||||
color: Optional[Colors],
|
||||
is_favorite: bool,
|
||||
view_style: Optional[str],
|
||||
):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
params = {"name": name, "is_favorite": is_favorite}
|
||||
|
||||
if parent_id is not None:
|
||||
params["parent_id"] = parent_id
|
||||
if color is not None:
|
||||
params["color"] = color.value
|
||||
if view_style is not None:
|
||||
params["view_style"] = view_style
|
||||
|
||||
api.add_project(**params)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.create_project(
|
||||
credentials=credentials,
|
||||
name=input_data.name,
|
||||
parent_id=input_data.parent_id,
|
||||
color=input_data.color,
|
||||
is_favorite=input_data.is_favorite,
|
||||
view_style=input_data.view_style,
|
||||
)
|
||||
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistGetProjectBlock(Block):
|
||||
"""Gets details for a specific Todoist project"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
project_id: str = SchemaField(
|
||||
description="ID of the project to get details for", advanced=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
project_id: str = SchemaField(description="ID of project")
|
||||
project_name: str = SchemaField(description="Name of project")
|
||||
project_url: str = SchemaField(description="URL of project")
|
||||
complete_data: dict = SchemaField(
|
||||
description="Complete project data including all fields"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="b435b5ea-de14-11ef-8b51-32d3674e8b7e",
|
||||
description="Gets details for a specific Todoist project",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetProjectBlock.Input,
|
||||
output_schema=TodoistGetProjectBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("project_id", "2203306141"),
|
||||
("project_name", "Shopping List"),
|
||||
("project_url", "https://todoist.com/showProject?id=2203306141"),
|
||||
(
|
||||
"complete_data",
|
||||
{
|
||||
"id": "2203306141",
|
||||
"name": "Shopping List",
|
||||
"url": "https://todoist.com/showProject?id=2203306141",
|
||||
},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_project": lambda *args, **kwargs: (
|
||||
"2203306141",
|
||||
"Shopping List",
|
||||
"https://todoist.com/showProject?id=2203306141",
|
||||
{
|
||||
"id": "2203306141",
|
||||
"name": "Shopping List",
|
||||
"url": "https://todoist.com/showProject?id=2203306141",
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_project(credentials: TodoistCredentials, project_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
project = api.get_project(project_id=project_id)
|
||||
|
||||
return project.id, project.name, project.url, project.__dict__
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
project_id, project_name, project_url, data = self.get_project(
|
||||
credentials=credentials, project_id=input_data.project_id
|
||||
)
|
||||
|
||||
if project_id:
|
||||
yield "project_id", project_id
|
||||
if project_name:
|
||||
yield "project_name", project_name
|
||||
if project_url:
|
||||
yield "project_url", project_url
|
||||
if data:
|
||||
yield "complete_data", data
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistUpdateProjectBlock(Block):
|
||||
"""Updates an existing project in Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
project_id: str = SchemaField(
|
||||
description="ID of project to update", advanced=False
|
||||
)
|
||||
name: Optional[str] = SchemaField(
|
||||
description="New name for the project", default=None, advanced=False
|
||||
)
|
||||
color: Optional[Colors] = SchemaField(
|
||||
description="New color for the project icon", default=None, advanced=True
|
||||
)
|
||||
is_favorite: Optional[bool] = SchemaField(
|
||||
description="Whether the project should be a favorite",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
view_style: Optional[str] = SchemaField(
|
||||
description="Display style (list or board)", default=None, advanced=True
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the update was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="ba41a20a-de14-11ef-91d7-32d3674e8b7e",
|
||||
description="Updates an existing project in Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistUpdateProjectBlock.Input,
|
||||
output_schema=TodoistUpdateProjectBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
"name": "Things To Buy",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"update_project": lambda *args, **kwargs: (True)},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_project(
|
||||
credentials: TodoistCredentials,
|
||||
project_id: str,
|
||||
name: Optional[str],
|
||||
color: Optional[Colors],
|
||||
is_favorite: Optional[bool],
|
||||
view_style: Optional[str],
|
||||
):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
params = {}
|
||||
|
||||
if name is not None:
|
||||
params["name"] = name
|
||||
if color is not None:
|
||||
params["color"] = color.value
|
||||
if is_favorite is not None:
|
||||
params["is_favorite"] = is_favorite
|
||||
if view_style is not None:
|
||||
params["view_style"] = view_style
|
||||
|
||||
api.update_project(project_id=project_id, **params)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.update_project(
|
||||
credentials=credentials,
|
||||
project_id=input_data.project_id,
|
||||
name=input_data.name,
|
||||
color=input_data.color,
|
||||
is_favorite=input_data.is_favorite,
|
||||
view_style=input_data.view_style,
|
||||
)
|
||||
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistDeleteProjectBlock(Block):
|
||||
"""Deletes a project and all of its sections and tasks"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
project_id: str = SchemaField(
|
||||
description="ID of project to delete", advanced=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the deletion was successful")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c2893acc-de14-11ef-a113-32d3674e8b7e",
|
||||
description="Deletes a Todoist project and all its contents",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteProjectBlock.Input,
|
||||
output_schema=TodoistDeleteProjectBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"delete_project": lambda *args, **kwargs: (True)},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_project(credentials: TodoistCredentials, project_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
success = api.delete_project(project_id=project_id)
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.delete_project(
|
||||
credentials=credentials, project_id=input_data.project_id
|
||||
)
|
||||
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistListCollaboratorsBlock(Block):
|
||||
"""Gets all collaborators for a Todoist project"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
project_id: str = SchemaField(
|
||||
description="ID of the project to get collaborators for", advanced=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
collaborator_ids: list[str] = SchemaField(
|
||||
description="List of collaborator IDs"
|
||||
)
|
||||
collaborator_names: list[str] = SchemaField(
|
||||
description="List of collaborator names"
|
||||
)
|
||||
collaborator_emails: list[str] = SchemaField(
|
||||
description="List of collaborator email addresses"
|
||||
)
|
||||
complete_data: list[dict] = SchemaField(
|
||||
description="Complete collaborator data including all fields"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="c99c804e-de14-11ef-9f47-32d3674e8b7e",
|
||||
description="Gets all collaborators for a specific Todoist project",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistListCollaboratorsBlock.Input,
|
||||
output_schema=TodoistListCollaboratorsBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("collaborator_ids", ["2671362", "2671366"]),
|
||||
("collaborator_names", ["Alice", "Bob"]),
|
||||
("collaborator_emails", ["alice@example.com", "bob@example.com"]),
|
||||
(
|
||||
"complete_data",
|
||||
[
|
||||
{
|
||||
"id": "2671362",
|
||||
"name": "Alice",
|
||||
"email": "alice@example.com",
|
||||
},
|
||||
{"id": "2671366", "name": "Bob", "email": "bob@example.com"},
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_collaborators": lambda *args, **kwargs: (
|
||||
["2671362", "2671366"],
|
||||
["Alice", "Bob"],
|
||||
["alice@example.com", "bob@example.com"],
|
||||
[
|
||||
{
|
||||
"id": "2671362",
|
||||
"name": "Alice",
|
||||
"email": "alice@example.com",
|
||||
},
|
||||
{"id": "2671366", "name": "Bob", "email": "bob@example.com"},
|
||||
],
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_collaborators(credentials: TodoistCredentials, project_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
collaborators = api.get_collaborators(project_id=project_id)
|
||||
|
||||
ids = []
|
||||
names = []
|
||||
emails = []
|
||||
complete_data = []
|
||||
|
||||
for collaborator in collaborators:
|
||||
ids.append(collaborator.id)
|
||||
names.append(collaborator.name)
|
||||
emails.append(collaborator.email)
|
||||
complete_data.append(collaborator.__dict__)
|
||||
|
||||
return ids, names, emails, complete_data
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
ids, names, emails, data = self.get_collaborators(
|
||||
credentials=credentials, project_id=input_data.project_id
|
||||
)
|
||||
|
||||
if ids:
|
||||
yield "collaborator_ids", ids
|
||||
if names:
|
||||
yield "collaborator_names", names
|
||||
if emails:
|
||||
yield "collaborator_emails", emails
|
||||
if data:
|
||||
yield "complete_data", data
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
306
autogpt_platform/backend/backend/blocks/todoist/sections.py
Normal file
306
autogpt_platform/backend/backend/blocks/todoist/sections.py
Normal file
@@ -0,0 +1,306 @@
|
||||
from todoist_api_python.api import TodoistAPI
|
||||
from typing_extensions import Optional
|
||||
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TodoistListSectionsBlock(Block):
|
||||
"""Gets all sections for a Todoist project"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
project_id: Optional[str] = SchemaField(
|
||||
description="Optional project ID to filter sections"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
names_list: list[str] = SchemaField(description="List of section names")
|
||||
ids_list: list[str] = SchemaField(description="List of section IDs")
|
||||
complete_data: list[dict] = SchemaField(
|
||||
description="Complete section data including all fields"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d6a116d8-de14-11ef-a94c-32d3674e8b7e",
|
||||
description="Gets all sections and their details from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistListSectionsBlock.Input,
|
||||
output_schema=TodoistListSectionsBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("names_list", ["Groceries"]),
|
||||
("ids_list", ["7025"]),
|
||||
(
|
||||
"complete_data",
|
||||
[
|
||||
{
|
||||
"id": "7025",
|
||||
"project_id": "2203306141",
|
||||
"order": 1,
|
||||
"name": "Groceries",
|
||||
}
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_section_lists": lambda *args, **kwargs: (
|
||||
["Groceries"],
|
||||
["7025"],
|
||||
[
|
||||
{
|
||||
"id": "7025",
|
||||
"project_id": "2203306141",
|
||||
"order": 1,
|
||||
"name": "Groceries",
|
||||
}
|
||||
],
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_section_lists(
|
||||
credentials: TodoistCredentials, project_id: Optional[str] = None
|
||||
):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
sections = api.get_sections(project_id=project_id)
|
||||
|
||||
names = []
|
||||
ids = []
|
||||
complete_data = []
|
||||
|
||||
for section in sections:
|
||||
names.append(section.name)
|
||||
ids.append(section.id)
|
||||
complete_data.append(section.__dict__)
|
||||
|
||||
return names, ids, complete_data
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
names, ids, data = self.get_section_lists(
|
||||
credentials, input_data.project_id
|
||||
)
|
||||
|
||||
if names:
|
||||
yield "names_list", names
|
||||
if ids:
|
||||
yield "ids_list", ids
|
||||
if data:
|
||||
yield "complete_data", data
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
# Error in official todoist SDK. Will add this block using sync_api
|
||||
# class TodoistCreateSectionBlock(Block):
|
||||
# """Creates a new section in a Todoist project"""
|
||||
|
||||
# class Input(BlockSchema):
|
||||
# credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
# name: str = SchemaField(description="Section name")
|
||||
# project_id: str = SchemaField(description="Project ID this section should belong to")
|
||||
# order: Optional[int] = SchemaField(description="Optional order among other sections", default=None)
|
||||
|
||||
# class Output(BlockSchema):
|
||||
# success: bool = SchemaField(description="Whether section was successfully created")
|
||||
# error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
# def __init__(self):
|
||||
# super().__init__(
|
||||
# id="e3025cfc-de14-11ef-b9f2-32d3674e8b7e",
|
||||
# description="Creates a new section in a Todoist project",
|
||||
# categories={BlockCategory.PRODUCTIVITY},
|
||||
# input_schema=TodoistCreateSectionBlock.Input,
|
||||
# output_schema=TodoistCreateSectionBlock.Output,
|
||||
# test_input={
|
||||
# "credentials": TEST_CREDENTIALS_INPUT,
|
||||
# "name": "Groceries",
|
||||
# "project_id": "2203306141"
|
||||
# },
|
||||
# test_credentials=TEST_CREDENTIALS,
|
||||
# test_output=[
|
||||
# ("success", True)
|
||||
# ],
|
||||
# test_mock={
|
||||
# "create_section": lambda *args, **kwargs: (
|
||||
# {"id": "7025", "project_id": "2203306141", "order": 1, "name": "Groceries"},
|
||||
# )
|
||||
# },
|
||||
# )
|
||||
|
||||
# @staticmethod
|
||||
# def create_section(credentials: TodoistCredentials, name: str, project_id: str, order: Optional[int] = None):
|
||||
# try:
|
||||
# api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
# section = api.add_section(name=name, project_id=project_id, order=order)
|
||||
# return section.__dict__
|
||||
|
||||
# except Exception as e:
|
||||
# raise e
|
||||
|
||||
# def run(
|
||||
# self,
|
||||
# input_data: Input,
|
||||
# *,
|
||||
# credentials: TodoistCredentials,
|
||||
# **kwargs,
|
||||
# ) -> BlockOutput:
|
||||
# try:
|
||||
# section_data = self.create_section(
|
||||
# credentials,
|
||||
# input_data.name,
|
||||
# input_data.project_id,
|
||||
# input_data.order
|
||||
# )
|
||||
|
||||
# if section_data:
|
||||
# yield "success", True
|
||||
|
||||
# except Exception as e:
|
||||
# yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistGetSectionBlock(Block):
|
||||
"""Gets a single section from Todoist by ID"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
section_id: str = SchemaField(description="ID of section to fetch")
|
||||
|
||||
class Output(BlockSchema):
|
||||
id: str = SchemaField(description="ID of section")
|
||||
project_id: str = SchemaField(description="Project ID the section belongs to")
|
||||
order: int = SchemaField(description="Order of the section")
|
||||
name: str = SchemaField(description="Name of the section")
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="ea5580e2-de14-11ef-a5d3-32d3674e8b7e",
|
||||
description="Gets a single section by ID from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetSectionBlock.Input,
|
||||
output_schema=TodoistGetSectionBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "section_id": "7025"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "7025"),
|
||||
("project_id", "2203306141"),
|
||||
("order", 1),
|
||||
("name", "Groceries"),
|
||||
],
|
||||
test_mock={
|
||||
"get_section": lambda *args, **kwargs: {
|
||||
"id": "7025",
|
||||
"project_id": "2203306141",
|
||||
"order": 1,
|
||||
"name": "Groceries",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_section(credentials: TodoistCredentials, section_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
section = api.get_section(section_id=section_id)
|
||||
return section.__dict__
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
section_data = self.get_section(credentials, input_data.section_id)
|
||||
|
||||
if section_data:
|
||||
yield "id", section_data["id"]
|
||||
yield "project_id", section_data["project_id"]
|
||||
yield "order", section_data["order"]
|
||||
yield "name", section_data["name"]
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistDeleteSectionBlock(Block):
|
||||
"""Deletes a section and all its tasks from Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
section_id: str = SchemaField(description="ID of section to delete")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether section was successfully deleted"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="f0e52eee-de14-11ef-9b12-32d3674e8b7e",
|
||||
description="Deletes a section and all its tasks from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteSectionBlock.Input,
|
||||
output_schema=TodoistDeleteSectionBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "section_id": "7025"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"delete_section": lambda *args, **kwargs: (True)},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_section(credentials: TodoistCredentials, section_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
success = api.delete_section(section_id=section_id)
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
success = self.delete_section(credentials, input_data.section_id)
|
||||
yield "success", success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
660
autogpt_platform/backend/backend/blocks/todoist/tasks.py
Normal file
660
autogpt_platform/backend/backend/blocks/todoist/tasks.py
Normal file
@@ -0,0 +1,660 @@
|
||||
from datetime import datetime
|
||||
|
||||
from todoist_api_python.api import TodoistAPI
|
||||
from todoist_api_python.models import Task
|
||||
from typing_extensions import Optional
|
||||
|
||||
from backend.blocks.todoist._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
TodoistCredentials,
|
||||
TodoistCredentialsField,
|
||||
TodoistCredentialsInput,
|
||||
)
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class TodoistCreateTaskBlock(Block):
|
||||
"""Creates a new task in a Todoist project"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
content: str = SchemaField(description="Task content", advanced=False)
|
||||
description: Optional[str] = SchemaField(
|
||||
description="Task description", default=None, advanced=False
|
||||
)
|
||||
project_id: Optional[str] = SchemaField(
|
||||
description="Project ID this task should belong to",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
section_id: Optional[str] = SchemaField(
|
||||
description="Section ID this task should belong to",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
parent_id: Optional[str] = SchemaField(
|
||||
description="Parent task ID", default=None, advanced=True
|
||||
)
|
||||
order: Optional[int] = SchemaField(
|
||||
description="Optional order among other tasks,[Non-zero integer value used by clients to sort tasks under the same parent]",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
labels: Optional[list[str]] = SchemaField(
|
||||
description="Task labels", default=None, advanced=True
|
||||
)
|
||||
priority: Optional[int] = SchemaField(
|
||||
description="Task priority from 1 (normal) to 4 (urgent)",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
due_date: Optional[datetime] = SchemaField(
|
||||
description="Due date in YYYY-MM-DD format", advanced=True, default=None
|
||||
)
|
||||
deadline_date: Optional[datetime] = SchemaField(
|
||||
description="Specific date in YYYY-MM-DD format relative to user's timezone",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
assignee_id: Optional[str] = SchemaField(
|
||||
description="Responsible user ID", default=None, advanced=True
|
||||
)
|
||||
duration_unit: Optional[str] = SchemaField(
|
||||
description="Task duration unit (minute/day)", default=None, advanced=True
|
||||
)
|
||||
duration: Optional[int] = SchemaField(
|
||||
description="Task duration amount, You need to selecct the duration unit first",
|
||||
depends_on=["duration_unit"],
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
id: str = SchemaField(description="Task ID")
|
||||
url: str = SchemaField(description="Task URL")
|
||||
complete_data: dict = SchemaField(
|
||||
description="Complete task data as dictionary"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="fde4f458-de14-11ef-bf0c-32d3674e8b7e",
|
||||
description="Creates a new task in a Todoist project",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCreateTaskBlock.Input,
|
||||
output_schema=TodoistCreateTaskBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"content": "Buy groceries",
|
||||
"project_id": "2203306141",
|
||||
"priority": 4,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("id", "2995104339"),
|
||||
("url", "https://todoist.com/showTask?id=2995104339"),
|
||||
(
|
||||
"complete_data",
|
||||
{
|
||||
"id": "2995104339",
|
||||
"project_id": "2203306141",
|
||||
"url": "https://todoist.com/showTask?id=2995104339",
|
||||
},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"create_task": lambda *args, **kwargs: (
|
||||
"2995104339",
|
||||
"https://todoist.com/showTask?id=2995104339",
|
||||
{
|
||||
"id": "2995104339",
|
||||
"project_id": "2203306141",
|
||||
"url": "https://todoist.com/showTask?id=2995104339",
|
||||
},
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_task(credentials: TodoistCredentials, content: str, **kwargs):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
task = api.add_task(content=content, **kwargs)
|
||||
task_dict = Task.to_dict(task)
|
||||
return task.id, task.url, task_dict
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
due_date = (
|
||||
input_data.due_date.strftime("%Y-%m-%d")
|
||||
if input_data.due_date
|
||||
else None
|
||||
)
|
||||
deadline_date = (
|
||||
input_data.deadline_date.strftime("%Y-%m-%d")
|
||||
if input_data.deadline_date
|
||||
else None
|
||||
)
|
||||
|
||||
task_args = {
|
||||
"description": input_data.description,
|
||||
"project_id": input_data.project_id,
|
||||
"section_id": input_data.section_id,
|
||||
"parent_id": input_data.parent_id,
|
||||
"order": input_data.order,
|
||||
"labels": input_data.labels,
|
||||
"priority": input_data.priority,
|
||||
"due_date": due_date,
|
||||
"deadline_date": deadline_date,
|
||||
"assignee_id": input_data.assignee_id,
|
||||
"duration": input_data.duration,
|
||||
"duration_unit": input_data.duration_unit,
|
||||
}
|
||||
|
||||
id, url, complete_data = self.create_task(
|
||||
credentials,
|
||||
input_data.content,
|
||||
**{k: v for k, v in task_args.items() if v is not None},
|
||||
)
|
||||
|
||||
yield "id", id
|
||||
yield "url", url
|
||||
yield "complete_data", complete_data
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistGetTasksBlock(Block):
|
||||
"""Get active tasks from Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
project_id: Optional[str] = SchemaField(
|
||||
description="Filter tasks by project ID", default=None, advanced=False
|
||||
)
|
||||
section_id: Optional[str] = SchemaField(
|
||||
description="Filter tasks by section ID", default=None, advanced=True
|
||||
)
|
||||
label: Optional[str] = SchemaField(
|
||||
description="Filter tasks by label name", default=None, advanced=True
|
||||
)
|
||||
filter: Optional[str] = SchemaField(
|
||||
description="Filter by any supported filter, You can see How to use filters or create one of your one here - https://todoist.com/help/articles/introduction-to-filters-V98wIH",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
lang: Optional[str] = SchemaField(
|
||||
description="IETF language tag for filter language", default=None
|
||||
)
|
||||
ids: Optional[list[str]] = SchemaField(
|
||||
description="List of task IDs to retrieve", default=None, advanced=False
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
ids: list[str] = SchemaField(description="Task IDs")
|
||||
urls: list[str] = SchemaField(description="Task URLs")
|
||||
complete_data: list[dict] = SchemaField(
|
||||
description="Complete task data as dictionary"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="0b706e86-de15-11ef-a113-32d3674e8b7e",
|
||||
description="Get active tasks from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetTasksBlock.Input,
|
||||
output_schema=TodoistGetTasksBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"project_id": "2203306141",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("ids", ["2995104339"]),
|
||||
("urls", ["https://todoist.com/showTask?id=2995104339"]),
|
||||
(
|
||||
"complete_data",
|
||||
[
|
||||
{
|
||||
"id": "2995104339",
|
||||
"project_id": "2203306141",
|
||||
"url": "https://todoist.com/showTask?id=2995104339",
|
||||
"is_completed": False,
|
||||
}
|
||||
],
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_tasks": lambda *args, **kwargs: [
|
||||
{
|
||||
"id": "2995104339",
|
||||
"project_id": "2203306141",
|
||||
"url": "https://todoist.com/showTask?id=2995104339",
|
||||
"is_completed": False,
|
||||
}
|
||||
]
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_tasks(credentials: TodoistCredentials, **kwargs):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
tasks = api.get_tasks(**kwargs)
|
||||
return [Task.to_dict(task) for task in tasks]
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
task_filters = {
|
||||
"project_id": input_data.project_id,
|
||||
"section_id": input_data.section_id,
|
||||
"label": input_data.label,
|
||||
"filter": input_data.filter,
|
||||
"lang": input_data.lang,
|
||||
"ids": input_data.ids,
|
||||
}
|
||||
|
||||
tasks = self.get_tasks(
|
||||
credentials, **{k: v for k, v in task_filters.items() if v is not None}
|
||||
)
|
||||
|
||||
yield "ids", [task["id"] for task in tasks]
|
||||
yield "urls", [task["url"] for task in tasks]
|
||||
yield "complete_data", tasks
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistGetTaskBlock(Block):
|
||||
"""Get an active task from Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
task_id: str = SchemaField(description="Task ID to retrieve")
|
||||
|
||||
class Output(BlockSchema):
|
||||
project_id: str = SchemaField(description="Project ID containing the task")
|
||||
url: str = SchemaField(description="Task URL")
|
||||
complete_data: dict = SchemaField(
|
||||
description="Complete task data as dictionary"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="16d7dc8c-de15-11ef-8ace-32d3674e8b7e",
|
||||
description="Get an active task from Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistGetTaskBlock.Input,
|
||||
output_schema=TodoistGetTaskBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("project_id", "2203306141"),
|
||||
("url", "https://todoist.com/showTask?id=2995104339"),
|
||||
(
|
||||
"complete_data",
|
||||
{
|
||||
"id": "2995104339",
|
||||
"project_id": "2203306141",
|
||||
"url": "https://todoist.com/showTask?id=2995104339",
|
||||
},
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"get_task": lambda *args, **kwargs: {
|
||||
"project_id": "2203306141",
|
||||
"id": "2995104339",
|
||||
"url": "https://todoist.com/showTask?id=2995104339",
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_task(credentials: TodoistCredentials, task_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
task = api.get_task(task_id=task_id)
|
||||
return Task.to_dict(task)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
task_data = self.get_task(credentials, input_data.task_id)
|
||||
|
||||
if task_data:
|
||||
yield "project_id", task_data["project_id"]
|
||||
yield "url", task_data["url"]
|
||||
yield "complete_data", task_data
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistUpdateTaskBlock(Block):
|
||||
"""Updates an existing task in Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
task_id: str = SchemaField(description="Task ID to update")
|
||||
content: str = SchemaField(description="Task content", advanced=False)
|
||||
description: Optional[str] = SchemaField(
|
||||
description="Task description", default=None, advanced=False
|
||||
)
|
||||
project_id: Optional[str] = SchemaField(
|
||||
description="Project ID this task should belong to",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
section_id: Optional[str] = SchemaField(
|
||||
description="Section ID this task should belong to",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
parent_id: Optional[str] = SchemaField(
|
||||
description="Parent task ID", default=None, advanced=True
|
||||
)
|
||||
order: Optional[int] = SchemaField(
|
||||
description="Optional order among other tasks,[Non-zero integer value used by clients to sort tasks under the same parent]",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
labels: Optional[list[str]] = SchemaField(
|
||||
description="Task labels", default=None, advanced=True
|
||||
)
|
||||
priority: Optional[int] = SchemaField(
|
||||
description="Task priority from 1 (normal) to 4 (urgent)",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
due_date: Optional[datetime] = SchemaField(
|
||||
description="Due date in YYYY-MM-DD format", advanced=True, default=None
|
||||
)
|
||||
deadline_date: Optional[datetime] = SchemaField(
|
||||
description="Specific date in YYYY-MM-DD format relative to user's timezone",
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
assignee_id: Optional[str] = SchemaField(
|
||||
description="Responsible user ID", default=None, advanced=True
|
||||
)
|
||||
duration_unit: Optional[str] = SchemaField(
|
||||
description="Task duration unit (minute/day)", default=None, advanced=True
|
||||
)
|
||||
duration: Optional[int] = SchemaField(
|
||||
description="Task duration amount, You need to selecct the duration unit first",
|
||||
depends_on=["duration_unit"],
|
||||
default=None,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(description="Whether the update was successful")
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="1eee6d32-de15-11ef-a2ff-32d3674e8b7e",
|
||||
description="Updates an existing task in Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistUpdateTaskBlock.Input,
|
||||
output_schema=TodoistUpdateTaskBlock.Output,
|
||||
test_input={
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"task_id": "2995104339",
|
||||
"content": "Buy Coffee",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"update_task": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def update_task(credentials: TodoistCredentials, task_id: str, **kwargs):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
is_success = api.update_task(task_id=task_id, **kwargs)
|
||||
return is_success
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
due_date = (
|
||||
input_data.due_date.strftime("%Y-%m-%d")
|
||||
if input_data.due_date
|
||||
else None
|
||||
)
|
||||
deadline_date = (
|
||||
input_data.deadline_date.strftime("%Y-%m-%d")
|
||||
if input_data.deadline_date
|
||||
else None
|
||||
)
|
||||
|
||||
task_updates = {}
|
||||
if input_data.content is not None:
|
||||
task_updates["content"] = input_data.content
|
||||
if input_data.description is not None:
|
||||
task_updates["description"] = input_data.description
|
||||
if input_data.project_id is not None:
|
||||
task_updates["project_id"] = input_data.project_id
|
||||
if input_data.section_id is not None:
|
||||
task_updates["section_id"] = input_data.section_id
|
||||
if input_data.parent_id is not None:
|
||||
task_updates["parent_id"] = input_data.parent_id
|
||||
if input_data.order is not None:
|
||||
task_updates["order"] = input_data.order
|
||||
if input_data.labels is not None:
|
||||
task_updates["labels"] = input_data.labels
|
||||
if input_data.priority is not None:
|
||||
task_updates["priority"] = input_data.priority
|
||||
if due_date is not None:
|
||||
task_updates["due_date"] = due_date
|
||||
if deadline_date is not None:
|
||||
task_updates["deadline_date"] = deadline_date
|
||||
if input_data.assignee_id is not None:
|
||||
task_updates["assignee_id"] = input_data.assignee_id
|
||||
if input_data.duration is not None:
|
||||
task_updates["duration"] = input_data.duration
|
||||
if input_data.duration_unit is not None:
|
||||
task_updates["duration_unit"] = input_data.duration_unit
|
||||
|
||||
self.update_task(
|
||||
credentials,
|
||||
input_data.task_id,
|
||||
**{k: v for k, v in task_updates.items() if v is not None},
|
||||
)
|
||||
|
||||
yield "success", True
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistCloseTaskBlock(Block):
|
||||
"""Closes a task in Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
task_id: str = SchemaField(description="Task ID to close")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the task was successfully closed"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="29fac798-de15-11ef-b839-32d3674e8b7e",
|
||||
description="Closes a task in Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistCloseTaskBlock.Input,
|
||||
output_schema=TodoistCloseTaskBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("success", True)],
|
||||
test_mock={"close_task": lambda *args, **kwargs: True},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def close_task(credentials: TodoistCredentials, task_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
is_success = api.close_task(task_id=task_id)
|
||||
return is_success
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
is_success = self.close_task(credentials, input_data.task_id)
|
||||
yield "success", is_success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistReopenTaskBlock(Block):
|
||||
"""Reopens a task in Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
task_id: str = SchemaField(description="Task ID to reopen")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the task was successfully reopened"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="2e6bf6f8-de15-11ef-ae7c-32d3674e8b7e",
|
||||
description="Reopens a task in Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistReopenTaskBlock.Input,
|
||||
output_schema=TodoistReopenTaskBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"reopen_task": lambda *args, **kwargs: (True)},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def reopen_task(credentials: TodoistCredentials, task_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
is_success = api.reopen_task(task_id=task_id)
|
||||
return is_success
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
is_success = self.reopen_task(credentials, input_data.task_id)
|
||||
yield "success", is_success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class TodoistDeleteTaskBlock(Block):
|
||||
"""Deletes a task in Todoist"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
credentials: TodoistCredentialsInput = TodoistCredentialsField([])
|
||||
task_id: str = SchemaField(description="Task ID to delete")
|
||||
|
||||
class Output(BlockSchema):
|
||||
success: bool = SchemaField(
|
||||
description="Whether the task was successfully deleted"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if request failed")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="33c29ada-de15-11ef-bcbb-32d3674e8b7e",
|
||||
description="Deletes a task in Todoist",
|
||||
categories={BlockCategory.PRODUCTIVITY},
|
||||
input_schema=TodoistDeleteTaskBlock.Input,
|
||||
output_schema=TodoistDeleteTaskBlock.Output,
|
||||
test_input={"credentials": TEST_CREDENTIALS_INPUT, "task_id": "2995104339"},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("success", True),
|
||||
],
|
||||
test_mock={"delete_task": lambda *args, **kwargs: (True)},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def delete_task(credentials: TodoistCredentials, task_id: str):
|
||||
try:
|
||||
api = TodoistAPI(credentials.access_token.get_secret_value())
|
||||
is_success = api.delete_task(task_id=task_id)
|
||||
return is_success
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
def run(
|
||||
self,
|
||||
input_data: Input,
|
||||
*,
|
||||
credentials: TodoistCredentials,
|
||||
**kwargs,
|
||||
) -> BlockOutput:
|
||||
try:
|
||||
is_success = self.delete_task(credentials, input_data.task_id)
|
||||
yield "success", is_success
|
||||
|
||||
except Exception as e:
|
||||
yield "error", str(e)
|
||||
@@ -92,7 +92,8 @@ class TwitterPostTweetBlock(Block):
|
||||
attachment: Union[Media, DeepLink, Poll, Place, Quote] | None = SchemaField(
|
||||
discriminator="discriminator",
|
||||
description="Additional tweet data (media, deep link, poll, place or quote)",
|
||||
advanced=True,
|
||||
advanced=False,
|
||||
default=Media(discriminator="media"),
|
||||
)
|
||||
|
||||
exclude_reply_user_ids: Optional[List[str]] = SchemaField(
|
||||
|
||||
37
autogpt_platform/backend/backend/blocks/xml_parser.py
Normal file
37
autogpt_platform/backend/backend/blocks/xml_parser.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from gravitasml.parser import Parser
|
||||
from gravitasml.token import tokenize
|
||||
|
||||
from backend.data.block import Block, BlockOutput, BlockSchema
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
|
||||
class XMLParserBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
input_xml: str = SchemaField(description="input xml to be parsed")
|
||||
|
||||
class Output(BlockSchema):
|
||||
parsed_xml: dict = SchemaField(description="output parsed xml to dict")
|
||||
error: str = SchemaField(description="Error in parsing")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="286380af-9529-4b55-8be0-1d7c854abdb5",
|
||||
description="Parses XML using gravitasml to tokenize and coverts it to dict",
|
||||
input_schema=XMLParserBlock.Input,
|
||||
output_schema=XMLParserBlock.Output,
|
||||
test_input={"input_xml": "<tag1><tag2>content</tag2></tag1>"},
|
||||
test_output=[
|
||||
("parsed_xml", {"tag1": {"tag2": "content"}}),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
try:
|
||||
tokens = tokenize(input_data.input_xml)
|
||||
parser = Parser(tokens)
|
||||
parsed_result = parser.parse()
|
||||
yield "parsed_xml", parsed_result
|
||||
except ValueError as val_e:
|
||||
raise ValueError(f"Validation error for dict:{val_e}") from val_e
|
||||
except SyntaxError as syn_e:
|
||||
raise SyntaxError(f"Error in input xml syntax: {syn_e}") from syn_e
|
||||
@@ -221,7 +221,8 @@ def event():
|
||||
@test.command()
|
||||
@click.argument("server_address")
|
||||
@click.argument("graph_id")
|
||||
def websocket(server_address: str, graph_id: str):
|
||||
@click.argument("graph_version")
|
||||
def websocket(server_address: str, graph_id: str, graph_version: int):
|
||||
"""
|
||||
Tests the websocket connection.
|
||||
"""
|
||||
@@ -237,7 +238,9 @@ def websocket(server_address: str, graph_id: str):
|
||||
try:
|
||||
msg = WsMessage(
|
||||
method=Methods.SUBSCRIBE,
|
||||
data=ExecutionSubscription(graph_id=graph_id).model_dump(),
|
||||
data=ExecutionSubscription(
|
||||
graph_id=graph_id, graph_version=graph_version
|
||||
).model_dump(),
|
||||
).model_dump_json()
|
||||
await websocket.send(msg)
|
||||
print(f"Sending: {msg}")
|
||||
|
||||
@@ -35,6 +35,8 @@ from backend.integrations.credentials_store import (
|
||||
# =============== Configure the cost for each LLM Model call =============== #
|
||||
|
||||
MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.O3_MINI: 2, # $1.10 / $4.40
|
||||
LlmModel.O1: 16, # $15 / $60
|
||||
LlmModel.O1_PREVIEW: 16,
|
||||
LlmModel.O1_MINI: 4,
|
||||
LlmModel.GPT4O_MINI: 1,
|
||||
@@ -42,20 +44,21 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.GPT4_TURBO: 10,
|
||||
LlmModel.GPT3_5_TURBO: 1,
|
||||
LlmModel.CLAUDE_3_5_SONNET: 4,
|
||||
LlmModel.CLAUDE_3_5_HAIKU: 1, # $0.80 / $4.00
|
||||
LlmModel.CLAUDE_3_HAIKU: 1,
|
||||
LlmModel.LLAMA3_8B: 1,
|
||||
LlmModel.LLAMA3_70B: 1,
|
||||
LlmModel.MIXTRAL_8X7B: 1,
|
||||
LlmModel.GEMMA_7B: 1,
|
||||
LlmModel.GEMMA2_9B: 1,
|
||||
LlmModel.LLAMA3_1_405B: 1,
|
||||
LlmModel.LLAMA3_1_70B: 1,
|
||||
LlmModel.LLAMA3_3_70B: 1, # $0.59 / $0.79
|
||||
LlmModel.LLAMA3_1_8B: 1,
|
||||
LlmModel.OLLAMA_LLAMA3_3: 1,
|
||||
LlmModel.OLLAMA_LLAMA3_2: 1,
|
||||
LlmModel.OLLAMA_LLAMA3_8B: 1,
|
||||
LlmModel.OLLAMA_LLAMA3_405B: 1,
|
||||
LlmModel.DEEPSEEK_LLAMA_70B: 1, # ? / ?
|
||||
LlmModel.OLLAMA_DOLPHIN: 1,
|
||||
LlmModel.GEMINI_FLASH_1_5_8B: 1,
|
||||
LlmModel.GEMINI_FLASH_1_5: 1,
|
||||
LlmModel.GROK_BETA: 5,
|
||||
LlmModel.MISTRAL_NEMO: 1,
|
||||
LlmModel.COHERE_COMMAND_R_08_2024: 1,
|
||||
|
||||
@@ -10,7 +10,6 @@ class BlockCostType(str, Enum):
|
||||
RUN = "run" # cost X credits per run
|
||||
BYTE = "byte" # cost X credits per byte
|
||||
SECOND = "second" # cost X credits per second
|
||||
DOLLAR = "dollar" # cost X dollars per run
|
||||
|
||||
|
||||
class BlockCost(BaseModel):
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import stripe
|
||||
@@ -8,13 +9,14 @@ from prisma.enums import CreditTransactionType
|
||||
from prisma.errors import UniqueViolationError
|
||||
from prisma.models import CreditTransaction, User
|
||||
from prisma.types import CreditTransactionCreateInput, CreditTransactionWhereInput
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.block import Block, BlockInput, get_block
|
||||
from backend.data.block_cost_config import BLOCK_COSTS
|
||||
from backend.data.cost import BlockCost, BlockCostType
|
||||
from backend.data.execution import NodeExecutionEntry
|
||||
from backend.data.model import AutoTopUpConfig
|
||||
from backend.data.model import AutoTopUpConfig, TransactionHistory, UserTransaction
|
||||
from backend.data.user import get_user_by_id
|
||||
from backend.util.settings import Settings
|
||||
|
||||
@@ -34,6 +36,26 @@ class UserCreditBase(ABC):
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_transaction_history(
|
||||
self,
|
||||
user_id: str,
|
||||
transaction_time: datetime,
|
||||
transaction_count_limit: int,
|
||||
) -> TransactionHistory:
|
||||
"""
|
||||
Get the credit transactions for the user.
|
||||
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
transaction_time (datetime): The upper bound of the transaction time.
|
||||
transaction_count_limit (int): The transaction count limit.
|
||||
|
||||
Returns:
|
||||
TransactionHistory: The credit transactions for the user.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def spend_credits(
|
||||
self,
|
||||
@@ -132,12 +154,14 @@ class UserCreditBase(ABC):
|
||||
},
|
||||
)
|
||||
transaction_balance = (
|
||||
transactions[0].get("_sum", {}).get("amount", 0) + snapshot_balance
|
||||
int(transactions[0].get("_sum", {}).get("amount", 0) + snapshot_balance)
|
||||
if transactions
|
||||
else snapshot_balance
|
||||
)
|
||||
transaction_time = (
|
||||
transactions[0].get("_max", {}).get("createdAt", datetime_min)
|
||||
datetime.fromisoformat(
|
||||
str(transactions[0].get("_max", {}).get("createdAt", datetime_min))
|
||||
)
|
||||
if transactions
|
||||
else snapshot_time
|
||||
)
|
||||
@@ -178,15 +202,37 @@ class UserCreditBase(ABC):
|
||||
transaction_type: CreditTransactionType,
|
||||
is_active: bool = True,
|
||||
transaction_key: str | None = None,
|
||||
ceiling_balance: int | None = None,
|
||||
metadata: Json = Json({}),
|
||||
) -> int:
|
||||
) -> tuple[int, str]:
|
||||
"""
|
||||
Add a new transaction for the user.
|
||||
This is the only method that should be used to add a new transaction.
|
||||
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
amount (int): The amount of credits to add.
|
||||
transaction_type (CreditTransactionType): The type of transaction.
|
||||
is_active (bool): Whether the transaction is active or needs to be manually activated through _enable_transaction.
|
||||
transaction_key (str | None): The transaction key. Avoids adding transaction if the key already exists.
|
||||
ceiling_balance (int | None): The ceiling balance. Avoids adding more credits if the balance is already above the ceiling.
|
||||
metadata (Json): The metadata of the transaction.
|
||||
|
||||
Returns:
|
||||
tuple[int, str]: The new balance & the transaction key.
|
||||
"""
|
||||
async with db.locked_transaction(f"usr_trx_{user_id}"):
|
||||
# Get latest balance snapshot
|
||||
user_balance, _ = await self._get_credits(user_id)
|
||||
|
||||
if ceiling_balance and user_balance >= ceiling_balance:
|
||||
raise ValueError(
|
||||
f"You already have enough balance for user {user_id}, balance: {user_balance}, ceiling: {ceiling_balance}"
|
||||
)
|
||||
|
||||
if amount < 0 and user_balance < abs(amount):
|
||||
raise ValueError(
|
||||
f"Insufficient balance for user {user_id}, balance: {user_balance}, amount: {amount}"
|
||||
f"Insufficient balance of ${user_balance/100} to run the block that costs ${abs(amount)/100}"
|
||||
)
|
||||
|
||||
# Create the transaction
|
||||
@@ -201,9 +247,18 @@ class UserCreditBase(ABC):
|
||||
}
|
||||
if transaction_key:
|
||||
transaction_data["transactionKey"] = transaction_key
|
||||
await CreditTransaction.prisma().create(data=transaction_data)
|
||||
tx = await CreditTransaction.prisma().create(data=transaction_data)
|
||||
return user_balance + amount, tx.transactionKey
|
||||
|
||||
return user_balance + amount
|
||||
|
||||
class UsageTransactionMetadata(BaseModel):
|
||||
graph_exec_id: str | None = None
|
||||
graph_id: str | None = None
|
||||
node_id: str | None = None
|
||||
node_exec_id: str | None = None
|
||||
block_id: str | None = None
|
||||
block: str | None = None
|
||||
input: BlockInput | None = None
|
||||
|
||||
|
||||
class UserCredit(UserCreditBase):
|
||||
@@ -274,29 +329,35 @@ class UserCredit(UserCreditBase):
|
||||
if cost == 0:
|
||||
return 0
|
||||
|
||||
balance = await self._add_transaction(
|
||||
balance, _ = await self._add_transaction(
|
||||
user_id=entry.user_id,
|
||||
amount=-cost,
|
||||
transaction_type=CreditTransactionType.USAGE,
|
||||
metadata=Json(
|
||||
{
|
||||
"graph_exec_id": entry.graph_exec_id,
|
||||
"graph_id": entry.graph_id,
|
||||
"node_id": entry.node_id,
|
||||
"node_exec_id": entry.node_exec_id,
|
||||
"block_id": entry.block_id,
|
||||
"block": block.name,
|
||||
"input": matching_filter,
|
||||
}
|
||||
UsageTransactionMetadata(
|
||||
graph_exec_id=entry.graph_exec_id,
|
||||
graph_id=entry.graph_id,
|
||||
node_id=entry.node_id,
|
||||
node_exec_id=entry.node_exec_id,
|
||||
block_id=entry.block_id,
|
||||
block=block.name,
|
||||
input=matching_filter,
|
||||
).model_dump()
|
||||
),
|
||||
)
|
||||
user_id = entry.user_id
|
||||
|
||||
# Auto top-up if balance just went below threshold due to this transaction.
|
||||
# Auto top-up if balance is below threshold.
|
||||
auto_top_up = await get_auto_top_up(user_id)
|
||||
if balance < auto_top_up.threshold <= balance - cost:
|
||||
if auto_top_up.threshold and balance < auto_top_up.threshold:
|
||||
try:
|
||||
await self.top_up_credits(user_id=user_id, amount=auto_top_up.amount)
|
||||
await self._top_up_credits(
|
||||
user_id=user_id,
|
||||
amount=auto_top_up.amount,
|
||||
# Avoid multiple auto top-ups within the same graph execution.
|
||||
key=f"AUTO-TOP-UP-{user_id}-{entry.graph_exec_id}",
|
||||
ceiling_balance=auto_top_up.threshold,
|
||||
)
|
||||
except Exception as e:
|
||||
# Failed top-up is not critical, we can move on.
|
||||
logger.error(
|
||||
@@ -306,9 +367,34 @@ class UserCredit(UserCreditBase):
|
||||
return cost
|
||||
|
||||
async def top_up_credits(self, user_id: str, amount: int):
|
||||
await self._top_up_credits(user_id, amount)
|
||||
|
||||
async def _top_up_credits(
|
||||
self,
|
||||
user_id: str,
|
||||
amount: int,
|
||||
key: str | None = None,
|
||||
ceiling_balance: int | None = None,
|
||||
):
|
||||
if amount < 0:
|
||||
raise ValueError(f"Top up amount must not be negative: {amount}")
|
||||
|
||||
if key is not None and (
|
||||
await CreditTransaction.prisma().find_first(
|
||||
where={"transactionKey": key, "userId": user_id}
|
||||
)
|
||||
):
|
||||
raise ValueError(f"Transaction key {key} already exists for user {user_id}")
|
||||
|
||||
_, transaction_key = await self._add_transaction(
|
||||
user_id=user_id,
|
||||
amount=amount,
|
||||
transaction_type=CreditTransactionType.TOP_UP,
|
||||
is_active=False,
|
||||
transaction_key=key,
|
||||
ceiling_balance=ceiling_balance,
|
||||
)
|
||||
|
||||
customer_id = await get_stripe_customer_id(user_id)
|
||||
|
||||
payment_methods = stripe.PaymentMethod.list(customer=customer_id, type="card")
|
||||
@@ -345,13 +431,10 @@ class UserCredit(UserCreditBase):
|
||||
},
|
||||
)
|
||||
if payment_intent.status == "succeeded":
|
||||
await self._add_transaction(
|
||||
await self._enable_transaction(
|
||||
transaction_key=transaction_key,
|
||||
user_id=user_id,
|
||||
amount=amount,
|
||||
transaction_type=CreditTransactionType.TOP_UP,
|
||||
transaction_key=payment_intent.id,
|
||||
metadata=Json({"payment_intent": payment_intent}),
|
||||
is_active=True,
|
||||
)
|
||||
return
|
||||
|
||||
@@ -360,6 +443,11 @@ class UserCredit(UserCreditBase):
|
||||
)
|
||||
|
||||
async def top_up_intent(self, user_id: str, amount: int) -> str:
|
||||
if amount < 500 or amount % 100 != 0:
|
||||
raise ValueError(
|
||||
f"Top up amount must be at least 500 credits and multiple of 100 but is {amount}"
|
||||
)
|
||||
|
||||
# Create checkout session
|
||||
# https://docs.stripe.com/checkout/quickstart?client=react
|
||||
# unit_amount param is always in the smallest currency unit (so cents for usd)
|
||||
@@ -379,12 +467,14 @@ class UserCredit(UserCreditBase):
|
||||
}
|
||||
],
|
||||
mode="payment",
|
||||
ui_mode="hosted",
|
||||
payment_intent_data={"setup_future_usage": "off_session"},
|
||||
saved_payment_method_options={"payment_method_save": "enabled"},
|
||||
success_url=settings.config.platform_base_url
|
||||
success_url=settings.config.frontend_base_url
|
||||
+ "/marketplace/credits?topup=success",
|
||||
cancel_url=settings.config.platform_base_url
|
||||
cancel_url=settings.config.frontend_base_url
|
||||
+ "/marketplace/credits?topup=cancel",
|
||||
allow_promotion_codes=True,
|
||||
)
|
||||
|
||||
await self._add_transaction(
|
||||
@@ -443,6 +533,61 @@ class UserCredit(UserCreditBase):
|
||||
balance, _ = await self._get_credits(user_id)
|
||||
return balance
|
||||
|
||||
async def get_transaction_history(
|
||||
self,
|
||||
user_id: str,
|
||||
transaction_time: datetime,
|
||||
transaction_count_limit: int,
|
||||
) -> TransactionHistory:
|
||||
transactions = await CreditTransaction.prisma().find_many(
|
||||
where={
|
||||
"userId": user_id,
|
||||
"createdAt": {"lt": transaction_time},
|
||||
"isActive": True,
|
||||
},
|
||||
order={"createdAt": "desc"},
|
||||
take=transaction_count_limit,
|
||||
)
|
||||
|
||||
grouped_transactions: dict[str, UserTransaction] = defaultdict(
|
||||
lambda: UserTransaction()
|
||||
)
|
||||
tx_time = None
|
||||
for t in transactions:
|
||||
metadata = (
|
||||
UsageTransactionMetadata.model_validate(t.metadata)
|
||||
if t.metadata
|
||||
else UsageTransactionMetadata()
|
||||
)
|
||||
tx_time = t.createdAt.replace(tzinfo=None)
|
||||
|
||||
if t.type == CreditTransactionType.USAGE and metadata.graph_exec_id:
|
||||
gt = grouped_transactions[metadata.graph_exec_id]
|
||||
gid = metadata.graph_id[:8] if metadata.graph_id else "UNKNOWN"
|
||||
gt.description = f"Graph #{gid} Execution"
|
||||
|
||||
gt.usage_node_count += 1
|
||||
gt.usage_start_time = min(gt.usage_start_time, tx_time)
|
||||
gt.usage_execution_id = metadata.graph_exec_id
|
||||
gt.usage_graph_id = metadata.graph_id
|
||||
else:
|
||||
gt = grouped_transactions[t.transactionKey]
|
||||
gt.description = f"{t.type} Transaction"
|
||||
|
||||
gt.amount += t.amount
|
||||
gt.transaction_type = t.type
|
||||
|
||||
if tx_time > gt.transaction_time:
|
||||
gt.transaction_time = tx_time
|
||||
gt.balance = t.runningBalance or 0
|
||||
|
||||
return TransactionHistory(
|
||||
transactions=list(grouped_transactions.values()),
|
||||
next_transaction_time=(
|
||||
tx_time if len(transactions) == transaction_count_limit else None
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class BetaUserCredit(UserCredit):
|
||||
"""
|
||||
@@ -460,12 +605,13 @@ class BetaUserCredit(UserCredit):
|
||||
return balance
|
||||
|
||||
try:
|
||||
return await self._add_transaction(
|
||||
balance, _ = await self._add_transaction(
|
||||
user_id=user_id,
|
||||
amount=max(self.num_user_credits_refill - balance, 0),
|
||||
transaction_type=CreditTransactionType.TOP_UP,
|
||||
transaction_key=f"MONTHLY-CREDIT-TOP-UP-{cur_time}",
|
||||
)
|
||||
return balance
|
||||
except UniqueViolationError:
|
||||
# Already refilled this month
|
||||
return (await self._get_credits(user_id))[0]
|
||||
@@ -475,6 +621,9 @@ class DisabledUserCredit(UserCreditBase):
|
||||
async def get_credits(self, *args, **kwargs) -> int:
|
||||
return 0
|
||||
|
||||
async def get_transaction_history(self, *args, **kwargs) -> TransactionHistory:
|
||||
return TransactionHistory(transactions=[], next_transaction_time=None)
|
||||
|
||||
async def spend_credits(self, *args, **kwargs) -> int:
|
||||
return 0
|
||||
|
||||
@@ -504,8 +653,6 @@ def get_block_costs() -> dict[str, list[BlockCost]]:
|
||||
|
||||
async def get_stripe_customer_id(user_id: str) -> str:
|
||||
user = await get_user_by_id(user_id)
|
||||
if not user:
|
||||
raise ValueError(f"User not found: {user_id}")
|
||||
|
||||
if user.stripeCustomerId:
|
||||
return user.stripeCustomerId
|
||||
@@ -517,17 +664,15 @@ async def get_stripe_customer_id(user_id: str) -> str:
|
||||
return customer.id
|
||||
|
||||
|
||||
async def set_auto_top_up(user_id: str, threshold: int, amount: int):
|
||||
async def set_auto_top_up(user_id: str, config: AutoTopUpConfig):
|
||||
await User.prisma().update(
|
||||
where={"id": user_id},
|
||||
data={"topUpConfig": Json({"threshold": threshold, "amount": amount})},
|
||||
data={"topUpConfig": Json(config.model_dump())},
|
||||
)
|
||||
|
||||
|
||||
async def get_auto_top_up(user_id: str) -> AutoTopUpConfig:
|
||||
user = await get_user_by_id(user_id)
|
||||
if not user:
|
||||
raise ValueError("Invalid user ID")
|
||||
|
||||
if not user.topUpConfig:
|
||||
return AutoTopUpConfig(threshold=0, amount=0)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from multiprocessing import Manager
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, Optional, TypeVar
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, Optional, Type, TypeVar
|
||||
|
||||
from prisma import Json
|
||||
from prisma.enums import AgentExecutionStatus
|
||||
from prisma.errors import PrismaError
|
||||
from prisma.models import (
|
||||
@@ -15,7 +16,8 @@ from pydantic import BaseModel
|
||||
from backend.data.block import BlockData, BlockInput, CompletedBlockOutput
|
||||
from backend.data.includes import EXECUTION_RESULT_INCLUDE, GRAPH_EXECUTION_INCLUDE
|
||||
from backend.data.queue import AsyncRedisEventBus, RedisEventBus
|
||||
from backend.util import json, mock
|
||||
from backend.server.v2.store.exceptions import DatabaseError
|
||||
from backend.util import mock, type
|
||||
from backend.util.settings import Config
|
||||
|
||||
|
||||
@@ -23,6 +25,7 @@ class GraphExecutionEntry(BaseModel):
|
||||
user_id: str
|
||||
graph_exec_id: str
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
start_node_execs: list["NodeExecutionEntry"]
|
||||
|
||||
|
||||
@@ -100,16 +103,16 @@ class ExecutionResult(BaseModel):
|
||||
def from_db(execution: AgentNodeExecution):
|
||||
if execution.executionData:
|
||||
# Execution that has been queued for execution will persist its data.
|
||||
input_data = json.loads(execution.executionData, target_type=dict[str, Any])
|
||||
input_data = type.convert(execution.executionData, dict[str, Any])
|
||||
else:
|
||||
# For incomplete execution, executionData will not be yet available.
|
||||
input_data: BlockInput = defaultdict()
|
||||
for data in execution.Input or []:
|
||||
input_data[data.name] = json.loads(data.data)
|
||||
input_data[data.name] = type.convert(data.data, Type[Any])
|
||||
|
||||
output_data: CompletedBlockOutput = defaultdict(list)
|
||||
for data in execution.Output or []:
|
||||
output_data[data.name].append(json.loads(data.data))
|
||||
output_data[data.name].append(type.convert(data.data, Type[Any]))
|
||||
|
||||
graph_execution: AgentGraphExecution | None = execution.AgentGraphExecution
|
||||
|
||||
@@ -138,6 +141,7 @@ async def create_graph_execution(
|
||||
graph_version: int,
|
||||
nodes_input: list[tuple[str, BlockInput]],
|
||||
user_id: str,
|
||||
preset_id: str | None = None,
|
||||
) -> tuple[str, list[ExecutionResult]]:
|
||||
"""
|
||||
Create a new AgentGraphExecution record.
|
||||
@@ -156,7 +160,7 @@ async def create_graph_execution(
|
||||
"executionStatus": ExecutionStatus.INCOMPLETE,
|
||||
"Input": {
|
||||
"create": [
|
||||
{"name": name, "data": json.dumps(data)}
|
||||
{"name": name, "data": Json(data)}
|
||||
for name, data in node_input.items()
|
||||
]
|
||||
},
|
||||
@@ -165,6 +169,7 @@ async def create_graph_execution(
|
||||
]
|
||||
},
|
||||
"userId": user_id,
|
||||
"agentPresetId": preset_id,
|
||||
},
|
||||
include=GRAPH_EXECUTION_INCLUDE,
|
||||
)
|
||||
@@ -208,7 +213,7 @@ async def upsert_execution_input(
|
||||
order={"addedTime": "asc"},
|
||||
include={"Input": True},
|
||||
)
|
||||
json_input_data = json.dumps(input_data)
|
||||
json_input_data = Json(input_data)
|
||||
|
||||
if existing_execution:
|
||||
await AgentNodeExecutionInputOutput.prisma().create(
|
||||
@@ -220,7 +225,7 @@ async def upsert_execution_input(
|
||||
)
|
||||
return existing_execution.id, {
|
||||
**{
|
||||
input_data.name: json.loads(input_data.data)
|
||||
input_data.name: type.convert(input_data.data, Type[Any])
|
||||
for input_data in existing_execution.Input or []
|
||||
},
|
||||
input_name: input_data,
|
||||
@@ -254,7 +259,7 @@ async def upsert_execution_output(
|
||||
await AgentNodeExecutionInputOutput.prisma().create(
|
||||
data={
|
||||
"name": output_name,
|
||||
"data": json.dumps(output_data),
|
||||
"data": Json(output_data),
|
||||
"referencedByOutputExecId": node_exec_id,
|
||||
}
|
||||
)
|
||||
@@ -279,7 +284,7 @@ async def update_graph_execution_stats(
|
||||
where={"id": graph_exec_id},
|
||||
data={
|
||||
"executionStatus": status,
|
||||
"stats": json.dumps(stats),
|
||||
"stats": Json(stats),
|
||||
},
|
||||
)
|
||||
if not res:
|
||||
@@ -291,7 +296,7 @@ async def update_graph_execution_stats(
|
||||
async def update_node_execution_stats(node_exec_id: str, stats: dict[str, Any]):
|
||||
await AgentNodeExecution.prisma().update(
|
||||
where={"id": node_exec_id},
|
||||
data={"stats": json.dumps(stats)},
|
||||
data={"stats": Json(stats)},
|
||||
)
|
||||
|
||||
|
||||
@@ -311,8 +316,8 @@ async def update_execution_status(
|
||||
**({"startedTime": now} if status == ExecutionStatus.RUNNING else {}),
|
||||
**({"endedTime": now} if status == ExecutionStatus.FAILED else {}),
|
||||
**({"endedTime": now} if status == ExecutionStatus.COMPLETED else {}),
|
||||
**({"executionData": json.dumps(execution_data)} if execution_data else {}),
|
||||
**({"stats": json.dumps(stats)} if stats else {}),
|
||||
**({"executionData": Json(execution_data)} if execution_data else {}),
|
||||
**({"stats": Json(stats)} if stats else {}),
|
||||
}
|
||||
|
||||
res = await AgentNodeExecution.prisma().update(
|
||||
@@ -363,6 +368,31 @@ async def get_execution_results(graph_exec_id: str) -> list[ExecutionResult]:
|
||||
return res
|
||||
|
||||
|
||||
async def get_executions_in_timerange(
|
||||
user_id: str, start_time: str, end_time: str
|
||||
) -> list[ExecutionResult]:
|
||||
try:
|
||||
executions = await AgentGraphExecution.prisma().find_many(
|
||||
where={
|
||||
"AND": [
|
||||
{
|
||||
"startedAt": {
|
||||
"gte": datetime.fromisoformat(start_time),
|
||||
"lte": datetime.fromisoformat(end_time),
|
||||
}
|
||||
},
|
||||
{"userId": user_id},
|
||||
]
|
||||
},
|
||||
include=GRAPH_EXECUTION_INCLUDE,
|
||||
)
|
||||
return [ExecutionResult.from_graph(execution) for execution in executions]
|
||||
except Exception as e:
|
||||
raise DatabaseError(
|
||||
f"Failed to get executions in timerange {start_time} to {end_time} for user {user_id}: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
LIST_SPLIT = "_$_"
|
||||
DICT_SPLIT = "_#_"
|
||||
OBJC_SPLIT = "_@_"
|
||||
@@ -446,8 +476,7 @@ async def get_latest_execution(node_id: str, graph_eid: str) -> ExecutionResult
|
||||
where={
|
||||
"agentNodeId": node_id,
|
||||
"agentGraphExecutionId": graph_eid,
|
||||
"executionStatus": {"not": ExecutionStatus.INCOMPLETE},
|
||||
"executionData": {"not": None}, # type: ignore
|
||||
"executionStatus": {"not": ExecutionStatus.INCOMPLETE}, # type: ignore
|
||||
},
|
||||
order={"queuedTime": "desc"},
|
||||
include=EXECUTION_RESULT_INCLUDE,
|
||||
|
||||
@@ -6,6 +6,7 @@ from datetime import datetime, timezone
|
||||
from typing import Any, Literal, Optional, Type
|
||||
|
||||
import prisma
|
||||
from prisma import Json
|
||||
from prisma.models import (
|
||||
AgentGraph,
|
||||
AgentGraphExecution,
|
||||
@@ -18,7 +19,7 @@ from pydantic.fields import computed_field
|
||||
|
||||
from backend.blocks.agent import AgentExecutorBlock
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock
|
||||
from backend.util import json
|
||||
from backend.util import type
|
||||
|
||||
from .block import BlockInput, BlockType, get_block, get_blocks
|
||||
from .db import BaseDbModel, transaction
|
||||
@@ -74,8 +75,8 @@ class NodeModel(Node):
|
||||
obj = NodeModel(
|
||||
id=node.id,
|
||||
block_id=node.AgentBlock.id,
|
||||
input_default=json.loads(node.constantInput, target_type=dict[str, Any]),
|
||||
metadata=json.loads(node.metadata, target_type=dict[str, Any]),
|
||||
input_default=type.convert(node.constantInput, dict[str, Any]),
|
||||
metadata=type.convert(node.metadata, dict[str, Any]),
|
||||
graph_id=node.agentGraphId,
|
||||
graph_version=node.agentGraphVersion,
|
||||
webhook_id=node.webhookId,
|
||||
@@ -125,7 +126,7 @@ class GraphExecution(BaseDbModel):
|
||||
total_run_time = duration
|
||||
|
||||
try:
|
||||
stats = json.loads(execution.stats or "{}", target_type=dict[str, Any])
|
||||
stats = type.convert(execution.stats or {}, dict[str, Any])
|
||||
except ValueError:
|
||||
stats = {}
|
||||
|
||||
@@ -402,11 +403,9 @@ class GraphModel(Graph):
|
||||
if for_export:
|
||||
# Remove credentials from node input
|
||||
if node.constantInput:
|
||||
constant_input = json.loads(
|
||||
node.constantInput, target_type=dict[str, Any]
|
||||
)
|
||||
constant_input = type.convert(node.constantInput, dict[str, Any])
|
||||
constant_input = GraphModel._hide_node_input_credentials(constant_input)
|
||||
node.constantInput = json.dumps(constant_input)
|
||||
node.constantInput = Json(constant_input)
|
||||
|
||||
# Remove webhook info
|
||||
node.webhookId = None
|
||||
@@ -535,7 +534,7 @@ async def get_execution(user_id: str, execution_id: str) -> GraphExecution | Non
|
||||
async def get_graph(
|
||||
graph_id: str,
|
||||
version: int | None = None,
|
||||
template: bool = False,
|
||||
template: bool = False, # note: currently not in use; TODO: remove from DB entirely
|
||||
user_id: str | None = None,
|
||||
for_export: bool = False,
|
||||
) -> GraphModel | None:
|
||||
@@ -654,8 +653,8 @@ async def __create_graph(tx, graph: Graph, user_id: str):
|
||||
{
|
||||
"id": node.id,
|
||||
"agentBlockId": node.block_id,
|
||||
"constantInput": json.dumps(node.input_default),
|
||||
"metadata": json.dumps(node.metadata),
|
||||
"constantInput": Json(node.input_default),
|
||||
"metadata": Json(node.metadata),
|
||||
}
|
||||
for node in graph.nodes
|
||||
]
|
||||
@@ -742,7 +741,7 @@ async def fix_llm_provider_credentials():
|
||||
raise RuntimeError(f"Impossible state while processing node {node}")
|
||||
|
||||
node_id: str = node["node_id"]
|
||||
node_preset_input: dict = json.loads(node["node_preset_input"])
|
||||
node_preset_input: dict = node["node_preset_input"]
|
||||
credentials_meta: dict = node_preset_input["credentials"]
|
||||
|
||||
credentials = next(
|
||||
@@ -778,5 +777,5 @@ async def fix_llm_provider_credentials():
|
||||
store.update_creds(user_id, credentials)
|
||||
await AgentNode.prisma().update(
|
||||
where={"id": node_id},
|
||||
data={"constantInput": json.dumps(node_preset_input)},
|
||||
data={"constantInput": Json(node_preset_input)},
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ from __future__ import annotations
|
||||
|
||||
import base64
|
||||
import logging
|
||||
from datetime import datetime
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
Annotated,
|
||||
@@ -17,6 +18,7 @@ from typing import (
|
||||
)
|
||||
from uuid import uuid4
|
||||
|
||||
from prisma.enums import CreditTransactionType
|
||||
from pydantic import (
|
||||
BaseModel,
|
||||
ConfigDict,
|
||||
@@ -370,3 +372,20 @@ class AutoTopUpConfig(BaseModel):
|
||||
"""Amount of credits to top up."""
|
||||
threshold: int
|
||||
"""Threshold to trigger auto top up."""
|
||||
|
||||
|
||||
class UserTransaction(BaseModel):
|
||||
transaction_time: datetime = datetime.min
|
||||
transaction_type: CreditTransactionType = CreditTransactionType.USAGE
|
||||
amount: int = 0
|
||||
balance: int = 0
|
||||
description: str | None = None
|
||||
usage_graph_id: str | None = None
|
||||
usage_execution_id: str | None = None
|
||||
usage_node_count: int = 0
|
||||
usage_start_time: datetime = datetime.max
|
||||
|
||||
|
||||
class TransactionHistory(BaseModel):
|
||||
transactions: list[UserTransaction]
|
||||
next_transaction_time: datetime | None
|
||||
|
||||
360
autogpt_platform/backend/backend/data/notifications.py
Normal file
360
autogpt_platform/backend/backend/data/notifications.py
Normal file
@@ -0,0 +1,360 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
from typing import Annotated, Generic, Optional, TypeVar, Union
|
||||
|
||||
from prisma import Json
|
||||
from prisma.enums import NotificationType
|
||||
from prisma.models import NotificationEvent, UserNotificationBatch
|
||||
from prisma.types import UserNotificationBatchWhereInput
|
||||
|
||||
# from backend.notifications.models import NotificationEvent
|
||||
from pydantic import BaseModel, EmailStr, Field, field_validator
|
||||
|
||||
from backend.server.v2.store.exceptions import DatabaseError
|
||||
|
||||
from .db import transaction
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
T_co = TypeVar("T_co", bound="BaseNotificationData", covariant=True)
|
||||
|
||||
|
||||
class BatchingStrategy(Enum):
|
||||
IMMEDIATE = "immediate" # Send right away (errors, critical notifications)
|
||||
HOURLY = "hourly" # Batch for up to an hour (usage reports)
|
||||
DAILY = "daily" # Daily digest (summary notifications)
|
||||
BACKOFF = "backoff" # Backoff strategy (exponential backoff)
|
||||
|
||||
|
||||
class BaseNotificationData(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class AgentRunData(BaseNotificationData):
|
||||
agent_name: str
|
||||
credits_used: float
|
||||
# remaining_balance: float
|
||||
execution_time: float
|
||||
graph_id: str
|
||||
node_count: int = Field(..., description="Number of nodes executed")
|
||||
|
||||
|
||||
class ZeroBalanceData(BaseNotificationData):
|
||||
last_transaction: float
|
||||
last_transaction_time: datetime
|
||||
top_up_link: str
|
||||
|
||||
|
||||
class LowBalanceData(BaseNotificationData):
|
||||
current_balance: float
|
||||
threshold_amount: float
|
||||
top_up_link: str
|
||||
recent_usage: float = Field(..., description="Usage in the last 24 hours")
|
||||
|
||||
|
||||
class BlockExecutionFailedData(BaseNotificationData):
|
||||
block_name: str
|
||||
block_id: str
|
||||
error_message: str
|
||||
graph_id: str
|
||||
node_id: str
|
||||
execution_id: str
|
||||
|
||||
|
||||
class ContinuousAgentErrorData(BaseNotificationData):
|
||||
agent_name: str
|
||||
error_message: str
|
||||
graph_id: str
|
||||
execution_id: str
|
||||
start_time: datetime
|
||||
error_time: datetime
|
||||
attempts: int = Field(..., description="Number of retry attempts made")
|
||||
|
||||
|
||||
class BaseSummaryData(BaseNotificationData):
|
||||
total_credits_used: float
|
||||
total_executions: int
|
||||
most_used_agent: str
|
||||
total_execution_time: float
|
||||
successful_runs: int
|
||||
failed_runs: int
|
||||
average_execution_time: float
|
||||
cost_breakdown: dict[str, float]
|
||||
|
||||
|
||||
class DailySummaryData(BaseSummaryData):
|
||||
date: datetime
|
||||
|
||||
|
||||
class WeeklySummaryData(BaseSummaryData):
|
||||
start_date: datetime
|
||||
end_date: datetime
|
||||
week_number: int
|
||||
year: int
|
||||
|
||||
|
||||
class MonthlySummaryData(BaseSummaryData):
|
||||
month: int
|
||||
year: int
|
||||
|
||||
|
||||
NotificationData = Annotated[
|
||||
Union[
|
||||
AgentRunData,
|
||||
ZeroBalanceData,
|
||||
LowBalanceData,
|
||||
BlockExecutionFailedData,
|
||||
ContinuousAgentErrorData,
|
||||
MonthlySummaryData,
|
||||
],
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
|
||||
|
||||
class NotificationEventDTO(BaseModel):
|
||||
user_id: str
|
||||
type: NotificationType
|
||||
data: dict
|
||||
created_at: datetime = Field(default_factory=datetime.now)
|
||||
|
||||
|
||||
class NotificationEventModel(BaseModel, Generic[T_co]):
|
||||
user_id: str
|
||||
type: NotificationType
|
||||
data: T_co
|
||||
created_at: datetime = Field(default_factory=datetime.now)
|
||||
|
||||
@property
|
||||
def strategy(self) -> BatchingStrategy:
|
||||
return NotificationTypeOverride(self.type).strategy
|
||||
|
||||
@field_validator("type", mode="before")
|
||||
def uppercase_type(cls, v):
|
||||
if isinstance(v, str):
|
||||
return v.upper()
|
||||
return v
|
||||
|
||||
@property
|
||||
def template(self) -> str:
|
||||
return NotificationTypeOverride(self.type).template
|
||||
|
||||
|
||||
def get_data_type(
|
||||
notification_type: NotificationType,
|
||||
) -> type[BaseNotificationData]:
|
||||
return {
|
||||
NotificationType.AGENT_RUN: AgentRunData,
|
||||
NotificationType.ZERO_BALANCE: ZeroBalanceData,
|
||||
NotificationType.LOW_BALANCE: LowBalanceData,
|
||||
NotificationType.BLOCK_EXECUTION_FAILED: BlockExecutionFailedData,
|
||||
NotificationType.CONTINUOUS_AGENT_ERROR: ContinuousAgentErrorData,
|
||||
NotificationType.DAILY_SUMMARY: DailySummaryData,
|
||||
NotificationType.WEEKLY_SUMMARY: WeeklySummaryData,
|
||||
NotificationType.MONTHLY_SUMMARY: MonthlySummaryData,
|
||||
}[notification_type]
|
||||
|
||||
|
||||
class NotificationBatch(BaseModel):
|
||||
user_id: str
|
||||
events: list[NotificationEvent]
|
||||
strategy: BatchingStrategy
|
||||
last_update: datetime = datetime.now()
|
||||
|
||||
|
||||
class NotificationResult(BaseModel):
|
||||
success: bool
|
||||
message: Optional[str] = None
|
||||
|
||||
|
||||
class NotificationTypeOverride:
|
||||
def __init__(self, notification_type: NotificationType):
|
||||
self.notification_type = notification_type
|
||||
|
||||
@property
|
||||
def strategy(self) -> BatchingStrategy:
|
||||
BATCHING_RULES = {
|
||||
# These are batched by the notification service
|
||||
NotificationType.AGENT_RUN: BatchingStrategy.IMMEDIATE,
|
||||
# These are batched by the notification service, but with a backoff strategy
|
||||
NotificationType.ZERO_BALANCE: BatchingStrategy.BACKOFF,
|
||||
NotificationType.LOW_BALANCE: BatchingStrategy.BACKOFF,
|
||||
NotificationType.BLOCK_EXECUTION_FAILED: BatchingStrategy.BACKOFF,
|
||||
NotificationType.CONTINUOUS_AGENT_ERROR: BatchingStrategy.BACKOFF,
|
||||
# These aren't batched by the notification service, so we send them right away
|
||||
NotificationType.DAILY_SUMMARY: BatchingStrategy.IMMEDIATE,
|
||||
NotificationType.WEEKLY_SUMMARY: BatchingStrategy.IMMEDIATE,
|
||||
NotificationType.MONTHLY_SUMMARY: BatchingStrategy.IMMEDIATE,
|
||||
}
|
||||
return BATCHING_RULES.get(self.notification_type, BatchingStrategy.HOURLY)
|
||||
|
||||
@property
|
||||
def template(self) -> str:
|
||||
"""Returns template name for this notification type"""
|
||||
return {
|
||||
NotificationType.AGENT_RUN: "agent_run.html",
|
||||
NotificationType.ZERO_BALANCE: "zero_balance.html",
|
||||
NotificationType.LOW_BALANCE: "low_balance.html",
|
||||
NotificationType.BLOCK_EXECUTION_FAILED: "block_failed.html",
|
||||
NotificationType.CONTINUOUS_AGENT_ERROR: "agent_error.html",
|
||||
NotificationType.DAILY_SUMMARY: "daily_summary.html",
|
||||
NotificationType.WEEKLY_SUMMARY: "weekly_summary.html",
|
||||
NotificationType.MONTHLY_SUMMARY: "monthly_summary.html",
|
||||
}[self.notification_type]
|
||||
|
||||
|
||||
class NotificationPreference(BaseModel):
|
||||
user_id: str
|
||||
email: EmailStr
|
||||
preferences: dict[NotificationType, bool] = Field(
|
||||
default_factory=dict, description="Which notifications the user wants"
|
||||
)
|
||||
daily_limit: int = 10 # Max emails per day
|
||||
emails_sent_today: int = 0
|
||||
last_reset_date: datetime = Field(default_factory=datetime.now)
|
||||
|
||||
|
||||
def get_batch_delay(notification_type: NotificationType) -> timedelta:
|
||||
return {
|
||||
NotificationType.AGENT_RUN: timedelta(seconds=1),
|
||||
NotificationType.ZERO_BALANCE: timedelta(minutes=60),
|
||||
NotificationType.LOW_BALANCE: timedelta(minutes=60),
|
||||
NotificationType.BLOCK_EXECUTION_FAILED: timedelta(minutes=60),
|
||||
NotificationType.CONTINUOUS_AGENT_ERROR: timedelta(minutes=60),
|
||||
}[notification_type]
|
||||
|
||||
|
||||
async def create_or_add_to_user_notification_batch(
|
||||
user_id: str,
|
||||
notification_type: NotificationType,
|
||||
data: str, # type: 'NotificationEventModel'
|
||||
) -> dict:
|
||||
try:
|
||||
logger.info(
|
||||
f"Creating or adding to notification batch for {user_id} with type {notification_type} and data {data}"
|
||||
)
|
||||
|
||||
notification_data = NotificationEventModel[
|
||||
get_data_type(notification_type)
|
||||
].model_validate_json(data)
|
||||
|
||||
# Serialize the data
|
||||
json_data: Json = Json(notification_data.data.model_dump_json())
|
||||
|
||||
# First try to find existing batch
|
||||
existing_batch = await UserNotificationBatch.prisma().find_unique(
|
||||
where={
|
||||
"userId_type": {
|
||||
"userId": user_id,
|
||||
"type": notification_type,
|
||||
}
|
||||
},
|
||||
include={"notifications": True},
|
||||
)
|
||||
|
||||
if not existing_batch:
|
||||
async with transaction() as tx:
|
||||
notification_event = await tx.notificationevent.create(
|
||||
data={
|
||||
"type": notification_type,
|
||||
"data": json_data,
|
||||
}
|
||||
)
|
||||
|
||||
# Create new batch
|
||||
resp = await tx.usernotificationbatch.create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"type": notification_type,
|
||||
"notifications": {"connect": [{"id": notification_event.id}]},
|
||||
},
|
||||
include={"notifications": True},
|
||||
)
|
||||
return resp.model_dump()
|
||||
else:
|
||||
async with transaction() as tx:
|
||||
notification_event = await tx.notificationevent.create(
|
||||
data={
|
||||
"type": notification_type,
|
||||
"data": json_data,
|
||||
"UserNotificationBatch": {"connect": {"id": existing_batch.id}},
|
||||
}
|
||||
)
|
||||
# Add to existing batch
|
||||
resp = await tx.usernotificationbatch.update(
|
||||
where={"id": existing_batch.id},
|
||||
data={
|
||||
"notifications": {"connect": [{"id": notification_event.id}]}
|
||||
},
|
||||
include={"notifications": True},
|
||||
)
|
||||
if not resp:
|
||||
raise DatabaseError(
|
||||
f"Failed to add notification event {notification_event.id} to existing batch {existing_batch.id}"
|
||||
)
|
||||
return resp.model_dump()
|
||||
except Exception as e:
|
||||
raise DatabaseError(
|
||||
f"Failed to create or add to notification batch for user {user_id} and type {notification_type}: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
async def get_user_notification_last_message_in_batch(
|
||||
user_id: str,
|
||||
notification_type: NotificationType,
|
||||
) -> NotificationEvent | None:
|
||||
try:
|
||||
batch = await UserNotificationBatch.prisma().find_first(
|
||||
where={"userId": user_id, "type": notification_type},
|
||||
order={"createdAt": "desc"},
|
||||
)
|
||||
if not batch:
|
||||
return None
|
||||
if not batch.notifications:
|
||||
return None
|
||||
return batch.notifications[-1]
|
||||
except Exception as e:
|
||||
raise DatabaseError(
|
||||
f"Failed to get user notification last message in batch for user {user_id} and type {notification_type}: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
async def empty_user_notification_batch(
|
||||
user_id: str, notification_type: NotificationType
|
||||
) -> None:
|
||||
try:
|
||||
async with transaction() as tx:
|
||||
await tx.notificationevent.delete_many(
|
||||
where={
|
||||
"UserNotificationBatch": {
|
||||
"is": {"userId": user_id, "type": notification_type}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
await tx.usernotificationbatch.delete_many(
|
||||
where=UserNotificationBatchWhereInput(
|
||||
userId=user_id,
|
||||
type=notification_type,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
raise DatabaseError(
|
||||
f"Failed to empty user notification batch for user {user_id} and type {notification_type}: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
async def get_user_notification_batch(
|
||||
user_id: str,
|
||||
notification_type: NotificationType,
|
||||
) -> UserNotificationBatch | None:
|
||||
try:
|
||||
return await UserNotificationBatch.prisma().find_first(
|
||||
where={"userId": user_id, "type": notification_type},
|
||||
include={"notifications": True},
|
||||
)
|
||||
except Exception as e:
|
||||
raise DatabaseError(
|
||||
f"Failed to get user notification batch for user {user_id} and type {notification_type}: {e}"
|
||||
) from e
|
||||
296
autogpt_platform/backend/backend/data/rabbitmq.py
Normal file
296
autogpt_platform/backend/backend/data/rabbitmq.py
Normal file
@@ -0,0 +1,296 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import Enum
|
||||
from typing import Awaitable, Optional
|
||||
|
||||
import aio_pika
|
||||
import pika
|
||||
import pika.adapters.blocking_connection
|
||||
from pika.spec import BasicProperties
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.util.retry import conn_retry
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ExchangeType(str, Enum):
|
||||
DIRECT = "direct"
|
||||
FANOUT = "fanout"
|
||||
TOPIC = "topic"
|
||||
HEADERS = "headers"
|
||||
|
||||
|
||||
class Exchange(BaseModel):
|
||||
name: str
|
||||
type: ExchangeType
|
||||
durable: bool = True
|
||||
auto_delete: bool = False
|
||||
|
||||
|
||||
class Queue(BaseModel):
|
||||
name: str
|
||||
durable: bool = True
|
||||
auto_delete: bool = False
|
||||
# Optional exchange binding configuration
|
||||
exchange: Optional[Exchange] = None
|
||||
routing_key: Optional[str] = None
|
||||
arguments: Optional[dict] = None
|
||||
|
||||
|
||||
class RabbitMQConfig(BaseModel):
|
||||
"""Configuration for a RabbitMQ service instance"""
|
||||
|
||||
vhost: str = "/"
|
||||
exchanges: list[Exchange]
|
||||
queues: list[Queue]
|
||||
|
||||
|
||||
class RabbitMQBase(ABC):
|
||||
"""Base class for RabbitMQ connections with shared configuration"""
|
||||
|
||||
def __init__(self, config: RabbitMQConfig):
|
||||
settings = Settings()
|
||||
self.host = settings.config.rabbitmq_host
|
||||
self.port = settings.config.rabbitmq_port
|
||||
self.username = settings.secrets.rabbitmq_default_user
|
||||
self.password = settings.secrets.rabbitmq_default_pass
|
||||
self.config = config
|
||||
|
||||
self._connection = None
|
||||
self._channel = None
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Check if we have a valid connection"""
|
||||
return bool(self._connection)
|
||||
|
||||
@property
|
||||
def is_ready(self) -> bool:
|
||||
"""Check if we have a valid channel"""
|
||||
return bool(self.is_connected and self._channel)
|
||||
|
||||
@abstractmethod
|
||||
def connect(self) -> None | Awaitable[None]:
|
||||
"""Establish connection to RabbitMQ"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def disconnect(self) -> None | Awaitable[None]:
|
||||
"""Close connection to RabbitMQ"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def declare_infrastructure(self) -> None | Awaitable[None]:
|
||||
"""Declare exchanges and queues for this service"""
|
||||
pass
|
||||
|
||||
|
||||
class SyncRabbitMQ(RabbitMQBase):
|
||||
"""Synchronous RabbitMQ client"""
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
return bool(self._connection and self._connection.is_open)
|
||||
|
||||
@property
|
||||
def is_ready(self) -> bool:
|
||||
return bool(self.is_connected and self._channel and self._channel.is_open)
|
||||
|
||||
@conn_retry("RabbitMQ", "Acquiring connection")
|
||||
def connect(self) -> None:
|
||||
if self.is_connected:
|
||||
return
|
||||
|
||||
credentials = pika.PlainCredentials(self.username, self.password)
|
||||
parameters = pika.ConnectionParameters(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
virtual_host=self.config.vhost,
|
||||
credentials=credentials,
|
||||
heartbeat=600,
|
||||
blocked_connection_timeout=300,
|
||||
)
|
||||
|
||||
self._connection = pika.BlockingConnection(parameters)
|
||||
self._channel = self._connection.channel()
|
||||
self._channel.basic_qos(prefetch_count=1)
|
||||
|
||||
self.declare_infrastructure()
|
||||
|
||||
def disconnect(self) -> None:
|
||||
if self._channel:
|
||||
if self._channel.is_open:
|
||||
self._channel.close()
|
||||
self._channel = None
|
||||
if self._connection:
|
||||
if self._connection.is_open:
|
||||
self._connection.close()
|
||||
self._connection = None
|
||||
|
||||
def declare_infrastructure(self) -> None:
|
||||
"""Declare exchanges and queues for this service"""
|
||||
if not self.is_ready:
|
||||
self.connect()
|
||||
|
||||
if self._channel is None:
|
||||
raise RuntimeError("Channel should be established after connect")
|
||||
|
||||
# Declare exchanges
|
||||
for exchange in self.config.exchanges:
|
||||
self._channel.exchange_declare(
|
||||
exchange=exchange.name,
|
||||
exchange_type=exchange.type.value,
|
||||
durable=exchange.durable,
|
||||
auto_delete=exchange.auto_delete,
|
||||
)
|
||||
|
||||
# Declare queues and bind them to exchanges
|
||||
for queue in self.config.queues:
|
||||
self._channel.queue_declare(
|
||||
queue=queue.name,
|
||||
durable=queue.durable,
|
||||
auto_delete=queue.auto_delete,
|
||||
arguments=queue.arguments,
|
||||
)
|
||||
if queue.exchange:
|
||||
self._channel.queue_bind(
|
||||
queue=queue.name,
|
||||
exchange=queue.exchange.name,
|
||||
routing_key=queue.routing_key or queue.name,
|
||||
)
|
||||
|
||||
def publish_message(
|
||||
self,
|
||||
routing_key: str,
|
||||
message: str,
|
||||
exchange: Optional[Exchange] = None,
|
||||
properties: Optional[BasicProperties] = None,
|
||||
mandatory: bool = True,
|
||||
) -> None:
|
||||
if not self.is_ready:
|
||||
self.connect()
|
||||
|
||||
if self._channel is None:
|
||||
raise RuntimeError("Channel should be established after connect")
|
||||
|
||||
self._channel.basic_publish(
|
||||
exchange=exchange.name if exchange else "",
|
||||
routing_key=routing_key,
|
||||
body=message.encode(),
|
||||
properties=properties or BasicProperties(delivery_mode=2),
|
||||
mandatory=mandatory,
|
||||
)
|
||||
|
||||
def get_channel(self) -> pika.adapters.blocking_connection.BlockingChannel:
|
||||
if not self.is_ready:
|
||||
self.connect()
|
||||
if self._channel is None:
|
||||
raise RuntimeError("Channel should be established after connect")
|
||||
return self._channel
|
||||
|
||||
|
||||
class AsyncRabbitMQ(RabbitMQBase):
|
||||
"""Asynchronous RabbitMQ client"""
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
return bool(self._connection and not self._connection.is_closed)
|
||||
|
||||
@property
|
||||
def is_ready(self) -> bool:
|
||||
return bool(self.is_connected and self._channel and not self._channel.is_closed)
|
||||
|
||||
@conn_retry("AsyncRabbitMQ", "Acquiring async connection")
|
||||
async def connect(self):
|
||||
if self.is_connected:
|
||||
return
|
||||
|
||||
self._connection = await aio_pika.connect_robust(
|
||||
host=self.host,
|
||||
port=self.port,
|
||||
login=self.username,
|
||||
password=self.password,
|
||||
virtualhost=self.config.vhost.lstrip("/"),
|
||||
)
|
||||
self._channel = await self._connection.channel()
|
||||
await self._channel.set_qos(prefetch_count=1)
|
||||
|
||||
await self.declare_infrastructure()
|
||||
|
||||
async def disconnect(self):
|
||||
if self._channel:
|
||||
await self._channel.close()
|
||||
self._channel = None
|
||||
if self._connection:
|
||||
await self._connection.close()
|
||||
self._connection = None
|
||||
|
||||
async def declare_infrastructure(self):
|
||||
"""Declare exchanges and queues for this service"""
|
||||
if not self.is_ready:
|
||||
await self.connect()
|
||||
|
||||
if self._channel is None:
|
||||
raise RuntimeError("Channel should be established after connect")
|
||||
|
||||
# Declare exchanges
|
||||
for exchange in self.config.exchanges:
|
||||
await self._channel.declare_exchange(
|
||||
name=exchange.name,
|
||||
type=exchange.type.value,
|
||||
durable=exchange.durable,
|
||||
auto_delete=exchange.auto_delete,
|
||||
)
|
||||
|
||||
# Declare queues and bind them to exchanges
|
||||
for queue in self.config.queues:
|
||||
queue_obj = await self._channel.declare_queue(
|
||||
name=queue.name,
|
||||
durable=queue.durable,
|
||||
auto_delete=queue.auto_delete,
|
||||
arguments=queue.arguments,
|
||||
)
|
||||
if queue.exchange:
|
||||
exchange = await self._channel.get_exchange(queue.exchange.name)
|
||||
await queue_obj.bind(
|
||||
exchange, routing_key=queue.routing_key or queue.name
|
||||
)
|
||||
|
||||
async def publish_message(
|
||||
self,
|
||||
routing_key: str,
|
||||
message: str,
|
||||
exchange: Optional[Exchange] = None,
|
||||
persistent: bool = True,
|
||||
) -> None:
|
||||
if not self.is_ready:
|
||||
await self.connect()
|
||||
|
||||
if self._channel is None:
|
||||
raise RuntimeError("Channel should be established after connect")
|
||||
|
||||
if exchange:
|
||||
exchange_obj = await self._channel.get_exchange(exchange.name)
|
||||
else:
|
||||
exchange_obj = self._channel.default_exchange
|
||||
|
||||
await exchange_obj.publish(
|
||||
aio_pika.Message(
|
||||
body=message.encode(),
|
||||
delivery_mode=(
|
||||
aio_pika.DeliveryMode.PERSISTENT
|
||||
if persistent
|
||||
else aio_pika.DeliveryMode.NOT_PERSISTENT
|
||||
),
|
||||
),
|
||||
routing_key=routing_key,
|
||||
)
|
||||
|
||||
async def get_channel(self) -> aio_pika.abc.AbstractChannel:
|
||||
if not self.is_ready:
|
||||
await self.connect()
|
||||
if self._channel is None:
|
||||
raise RuntimeError("Channel should be established after connect")
|
||||
return self._channel
|
||||
@@ -1,44 +1,54 @@
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, cast
|
||||
|
||||
from autogpt_libs.auth.models import DEFAULT_USER_ID
|
||||
from fastapi import HTTPException
|
||||
from prisma import Json
|
||||
from prisma.enums import NotificationType
|
||||
from prisma.models import User
|
||||
|
||||
from backend.data.db import prisma
|
||||
from backend.data.model import UserIntegrations, UserMetadata, UserMetadataRaw
|
||||
from backend.data.notifications import NotificationPreference
|
||||
from backend.server.v2.store.exceptions import DatabaseError
|
||||
from backend.util.encryption import JSONCryptor
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def get_or_create_user(user_data: dict) -> User:
|
||||
user_id = user_data.get("sub")
|
||||
if not user_id:
|
||||
raise HTTPException(status_code=401, detail="User ID not found in token")
|
||||
try:
|
||||
user_id = user_data.get("sub")
|
||||
if not user_id:
|
||||
raise HTTPException(status_code=401, detail="User ID not found in token")
|
||||
|
||||
user_email = user_data.get("email")
|
||||
if not user_email:
|
||||
raise HTTPException(status_code=401, detail="Email not found in token")
|
||||
user_email = user_data.get("email")
|
||||
if not user_email:
|
||||
raise HTTPException(status_code=401, detail="Email not found in token")
|
||||
|
||||
user = await prisma.user.find_unique(where={"id": user_id})
|
||||
if not user:
|
||||
user = await prisma.user.create(
|
||||
data={
|
||||
"id": user_id,
|
||||
"email": user_email,
|
||||
"name": user_data.get("user_metadata", {}).get("name"),
|
||||
}
|
||||
)
|
||||
|
||||
return User.model_validate(user)
|
||||
except Exception as e:
|
||||
raise DatabaseError(f"Failed to get or create user {user_data}: {e}") from e
|
||||
|
||||
|
||||
async def get_user_by_id(user_id: str) -> User:
|
||||
user = await prisma.user.find_unique(where={"id": user_id})
|
||||
if not user:
|
||||
user = await prisma.user.create(
|
||||
data={
|
||||
"id": user_id,
|
||||
"email": user_email,
|
||||
"name": user_data.get("user_metadata", {}).get("name"),
|
||||
}
|
||||
)
|
||||
raise ValueError(f"User not found with ID: {user_id}")
|
||||
return User.model_validate(user)
|
||||
|
||||
|
||||
async def get_user_by_id(user_id: str) -> Optional[User]:
|
||||
user = await prisma.user.find_unique(where={"id": user_id})
|
||||
return User.model_validate(user) if user else None
|
||||
|
||||
|
||||
async def create_default_user() -> Optional[User]:
|
||||
user = await prisma.user.find_unique(where={"id": DEFAULT_USER_ID})
|
||||
if not user:
|
||||
@@ -128,3 +138,70 @@ async def migrate_and_encrypt_user_integrations():
|
||||
where={"id": user.id},
|
||||
data={"metadata": Json(raw_metadata)},
|
||||
)
|
||||
|
||||
|
||||
async def get_active_user_ids_in_timerange(start_time: str, end_time: str) -> list[str]:
|
||||
try:
|
||||
users = await User.prisma().find_many(
|
||||
where={
|
||||
"AgentGraphExecutions": {
|
||||
"some": {
|
||||
"createdAt": {
|
||||
"gte": datetime.fromisoformat(start_time),
|
||||
"lte": datetime.fromisoformat(end_time),
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
return [user.id for user in users]
|
||||
|
||||
except Exception as e:
|
||||
raise DatabaseError(
|
||||
f"Failed to get active user ids in timerange {start_time} to {end_time}: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
async def get_active_users_ids() -> list[str]:
|
||||
user_ids = await get_active_user_ids_in_timerange(
|
||||
(datetime.now() - timedelta(days=30)).isoformat(),
|
||||
datetime.now().isoformat(),
|
||||
)
|
||||
return user_ids
|
||||
|
||||
|
||||
async def get_user_notification_preference(user_id: str) -> NotificationPreference:
|
||||
try:
|
||||
user = await User.prisma().find_unique_or_raise(
|
||||
where={"id": user_id},
|
||||
)
|
||||
|
||||
# enable notifications by default if user has no notification preference (shouldn't ever happen though)
|
||||
preferences: dict[NotificationType, bool] = {
|
||||
NotificationType.AGENT_RUN: user.notifyOnAgentRun or True,
|
||||
NotificationType.ZERO_BALANCE: user.notifyOnZeroBalance or True,
|
||||
NotificationType.LOW_BALANCE: user.notifyOnLowBalance or True,
|
||||
NotificationType.BLOCK_EXECUTION_FAILED: user.notifyOnBlockExecutionFailed
|
||||
or True,
|
||||
NotificationType.CONTINUOUS_AGENT_ERROR: user.notifyOnContinuousAgentError
|
||||
or True,
|
||||
NotificationType.DAILY_SUMMARY: user.notifyOnDailySummary or True,
|
||||
NotificationType.WEEKLY_SUMMARY: user.notifyOnWeeklySummary or True,
|
||||
NotificationType.MONTHLY_SUMMARY: user.notifyOnMonthlySummary or True,
|
||||
}
|
||||
daily_limit = user.maxEmailsPerDay or 3
|
||||
notification_preference = NotificationPreference(
|
||||
user_id=user.id,
|
||||
email=user.email,
|
||||
preferences=preferences,
|
||||
daily_limit=daily_limit,
|
||||
# TODO with other changes later, for now we just will email them
|
||||
emails_sent_today=0,
|
||||
last_reset_date=datetime.now(),
|
||||
)
|
||||
return NotificationPreference.model_validate(notification_preference)
|
||||
|
||||
except Exception as e:
|
||||
raise DatabaseError(
|
||||
f"Failed to upsert user notification preference for user {user_id}: {e}"
|
||||
) from e
|
||||
|
||||
@@ -8,6 +8,7 @@ from backend.data.execution import (
|
||||
RedisExecutionEventBus,
|
||||
create_graph_execution,
|
||||
get_execution_results,
|
||||
get_executions_in_timerange,
|
||||
get_incomplete_executions,
|
||||
get_latest_execution,
|
||||
update_execution_status,
|
||||
@@ -17,9 +18,19 @@ from backend.data.execution import (
|
||||
upsert_execution_output,
|
||||
)
|
||||
from backend.data.graph import get_graph, get_node
|
||||
from backend.data.notifications import (
|
||||
create_or_add_to_user_notification_batch,
|
||||
empty_user_notification_batch,
|
||||
get_user_notification_batch,
|
||||
get_user_notification_last_message_in_batch,
|
||||
)
|
||||
from backend.data.user import (
|
||||
get_active_user_ids_in_timerange,
|
||||
get_active_users_ids,
|
||||
get_user_by_id,
|
||||
get_user_integrations,
|
||||
get_user_metadata,
|
||||
get_user_notification_preference,
|
||||
update_user_integrations,
|
||||
update_user_metadata,
|
||||
)
|
||||
@@ -72,6 +83,7 @@ class DatabaseManager(AppService):
|
||||
update_node_execution_stats = exposed_run_and_wait(update_node_execution_stats)
|
||||
upsert_execution_input = exposed_run_and_wait(upsert_execution_input)
|
||||
upsert_execution_output = exposed_run_and_wait(upsert_execution_output)
|
||||
get_executions_in_timerange = exposed_run_and_wait(get_executions_in_timerange)
|
||||
|
||||
# Graphs
|
||||
get_node = exposed_run_and_wait(get_node)
|
||||
@@ -84,8 +96,26 @@ class DatabaseManager(AppService):
|
||||
exposed_run_and_wait(user_credit_model.spend_credits),
|
||||
)
|
||||
|
||||
# User + User Metadata + User Integrations
|
||||
# User + User Metadata + User Integrations + User Notification Preferences
|
||||
get_user_metadata = exposed_run_and_wait(get_user_metadata)
|
||||
update_user_metadata = exposed_run_and_wait(update_user_metadata)
|
||||
get_user_integrations = exposed_run_and_wait(get_user_integrations)
|
||||
update_user_integrations = exposed_run_and_wait(update_user_integrations)
|
||||
get_active_user_ids_in_timerange = exposed_run_and_wait(
|
||||
get_active_user_ids_in_timerange
|
||||
)
|
||||
get_user_by_id = exposed_run_and_wait(get_user_by_id)
|
||||
get_user_notification_preference = exposed_run_and_wait(
|
||||
get_user_notification_preference
|
||||
)
|
||||
get_active_users_ids = exposed_run_and_wait(get_active_users_ids)
|
||||
|
||||
# Notifications
|
||||
create_or_add_to_user_notification_batch = exposed_run_and_wait(
|
||||
create_or_add_to_user_notification_batch
|
||||
)
|
||||
get_user_notification_last_message_in_batch = exposed_run_and_wait(
|
||||
get_user_notification_last_message_in_batch
|
||||
)
|
||||
empty_user_notification_batch = exposed_run_and_wait(empty_user_notification_batch)
|
||||
get_user_notification_batch = exposed_run_and_wait(get_user_notification_batch)
|
||||
|
||||
@@ -163,6 +163,7 @@ def execute_node(
|
||||
# AgentExecutorBlock specially separate the node input_data & its input_default.
|
||||
if isinstance(node_block, AgentExecutorBlock):
|
||||
input_data = {**node.input_default, "data": input_data}
|
||||
data.data = input_data
|
||||
|
||||
# Execute the node
|
||||
input_data_str = json.dumps(input_data)
|
||||
@@ -192,6 +193,11 @@ def execute_node(
|
||||
|
||||
output_size = 0
|
||||
try:
|
||||
# Charge the user for the execution before running the block.
|
||||
# TODO: We assume the block is executed within 0 seconds.
|
||||
# This is fine because for now, there is no block that is charged by time.
|
||||
db_client.spend_credits(data, input_size + output_size, 0)
|
||||
|
||||
for output_name, output_data in node_block.execute(
|
||||
input_data, **extra_exec_kwargs
|
||||
):
|
||||
@@ -210,16 +216,7 @@ def execute_node(
|
||||
):
|
||||
yield execution
|
||||
|
||||
# Update execution status and spend credits
|
||||
res = update_execution(ExecutionStatus.COMPLETED)
|
||||
s = input_size + output_size
|
||||
t = (
|
||||
(res.end_time - res.start_time).total_seconds()
|
||||
if res.end_time and res.start_time
|
||||
else 0
|
||||
)
|
||||
data.data = input_data
|
||||
db_client.spend_credits(data, s, t)
|
||||
update_execution(ExecutionStatus.COMPLETED)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
@@ -806,6 +803,7 @@ class ExecutionManager(AppService):
|
||||
data: BlockInput,
|
||||
user_id: str,
|
||||
graph_version: Optional[int] = None,
|
||||
preset_id: str | None = None,
|
||||
) -> GraphExecutionEntry:
|
||||
graph: GraphModel | None = self.db_client.get_graph(
|
||||
graph_id=graph_id, user_id=user_id, version=graph_version
|
||||
@@ -827,9 +825,9 @@ class ExecutionManager(AppService):
|
||||
|
||||
# Extract request input data, and assign it to the input pin.
|
||||
if block.block_type == BlockType.INPUT:
|
||||
name = node.input_default.get("name")
|
||||
if name in data.get("node_input", {}):
|
||||
input_data = {"value": data["node_input"][name]}
|
||||
input_name = node.input_default.get("name")
|
||||
if input_name and input_name in data:
|
||||
input_data = {"value": data[input_name]}
|
||||
|
||||
# Extract webhook payload, and assign it to the input pin
|
||||
webhook_payload_key = f"webhook_{node.webhook_id}_payload"
|
||||
@@ -854,6 +852,7 @@ class ExecutionManager(AppService):
|
||||
graph_version=graph.version,
|
||||
nodes_input=nodes_input,
|
||||
user_id=user_id,
|
||||
preset_id=preset_id,
|
||||
)
|
||||
|
||||
starting_node_execs = []
|
||||
@@ -873,6 +872,7 @@ class ExecutionManager(AppService):
|
||||
graph_exec = GraphExecutionEntry(
|
||||
user_id=user_id,
|
||||
graph_id=graph_id,
|
||||
graph_version=graph_version or 0,
|
||||
graph_exec_id=graph_exec_id,
|
||||
start_node_execs=starting_node_execs,
|
||||
)
|
||||
|
||||
@@ -130,6 +130,13 @@ nvidia_credentials = APIKeyCredentials(
|
||||
title="Use Credits for Nvidia",
|
||||
expires_at=None,
|
||||
)
|
||||
screenshotone_credentials = APIKeyCredentials(
|
||||
id="3b1bdd16-8818-4bc2-8cbb-b23f9a3439ed",
|
||||
provider="screenshotone",
|
||||
api_key=SecretStr(settings.secrets.screenshotone_api_key),
|
||||
title="Use Credits for ScreenshotOne",
|
||||
expires_at=None,
|
||||
)
|
||||
mem0_credentials = APIKeyCredentials(
|
||||
id="ed55ac19-356e-4243-a6cb-bc599e9b716f",
|
||||
provider="mem0",
|
||||
@@ -154,8 +161,9 @@ DEFAULT_CREDENTIALS = [
|
||||
fal_credentials,
|
||||
exa_credentials,
|
||||
e2b_credentials,
|
||||
nvidia_credentials,
|
||||
mem0_credentials,
|
||||
nvidia_credentials,
|
||||
screenshotone_credentials,
|
||||
]
|
||||
|
||||
|
||||
@@ -219,6 +227,8 @@ class IntegrationCredentialsStore:
|
||||
all_credentials.append(e2b_credentials)
|
||||
if settings.secrets.nvidia_api_key:
|
||||
all_credentials.append(nvidia_credentials)
|
||||
if settings.secrets.screenshotone_api_key:
|
||||
all_credentials.append(screenshotone_credentials)
|
||||
if settings.secrets.mem0_api_key:
|
||||
all_credentials.append(mem0_credentials)
|
||||
return all_credentials
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from backend.integrations.oauth.todoist import TodoistOAuthHandler
|
||||
|
||||
from .github import GitHubOAuthHandler
|
||||
from .google import GoogleOAuthHandler
|
||||
from .linear import LinearOAuthHandler
|
||||
@@ -19,6 +21,7 @@ HANDLERS_BY_NAME: dict["ProviderName", type["BaseOAuthHandler"]] = {
|
||||
NotionOAuthHandler,
|
||||
TwitterOAuthHandler,
|
||||
LinearOAuthHandler,
|
||||
TodoistOAuthHandler,
|
||||
]
|
||||
}
|
||||
# --8<-- [end:HANDLERS_BY_NAMEExample]
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
import urllib.parse
|
||||
from typing import ClassVar, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from backend.data.model import OAuth2Credentials, ProviderName
|
||||
from backend.integrations.oauth.base import BaseOAuthHandler
|
||||
|
||||
|
||||
class TodoistOAuthHandler(BaseOAuthHandler):
|
||||
PROVIDER_NAME = ProviderName.TODOIST
|
||||
DEFAULT_SCOPES: ClassVar[list[str]] = [
|
||||
"task:add",
|
||||
"data:read",
|
||||
"data:read_write",
|
||||
"data:delete",
|
||||
"project:delete",
|
||||
]
|
||||
|
||||
AUTHORIZE_URL = "https://todoist.com/oauth/authorize"
|
||||
TOKEN_URL = "https://todoist.com/oauth/access_token"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.redirect_uri = redirect_uri
|
||||
|
||||
def get_login_url(
|
||||
self, scopes: list[str], state: str, code_challenge: Optional[str]
|
||||
) -> str:
|
||||
params = {
|
||||
"client_id": self.client_id,
|
||||
"scope": ",".join(self.DEFAULT_SCOPES),
|
||||
"state": state,
|
||||
}
|
||||
|
||||
return f"{self.AUTHORIZE_URL}?{urllib.parse.urlencode(params)}"
|
||||
|
||||
def exchange_code_for_tokens(
|
||||
self, code: str, scopes: list[str], code_verifier: Optional[str]
|
||||
) -> OAuth2Credentials:
|
||||
"""Exchange authorization code for access tokens"""
|
||||
|
||||
data = {
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"code": code,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
}
|
||||
|
||||
response = requests.post(self.TOKEN_URL, data=data)
|
||||
response.raise_for_status()
|
||||
|
||||
tokens = response.json()
|
||||
|
||||
response = requests.post(
|
||||
"https://api.todoist.com/sync/v9/sync",
|
||||
headers={"Authorization": f"Bearer {tokens['access_token']}"},
|
||||
data={"sync_token": "*", "resource_types": '["user"]'},
|
||||
)
|
||||
response.raise_for_status()
|
||||
user_info = response.json()
|
||||
user_email = user_info["user"].get("email")
|
||||
|
||||
return OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=None,
|
||||
username=user_email,
|
||||
access_token=tokens["access_token"],
|
||||
refresh_token=None,
|
||||
access_token_expires_at=None,
|
||||
refresh_token_expires_at=None,
|
||||
scopes=scopes,
|
||||
)
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
# Todoist does not support token refresh
|
||||
return credentials
|
||||
|
||||
def revoke_tokens(self, credentials: OAuth2Credentials) -> bool:
|
||||
return False
|
||||
@@ -30,8 +30,10 @@ class ProviderName(str, Enum):
|
||||
REDDIT = "reddit"
|
||||
REPLICATE = "replicate"
|
||||
REVID = "revid"
|
||||
SCREENSHOTONE = "screenshotone"
|
||||
SLANT3D = "slant3d"
|
||||
SMTP = "smtp"
|
||||
TWITTER = "twitter"
|
||||
TODOIST = "todoist"
|
||||
UNREAL_SPEECH = "unreal_speech"
|
||||
# --8<-- [end:ProviderName]
|
||||
|
||||
@@ -20,24 +20,28 @@ class ConnectionManager:
|
||||
for subscribers in self.subscriptions.values():
|
||||
subscribers.discard(websocket)
|
||||
|
||||
async def subscribe(self, graph_id: str, websocket: WebSocket):
|
||||
if graph_id not in self.subscriptions:
|
||||
self.subscriptions[graph_id] = set()
|
||||
self.subscriptions[graph_id].add(websocket)
|
||||
async def subscribe(self, graph_id: str, graph_version: int, websocket: WebSocket):
|
||||
key = f"{graph_id}_{graph_version}"
|
||||
if key not in self.subscriptions:
|
||||
self.subscriptions[key] = set()
|
||||
self.subscriptions[key].add(websocket)
|
||||
|
||||
async def unsubscribe(self, graph_id: str, websocket: WebSocket):
|
||||
if graph_id in self.subscriptions:
|
||||
self.subscriptions[graph_id].discard(websocket)
|
||||
if not self.subscriptions[graph_id]:
|
||||
del self.subscriptions[graph_id]
|
||||
async def unsubscribe(
|
||||
self, graph_id: str, graph_version: int, websocket: WebSocket
|
||||
):
|
||||
key = f"{graph_id}_{graph_version}"
|
||||
if key in self.subscriptions:
|
||||
self.subscriptions[key].discard(websocket)
|
||||
if not self.subscriptions[key]:
|
||||
del self.subscriptions[key]
|
||||
|
||||
async def send_execution_result(self, result: execution.ExecutionResult):
|
||||
graph_id = result.graph_id
|
||||
if graph_id in self.subscriptions:
|
||||
key = f"{result.graph_id}_{result.graph_version}"
|
||||
if key in self.subscriptions:
|
||||
message = WsMessage(
|
||||
method=Methods.EXECUTION_EVENT,
|
||||
channel=graph_id,
|
||||
channel=key,
|
||||
data=result.model_dump(),
|
||||
).model_dump_json()
|
||||
for connection in self.subscriptions[graph_id]:
|
||||
for connection in self.subscriptions[key]:
|
||||
await connection.send_text(message)
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from typing import Any, Sequence
|
||||
from typing import Annotated, Any, Dict, List, Optional, Sequence
|
||||
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from prisma.enums import APIKeyPermission
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException
|
||||
from prisma.enums import AgentExecutionStatus, APIKeyPermission
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
import backend.data.block
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.api_key import APIKey
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.execution import ExecutionResult
|
||||
from backend.executor import ExecutionManager
|
||||
from backend.server.external.middleware import require_permission
|
||||
from backend.util.service import get_service_client
|
||||
@@ -28,6 +30,40 @@ logger = logging.getLogger(__name__)
|
||||
v1_router = APIRouter()
|
||||
|
||||
|
||||
class NodeOutput(TypedDict):
|
||||
key: str
|
||||
value: Any
|
||||
|
||||
|
||||
class ExecutionNode(TypedDict):
|
||||
node_id: str
|
||||
input: Any
|
||||
output: Dict[str, Any]
|
||||
|
||||
|
||||
class ExecutionNodeOutput(TypedDict):
|
||||
node_id: str
|
||||
outputs: List[NodeOutput]
|
||||
|
||||
|
||||
class GraphExecutionResult(TypedDict):
|
||||
execution_id: str
|
||||
status: str
|
||||
nodes: List[ExecutionNode]
|
||||
output: Optional[List[Dict[str, str]]]
|
||||
|
||||
|
||||
def get_outputs_with_names(results: List[ExecutionResult]) -> List[Dict[str, str]]:
|
||||
outputs = []
|
||||
for result in results:
|
||||
if "output" in result.output_data:
|
||||
output_value = result.output_data["output"][0]
|
||||
name = result.output_data.get("name", [None])[0]
|
||||
if output_value and name:
|
||||
outputs.append({name: output_value})
|
||||
return outputs
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/blocks",
|
||||
tags=["blocks"],
|
||||
@@ -59,21 +95,25 @@ def execute_graph_block(
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs/{graph_id}/execute",
|
||||
path="/graphs/{graph_id}/execute/{graph_version}",
|
||||
tags=["graphs"],
|
||||
)
|
||||
def execute_graph(
|
||||
graph_id: str,
|
||||
node_input: dict[Any, Any],
|
||||
graph_version: int,
|
||||
node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)],
|
||||
api_key: APIKey = Depends(require_permission(APIKeyPermission.EXECUTE_GRAPH)),
|
||||
) -> dict[str, Any]:
|
||||
try:
|
||||
graph_exec = execution_manager_client().add_execution(
|
||||
graph_id, node_input, user_id=api_key.user_id
|
||||
graph_id,
|
||||
graph_version=graph_version,
|
||||
data=node_input,
|
||||
user_id=api_key.user_id,
|
||||
)
|
||||
return {"id": graph_exec.graph_exec_id}
|
||||
except Exception as e:
|
||||
msg = e.__str__().encode().decode("unicode_escape")
|
||||
msg = str(e).encode().decode("unicode_escape")
|
||||
raise HTTPException(status_code=400, detail=msg)
|
||||
|
||||
|
||||
@@ -85,27 +125,28 @@ async def get_graph_execution_results(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
api_key: APIKey = Depends(require_permission(APIKeyPermission.READ_GRAPH)),
|
||||
) -> dict:
|
||||
) -> GraphExecutionResult:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=api_key.user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
results = await execution_db.get_execution_results(graph_exec_id)
|
||||
last_result = results[-1] if results else None
|
||||
execution_status = (
|
||||
last_result.status if last_result else AgentExecutionStatus.INCOMPLETE
|
||||
)
|
||||
outputs = get_outputs_with_names(results)
|
||||
|
||||
return {
|
||||
"execution_id": graph_exec_id,
|
||||
"nodes": [
|
||||
{
|
||||
"node_id": result.node_id,
|
||||
"input": (
|
||||
result.input_data.get("value")
|
||||
if "value" in result.input_data
|
||||
else result.input_data
|
||||
),
|
||||
"output": result.output_data.get(
|
||||
"response", result.output_data.get("result", [])
|
||||
),
|
||||
}
|
||||
return GraphExecutionResult(
|
||||
execution_id=graph_exec_id,
|
||||
status=execution_status,
|
||||
nodes=[
|
||||
ExecutionNode(
|
||||
node_id=result.node_id,
|
||||
input=result.input_data.get("value", result.input_data),
|
||||
output={k: v for k, v in result.output_data.items()},
|
||||
)
|
||||
for result in results
|
||||
],
|
||||
}
|
||||
output=outputs if execution_status == AgentExecutionStatus.COMPLETED else None,
|
||||
)
|
||||
|
||||
@@ -25,18 +25,15 @@ class WsMessage(pydantic.BaseModel):
|
||||
|
||||
class ExecutionSubscription(pydantic.BaseModel):
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
|
||||
|
||||
class SubscriptionDetails(pydantic.BaseModel):
|
||||
event_type: str
|
||||
channel: str
|
||||
graph_id: str
|
||||
class ExecuteGraphResponse(pydantic.BaseModel):
|
||||
graph_exec_id: str
|
||||
|
||||
|
||||
class CreateGraph(pydantic.BaseModel):
|
||||
template_id: str | None = None
|
||||
template_version: int | None = None
|
||||
graph: backend.data.graph.Graph | None = None
|
||||
graph: backend.data.graph.Graph
|
||||
|
||||
|
||||
class CreateAPIKeyRequest(pydantic.BaseModel):
|
||||
@@ -58,6 +55,20 @@ class UpdatePermissionsRequest(pydantic.BaseModel):
|
||||
permissions: List[APIKeyPermission]
|
||||
|
||||
|
||||
class Pagination(pydantic.BaseModel):
|
||||
total_items: int = pydantic.Field(
|
||||
description="Total number of items.", examples=[42]
|
||||
)
|
||||
total_pages: int = pydantic.Field(
|
||||
description="Total number of pages.", examples=[2]
|
||||
)
|
||||
current_page: int = pydantic.Field(
|
||||
description="Current_page page number.", examples=[1]
|
||||
)
|
||||
page_size: int = pydantic.Field(
|
||||
description="Number of items per page.", examples=[25]
|
||||
)
|
||||
|
||||
|
||||
class RequestTopUp(pydantic.BaseModel):
|
||||
amount: int
|
||||
"""Amount of credits to top up."""
|
||||
credit_amount: int
|
||||
|
||||
@@ -17,6 +17,8 @@ import backend.data.db
|
||||
import backend.data.graph
|
||||
import backend.data.user
|
||||
import backend.server.routers.v1
|
||||
import backend.server.v2.library.db
|
||||
import backend.server.v2.library.model
|
||||
import backend.server.v2.library.routes
|
||||
import backend.server.v2.store.model
|
||||
import backend.server.v2.store.routes
|
||||
@@ -123,15 +125,15 @@ class AgentServer(backend.util.service.AppProcess):
|
||||
@staticmethod
|
||||
async def test_execute_graph(
|
||||
graph_id: str,
|
||||
node_input: dict[str, Any],
|
||||
user_id: str,
|
||||
graph_version: Optional[int] = None,
|
||||
node_input: Optional[dict[str, Any]] = None,
|
||||
):
|
||||
return backend.server.routers.v1.execute_graph(
|
||||
user_id=user_id,
|
||||
graph_id=graph_id,
|
||||
graph_version=graph_version,
|
||||
node_input=node_input,
|
||||
node_input=node_input or {},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
@@ -170,8 +172,64 @@ class AgentServer(backend.util.service.AppProcess):
|
||||
|
||||
@staticmethod
|
||||
async def test_delete_graph(graph_id: str, user_id: str):
|
||||
await backend.server.v2.library.db.delete_library_agent_by_graph_id(
|
||||
graph_id=graph_id, user_id=user_id
|
||||
)
|
||||
return await backend.server.routers.v1.delete_graph(graph_id, user_id)
|
||||
|
||||
@staticmethod
|
||||
async def test_get_presets(user_id: str, page: int = 1, page_size: int = 10):
|
||||
return await backend.server.v2.library.routes.presets.get_presets(
|
||||
user_id=user_id, page=page, page_size=page_size
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_get_preset(preset_id: str, user_id: str):
|
||||
return await backend.server.v2.library.routes.presets.get_preset(
|
||||
preset_id=preset_id, user_id=user_id
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_create_preset(
|
||||
preset: backend.server.v2.library.model.CreateLibraryAgentPresetRequest,
|
||||
user_id: str,
|
||||
):
|
||||
return await backend.server.v2.library.routes.presets.create_preset(
|
||||
preset=preset, user_id=user_id
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_update_preset(
|
||||
preset_id: str,
|
||||
preset: backend.server.v2.library.model.CreateLibraryAgentPresetRequest,
|
||||
user_id: str,
|
||||
):
|
||||
return await backend.server.v2.library.routes.presets.update_preset(
|
||||
preset_id=preset_id, preset=preset, user_id=user_id
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_delete_preset(preset_id: str, user_id: str):
|
||||
return await backend.server.v2.library.routes.presets.delete_preset(
|
||||
preset_id=preset_id, user_id=user_id
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_execute_preset(
|
||||
graph_id: str,
|
||||
graph_version: int,
|
||||
preset_id: str,
|
||||
user_id: str,
|
||||
node_input: Optional[dict[str, Any]] = None,
|
||||
):
|
||||
return await backend.server.v2.library.routes.presets.execute_preset(
|
||||
graph_id=graph_id,
|
||||
graph_version=graph_version,
|
||||
preset_id=preset_id,
|
||||
node_input=node_input or {},
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def test_create_store_listing(
|
||||
request: backend.server.v2.store.model.StoreSubmissionRequest, user_id: str
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from typing import TYPE_CHECKING, Annotated, Any, Sequence
|
||||
|
||||
import pydantic
|
||||
@@ -8,12 +9,13 @@ import stripe
|
||||
from autogpt_libs.auth.middleware import auth_middleware
|
||||
from autogpt_libs.feature_flag.client import feature_flag
|
||||
from autogpt_libs.utils.cache import thread_cached
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, Response
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Request, Response
|
||||
from typing_extensions import Optional, TypedDict
|
||||
|
||||
import backend.data.block
|
||||
import backend.server.integrations.router
|
||||
import backend.server.routers.analytics
|
||||
import backend.server.v2.library.db as library_db
|
||||
from backend.data import execution as execution_db
|
||||
from backend.data import graph as graph_db
|
||||
from backend.data.api_key import (
|
||||
@@ -31,6 +33,7 @@ from backend.data.api_key import (
|
||||
from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.credit import (
|
||||
AutoTopUpConfig,
|
||||
TransactionHistory,
|
||||
get_auto_top_up,
|
||||
get_block_costs,
|
||||
get_stripe_customer_id,
|
||||
@@ -48,6 +51,7 @@ from backend.server.model import (
|
||||
CreateAPIKeyRequest,
|
||||
CreateAPIKeyResponse,
|
||||
CreateGraph,
|
||||
ExecuteGraphResponse,
|
||||
RequestTopUp,
|
||||
SetGraphActiveVersion,
|
||||
UpdatePermissionsRequest,
|
||||
@@ -151,7 +155,9 @@ async def get_user_credits(
|
||||
async def request_top_up(
|
||||
request: RequestTopUp, user_id: Annotated[str, Depends(get_user_id)]
|
||||
):
|
||||
checkout_url = await _user_credit_model.top_up_intent(user_id, request.amount)
|
||||
checkout_url = await _user_credit_model.top_up_intent(
|
||||
user_id, request.credit_amount
|
||||
)
|
||||
return {"checkout_url": checkout_url}
|
||||
|
||||
|
||||
@@ -173,6 +179,8 @@ async def configure_user_auto_top_up(
|
||||
) -> str:
|
||||
if request.threshold < 0:
|
||||
raise ValueError("Threshold must be greater than 0")
|
||||
if request.amount < 500 and request.amount != 0:
|
||||
raise ValueError("Amount must be greater than or equal to 500")
|
||||
if request.amount < request.threshold:
|
||||
raise ValueError("Amount must be greater than or equal to threshold")
|
||||
|
||||
@@ -183,7 +191,9 @@ async def configure_user_auto_top_up(
|
||||
else:
|
||||
await _user_credit_model.top_up_credits(user_id, 0)
|
||||
|
||||
await set_auto_top_up(user_id, threshold=request.threshold, amount=request.amount)
|
||||
await set_auto_top_up(
|
||||
user_id, AutoTopUpConfig(threshold=request.threshold, amount=request.amount)
|
||||
)
|
||||
return "Auto top-up settings updated"
|
||||
|
||||
|
||||
@@ -233,7 +243,7 @@ async def manage_payment_method(
|
||||
) -> dict[str, str]:
|
||||
session = stripe.billing_portal.Session.create(
|
||||
customer=await get_stripe_customer_id(user_id),
|
||||
return_url=settings.config.platform_base_url + "/marketplace/credits",
|
||||
return_url=settings.config.frontend_base_url + "/marketplace/credits",
|
||||
)
|
||||
if not session:
|
||||
raise HTTPException(
|
||||
@@ -242,6 +252,22 @@ async def manage_payment_method(
|
||||
return {"url": session.url}
|
||||
|
||||
|
||||
@v1_router.get(path="/credits/transactions", dependencies=[Depends(auth_middleware)])
|
||||
async def get_credit_history(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
transaction_time: datetime | None = None,
|
||||
transaction_count_limit: int = 100,
|
||||
) -> TransactionHistory:
|
||||
if transaction_count_limit < 1 or transaction_count_limit > 1000:
|
||||
raise ValueError("Transaction count limit must be between 1 and 1000")
|
||||
|
||||
return await _user_credit_model.get_transaction_history(
|
||||
user_id=user_id,
|
||||
transaction_time=transaction_time or datetime.max,
|
||||
transaction_count_limit=transaction_count_limit,
|
||||
)
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Graphs ###########################
|
||||
########################################################
|
||||
@@ -285,11 +311,6 @@ async def get_graph(
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@v1_router.get(
|
||||
path="/templates/{graph_id}/versions",
|
||||
tags=["templates", "graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_graph_all_versions(
|
||||
graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> Sequence[graph_db.GraphModel]:
|
||||
@@ -305,41 +326,18 @@ async def get_graph_all_versions(
|
||||
async def create_new_graph(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.GraphModel:
|
||||
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
|
||||
async def do_create_graph(
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.GraphModel:
|
||||
if create_graph.graph:
|
||||
graph = graph_db.make_graph_model(create_graph.graph, user_id)
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph = graph_db.make_graph_model(create_graph.graph, user_id)
|
||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||
|
||||
graph = await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
# Create a library agent for the new graph
|
||||
await library_db.create_library_agent(
|
||||
graph.id,
|
||||
graph.version,
|
||||
user_id,
|
||||
)
|
||||
|
||||
graph = await on_graph_activate(
|
||||
graph,
|
||||
get_credentials=lambda id: integration_creds_manager.get(user_id, id),
|
||||
@@ -366,11 +364,6 @@ async def delete_graph(
|
||||
@v1_router.put(
|
||||
path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
@v1_router.put(
|
||||
path="/templates/{graph_id}",
|
||||
tags=["templates", "graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def update_graph(
|
||||
graph_id: str,
|
||||
graph: graph_db.Graph,
|
||||
@@ -402,6 +395,10 @@ async def update_graph(
|
||||
new_graph_version = await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
if new_graph_version.is_active:
|
||||
# Keep the library agent up to date with the new active version
|
||||
await library_db.update_agent_version_in_library(
|
||||
user_id, graph.id, graph.version
|
||||
)
|
||||
|
||||
def get_credentials(credentials_id: str) -> "Credentials | None":
|
||||
return integration_creds_manager.get(user_id, credentials_id)
|
||||
@@ -458,6 +455,12 @@ async def set_graph_active_version(
|
||||
version=new_active_version,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
# Keep the library agent up to date with the new active version
|
||||
await library_db.update_agent_version_in_library(
|
||||
user_id, new_active_graph.id, new_active_graph.version
|
||||
)
|
||||
|
||||
if current_active_graph and current_active_graph.version != new_active_version:
|
||||
# Handle deactivation of the previously active version
|
||||
await on_graph_deactivate(
|
||||
@@ -467,23 +470,23 @@ async def set_graph_active_version(
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs/{graph_id}/execute",
|
||||
path="/graphs/{graph_id}/execute/{graph_version}",
|
||||
tags=["graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
def execute_graph(
|
||||
graph_id: str,
|
||||
node_input: dict[Any, Any],
|
||||
node_input: Annotated[dict[str, Any], Body(..., embed=True, default_factory=dict)],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
graph_version: Optional[int] = None,
|
||||
) -> dict[str, Any]: # FIXME: add proper return type
|
||||
) -> ExecuteGraphResponse:
|
||||
try:
|
||||
graph_exec = execution_manager_client().add_execution(
|
||||
graph_id, node_input, user_id=user_id, graph_version=graph_version
|
||||
)
|
||||
return {"id": graph_exec.graph_exec_id}
|
||||
return ExecuteGraphResponse(graph_exec_id=graph_exec.graph_exec_id)
|
||||
except Exception as e:
|
||||
msg = e.__str__().encode().decode("unicode_escape")
|
||||
msg = str(e).encode().decode("unicode_escape")
|
||||
raise HTTPException(status_code=400, detail=msg)
|
||||
|
||||
|
||||
@@ -534,47 +537,6 @@ async def get_graph_run_node_execution_results(
|
||||
return await execution_db.get_execution_results(graph_exec_id)
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Templates ########################
|
||||
########################################################
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/templates",
|
||||
tags=["graphs", "templates"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_templates(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> Sequence[graph_db.GraphModel]:
|
||||
return await graph_db.get_graphs(filter_by="template", user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.get(
|
||||
path="/templates/{graph_id}",
|
||||
tags=["templates", "graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_template(
|
||||
graph_id: str, version: int | None = None
|
||||
) -> graph_db.GraphModel:
|
||||
graph = await graph_db.get_graph(graph_id, version, template=True)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Template #{graph_id} not found.")
|
||||
return graph
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/templates",
|
||||
tags=["templates", "graphs"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def create_new_template(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.GraphModel:
|
||||
return await do_create_graph(create_graph, is_template=True, user_id=user_id)
|
||||
|
||||
|
||||
########################################################
|
||||
##################### Schedules ########################
|
||||
########################################################
|
||||
@@ -584,6 +546,7 @@ class ScheduleCreationRequest(pydantic.BaseModel):
|
||||
cron: str
|
||||
input_data: dict[Any, Any]
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
@@ -595,10 +558,13 @@ async def create_schedule(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
schedule: ScheduleCreationRequest,
|
||||
) -> scheduler.JobInfo:
|
||||
graph = await graph_db.get_graph(schedule.graph_id, user_id=user_id)
|
||||
graph = await graph_db.get_graph(
|
||||
schedule.graph_id, schedule.graph_version, user_id=user_id
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Graph #{schedule.graph_id} not found."
|
||||
status_code=404,
|
||||
detail=f"Graph #{schedule.graph_id} v.{schedule.graph_version} not found.",
|
||||
)
|
||||
|
||||
return await asyncio.to_thread(
|
||||
|
||||
@@ -1,107 +1,210 @@
|
||||
import logging
|
||||
from typing import List
|
||||
|
||||
import fastapi
|
||||
import prisma.errors
|
||||
import prisma.fields
|
||||
import prisma.models
|
||||
import prisma.types
|
||||
|
||||
import backend.data.graph
|
||||
import backend.data.includes
|
||||
import backend.server.v2.library.model
|
||||
import backend.server.v2.store.exceptions
|
||||
import backend.server.model
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.exceptions as store_exceptions
|
||||
import backend.server.v2.store.image_gen as store_image_gen
|
||||
import backend.server.v2.store.media as store_media
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def get_library_agents(
|
||||
user_id: str,
|
||||
) -> List[backend.server.v2.library.model.LibraryAgent]:
|
||||
"""
|
||||
Returns all agents (AgentGraph) that belong to the user and all agents in their library (UserAgent table)
|
||||
"""
|
||||
logger.debug(f"Getting library agents for user {user_id}")
|
||||
user_id: str, search_query: str | None = None
|
||||
) -> list[library_model.LibraryAgent]:
|
||||
logger.debug(
|
||||
f"Fetching library agents for user_id={user_id} search_query={search_query}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Get agents created by user with nodes and links
|
||||
user_created = await prisma.models.AgentGraph.prisma().find_many(
|
||||
where=prisma.types.AgentGraphWhereInput(userId=user_id, isActive=True),
|
||||
include=backend.data.includes.AGENT_GRAPH_INCLUDE,
|
||||
)
|
||||
if search_query and len(search_query.strip()) > 100:
|
||||
logger.warning(f"Search query too long: {search_query}")
|
||||
raise store_exceptions.DatabaseError("Search query is too long.")
|
||||
|
||||
# Get agents in user's library with nodes and links
|
||||
library_agents = await prisma.models.UserAgent.prisma().find_many(
|
||||
where=prisma.types.UserAgentWhereInput(
|
||||
userId=user_id, isDeleted=False, isArchived=False
|
||||
),
|
||||
include={
|
||||
where_clause: prisma.types.LibraryAgentWhereInput = {
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
"isArchived": False,
|
||||
}
|
||||
|
||||
if search_query:
|
||||
where_clause["OR"] = [
|
||||
{
|
||||
"Agent": {
|
||||
"include": {
|
||||
"AgentNodes": {
|
||||
"include": {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"Webhook": True,
|
||||
"AgentBlock": True,
|
||||
}
|
||||
}
|
||||
"is": {"name": {"contains": search_query, "mode": "insensitive"}}
|
||||
}
|
||||
},
|
||||
{
|
||||
"Agent": {
|
||||
"is": {
|
||||
"description": {"contains": search_query, "mode": "insensitive"}
|
||||
}
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
try:
|
||||
library_agents = await prisma.models.LibraryAgent.prisma().find_many(
|
||||
where=where_clause,
|
||||
include={
|
||||
"Agent": {
|
||||
"include": {
|
||||
"AgentNodes": {"include": {"Input": True, "Output": True}},
|
||||
"AgentGraphExecution": {"where": {"userId": user_id}},
|
||||
}
|
||||
},
|
||||
"Creator": True,
|
||||
},
|
||||
order=[{"updatedAt": "desc"}],
|
||||
)
|
||||
logger.debug(f"Retrieved {len(library_agents)} agents for user_id={user_id}.")
|
||||
return [library_model.LibraryAgent.from_db(agent) for agent in library_agents]
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error fetching library agents: {e}")
|
||||
raise store_exceptions.DatabaseError("Failed to fetch library agents") from e
|
||||
|
||||
|
||||
async def create_library_agent(
|
||||
agent_id: str, agent_version: int, user_id: str
|
||||
) -> prisma.models.LibraryAgent:
|
||||
"""
|
||||
Adds an agent to the user's library (LibraryAgent table)
|
||||
"""
|
||||
|
||||
try:
|
||||
agent = await prisma.models.AgentGraph.prisma().find_unique(
|
||||
where={"id": agent_id, "version": agent_version}
|
||||
)
|
||||
|
||||
# Convert to Graph models first
|
||||
graphs = []
|
||||
|
||||
# Add user created agents
|
||||
for agent in user_created:
|
||||
try:
|
||||
graphs.append(backend.data.graph.GraphModel.from_db(agent))
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing user created agent {agent.id}: {e}")
|
||||
continue
|
||||
|
||||
# Add library agents
|
||||
for agent in library_agents:
|
||||
if agent.Agent:
|
||||
try:
|
||||
graphs.append(backend.data.graph.GraphModel.from_db(agent.Agent))
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing library agent {agent.agentId}: {e}")
|
||||
continue
|
||||
|
||||
# Convert Graph models to LibraryAgent models
|
||||
result = []
|
||||
for graph in graphs:
|
||||
result.append(
|
||||
backend.server.v2.library.model.LibraryAgent(
|
||||
id=graph.id,
|
||||
version=graph.version,
|
||||
is_active=graph.is_active,
|
||||
name=graph.name,
|
||||
description=graph.description,
|
||||
isCreatedByUser=any(a.id == graph.id for a in user_created),
|
||||
input_schema=graph.input_schema,
|
||||
output_schema=graph.output_schema,
|
||||
)
|
||||
if not agent:
|
||||
raise store_exceptions.AgentNotFoundError(
|
||||
f"Agent {agent_id} version {agent_version} not found"
|
||||
)
|
||||
try:
|
||||
# Use .jpeg here since we are generating JPEG images
|
||||
filename = f"agent_{agent_id}.jpeg"
|
||||
|
||||
logger.debug(f"Found {len(result)} library agents")
|
||||
return result
|
||||
image_url = await store_media.check_media_exists(user_id, filename)
|
||||
|
||||
if not image_url:
|
||||
# Generate agent image as JPEG
|
||||
image = await store_image_gen.generate_agent_image(agent=agent)
|
||||
|
||||
# Create UploadFile with the correct filename and content_type
|
||||
image_file = fastapi.UploadFile(
|
||||
file=image,
|
||||
filename=filename,
|
||||
)
|
||||
|
||||
image_url = await store_media.upload_media(
|
||||
user_id=user_id, file=image_file, use_file_name=True
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating agent image: {e}")
|
||||
raise store_exceptions.DatabaseError(
|
||||
"Failed to generate agent image"
|
||||
) from e
|
||||
|
||||
return await prisma.models.LibraryAgent.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"agentId": agent_id,
|
||||
"agentVersion": agent_version,
|
||||
"isCreatedByUser": False,
|
||||
"useGraphIsActiveVersion": True,
|
||||
"Creator": {"connect": {"id": agent.userId}},
|
||||
}
|
||||
)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error getting library agents: {str(e)}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to fetch library agents"
|
||||
logger.error(f"Database error creating agent in library: {str(e)}")
|
||||
raise store_exceptions.DatabaseError("Failed to create agent in library") from e
|
||||
|
||||
|
||||
async def update_agent_version_in_library(
|
||||
user_id: str, agent_id: str, agent_version: int
|
||||
) -> None:
|
||||
"""
|
||||
Updates the agent version in the library
|
||||
"""
|
||||
try:
|
||||
library_agent = await prisma.models.LibraryAgent.prisma().find_first_or_raise(
|
||||
where={
|
||||
"userId": user_id,
|
||||
"agentId": agent_id,
|
||||
"useGraphIsActiveVersion": True,
|
||||
},
|
||||
)
|
||||
await prisma.models.LibraryAgent.prisma().update(
|
||||
where={"id": library_agent.id},
|
||||
data={
|
||||
"Agent": {
|
||||
"connect": {
|
||||
"graphVersionId": {"id": agent_id, "version": agent_version}
|
||||
},
|
||||
},
|
||||
},
|
||||
)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error updating agent version in library: {str(e)}")
|
||||
raise store_exceptions.DatabaseError(
|
||||
"Failed to update agent version in library"
|
||||
) from e
|
||||
|
||||
|
||||
async def add_agent_to_library(store_listing_version_id: str, user_id: str) -> None:
|
||||
async def update_library_agent(
|
||||
library_agent_id: str,
|
||||
user_id: str,
|
||||
auto_update_version: bool = False,
|
||||
is_favorite: bool = False,
|
||||
is_archived: bool = False,
|
||||
is_deleted: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Finds the agent from the store listing version and adds it to the user's library (UserAgent table)
|
||||
Updates the library agent with the given fields
|
||||
"""
|
||||
try:
|
||||
await prisma.models.LibraryAgent.prisma().update_many(
|
||||
where={"id": library_agent_id, "userId": user_id},
|
||||
data={
|
||||
"useGraphIsActiveVersion": auto_update_version,
|
||||
"isFavorite": is_favorite,
|
||||
"isArchived": is_archived,
|
||||
"isDeleted": is_deleted,
|
||||
},
|
||||
)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error updating library agent: {str(e)}")
|
||||
raise store_exceptions.DatabaseError("Failed to update library agent") from e
|
||||
|
||||
|
||||
async def delete_library_agent_by_graph_id(graph_id: str, user_id: str) -> None:
|
||||
"""
|
||||
Deletes a library agent for the given user
|
||||
"""
|
||||
try:
|
||||
await prisma.models.LibraryAgent.prisma().delete_many(
|
||||
where={"agentId": graph_id, "userId": user_id}
|
||||
)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error deleting library agent: {str(e)}")
|
||||
raise store_exceptions.DatabaseError("Failed to delete library agent") from e
|
||||
|
||||
|
||||
async def add_store_agent_to_library(
|
||||
store_listing_version_id: str, user_id: str
|
||||
) -> None:
|
||||
"""
|
||||
Finds the agent from the store listing version and adds it to the user's library (LibraryAgent table)
|
||||
if they don't already have it
|
||||
"""
|
||||
logger.debug(
|
||||
f"Adding agent from store listing version {store_listing_version_id} to library for user {user_id}"
|
||||
f"Adding agent from store listing version #{store_listing_version_id} "
|
||||
f"to library for user #{user_id}"
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -116,22 +219,22 @@ async def add_agent_to_library(store_listing_version_id: str, user_id: str) -> N
|
||||
logger.warning(
|
||||
f"Store listing version not found: {store_listing_version_id}"
|
||||
)
|
||||
raise backend.server.v2.store.exceptions.AgentNotFoundError(
|
||||
raise store_exceptions.AgentNotFoundError(
|
||||
f"Store listing version {store_listing_version_id} not found"
|
||||
)
|
||||
|
||||
# We need the agent object to be able to check if
|
||||
# the user_id is the same as the agent's user_id
|
||||
agent = store_listing_version.Agent
|
||||
|
||||
if agent.userId == user_id:
|
||||
logger.warning(
|
||||
f"User {user_id} cannot add their own agent to their library"
|
||||
)
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Cannot add own agent to library"
|
||||
f"User #{user_id} cannot add their own agent to their library"
|
||||
)
|
||||
raise store_exceptions.DatabaseError("Cannot add own agent to library")
|
||||
|
||||
# Check if user already has this agent
|
||||
existing_user_agent = await prisma.models.UserAgent.prisma().find_first(
|
||||
existing_user_agent = await prisma.models.LibraryAgent.prisma().find_first(
|
||||
where={
|
||||
"userId": user_id,
|
||||
"agentId": agent.id,
|
||||
@@ -141,25 +244,138 @@ async def add_agent_to_library(store_listing_version_id: str, user_id: str) -> N
|
||||
|
||||
if existing_user_agent:
|
||||
logger.debug(
|
||||
f"User {user_id} already has agent {agent.id} in their library"
|
||||
f"User #{user_id} already has agent #{agent.id} in their library"
|
||||
)
|
||||
return
|
||||
|
||||
# Create UserAgent entry
|
||||
await prisma.models.UserAgent.prisma().create(
|
||||
data=prisma.types.UserAgentCreateInput(
|
||||
userId=user_id,
|
||||
agentId=agent.id,
|
||||
agentVersion=agent.version,
|
||||
isCreatedByUser=False,
|
||||
)
|
||||
# Create LibraryAgent entry
|
||||
await prisma.models.LibraryAgent.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"agentId": agent.id,
|
||||
"agentVersion": agent.version,
|
||||
"isCreatedByUser": False,
|
||||
}
|
||||
)
|
||||
logger.debug(f"Added agent {agent.id} to library for user {user_id}")
|
||||
logger.debug(f"Added agent #{agent.id} to library for user #{user_id}")
|
||||
|
||||
except backend.server.v2.store.exceptions.AgentNotFoundError:
|
||||
except store_exceptions.AgentNotFoundError:
|
||||
raise
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error adding agent to library: {str(e)}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to add agent to library"
|
||||
) from e
|
||||
raise store_exceptions.DatabaseError("Failed to add agent to library") from e
|
||||
|
||||
|
||||
##############################################
|
||||
########### Presets DB Functions #############
|
||||
##############################################
|
||||
|
||||
|
||||
async def get_presets(
|
||||
user_id: str, page: int, page_size: int
|
||||
) -> library_model.LibraryAgentPresetResponse:
|
||||
try:
|
||||
presets = await prisma.models.AgentPreset.prisma().find_many(
|
||||
where={"userId": user_id},
|
||||
skip=page * page_size,
|
||||
take=page_size,
|
||||
)
|
||||
|
||||
total_items = await prisma.models.AgentPreset.prisma().count(
|
||||
where={"userId": user_id},
|
||||
)
|
||||
total_pages = (total_items + page_size - 1) // page_size
|
||||
|
||||
presets = [
|
||||
library_model.LibraryAgentPreset.from_db(preset) for preset in presets
|
||||
]
|
||||
|
||||
return library_model.LibraryAgentPresetResponse(
|
||||
presets=presets,
|
||||
pagination=backend.server.model.Pagination(
|
||||
total_items=total_items,
|
||||
total_pages=total_pages,
|
||||
current_page=page,
|
||||
page_size=page_size,
|
||||
),
|
||||
)
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error getting presets: {str(e)}")
|
||||
raise store_exceptions.DatabaseError("Failed to fetch presets") from e
|
||||
|
||||
|
||||
async def get_preset(
|
||||
user_id: str, preset_id: str
|
||||
) -> library_model.LibraryAgentPreset | None:
|
||||
try:
|
||||
preset = await prisma.models.AgentPreset.prisma().find_unique(
|
||||
where={"id": preset_id}, include={"InputPresets": True}
|
||||
)
|
||||
if not preset or preset.userId != user_id:
|
||||
return None
|
||||
return library_model.LibraryAgentPreset.from_db(preset)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error getting preset: {str(e)}")
|
||||
raise store_exceptions.DatabaseError("Failed to fetch preset") from e
|
||||
|
||||
|
||||
async def upsert_preset(
|
||||
user_id: str,
|
||||
preset: library_model.CreateLibraryAgentPresetRequest,
|
||||
preset_id: str | None = None,
|
||||
) -> library_model.LibraryAgentPreset:
|
||||
try:
|
||||
if preset_id:
|
||||
# Update existing preset
|
||||
new_preset = await prisma.models.AgentPreset.prisma().update(
|
||||
where={"id": preset_id},
|
||||
data={
|
||||
"name": preset.name,
|
||||
"description": preset.description,
|
||||
"isActive": preset.is_active,
|
||||
"InputPresets": {
|
||||
"create": [
|
||||
{"name": name, "data": prisma.fields.Json(data)}
|
||||
for name, data in preset.inputs.items()
|
||||
]
|
||||
},
|
||||
},
|
||||
include={"InputPresets": True},
|
||||
)
|
||||
if not new_preset:
|
||||
raise ValueError(f"AgentPreset #{preset_id} not found")
|
||||
else:
|
||||
# Create new preset
|
||||
new_preset = await prisma.models.AgentPreset.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"name": preset.name,
|
||||
"description": preset.description,
|
||||
"agentId": preset.agent_id,
|
||||
"agentVersion": preset.agent_version,
|
||||
"isActive": preset.is_active,
|
||||
"InputPresets": {
|
||||
"create": [
|
||||
{"name": name, "data": prisma.fields.Json(data)}
|
||||
for name, data in preset.inputs.items()
|
||||
]
|
||||
},
|
||||
},
|
||||
include={"InputPresets": True},
|
||||
)
|
||||
return library_model.LibraryAgentPreset.from_db(new_preset)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error creating preset: {str(e)}")
|
||||
raise store_exceptions.DatabaseError("Failed to create preset") from e
|
||||
|
||||
|
||||
async def delete_preset(user_id: str, preset_id: str) -> None:
|
||||
try:
|
||||
await prisma.models.AgentPreset.prisma().update_many(
|
||||
where={"id": preset_id, "userId": user_id},
|
||||
data={"isDeleted": True},
|
||||
)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error deleting preset: {str(e)}")
|
||||
raise store_exceptions.DatabaseError("Failed to delete preset") from e
|
||||
|
||||
@@ -5,7 +5,6 @@ import prisma.models
|
||||
import pytest
|
||||
from prisma import Prisma
|
||||
|
||||
import backend.data.includes
|
||||
import backend.server.v2.library.db as db
|
||||
import backend.server.v2.store.exceptions
|
||||
|
||||
@@ -37,7 +36,7 @@ async def test_get_library_agents(mocker):
|
||||
]
|
||||
|
||||
mock_library_agents = [
|
||||
prisma.models.UserAgent(
|
||||
prisma.models.LibraryAgent(
|
||||
id="ua1",
|
||||
userId="test-user",
|
||||
agentId="agent2",
|
||||
@@ -48,6 +47,7 @@ async def test_get_library_agents(mocker):
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now(),
|
||||
isFavorite=False,
|
||||
useGraphIsActiveVersion=True,
|
||||
Agent=prisma.models.AgentGraph(
|
||||
id="agent2",
|
||||
version=1,
|
||||
@@ -67,8 +67,8 @@ async def test_get_library_agents(mocker):
|
||||
return_value=mock_user_created
|
||||
)
|
||||
|
||||
mock_user_agent = mocker.patch("prisma.models.UserAgent.prisma")
|
||||
mock_user_agent.return_value.find_many = mocker.AsyncMock(
|
||||
mock_library_agent = mocker.patch("prisma.models.LibraryAgent.prisma")
|
||||
mock_library_agent.return_value.find_many = mocker.AsyncMock(
|
||||
return_value=mock_library_agents
|
||||
)
|
||||
|
||||
@@ -76,40 +76,14 @@ async def test_get_library_agents(mocker):
|
||||
result = await db.get_library_agents("test-user")
|
||||
|
||||
# Verify results
|
||||
assert len(result) == 2
|
||||
assert result[0].id == "agent1"
|
||||
assert result[0].name == "Test Agent 1"
|
||||
assert result[0].description == "Test Description 1"
|
||||
assert result[0].isCreatedByUser is True
|
||||
assert result[1].id == "agent2"
|
||||
assert result[1].name == "Test Agent 2"
|
||||
assert result[1].description == "Test Description 2"
|
||||
assert result[1].isCreatedByUser is False
|
||||
|
||||
# Verify mocks called correctly
|
||||
mock_agent_graph.return_value.find_many.assert_called_once_with(
|
||||
where=prisma.types.AgentGraphWhereInput(userId="test-user", isActive=True),
|
||||
include=backend.data.includes.AGENT_GRAPH_INCLUDE,
|
||||
)
|
||||
mock_user_agent.return_value.find_many.assert_called_once_with(
|
||||
where=prisma.types.UserAgentWhereInput(
|
||||
userId="test-user", isDeleted=False, isArchived=False
|
||||
),
|
||||
include={
|
||||
"Agent": {
|
||||
"include": {
|
||||
"AgentNodes": {
|
||||
"include": {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"Webhook": True,
|
||||
"AgentBlock": True,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
assert len(result) == 1
|
||||
assert result[0].id == "ua1"
|
||||
assert result[0].name == "Test Agent 2"
|
||||
assert result[0].description == "Test Description 2"
|
||||
assert result[0].agent_id == "agent2"
|
||||
assert result[0].agent_version == 1
|
||||
assert result[0].can_access_graph is False
|
||||
assert result[0].is_latest_version is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@@ -152,26 +126,26 @@ async def test_add_agent_to_library(mocker):
|
||||
return_value=mock_store_listing
|
||||
)
|
||||
|
||||
mock_user_agent = mocker.patch("prisma.models.UserAgent.prisma")
|
||||
mock_user_agent.return_value.find_first = mocker.AsyncMock(return_value=None)
|
||||
mock_user_agent.return_value.create = mocker.AsyncMock()
|
||||
mock_library_agent = mocker.patch("prisma.models.LibraryAgent.prisma")
|
||||
mock_library_agent.return_value.find_first = mocker.AsyncMock(return_value=None)
|
||||
mock_library_agent.return_value.create = mocker.AsyncMock()
|
||||
|
||||
# Call function
|
||||
await db.add_agent_to_library("version123", "test-user")
|
||||
await db.add_store_agent_to_library("version123", "test-user")
|
||||
|
||||
# Verify mocks called correctly
|
||||
mock_store_listing_version.return_value.find_unique.assert_called_once_with(
|
||||
where={"id": "version123"}, include={"Agent": True}
|
||||
)
|
||||
mock_user_agent.return_value.find_first.assert_called_once_with(
|
||||
mock_library_agent.return_value.find_first.assert_called_once_with(
|
||||
where={
|
||||
"userId": "test-user",
|
||||
"agentId": "agent1",
|
||||
"agentVersion": 1,
|
||||
}
|
||||
)
|
||||
mock_user_agent.return_value.create.assert_called_once_with(
|
||||
data=prisma.types.UserAgentCreateInput(
|
||||
mock_library_agent.return_value.create.assert_called_once_with(
|
||||
data=prisma.types.LibraryAgentCreateInput(
|
||||
userId="test-user", agentId="agent1", agentVersion=1, isCreatedByUser=False
|
||||
)
|
||||
)
|
||||
@@ -189,7 +163,7 @@ async def test_add_agent_to_library_not_found(mocker):
|
||||
|
||||
# Call function and verify exception
|
||||
with pytest.raises(backend.server.v2.store.exceptions.AgentNotFoundError):
|
||||
await db.add_agent_to_library("version123", "test-user")
|
||||
await db.add_store_agent_to_library("version123", "test-user")
|
||||
|
||||
# Verify mock called correctly
|
||||
mock_store_listing_version.return_value.find_unique.assert_called_once_with(
|
||||
|
||||
@@ -1,16 +1,174 @@
|
||||
import typing
|
||||
import datetime
|
||||
from enum import Enum
|
||||
from typing import Any
|
||||
|
||||
import prisma.enums
|
||||
import prisma.models
|
||||
import pydantic
|
||||
|
||||
import backend.data.block as block_model
|
||||
import backend.data.graph as graph_model
|
||||
import backend.server.model as server_model
|
||||
|
||||
|
||||
class AgentStatus(str, Enum):
|
||||
# The agent has completed all runs
|
||||
COMPLETED = "COMPLETED"
|
||||
# An agent is running, but not all runs have completed
|
||||
HEALTHY = "HEALTHY"
|
||||
# An agent is waiting to start or waiting for another reason
|
||||
WAITING = "WAITING"
|
||||
# An agent is in an error state
|
||||
ERROR = "ERROR"
|
||||
|
||||
|
||||
class LibraryAgent(pydantic.BaseModel):
|
||||
id: str # Changed from agent_id to match GraphMeta
|
||||
version: int # Changed from agent_version to match GraphMeta
|
||||
is_active: bool # Added to match GraphMeta
|
||||
|
||||
agent_id: str
|
||||
agent_version: int # Changed from agent_version to match GraphMeta
|
||||
|
||||
image_url: str
|
||||
|
||||
creator_name: str # from profile
|
||||
creator_image_url: str # from profile
|
||||
|
||||
status: AgentStatus
|
||||
|
||||
updated_at: datetime.datetime
|
||||
|
||||
name: str # from graph
|
||||
description: str # from graph
|
||||
|
||||
# Made input_schema and output_schema match GraphMeta's type
|
||||
input_schema: dict[str, Any] # Should be BlockIOObjectSubSchema in frontend
|
||||
|
||||
new_output: bool
|
||||
can_access_graph: bool
|
||||
|
||||
is_latest_version: bool
|
||||
|
||||
@staticmethod
|
||||
def from_db(agent: prisma.models.LibraryAgent):
|
||||
if not agent.Agent:
|
||||
raise ValueError("AgentGraph is required")
|
||||
|
||||
graph = graph_model.GraphModel.from_db(agent.Agent)
|
||||
|
||||
agent_updated_at = agent.Agent.updatedAt
|
||||
lib_agent_updated_at = agent.updatedAt
|
||||
|
||||
name = graph.name
|
||||
description = graph.description
|
||||
image_url = agent.image_url if agent.image_url else ""
|
||||
if agent.Creator:
|
||||
creator_name = agent.Creator.name
|
||||
creator_image_url = (
|
||||
agent.Creator.avatarUrl if agent.Creator.avatarUrl else ""
|
||||
)
|
||||
else:
|
||||
creator_name = "Unknown"
|
||||
creator_image_url = ""
|
||||
|
||||
# Take the latest updated_at timestamp either when the graph was updated or the library agent was updated
|
||||
updated_at = (
|
||||
max(agent_updated_at, lib_agent_updated_at)
|
||||
if agent_updated_at
|
||||
else lib_agent_updated_at
|
||||
)
|
||||
|
||||
# Getting counts as expecting more refined logic for determining status
|
||||
status_counts = {status: 0 for status in prisma.enums.AgentExecutionStatus}
|
||||
new_output = False
|
||||
|
||||
runs_since = datetime.datetime.now(datetime.UTC) - datetime.timedelta(days=7)
|
||||
if not agent.Agent.AgentGraphExecution:
|
||||
status = AgentStatus.COMPLETED
|
||||
else:
|
||||
for execution in agent.Agent.AgentGraphExecution:
|
||||
if runs_since > execution.createdAt:
|
||||
if (
|
||||
execution.executionStatus
|
||||
== prisma.enums.AgentExecutionStatus.COMPLETED
|
||||
):
|
||||
new_output = True
|
||||
status_counts[execution.executionStatus] += 1
|
||||
|
||||
if status_counts[prisma.enums.AgentExecutionStatus.FAILED] > 0:
|
||||
status = AgentStatus.ERROR
|
||||
elif status_counts[prisma.enums.AgentExecutionStatus.QUEUED] > 0:
|
||||
status = AgentStatus.WAITING
|
||||
elif status_counts[prisma.enums.AgentExecutionStatus.RUNNING] > 0:
|
||||
status = AgentStatus.HEALTHY
|
||||
else:
|
||||
status = AgentStatus.COMPLETED
|
||||
|
||||
return LibraryAgent(
|
||||
id=agent.id,
|
||||
agent_id=agent.agentId,
|
||||
agent_version=agent.agentVersion,
|
||||
image_url=image_url,
|
||||
creator_name=creator_name,
|
||||
creator_image_url=creator_image_url,
|
||||
name=name,
|
||||
description=description,
|
||||
status=status,
|
||||
updated_at=updated_at,
|
||||
input_schema=graph.input_schema,
|
||||
new_output=new_output,
|
||||
can_access_graph=agent.Agent.userId == agent.userId,
|
||||
# TODO: work out how to calculate this efficiently
|
||||
is_latest_version=True,
|
||||
)
|
||||
|
||||
|
||||
class LibraryAgentResponse:
|
||||
agents: list[LibraryAgent]
|
||||
pagination: server_model.Pagination
|
||||
|
||||
|
||||
class LibraryAgentPreset(pydantic.BaseModel):
|
||||
id: str
|
||||
updated_at: datetime.datetime
|
||||
|
||||
agent_id: str
|
||||
agent_version: int
|
||||
|
||||
name: str
|
||||
description: str
|
||||
|
||||
isCreatedByUser: bool
|
||||
# Made input_schema and output_schema match GraphMeta's type
|
||||
input_schema: dict[str, typing.Any] # Should be BlockIOObjectSubSchema in frontend
|
||||
output_schema: dict[str, typing.Any] # Should be BlockIOObjectSubSchema in frontend
|
||||
is_active: bool
|
||||
|
||||
inputs: block_model.BlockInput
|
||||
|
||||
@staticmethod
|
||||
def from_db(preset: prisma.models.AgentPreset):
|
||||
input_data: block_model.BlockInput = {}
|
||||
|
||||
for preset_input in preset.InputPresets or []:
|
||||
input_data[preset_input.name] = preset_input.data
|
||||
|
||||
return LibraryAgentPreset(
|
||||
id=preset.id,
|
||||
updated_at=preset.updatedAt,
|
||||
agent_id=preset.agentId,
|
||||
agent_version=preset.agentVersion,
|
||||
name=preset.name,
|
||||
description=preset.description,
|
||||
is_active=preset.isActive,
|
||||
inputs=input_data,
|
||||
)
|
||||
|
||||
|
||||
class LibraryAgentPresetResponse(pydantic.BaseModel):
|
||||
presets: list[LibraryAgentPreset]
|
||||
pagination: server_model.Pagination
|
||||
|
||||
|
||||
class CreateLibraryAgentPresetRequest(pydantic.BaseModel):
|
||||
name: str
|
||||
description: str
|
||||
inputs: block_model.BlockInput
|
||||
agent_id: str
|
||||
agent_version: int
|
||||
is_active: bool
|
||||
|
||||
@@ -1,43 +1,40 @@
|
||||
import backend.server.v2.library.model
|
||||
import datetime
|
||||
|
||||
import prisma.fields
|
||||
import prisma.models
|
||||
|
||||
import backend.server.v2.library.model as library_model
|
||||
|
||||
|
||||
def test_library_agent():
|
||||
agent = backend.server.v2.library.model.LibraryAgent(
|
||||
def test_agent_preset_from_db():
|
||||
# Create mock DB agent
|
||||
db_agent = prisma.models.AgentPreset(
|
||||
id="test-agent-123",
|
||||
version=1,
|
||||
is_active=True,
|
||||
createdAt=datetime.datetime.now(),
|
||||
updatedAt=datetime.datetime.now(),
|
||||
agentId="agent-123",
|
||||
agentVersion=1,
|
||||
name="Test Agent",
|
||||
description="Test description",
|
||||
isCreatedByUser=False,
|
||||
input_schema={"type": "object", "properties": {}},
|
||||
output_schema={"type": "object", "properties": {}},
|
||||
description="Test agent description",
|
||||
isActive=True,
|
||||
userId="test-user-123",
|
||||
isDeleted=False,
|
||||
InputPresets=[
|
||||
prisma.models.AgentNodeExecutionInputOutput(
|
||||
id="input-123",
|
||||
time=datetime.datetime.now(),
|
||||
name="input1",
|
||||
data=prisma.fields.Json({"type": "string", "value": "test value"}),
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
# Convert to LibraryAgentPreset
|
||||
agent = library_model.LibraryAgentPreset.from_db(db_agent)
|
||||
|
||||
assert agent.id == "test-agent-123"
|
||||
assert agent.version == 1
|
||||
assert agent.agent_version == 1
|
||||
assert agent.is_active is True
|
||||
assert agent.name == "Test Agent"
|
||||
assert agent.description == "Test description"
|
||||
assert agent.isCreatedByUser is False
|
||||
assert agent.input_schema == {"type": "object", "properties": {}}
|
||||
assert agent.output_schema == {"type": "object", "properties": {}}
|
||||
|
||||
|
||||
def test_library_agent_with_user_created():
|
||||
agent = backend.server.v2.library.model.LibraryAgent(
|
||||
id="user-agent-456",
|
||||
version=2,
|
||||
is_active=True,
|
||||
name="User Created Agent",
|
||||
description="An agent created by the user",
|
||||
isCreatedByUser=True,
|
||||
input_schema={"type": "object", "properties": {}},
|
||||
output_schema={"type": "object", "properties": {}},
|
||||
)
|
||||
assert agent.id == "user-agent-456"
|
||||
assert agent.version == 2
|
||||
assert agent.is_active is True
|
||||
assert agent.name == "User Created Agent"
|
||||
assert agent.description == "An agent created by the user"
|
||||
assert agent.isCreatedByUser is True
|
||||
assert agent.input_schema == {"type": "object", "properties": {}}
|
||||
assert agent.output_schema == {"type": "object", "properties": {}}
|
||||
assert agent.description == "Test agent description"
|
||||
assert agent.inputs == {"input1": {"type": "string", "value": "test value"}}
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
import logging
|
||||
import typing
|
||||
|
||||
import autogpt_libs.auth.depends
|
||||
import autogpt_libs.auth.middleware
|
||||
import fastapi
|
||||
import prisma
|
||||
|
||||
import backend.data.graph
|
||||
import backend.integrations.creds_manager
|
||||
import backend.integrations.webhooks.graph_lifecycle_hooks
|
||||
import backend.server.v2.library.db
|
||||
import backend.server.v2.library.model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
integration_creds_manager = (
|
||||
backend.integrations.creds_manager.IntegrationCredentialsManager()
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/agents",
|
||||
tags=["library", "private"],
|
||||
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
|
||||
)
|
||||
async def get_library_agents(
|
||||
user_id: typing.Annotated[
|
||||
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
|
||||
]
|
||||
) -> typing.Sequence[backend.server.v2.library.model.LibraryAgent]:
|
||||
"""
|
||||
Get all agents in the user's library, including both created and saved agents.
|
||||
"""
|
||||
try:
|
||||
agents = await backend.server.v2.library.db.get_library_agents(user_id)
|
||||
return agents
|
||||
except Exception:
|
||||
logger.exception("Exception occurred whilst getting library agents")
|
||||
raise fastapi.HTTPException(
|
||||
status_code=500, detail="Failed to get library agents"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/agents/{store_listing_version_id}",
|
||||
tags=["library", "private"],
|
||||
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
|
||||
status_code=201,
|
||||
)
|
||||
async def add_agent_to_library(
|
||||
store_listing_version_id: str,
|
||||
user_id: typing.Annotated[
|
||||
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
|
||||
],
|
||||
) -> fastapi.Response:
|
||||
"""
|
||||
Add an agent from the store to the user's library.
|
||||
|
||||
Args:
|
||||
store_listing_version_id (str): ID of the store listing version to add
|
||||
user_id (str): ID of the authenticated user
|
||||
|
||||
Returns:
|
||||
fastapi.Response: 201 status code on success
|
||||
|
||||
Raises:
|
||||
HTTPException: If there is an error adding the agent to the library
|
||||
"""
|
||||
try:
|
||||
# Get the graph from the store listing
|
||||
store_listing_version = (
|
||||
await prisma.models.StoreListingVersion.prisma().find_unique(
|
||||
where={"id": store_listing_version_id}, include={"Agent": True}
|
||||
)
|
||||
)
|
||||
|
||||
if not store_listing_version or not store_listing_version.Agent:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Store listing version {store_listing_version_id} not found",
|
||||
)
|
||||
|
||||
agent = store_listing_version.Agent
|
||||
|
||||
if agent.userId == user_id:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=400, detail="Cannot add own agent to library"
|
||||
)
|
||||
|
||||
# Create a new graph from the template
|
||||
graph = await backend.data.graph.get_graph(
|
||||
agent.id, agent.version, user_id=user_id
|
||||
)
|
||||
|
||||
if not graph:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=404, detail=f"Agent {agent.id} not found"
|
||||
)
|
||||
|
||||
# Create a deep copy with new IDs
|
||||
graph.version = 1
|
||||
graph.is_template = False
|
||||
graph.is_active = True
|
||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||
|
||||
# Save the new graph
|
||||
graph = await backend.data.graph.create_graph(graph, user_id=user_id)
|
||||
graph = (
|
||||
await backend.integrations.webhooks.graph_lifecycle_hooks.on_graph_activate(
|
||||
graph,
|
||||
get_credentials=lambda id: integration_creds_manager.get(user_id, id),
|
||||
)
|
||||
)
|
||||
|
||||
return fastapi.Response(status_code=201)
|
||||
|
||||
except Exception:
|
||||
logger.exception("Exception occurred whilst adding agent to library")
|
||||
raise fastapi.HTTPException(
|
||||
status_code=500, detail="Failed to add agent to library"
|
||||
)
|
||||
@@ -0,0 +1,9 @@
|
||||
import fastapi
|
||||
|
||||
from .agents import router as agents_router
|
||||
from .presets import router as presets_router
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
|
||||
router.include_router(presets_router)
|
||||
router.include_router(agents_router)
|
||||
@@ -0,0 +1,138 @@
|
||||
import logging
|
||||
from typing import Annotated, Sequence
|
||||
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
import fastapi
|
||||
|
||||
import backend.server.v2.library.db as library_db
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.exceptions as store_exceptions
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
|
||||
|
||||
@router.get(
|
||||
"/agents",
|
||||
tags=["library", "private"],
|
||||
dependencies=[fastapi.Depends(autogpt_auth_lib.auth_middleware)],
|
||||
)
|
||||
async def get_library_agents(
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)]
|
||||
) -> Sequence[library_model.LibraryAgent]:
|
||||
"""
|
||||
Get all agents in the user's library, including both created and saved agents.
|
||||
"""
|
||||
try:
|
||||
agents = await library_db.get_library_agents(user_id)
|
||||
return agents
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception occurred whilst getting library agents: {e}")
|
||||
raise fastapi.HTTPException(
|
||||
status_code=500, detail="Failed to get library agents"
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/agents/{store_listing_version_id}",
|
||||
tags=["library", "private"],
|
||||
dependencies=[fastapi.Depends(autogpt_auth_lib.auth_middleware)],
|
||||
status_code=201,
|
||||
)
|
||||
async def add_agent_to_library(
|
||||
store_listing_version_id: str,
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
|
||||
) -> fastapi.Response:
|
||||
"""
|
||||
Add an agent from the store to the user's library.
|
||||
|
||||
Args:
|
||||
store_listing_version_id (str): ID of the store listing version to add
|
||||
user_id (str): ID of the authenticated user
|
||||
|
||||
Returns:
|
||||
fastapi.Response: 201 status code on success
|
||||
|
||||
Raises:
|
||||
HTTPException: If there is an error adding the agent to the library
|
||||
"""
|
||||
try:
|
||||
# Use the database function to add the agent to the library
|
||||
await library_db.add_store_agent_to_library(store_listing_version_id, user_id)
|
||||
return fastapi.Response(status_code=201)
|
||||
|
||||
except store_exceptions.AgentNotFoundError:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Store listing version {store_listing_version_id} not found",
|
||||
)
|
||||
except store_exceptions.DatabaseError as e:
|
||||
logger.exception(f"Database error occurred whilst adding agent to library: {e}")
|
||||
raise fastapi.HTTPException(
|
||||
status_code=500, detail="Failed to add agent to library"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Unexpected exception occurred whilst adding agent to library: {e}"
|
||||
)
|
||||
raise fastapi.HTTPException(
|
||||
status_code=500, detail="Failed to add agent to library"
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/agents/{library_agent_id}",
|
||||
tags=["library", "private"],
|
||||
dependencies=[fastapi.Depends(autogpt_auth_lib.auth_middleware)],
|
||||
status_code=204,
|
||||
)
|
||||
async def update_library_agent(
|
||||
library_agent_id: str,
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
|
||||
auto_update_version: bool = False,
|
||||
is_favorite: bool = False,
|
||||
is_archived: bool = False,
|
||||
is_deleted: bool = False,
|
||||
) -> fastapi.Response:
|
||||
"""
|
||||
Update the library agent with the given fields.
|
||||
|
||||
Args:
|
||||
library_agent_id (str): ID of the library agent to update
|
||||
user_id (str): ID of the authenticated user
|
||||
auto_update_version (bool): Whether to auto-update the agent version
|
||||
is_favorite (bool): Whether the agent is marked as favorite
|
||||
is_archived (bool): Whether the agent is archived
|
||||
is_deleted (bool): Whether the agent is deleted
|
||||
|
||||
Returns:
|
||||
fastapi.Response: 204 status code on success
|
||||
|
||||
Raises:
|
||||
HTTPException: If there is an error updating the library agent
|
||||
"""
|
||||
try:
|
||||
# Use the database function to update the library agent
|
||||
await library_db.update_library_agent(
|
||||
library_agent_id,
|
||||
user_id,
|
||||
auto_update_version,
|
||||
is_favorite,
|
||||
is_archived,
|
||||
is_deleted,
|
||||
)
|
||||
return fastapi.Response(status_code=204)
|
||||
|
||||
except store_exceptions.DatabaseError as e:
|
||||
logger.exception(f"Database error occurred whilst updating library agent: {e}")
|
||||
raise fastapi.HTTPException(
|
||||
status_code=500, detail="Failed to update library agent"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Unexpected exception occurred whilst updating library agent: {e}"
|
||||
)
|
||||
raise fastapi.HTTPException(
|
||||
status_code=500, detail="Failed to update library agent"
|
||||
)
|
||||
@@ -0,0 +1,128 @@
|
||||
import logging
|
||||
from typing import Annotated, Any
|
||||
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
import autogpt_libs.utils.cache
|
||||
import fastapi
|
||||
|
||||
import backend.executor
|
||||
import backend.server.v2.library.db as library_db
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.util.service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
|
||||
|
||||
@autogpt_libs.utils.cache.thread_cached
|
||||
def execution_manager_client() -> backend.executor.ExecutionManager:
|
||||
return backend.util.service.get_service_client(backend.executor.ExecutionManager)
|
||||
|
||||
|
||||
@router.get("/presets")
|
||||
async def get_presets(
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
|
||||
page: int = 1,
|
||||
page_size: int = 10,
|
||||
) -> library_model.LibraryAgentPresetResponse:
|
||||
try:
|
||||
presets = await library_db.get_presets(user_id, page, page_size)
|
||||
return presets
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception occurred whilst getting presets: {e}")
|
||||
raise fastapi.HTTPException(status_code=500, detail="Failed to get presets")
|
||||
|
||||
|
||||
@router.get("/presets/{preset_id}")
|
||||
async def get_preset(
|
||||
preset_id: str,
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
|
||||
) -> library_model.LibraryAgentPreset:
|
||||
try:
|
||||
preset = await library_db.get_preset(user_id, preset_id)
|
||||
if not preset:
|
||||
raise fastapi.HTTPException(
|
||||
status_code=404,
|
||||
detail=f"Preset {preset_id} not found",
|
||||
)
|
||||
return preset
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception occurred whilst getting preset: {e}")
|
||||
raise fastapi.HTTPException(status_code=500, detail="Failed to get preset")
|
||||
|
||||
|
||||
@router.post("/presets")
|
||||
async def create_preset(
|
||||
preset: library_model.CreateLibraryAgentPresetRequest,
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
|
||||
) -> library_model.LibraryAgentPreset:
|
||||
try:
|
||||
return await library_db.upsert_preset(user_id, preset)
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception occurred whilst creating preset: {e}")
|
||||
raise fastapi.HTTPException(status_code=500, detail="Failed to create preset")
|
||||
|
||||
|
||||
@router.put("/presets/{preset_id}")
|
||||
async def update_preset(
|
||||
preset_id: str,
|
||||
preset: library_model.CreateLibraryAgentPresetRequest,
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
|
||||
) -> library_model.LibraryAgentPreset:
|
||||
try:
|
||||
return await library_db.upsert_preset(user_id, preset, preset_id)
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception occurred whilst updating preset: {e}")
|
||||
raise fastapi.HTTPException(status_code=500, detail="Failed to update preset")
|
||||
|
||||
|
||||
@router.delete("/presets/{preset_id}")
|
||||
async def delete_preset(
|
||||
preset_id: str,
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
|
||||
):
|
||||
try:
|
||||
await library_db.delete_preset(user_id, preset_id)
|
||||
return fastapi.Response(status_code=204)
|
||||
except Exception as e:
|
||||
logger.exception(f"Exception occurred whilst deleting preset: {e}")
|
||||
raise fastapi.HTTPException(status_code=500, detail="Failed to delete preset")
|
||||
|
||||
|
||||
@router.post(
|
||||
path="/presets/{preset_id}/execute",
|
||||
tags=["presets"],
|
||||
dependencies=[fastapi.Depends(autogpt_auth_lib.auth_middleware)],
|
||||
)
|
||||
async def execute_preset(
|
||||
graph_id: str,
|
||||
graph_version: int,
|
||||
preset_id: str,
|
||||
node_input: Annotated[
|
||||
dict[str, Any], fastapi.Body(..., embed=True, default_factory=dict)
|
||||
],
|
||||
user_id: Annotated[str, fastapi.Depends(autogpt_auth_lib.depends.get_user_id)],
|
||||
) -> dict[str, Any]: # FIXME: add proper return type
|
||||
try:
|
||||
preset = await library_db.get_preset(user_id, preset_id)
|
||||
if not preset:
|
||||
raise fastapi.HTTPException(status_code=404, detail="Preset not found")
|
||||
|
||||
# Merge input overrides with preset inputs
|
||||
merged_node_input = preset.inputs | node_input
|
||||
|
||||
execution = execution_manager_client().add_execution(
|
||||
graph_id=graph_id,
|
||||
graph_version=graph_version,
|
||||
data=merged_node_input,
|
||||
user_id=user_id,
|
||||
preset_id=preset_id,
|
||||
)
|
||||
|
||||
logger.debug(f"Execution added: {execution} with input: {merged_node_input}")
|
||||
|
||||
return {"id": execution.graph_exec_id}
|
||||
except Exception as e:
|
||||
msg = str(e).encode().decode("unicode_escape")
|
||||
raise fastapi.HTTPException(status_code=400, detail=msg)
|
||||
@@ -1,16 +1,16 @@
|
||||
import autogpt_libs.auth.depends
|
||||
import autogpt_libs.auth.middleware
|
||||
import datetime
|
||||
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
import fastapi
|
||||
import fastapi.testclient
|
||||
import pytest
|
||||
import pytest_mock
|
||||
|
||||
import backend.server.v2.library.db
|
||||
import backend.server.v2.library.model
|
||||
import backend.server.v2.library.routes
|
||||
import backend.server.v2.library.model as library_model
|
||||
from backend.server.v2.library.routes import router as library_router
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(backend.server.v2.library.routes.router)
|
||||
app.include_router(library_router)
|
||||
|
||||
client = fastapi.testclient.TestClient(app)
|
||||
|
||||
@@ -25,33 +25,43 @@ def override_get_user_id():
|
||||
return "test-user-id"
|
||||
|
||||
|
||||
app.dependency_overrides[autogpt_libs.auth.middleware.auth_middleware] = (
|
||||
override_auth_middleware
|
||||
)
|
||||
app.dependency_overrides[autogpt_libs.auth.depends.get_user_id] = override_get_user_id
|
||||
app.dependency_overrides[autogpt_auth_lib.auth_middleware] = override_auth_middleware
|
||||
app.dependency_overrides[autogpt_auth_lib.depends.get_user_id] = override_get_user_id
|
||||
|
||||
|
||||
def test_get_library_agents_success(mocker: pytest_mock.MockFixture):
|
||||
mocked_value = [
|
||||
backend.server.v2.library.model.LibraryAgent(
|
||||
library_model.LibraryAgent(
|
||||
id="test-agent-1",
|
||||
version=1,
|
||||
is_active=True,
|
||||
agent_id="test-agent-1",
|
||||
agent_version=1,
|
||||
name="Test Agent 1",
|
||||
description="Test Description 1",
|
||||
isCreatedByUser=True,
|
||||
image_url="",
|
||||
creator_name="Test Creator",
|
||||
creator_image_url="",
|
||||
input_schema={"type": "object", "properties": {}},
|
||||
output_schema={"type": "object", "properties": {}},
|
||||
status=library_model.AgentStatus.COMPLETED,
|
||||
new_output=False,
|
||||
can_access_graph=True,
|
||||
is_latest_version=True,
|
||||
updated_at=datetime.datetime(2023, 1, 1, 0, 0, 0),
|
||||
),
|
||||
backend.server.v2.library.model.LibraryAgent(
|
||||
library_model.LibraryAgent(
|
||||
id="test-agent-2",
|
||||
version=1,
|
||||
is_active=True,
|
||||
agent_id="test-agent-2",
|
||||
agent_version=1,
|
||||
name="Test Agent 2",
|
||||
description="Test Description 2",
|
||||
isCreatedByUser=False,
|
||||
image_url="",
|
||||
creator_name="Test Creator",
|
||||
creator_image_url="",
|
||||
input_schema={"type": "object", "properties": {}},
|
||||
output_schema={"type": "object", "properties": {}},
|
||||
status=library_model.AgentStatus.COMPLETED,
|
||||
new_output=False,
|
||||
can_access_graph=False,
|
||||
is_latest_version=True,
|
||||
updated_at=datetime.datetime(2023, 1, 1, 0, 0, 0),
|
||||
),
|
||||
]
|
||||
mock_db_call = mocker.patch("backend.server.v2.library.db.get_library_agents")
|
||||
@@ -61,14 +71,13 @@ def test_get_library_agents_success(mocker: pytest_mock.MockFixture):
|
||||
assert response.status_code == 200
|
||||
|
||||
data = [
|
||||
backend.server.v2.library.model.LibraryAgent.model_validate(agent)
|
||||
for agent in response.json()
|
||||
library_model.LibraryAgent.model_validate(agent) for agent in response.json()
|
||||
]
|
||||
assert len(data) == 2
|
||||
assert data[0].id == "test-agent-1"
|
||||
assert data[0].isCreatedByUser is True
|
||||
assert data[1].id == "test-agent-2"
|
||||
assert data[1].isCreatedByUser is False
|
||||
assert data[0].agent_id == "test-agent-1"
|
||||
assert data[0].can_access_graph is True
|
||||
assert data[1].agent_id == "test-agent-2"
|
||||
assert data[1].can_access_graph is False
|
||||
mock_db_call.assert_called_once_with("test-user-id")
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import logging
|
||||
import random
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
@@ -17,6 +16,25 @@ from backend.data.graph import GraphModel
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def sanitize_query(query: str | None) -> str | None:
|
||||
if query is None:
|
||||
return query
|
||||
query = query.strip()[:100]
|
||||
return (
|
||||
query.replace("\\", "\\\\")
|
||||
.replace("%", "\\%")
|
||||
.replace("_", "\\_")
|
||||
.replace("[", "\\[")
|
||||
.replace("]", "\\]")
|
||||
.replace("'", "\\'")
|
||||
.replace('"', '\\"')
|
||||
.replace(";", "\\;")
|
||||
.replace("--", "\\--")
|
||||
.replace("/*", "\\/*")
|
||||
.replace("*/", "\\*/")
|
||||
)
|
||||
|
||||
|
||||
async def get_store_agents(
|
||||
featured: bool = False,
|
||||
creator: str | None = None,
|
||||
@@ -29,29 +47,7 @@ async def get_store_agents(
|
||||
logger.debug(
|
||||
f"Getting store agents. featured={featured}, creator={creator}, sorted_by={sorted_by}, search={search_query}, category={category}, page={page}"
|
||||
)
|
||||
sanitized_query = None
|
||||
# Sanitize and validate search query by escaping special characters
|
||||
if search_query is not None:
|
||||
sanitized_query = search_query.strip()
|
||||
if not sanitized_query or len(sanitized_query) > 100: # Reasonable length limit
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
f"Invalid search query: len({len(sanitized_query)}) query: {search_query}"
|
||||
)
|
||||
|
||||
# Escape special SQL characters
|
||||
sanitized_query = (
|
||||
sanitized_query.replace("\\", "\\\\")
|
||||
.replace("%", "\\%")
|
||||
.replace("_", "\\_")
|
||||
.replace("[", "\\[")
|
||||
.replace("]", "\\]")
|
||||
.replace("'", "\\'")
|
||||
.replace('"', '\\"')
|
||||
.replace(";", "\\;")
|
||||
.replace("--", "\\--")
|
||||
.replace("/*", "\\/*")
|
||||
.replace("*/", "\\*/")
|
||||
)
|
||||
sanitized_query = sanitize_query(search_query)
|
||||
|
||||
where_clause = {}
|
||||
if featured:
|
||||
@@ -93,8 +89,8 @@ async def get_store_agents(
|
||||
slug=agent.slug,
|
||||
agent_name=agent.agent_name,
|
||||
agent_image=agent.agent_image[0] if agent.agent_image else "",
|
||||
creator=agent.creator_username,
|
||||
creator_avatar=agent.creator_avatar,
|
||||
creator=agent.creator_username or "Needs Profile",
|
||||
creator_avatar=agent.creator_avatar or "",
|
||||
sub_heading=agent.sub_heading,
|
||||
description=agent.description,
|
||||
runs=agent.runs,
|
||||
@@ -114,7 +110,7 @@ async def get_store_agents(
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting store agents: {str(e)}")
|
||||
logger.error(f"Error getting store agents: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to fetch store agents"
|
||||
) from e
|
||||
@@ -156,7 +152,7 @@ async def get_store_agent_details(
|
||||
except backend.server.v2.store.exceptions.AgentNotFoundError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting store agent details: {str(e)}")
|
||||
logger.error(f"Error getting store agent details: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to fetch agent details"
|
||||
) from e
|
||||
@@ -270,7 +266,7 @@ async def get_store_creators(
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting store creators: {str(e)}")
|
||||
logger.error(f"Error getting store creators: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to fetch store creators"
|
||||
) from e
|
||||
@@ -307,7 +303,7 @@ async def get_store_creator_details(
|
||||
except backend.server.v2.store.exceptions.CreatorNotFoundError:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting store creator details: {str(e)}")
|
||||
logger.error(f"Error getting store creator details: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to fetch creator details"
|
||||
) from e
|
||||
@@ -366,7 +362,7 @@ async def get_store_submissions(
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching store submissions: {str(e)}")
|
||||
logger.error(f"Error fetching store submissions: {e}")
|
||||
# Return empty response rather than exposing internal errors
|
||||
return backend.server.v2.store.model.StoreSubmissionsResponse(
|
||||
submissions=[],
|
||||
@@ -416,7 +412,7 @@ async def delete_store_submission(
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting store submission: {str(e)}")
|
||||
logger.error(f"Error deleting store submission: {e}")
|
||||
return False
|
||||
|
||||
|
||||
@@ -539,7 +535,7 @@ async def create_store_submission(
|
||||
):
|
||||
raise
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error creating store submission: {str(e)}")
|
||||
logger.error(f"Database error creating store submission: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to create store submission"
|
||||
) from e
|
||||
@@ -579,7 +575,7 @@ async def create_store_review(
|
||||
)
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error creating store review: {str(e)}")
|
||||
logger.error(f"Database error creating store review: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to create store review"
|
||||
) from e
|
||||
@@ -587,7 +583,7 @@ async def create_store_review(
|
||||
|
||||
async def get_user_profile(
|
||||
user_id: str,
|
||||
) -> backend.server.v2.store.model.ProfileDetails:
|
||||
) -> backend.server.v2.store.model.ProfileDetails | None:
|
||||
logger.debug(f"Getting user profile for {user_id}")
|
||||
|
||||
try:
|
||||
@@ -596,25 +592,7 @@ async def get_user_profile(
|
||||
)
|
||||
|
||||
if not profile:
|
||||
logger.warning(f"Profile not found for user {user_id}")
|
||||
new_profile = await prisma.models.Profile.prisma().create(
|
||||
data=prisma.types.ProfileCreateInput(
|
||||
userId=user_id,
|
||||
name="No Profile Data",
|
||||
username=f"{random.choice(['happy', 'clever', 'swift', 'bright', 'wise'])}-{random.choice(['fox', 'wolf', 'bear', 'eagle', 'owl'])}_{random.randint(1000,9999)}".lower(),
|
||||
description="No Profile Data",
|
||||
links=[],
|
||||
avatarUrl="",
|
||||
)
|
||||
)
|
||||
return backend.server.v2.store.model.ProfileDetails(
|
||||
name=new_profile.name,
|
||||
username=new_profile.username,
|
||||
description=new_profile.description,
|
||||
links=new_profile.links,
|
||||
avatar_url=new_profile.avatarUrl,
|
||||
)
|
||||
|
||||
return None
|
||||
return backend.server.v2.store.model.ProfileDetails(
|
||||
name=profile.name,
|
||||
username=profile.username,
|
||||
@@ -623,115 +601,90 @@ async def get_user_profile(
|
||||
avatar_url=profile.avatarUrl,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user profile: {str(e)}")
|
||||
return backend.server.v2.store.model.ProfileDetails(
|
||||
name="No Profile Data",
|
||||
username="No Profile Data",
|
||||
description="No Profile Data",
|
||||
links=[],
|
||||
avatar_url="",
|
||||
)
|
||||
logger.error("Error getting user profile: %s", e)
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to get user profile"
|
||||
) from e
|
||||
|
||||
|
||||
async def update_or_create_profile(
|
||||
async def update_profile(
|
||||
user_id: str, profile: backend.server.v2.store.model.Profile
|
||||
) -> backend.server.v2.store.model.CreatorDetails:
|
||||
"""
|
||||
Update the store profile for a user. Creates a new profile if one doesn't exist.
|
||||
Only allows updating if the user_id matches the owning user.
|
||||
If a field is None, it will not overwrite the existing value in the case of an update.
|
||||
|
||||
Update the store profile for a user or create a new one if it doesn't exist.
|
||||
Args:
|
||||
user_id: ID of the authenticated user
|
||||
profile: Updated profile details
|
||||
|
||||
Returns:
|
||||
CreatorDetails: The updated profile
|
||||
|
||||
CreatorDetails: The updated or created profile details
|
||||
Raises:
|
||||
HTTPException: If user is not authorized to update this profile
|
||||
DatabaseError: If profile cannot be updated due to database issues
|
||||
DatabaseError: If there's an issue updating or creating the profile
|
||||
"""
|
||||
logger.info(f"Updating profile for user {user_id} data: {profile}")
|
||||
|
||||
logger.info("Updating profile for user %s with data: %s", user_id, profile)
|
||||
try:
|
||||
# Sanitize username to only allow letters and hyphens
|
||||
# Sanitize username to allow only letters, numbers, and hyphens
|
||||
username = "".join(
|
||||
c if c.isalpha() or c == "-" or c.isnumeric() else ""
|
||||
for c in profile.username
|
||||
).lower()
|
||||
|
||||
# Check if profile exists for the given user_id
|
||||
existing_profile = await prisma.models.Profile.prisma().find_first(
|
||||
where={"userId": user_id}
|
||||
)
|
||||
|
||||
# If no profile exists, create a new one
|
||||
if not existing_profile:
|
||||
logger.debug(
|
||||
f"No existing profile found. Creating new profile for user {user_id}"
|
||||
)
|
||||
# Create new profile since one doesn't exist
|
||||
new_profile = await prisma.models.Profile.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"name": profile.name,
|
||||
"username": username,
|
||||
"description": profile.description,
|
||||
"links": profile.links or [],
|
||||
"avatarUrl": profile.avatar_url,
|
||||
"isFeatured": False,
|
||||
}
|
||||
raise backend.server.v2.store.exceptions.ProfileNotFoundError(
|
||||
f"Profile not found for user {user_id}. This should not be possible."
|
||||
)
|
||||
|
||||
return backend.server.v2.store.model.CreatorDetails(
|
||||
name=new_profile.name,
|
||||
username=new_profile.username,
|
||||
description=new_profile.description,
|
||||
links=new_profile.links,
|
||||
avatar_url=new_profile.avatarUrl or "",
|
||||
agent_rating=0.0,
|
||||
agent_runs=0,
|
||||
top_categories=[],
|
||||
# Verify that the user is authorized to update this profile
|
||||
if existing_profile.userId != user_id:
|
||||
logger.error(
|
||||
"Unauthorized update attempt for profile %s by user %s",
|
||||
existing_profile.userId,
|
||||
user_id,
|
||||
)
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
f"Unauthorized update attempt for profile {existing_profile.id} by user {user_id}"
|
||||
)
|
||||
else:
|
||||
logger.debug(f"Updating existing profile for user {user_id}")
|
||||
# Update only provided fields for the existing profile
|
||||
update_data = {}
|
||||
if profile.name is not None:
|
||||
update_data["name"] = profile.name
|
||||
if profile.username is not None:
|
||||
update_data["username"] = username
|
||||
if profile.description is not None:
|
||||
update_data["description"] = profile.description
|
||||
if profile.links is not None:
|
||||
update_data["links"] = profile.links
|
||||
if profile.avatar_url is not None:
|
||||
update_data["avatarUrl"] = profile.avatar_url
|
||||
|
||||
# Update the existing profile
|
||||
updated_profile = await prisma.models.Profile.prisma().update(
|
||||
where={"id": existing_profile.id},
|
||||
data=prisma.types.ProfileUpdateInput(**update_data),
|
||||
)
|
||||
if updated_profile is None:
|
||||
logger.error(f"Failed to update profile for user {user_id}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to update profile"
|
||||
)
|
||||
logger.debug("Updating existing profile for user %s", user_id)
|
||||
# Prepare update data, only including non-None values
|
||||
update_data = {}
|
||||
if profile.name is not None:
|
||||
update_data["name"] = profile.name
|
||||
if profile.username is not None:
|
||||
update_data["username"] = username
|
||||
if profile.description is not None:
|
||||
update_data["description"] = profile.description
|
||||
if profile.links is not None:
|
||||
update_data["links"] = profile.links
|
||||
if profile.avatar_url is not None:
|
||||
update_data["avatarUrl"] = profile.avatar_url
|
||||
|
||||
return backend.server.v2.store.model.CreatorDetails(
|
||||
name=updated_profile.name,
|
||||
username=updated_profile.username,
|
||||
description=updated_profile.description,
|
||||
links=updated_profile.links,
|
||||
avatar_url=updated_profile.avatarUrl or "",
|
||||
agent_rating=0.0,
|
||||
agent_runs=0,
|
||||
top_categories=[],
|
||||
# Update the existing profile
|
||||
updated_profile = await prisma.models.Profile.prisma().update(
|
||||
where={"id": existing_profile.id},
|
||||
data=prisma.types.ProfileUpdateInput(**update_data),
|
||||
)
|
||||
if updated_profile is None:
|
||||
logger.error("Failed to update profile for user %s", user_id)
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to update profile"
|
||||
)
|
||||
|
||||
return backend.server.v2.store.model.CreatorDetails(
|
||||
name=updated_profile.name,
|
||||
username=updated_profile.username,
|
||||
description=updated_profile.description,
|
||||
links=updated_profile.links,
|
||||
avatar_url=updated_profile.avatarUrl or "",
|
||||
agent_rating=0.0,
|
||||
agent_runs=0,
|
||||
top_categories=[],
|
||||
)
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error updating profile: {str(e)}")
|
||||
logger.error("Database error updating profile: %s", e)
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to update profile"
|
||||
) from e
|
||||
@@ -796,7 +749,7 @@ async def get_my_agents(
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting my agents: {str(e)}")
|
||||
logger.error(f"Error getting my agents: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to fetch my agents"
|
||||
) from e
|
||||
@@ -809,7 +762,7 @@ async def get_agent(
|
||||
try:
|
||||
store_listing_version = (
|
||||
await prisma.models.StoreListingVersion.prisma().find_unique(
|
||||
where={"id": store_listing_version_id}
|
||||
where={"id": store_listing_version_id}, include={"Agent": True}
|
||||
)
|
||||
)
|
||||
|
||||
@@ -840,7 +793,7 @@ async def get_agent(
|
||||
return graph
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting agent: {str(e)}")
|
||||
logger.error(f"Error getting agent: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to fetch agent"
|
||||
) from e
|
||||
@@ -905,7 +858,7 @@ async def review_store_submission(
|
||||
return submission
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Could not create store submission review: {str(e)}")
|
||||
logger.error(f"Could not create store submission review: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to create store submission review"
|
||||
) from e
|
||||
|
||||
@@ -158,6 +158,26 @@ async def test_create_store_submission(mocker):
|
||||
agentId="agent-id",
|
||||
agentVersion=1,
|
||||
owningUserId="user-id",
|
||||
StoreListingVersions=[
|
||||
prisma.models.StoreListingVersion(
|
||||
id="version-id",
|
||||
agentId="agent-id",
|
||||
agentVersion=1,
|
||||
slug="test-agent",
|
||||
name="Test Agent",
|
||||
description="Test description",
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now(),
|
||||
subHeading="Test heading",
|
||||
imageUrls=["image.jpg"],
|
||||
categories=["test"],
|
||||
isFeatured=False,
|
||||
isDeleted=False,
|
||||
version=1,
|
||||
isAvailable=True,
|
||||
isApproved=False,
|
||||
)
|
||||
],
|
||||
)
|
||||
|
||||
# Mock prisma calls
|
||||
@@ -181,6 +201,7 @@ async def test_create_store_submission(mocker):
|
||||
# Verify results
|
||||
assert result.name == "Test Agent"
|
||||
assert result.description == "Test description"
|
||||
assert result.store_listing_version_id == "version-id"
|
||||
|
||||
# Verify mocks called correctly
|
||||
mock_agent_graph.return_value.find_first.assert_called_once()
|
||||
@@ -195,6 +216,7 @@ async def test_update_profile(mocker):
|
||||
id="profile-id",
|
||||
name="Test Creator",
|
||||
username="creator",
|
||||
userId="user-id",
|
||||
description="Test description",
|
||||
links=["link1"],
|
||||
avatarUrl="avatar.jpg",
|
||||
@@ -221,7 +243,7 @@ async def test_update_profile(mocker):
|
||||
)
|
||||
|
||||
# Call function
|
||||
result = await db.update_or_create_profile("user-id", profile)
|
||||
result = await db.update_profile("user-id", profile)
|
||||
|
||||
# Verify results
|
||||
assert result.username == "creator"
|
||||
@@ -237,7 +259,7 @@ async def test_get_user_profile(mocker):
|
||||
# Mock data
|
||||
mock_profile = prisma.models.Profile(
|
||||
id="profile-id",
|
||||
name="No Profile Data",
|
||||
name="Test User",
|
||||
username="testuser",
|
||||
description="Test description",
|
||||
links=["link1", "link2"],
|
||||
@@ -245,20 +267,22 @@ async def test_get_user_profile(mocker):
|
||||
isFeatured=False,
|
||||
createdAt=datetime.now(),
|
||||
updatedAt=datetime.now(),
|
||||
userId="user-id",
|
||||
)
|
||||
|
||||
# Mock prisma calls
|
||||
mock_profile_db = mocker.patch("prisma.models.Profile.prisma")
|
||||
mock_profile_db.return_value.find_unique = mocker.AsyncMock(
|
||||
mock_profile_db.return_value.find_first = mocker.AsyncMock(
|
||||
return_value=mock_profile
|
||||
)
|
||||
|
||||
# Call function
|
||||
result = await db.get_user_profile("user-id")
|
||||
|
||||
assert result is not None
|
||||
# Verify results
|
||||
assert result.name == "No Profile Data"
|
||||
assert result.username == "No Profile Data"
|
||||
assert result.description == "No Profile Data"
|
||||
assert result.links == []
|
||||
assert result.avatar_url == ""
|
||||
assert result.name == "Test User"
|
||||
assert result.username == "testuser"
|
||||
assert result.description == "Test description"
|
||||
assert result.links == ["link1", "link2"]
|
||||
assert result.avatar_url == "avatar.jpg"
|
||||
|
||||
@@ -5,6 +5,7 @@ from enum import Enum
|
||||
import replicate
|
||||
import replicate.exceptions
|
||||
import requests
|
||||
from prisma.models import AgentGraph
|
||||
from replicate.helpers import FileOutput
|
||||
|
||||
from backend.data.graph import Graph
|
||||
@@ -21,7 +22,7 @@ class ImageStyle(str, Enum):
|
||||
DIGITAL_ART = "digital art"
|
||||
|
||||
|
||||
async def generate_agent_image(agent: Graph) -> io.BytesIO:
|
||||
async def generate_agent_image(agent: Graph | AgentGraph) -> io.BytesIO:
|
||||
"""
|
||||
Generate an image for an agent using Flux model via Replicate API.
|
||||
|
||||
|
||||
@@ -42,6 +42,11 @@ async def get_profile(
|
||||
"""
|
||||
try:
|
||||
profile = await backend.server.v2.store.db.get_user_profile(user_id)
|
||||
if profile is None:
|
||||
return fastapi.responses.JSONResponse(
|
||||
status_code=404,
|
||||
content={"detail": "Profile not found"},
|
||||
)
|
||||
return profile
|
||||
except Exception:
|
||||
logger.exception("Exception occurred whilst getting user profile")
|
||||
@@ -77,7 +82,7 @@ async def update_or_create_profile(
|
||||
HTTPException: If there is an error updating the profile
|
||||
"""
|
||||
try:
|
||||
updated_profile = await backend.server.v2.store.db.update_or_create_profile(
|
||||
updated_profile = await backend.server.v2.store.db.update_profile(
|
||||
user_id=user_id, profile=profile
|
||||
)
|
||||
return updated_profile
|
||||
|
||||
@@ -86,13 +86,13 @@ async def handle_subscribe(
|
||||
)
|
||||
else:
|
||||
ex_sub = ExecutionSubscription.model_validate(message.data)
|
||||
await manager.subscribe(ex_sub.graph_id, websocket)
|
||||
await manager.subscribe(ex_sub.graph_id, ex_sub.graph_version, websocket)
|
||||
logger.debug(f"New execution subscription for graph {ex_sub.graph_id}")
|
||||
await websocket.send_text(
|
||||
WsMessage(
|
||||
method=Methods.SUBSCRIBE,
|
||||
success=True,
|
||||
channel=ex_sub.graph_id,
|
||||
channel=f"{ex_sub.graph_id}_{ex_sub.graph_version}",
|
||||
).model_dump_json()
|
||||
)
|
||||
|
||||
@@ -110,13 +110,13 @@ async def handle_unsubscribe(
|
||||
)
|
||||
else:
|
||||
ex_sub = ExecutionSubscription.model_validate(message.data)
|
||||
await manager.unsubscribe(ex_sub.graph_id, websocket)
|
||||
await manager.unsubscribe(ex_sub.graph_id, ex_sub.graph_version, websocket)
|
||||
logger.debug(f"Removed execution subscription for graph {ex_sub.graph_id}")
|
||||
await websocket.send_text(
|
||||
WsMessage(
|
||||
method=Methods.UNSUBSCRIBE,
|
||||
success=True,
|
||||
channel=ex_sub.graph_id,
|
||||
channel=f"{ex_sub.graph_id}_{ex_sub.graph_version}",
|
||||
).model_dump_json()
|
||||
)
|
||||
|
||||
|
||||
@@ -253,12 +253,14 @@ async def block_autogen_agent():
|
||||
test_graph = await create_graph(create_test_graph(), user_id=test_user.id)
|
||||
input_data = {"input": "Write me a block that writes a string into a file."}
|
||||
response = await server.agent_server.test_execute_graph(
|
||||
test_graph.id, input_data, test_user.id
|
||||
graph_id=test_graph.id,
|
||||
user_id=test_user.id,
|
||||
node_input=input_data,
|
||||
)
|
||||
print(response)
|
||||
result = await wait_execution(
|
||||
graph_id=test_graph.id,
|
||||
graph_exec_id=response["id"],
|
||||
graph_exec_id=response.graph_exec_id,
|
||||
timeout=1200,
|
||||
user_id=test_user.id,
|
||||
)
|
||||
|
||||
@@ -157,10 +157,14 @@ async def reddit_marketing_agent():
|
||||
test_graph = await create_graph(create_test_graph(), user_id=test_user.id)
|
||||
input_data = {"subreddit": "AutoGPT"}
|
||||
response = await server.agent_server.test_execute_graph(
|
||||
test_graph.id, input_data, test_user.id
|
||||
graph_id=test_graph.id,
|
||||
user_id=test_user.id,
|
||||
node_input=input_data,
|
||||
)
|
||||
print(response)
|
||||
result = await wait_execution(test_user.id, test_graph.id, response["id"], 120)
|
||||
result = await wait_execution(
|
||||
test_user.id, test_graph.id, response.graph_exec_id, 120
|
||||
)
|
||||
print(result)
|
||||
|
||||
|
||||
|
||||
@@ -86,10 +86,14 @@ async def sample_agent():
|
||||
test_graph = await create_graph(create_test_graph(), test_user.id)
|
||||
input_data = {"input_1": "Hello", "input_2": "World"}
|
||||
response = await server.agent_server.test_execute_graph(
|
||||
test_graph.id, input_data, test_user.id
|
||||
graph_id=test_graph.id,
|
||||
user_id=test_user.id,
|
||||
node_input=input_data,
|
||||
)
|
||||
print(response)
|
||||
result = await wait_execution(test_user.id, test_graph.id, response["id"], 10)
|
||||
result = await wait_execution(
|
||||
test_user.id, test_graph.id, response.graph_exec_id, 10
|
||||
)
|
||||
print(result)
|
||||
|
||||
|
||||
|
||||
@@ -18,6 +18,7 @@ from typing import (
|
||||
FrozenSet,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
Type,
|
||||
@@ -33,7 +34,7 @@ from pydantic import BaseModel
|
||||
from Pyro5 import api as pyro
|
||||
from Pyro5 import config as pyro_config
|
||||
|
||||
from backend.data import db, redis
|
||||
from backend.data import db, rabbitmq, redis
|
||||
from backend.util.process import AppProcess
|
||||
from backend.util.retry import conn_retry
|
||||
from backend.util.settings import Config, Secrets
|
||||
@@ -61,7 +62,7 @@ def expose(func: C) -> C:
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
msg = f"Error in {func.__name__}: {e.__str__()}"
|
||||
msg = f"Error in {func.__name__}: {e}"
|
||||
if isinstance(e, ValueError):
|
||||
logger.warning(msg)
|
||||
else:
|
||||
@@ -79,7 +80,7 @@ def register_pydantic_serializers(func: Callable):
|
||||
try:
|
||||
pydantic_types = _pydantic_models_from_type_annotation(annotation)
|
||||
except Exception as e:
|
||||
raise TypeError(f"Error while exposing {func.__name__}: {e.__str__()}")
|
||||
raise TypeError(f"Error while exposing {func.__name__}: {e}")
|
||||
|
||||
for model in pydantic_types:
|
||||
logger.debug(
|
||||
@@ -116,6 +117,9 @@ class AppService(AppProcess, ABC):
|
||||
shared_event_loop: asyncio.AbstractEventLoop
|
||||
use_db: bool = False
|
||||
use_redis: bool = False
|
||||
use_async: bool = False
|
||||
use_rabbitmq: Optional[rabbitmq.RabbitMQConfig] = None
|
||||
rabbitmq_service: Optional[rabbitmq.SyncRabbitMQ | rabbitmq.AsyncRabbitMQ] = None
|
||||
use_supabase: bool = False
|
||||
|
||||
def __init__(self):
|
||||
@@ -130,6 +134,20 @@ class AppService(AppProcess, ABC):
|
||||
def get_host(cls) -> str:
|
||||
return os.environ.get(f"{cls.service_name.upper()}_HOST", config.pyro_host)
|
||||
|
||||
@property
|
||||
def rabbit(self) -> rabbitmq.SyncRabbitMQ | rabbitmq.AsyncRabbitMQ:
|
||||
"""Access the RabbitMQ service. Will raise if not configured."""
|
||||
if not self.rabbitmq_service:
|
||||
raise RuntimeError("RabbitMQ not configured for this service")
|
||||
return self.rabbitmq_service
|
||||
|
||||
@property
|
||||
def rabbit_config(self) -> rabbitmq.RabbitMQConfig:
|
||||
"""Access the RabbitMQ config. Will raise if not configured."""
|
||||
if not self.use_rabbitmq:
|
||||
raise RuntimeError("RabbitMQ not configured for this service")
|
||||
return self.use_rabbitmq
|
||||
|
||||
def run_service(self) -> None:
|
||||
while True:
|
||||
time.sleep(10)
|
||||
@@ -147,6 +165,16 @@ class AppService(AppProcess, ABC):
|
||||
self.shared_event_loop.run_until_complete(db.connect())
|
||||
if self.use_redis:
|
||||
redis.connect()
|
||||
if self.use_rabbitmq:
|
||||
logger.info(f"[{self.__class__.__name__}] ⏳ Configuring RabbitMQ...")
|
||||
if self.use_async:
|
||||
self.rabbitmq_service = rabbitmq.AsyncRabbitMQ(self.use_rabbitmq)
|
||||
self.shared_event_loop.run_until_complete(
|
||||
self.rabbitmq_service.connect()
|
||||
)
|
||||
else:
|
||||
self.rabbitmq_service = rabbitmq.SyncRabbitMQ(self.use_rabbitmq)
|
||||
self.rabbitmq_service.connect()
|
||||
if self.use_supabase:
|
||||
from supabase import create_client
|
||||
|
||||
@@ -175,6 +203,8 @@ class AppService(AppProcess, ABC):
|
||||
if self.use_redis:
|
||||
logger.info(f"[{self.__class__.__name__}] ⏳ Disconnecting Redis...")
|
||||
redis.disconnect()
|
||||
if self.use_rabbitmq:
|
||||
logger.info(f"[{self.__class__.__name__}] ⏳ Disconnecting RabbitMQ...")
|
||||
|
||||
@conn_retry("Pyro", "Starting Pyro Service")
|
||||
def __start_pyro(self):
|
||||
|
||||
@@ -167,6 +167,20 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="The pool size for the scheduler database connection pool",
|
||||
)
|
||||
|
||||
rabbitmq_host: str = Field(
|
||||
default="localhost",
|
||||
description="The host for the RabbitMQ server",
|
||||
)
|
||||
rabbitmq_port: int = Field(
|
||||
default=5672,
|
||||
description="The port for the RabbitMQ server",
|
||||
)
|
||||
|
||||
rabbitmq_vhost: str = Field(
|
||||
default="/",
|
||||
description="The vhost for the RabbitMQ server",
|
||||
)
|
||||
|
||||
@field_validator("platform_base_url", "frontend_base_url")
|
||||
@classmethod
|
||||
def validate_platform_base_url(cls, v: str, info: ValidationInfo) -> str:
|
||||
@@ -258,6 +272,11 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
|
||||
encryption_key: str = Field(default="", description="Encryption key")
|
||||
|
||||
rabbitmq_default_user: str = Field(default="", description="RabbitMQ default user")
|
||||
rabbitmq_default_pass: str = Field(
|
||||
default="", description="RabbitMQ default password"
|
||||
)
|
||||
|
||||
# OAuth server credentials for integrations
|
||||
# --8<-- [start:OAuthServerCredentialsExample]
|
||||
github_client_id: str = Field(default="", description="GitHub OAuth client ID")
|
||||
@@ -320,9 +339,14 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
linear_client_id: str = Field(default="", description="Linear client ID")
|
||||
linear_client_secret: str = Field(default="", description="Linear client secret")
|
||||
|
||||
todoist_client_id: str = Field(default="", description="Todoist client ID")
|
||||
todoist_client_secret: str = Field(default="", description="Todoist client secret")
|
||||
|
||||
stripe_api_key: str = Field(default="", description="Stripe API Key")
|
||||
stripe_webhook_secret: str = Field(default="", description="Stripe Webhook Secret")
|
||||
|
||||
screenshotone_api_key: str = Field(default="", description="ScreenshotOne API Key")
|
||||
|
||||
# Add more secret fields as needed
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
|
||||
@@ -106,11 +106,11 @@ def execute_block_test(block: Block):
|
||||
|
||||
# Populate credentials argument(s)
|
||||
extra_exec_kwargs: dict = {
|
||||
"graph_id": uuid.uuid4(),
|
||||
"node_id": uuid.uuid4(),
|
||||
"graph_exec_id": uuid.uuid4(),
|
||||
"node_exec_id": uuid.uuid4(),
|
||||
"user_id": uuid.uuid4(),
|
||||
"graph_id": str(uuid.uuid4()),
|
||||
"node_id": str(uuid.uuid4()),
|
||||
"graph_exec_id": str(uuid.uuid4()),
|
||||
"node_exec_id": str(uuid.uuid4()),
|
||||
"user_id": str(uuid.uuid4()),
|
||||
}
|
||||
input_model = cast(type[BlockSchema], block.input_schema)
|
||||
credentials_input_fields = input_model.get_credentials_fields()
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import json
|
||||
from typing import Any, Type, TypeVar, cast, get_args, get_origin
|
||||
|
||||
from prisma import Json as PrismaJson
|
||||
|
||||
|
||||
class ConversionError(ValueError):
|
||||
pass
|
||||
@@ -188,6 +190,8 @@ def type_match(value: Any, target_type: Type[T]) -> T:
|
||||
|
||||
def convert(value: Any, target_type: Type[T]) -> T:
|
||||
try:
|
||||
if isinstance(value, PrismaJson):
|
||||
value = value.data
|
||||
return cast(T, _try_convert(value, target_type, raise_on_mismatch=False))
|
||||
except Exception as e:
|
||||
raise ConversionError(f"Failed to convert {value} to {target_type}") from e
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "AgentPreset" ADD COLUMN "isDeleted" BOOLEAN NOT NULL DEFAULT false;
|
||||
@@ -0,0 +1,46 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the `UserAgent` table. If the table is not empty, all the data it contains will be lost.
|
||||
|
||||
*/
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "UserAgent" DROP CONSTRAINT "UserAgent_agentId_agentVersion_fkey";
|
||||
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "UserAgent" DROP CONSTRAINT "UserAgent_agentPresetId_fkey";
|
||||
|
||||
-- DropForeignKey
|
||||
ALTER TABLE "UserAgent" DROP CONSTRAINT "UserAgent_userId_fkey";
|
||||
|
||||
-- DropTable
|
||||
DROP TABLE "UserAgent";
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "LibraryAgent" (
|
||||
"id" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"userId" TEXT NOT NULL,
|
||||
"agentId" TEXT NOT NULL,
|
||||
"agentVersion" INTEGER NOT NULL,
|
||||
"agentPresetId" TEXT,
|
||||
"isFavorite" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isCreatedByUser" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isArchived" BOOLEAN NOT NULL DEFAULT false,
|
||||
"isDeleted" BOOLEAN NOT NULL DEFAULT false,
|
||||
|
||||
CONSTRAINT "LibraryAgent_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LibraryAgent_userId_idx" ON "LibraryAgent"("userId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LibraryAgent" ADD CONSTRAINT "LibraryAgent_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LibraryAgent" ADD CONSTRAINT "LibraryAgent_agentId_agentVersion_fkey" FOREIGN KEY ("agentId", "agentVersion") REFERENCES "AgentGraph"("id", "version") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LibraryAgent" ADD CONSTRAINT "LibraryAgent_agentPresetId_fkey" FOREIGN KEY ("agentPresetId") REFERENCES "AgentPreset"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,2 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "LibraryAgent" ADD COLUMN "useGraphIsActiveVersion" BOOLEAN NOT NULL DEFAULT false;
|
||||
@@ -0,0 +1,6 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "LibraryAgent" ADD COLUMN "creatorId" TEXT,
|
||||
ADD COLUMN "image_url" TEXT;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LibraryAgent" ADD CONSTRAINT "LibraryAgent_creatorId_fkey" FOREIGN KEY ("creatorId") REFERENCES "Profile"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,109 @@
|
||||
CREATE OR REPLACE FUNCTION generate_username()
|
||||
RETURNS TEXT AS $$
|
||||
DECLARE
|
||||
-- Random username generation
|
||||
selected_adjective TEXT;
|
||||
selected_animal TEXT;
|
||||
random_int INT;
|
||||
generated_username TEXT;
|
||||
BEGIN
|
||||
FOR i IN 1..10 LOOP
|
||||
SELECT unnest
|
||||
INTO selected_adjective
|
||||
FROM (VALUES ('happy'), ('clever'), ('swift'), ('bright'), ('wise'), ('funny'), ('cool'), ('awesome'), ('amazing'), ('fantastic'), ('wonderful')) AS t(unnest)
|
||||
ORDER BY random()
|
||||
LIMIT 1;
|
||||
|
||||
SELECT unnest
|
||||
INTO selected_animal
|
||||
FROM (VALUES ('fox'), ('wolf'), ('bear'), ('eagle'), ('owl'), ('tiger'), ('lion'), ('elephant'), ('giraffe'), ('zebra')) AS t(unnest)
|
||||
ORDER BY random()
|
||||
LIMIT 1;
|
||||
|
||||
SELECT floor(random() * (99999 - 10000 + 1) + 10000)::int
|
||||
INTO random_int;
|
||||
|
||||
generated_username := lower(selected_adjective || '-' || selected_animal || '-' || random_int);
|
||||
|
||||
-- Check if username is already taken
|
||||
IF NOT EXISTS (
|
||||
SELECT 1 FROM platform."Profile" WHERE username = generated_username
|
||||
) THEN
|
||||
-- Username is unique, exit the loop
|
||||
EXIT;
|
||||
END IF;
|
||||
|
||||
-- If we've tried 10 times and still haven't found a unique username
|
||||
IF i = 10 THEN
|
||||
RAISE EXCEPTION 'Unable to generate unique username after 10 attempts';
|
||||
END IF;
|
||||
END LOOP;
|
||||
RETURN generated_username;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
CREATE OR REPLACE FUNCTION add_user_and_profile_to_platform()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Exit early if NEW.id is null to prevent constraint violations
|
||||
IF NEW.id IS NULL THEN
|
||||
RAISE EXCEPTION 'Cannot create user/profile: id is null';
|
||||
END IF;
|
||||
|
||||
/*
|
||||
1) Insert into platform."User"
|
||||
(If you already have such a row or want different columns, adjust below.)
|
||||
*/
|
||||
INSERT INTO platform."User" (id, email, "updatedAt")
|
||||
VALUES (NEW.id, NEW.email, now());
|
||||
|
||||
/*
|
||||
2) Insert into platform."Profile"
|
||||
Adjust columns/types depending on how your "Profile" schema is defined:
|
||||
- "links" might be text[], jsonb, or something else in your table.
|
||||
- "avatarUrl" and "description" can be defaulted as well.
|
||||
*/
|
||||
INSERT INTO platform."Profile"
|
||||
("id", "userId", name, username, description, links, "avatarUrl", "updatedAt")
|
||||
VALUES
|
||||
(
|
||||
NEW.id,
|
||||
NEW.id,
|
||||
COALESCE(split_part(NEW.email, '@', 1), 'user'), -- handle null email
|
||||
platform.generate_username(),
|
||||
'I''m new here',
|
||||
'{}', -- empty array or empty JSON, depending on your column definition
|
||||
'',
|
||||
now()
|
||||
);
|
||||
|
||||
RETURN NEW;
|
||||
EXCEPTION
|
||||
WHEN OTHERS THEN
|
||||
-- Log the error details
|
||||
RAISE NOTICE 'Error in add_user_and_profile_to_platform: %', SQLERRM;
|
||||
-- Re-raise the error
|
||||
RAISE;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql SECURITY DEFINER;
|
||||
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Check if the auth schema and users table exist
|
||||
IF EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'auth'
|
||||
AND table_name = 'users'
|
||||
) THEN
|
||||
-- Drop the trigger if it exists
|
||||
DROP TRIGGER IF EXISTS user_added_to_platform ON auth.users;
|
||||
|
||||
-- Create the trigger
|
||||
CREATE TRIGGER user_added_to_platform
|
||||
AFTER INSERT ON auth.users
|
||||
FOR EACH ROW EXECUTE FUNCTION add_user_and_profile_to_platform();
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
DO $$
|
||||
BEGIN
|
||||
IF EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'platform'
|
||||
AND table_name = 'User'
|
||||
) AND EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'platform'
|
||||
AND table_name = 'Profile'
|
||||
) THEN
|
||||
INSERT INTO platform."Profile"
|
||||
("id", "userId", name, username, description, links, "avatarUrl", "updatedAt")
|
||||
SELECT
|
||||
u.id,
|
||||
u.id,
|
||||
COALESCE(split_part(u.email, '@', 1), 'user'),
|
||||
platform.generate_username(),
|
||||
'I''m new here',
|
||||
'{}',
|
||||
'',
|
||||
now()
|
||||
FROM platform."User" u
|
||||
LEFT JOIN platform."Profile" p ON u.id = p."userId"
|
||||
WHERE p.id IS NULL;
|
||||
END IF;
|
||||
END $$;
|
||||
@@ -0,0 +1,45 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "NotificationType" AS ENUM ('AGENT_RUN', 'ZERO_BALANCE', 'LOW_BALANCE', 'BLOCK_EXECUTION_FAILED', 'CONTINUOUS_AGENT_ERROR', 'DAILY_SUMMARY', 'WEEKLY_SUMMARY', 'MONTHLY_SUMMARY');
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "User" ADD COLUMN "maxEmailsPerDay" INTEGER NOT NULL DEFAULT 3,
|
||||
ADD COLUMN "notifyOnAgentRun" BOOLEAN NOT NULL DEFAULT true,
|
||||
ADD COLUMN "notifyOnBlockExecutionFailed" BOOLEAN NOT NULL DEFAULT true,
|
||||
ADD COLUMN "notifyOnContinuousAgentError" BOOLEAN NOT NULL DEFAULT true,
|
||||
ADD COLUMN "notifyOnDailySummary" BOOLEAN NOT NULL DEFAULT true,
|
||||
ADD COLUMN "notifyOnLowBalance" BOOLEAN NOT NULL DEFAULT true,
|
||||
ADD COLUMN "notifyOnMonthlySummary" BOOLEAN NOT NULL DEFAULT true,
|
||||
ADD COLUMN "notifyOnWeeklySummary" BOOLEAN NOT NULL DEFAULT true,
|
||||
ADD COLUMN "notifyOnZeroBalance" BOOLEAN NOT NULL DEFAULT true;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "NotificationEvent" (
|
||||
"id" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"userNotificationBatchId" TEXT,
|
||||
"type" "NotificationType" NOT NULL,
|
||||
"data" JSONB NOT NULL,
|
||||
|
||||
CONSTRAINT "NotificationEvent_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "UserNotificationBatch" (
|
||||
"id" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"userId" TEXT NOT NULL,
|
||||
"type" "NotificationType" NOT NULL,
|
||||
|
||||
CONSTRAINT "UserNotificationBatch_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "UserNotificationBatch_userId_type_key" ON "UserNotificationBatch"("userId", "type");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "NotificationEvent" ADD CONSTRAINT "NotificationEvent_userNotificationBatchId_fkey" FOREIGN KEY ("userNotificationBatchId") REFERENCES "UserNotificationBatch"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "UserNotificationBatch" ADD CONSTRAINT "UserNotificationBatch_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,77 @@
|
||||
CREATE OR REPLACE FUNCTION migrate_text_column_to_json(
|
||||
p_table text, -- Table name, e.g. 'AgentNodeExecution'
|
||||
p_col text, -- Column name to convert, e.g. 'executionData'
|
||||
p_default json DEFAULT '{}'::json, -- Fallback value when original value is NULL.
|
||||
-- Pass NULL here if you prefer to leave NULLs.
|
||||
p_set_nullable boolean DEFAULT true -- If false, the new column will be NOT NULL.
|
||||
) RETURNS void AS $$
|
||||
DECLARE
|
||||
full_table text;
|
||||
tmp_col text;
|
||||
BEGIN
|
||||
-- Build a fully qualified table name using the current schema.
|
||||
full_table := format('%I.%I', current_schema(), p_table);
|
||||
tmp_col := p_col || '_tmp';
|
||||
|
||||
-- 0. Skip the migration if the column is already of type jsonb.
|
||||
IF EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema = current_schema()
|
||||
AND table_name = p_table
|
||||
AND column_name = p_col
|
||||
AND data_type = 'jsonb'
|
||||
) THEN
|
||||
RAISE NOTICE 'Column %I.%I is already of type jsonb, skipping migration.', full_table, p_col;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
-- 1. Cleanup the original column from invalid JSON characters.
|
||||
EXECUTE format('UPDATE %s SET %I = replace(%I, E''\\u0000'', '''') WHERE %I LIKE ''%%\\u0000%%'';', full_table, p_col, p_col, p_col);
|
||||
|
||||
-- 2. Add the temporary column of type JSON.
|
||||
EXECUTE format('ALTER TABLE %s ADD COLUMN %I jsonb;', full_table, tmp_col);
|
||||
|
||||
-- 3. Convert the data:
|
||||
-- - If p_default IS NOT NULL, use it as the fallback value.
|
||||
-- - Otherwise, keep NULL.
|
||||
IF p_default IS NULL THEN
|
||||
EXECUTE format(
|
||||
'UPDATE %s SET %I = CASE WHEN %I IS NULL THEN NULL ELSE %I::json END;',
|
||||
full_table, tmp_col, p_col, p_col
|
||||
);
|
||||
ELSE
|
||||
EXECUTE format(
|
||||
'UPDATE %s SET %I = CASE WHEN %I IS NULL THEN %L::json ELSE %I::json END;',
|
||||
full_table, tmp_col, p_col, p_default::text, p_col
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- 4. Drop the original text column.
|
||||
EXECUTE format('ALTER TABLE %s DROP COLUMN %I;', full_table, p_col);
|
||||
|
||||
-- 5. Rename the temporary column to the original column name.
|
||||
EXECUTE format('ALTER TABLE %s RENAME COLUMN %I TO %I;', full_table, tmp_col, p_col);
|
||||
|
||||
-- 6. Optionally set a DEFAULT for future inserts if a fallback is provided.
|
||||
IF p_default IS NOT NULL THEN
|
||||
EXECUTE format('ALTER TABLE %s ALTER COLUMN %I SET DEFAULT %L::json;',
|
||||
full_table, p_col, p_default::text);
|
||||
END IF;
|
||||
|
||||
-- 7. Optionally mark the column as NOT NULL.
|
||||
IF NOT p_set_nullable THEN
|
||||
EXECUTE format('ALTER TABLE %s ALTER COLUMN %I SET NOT NULL;', full_table, p_col);
|
||||
END IF;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
|
||||
BEGIN;
|
||||
SELECT migrate_text_column_to_json('AgentGraphExecution', 'stats', NULL, true);
|
||||
SELECT migrate_text_column_to_json('AgentNodeExecution', 'stats', NULL, true);
|
||||
SELECT migrate_text_column_to_json('AgentNodeExecution', 'executionData', NULL, true);
|
||||
SELECT migrate_text_column_to_json('AgentNode', 'constantInput', '{}'::json, false);
|
||||
SELECT migrate_text_column_to_json('AgentNode', 'metadata', '{}'::json, false);
|
||||
SELECT migrate_text_column_to_json('AgentNodeExecutionInputOutput', 'data', NULL, false);
|
||||
COMMIT;
|
||||
186
autogpt_platform/backend/poetry.lock
generated
186
autogpt_platform/backend/poetry.lock
generated
@@ -173,14 +173,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "anthropic"
|
||||
version = "0.40.0"
|
||||
version = "0.45.2"
|
||||
description = "The official Python library for the anthropic API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "anthropic-0.40.0-py3-none-any.whl", hash = "sha256:442028ae8790ff9e3b6f8912043918755af1230d193904ae2ef78cc22995280c"},
|
||||
{file = "anthropic-0.40.0.tar.gz", hash = "sha256:3efeca6d9e97813f93ed34322c6c7ea2279bf0824cd0aa71b59ce222665e2b87"},
|
||||
{file = "anthropic-0.45.2-py3-none-any.whl", hash = "sha256:ecd746f7274451dfcb7e1180571ead624c7e1195d1d46cb7c70143d2aedb4d35"},
|
||||
{file = "anthropic-0.45.2.tar.gz", hash = "sha256:32a18b9ecd12c91b2be4cae6ca2ab46a06937b5aa01b21308d97a6d29794fb5e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -190,7 +190,7 @@ httpx = ">=0.23.0,<1"
|
||||
jiter = ">=0.4.0,<1"
|
||||
pydantic = ">=1.9.0,<3"
|
||||
sniffio = "*"
|
||||
typing-extensions = ">=4.7,<5"
|
||||
typing-extensions = ">=4.10,<5"
|
||||
|
||||
[package.extras]
|
||||
bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"]
|
||||
@@ -293,14 +293,14 @@ develop = true
|
||||
[package.dependencies]
|
||||
colorama = "^0.4.6"
|
||||
expiringdict = "^1.2.2"
|
||||
google-cloud-logging = "^3.11.3"
|
||||
pydantic = "^2.10.5"
|
||||
google-cloud-logging = "^3.11.4"
|
||||
pydantic = "^2.10.6"
|
||||
pydantic-settings = "^2.7.1"
|
||||
pyjwt = "^2.10.1"
|
||||
pytest-asyncio = "^0.25.2"
|
||||
pytest-asyncio = "^0.25.3"
|
||||
pytest-mock = "^3.14.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.11.0"
|
||||
supabase = "^2.13.0"
|
||||
|
||||
[package.source]
|
||||
type = "directory"
|
||||
@@ -324,7 +324,7 @@ version = "24.10.0"
|
||||
description = "The uncompromising code formatter."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["dev"]
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"},
|
||||
{file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"},
|
||||
@@ -740,6 +740,27 @@ files = [
|
||||
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dnspython"
|
||||
version = "2.7.0"
|
||||
description = "DNS toolkit"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"},
|
||||
{file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"]
|
||||
dnssec = ["cryptography (>=43)"]
|
||||
doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"]
|
||||
doq = ["aioquic (>=1.0.0)"]
|
||||
idna = ["idna (>=3.7)"]
|
||||
trio = ["trio (>=0.23)"]
|
||||
wmi = ["wmi (>=1.5.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "e2b"
|
||||
version = "1.0.5"
|
||||
@@ -763,14 +784,14 @@ typing-extensions = ">=4.1.0"
|
||||
|
||||
[[package]]
|
||||
name = "e2b-code-interpreter"
|
||||
version = "1.0.4"
|
||||
version = "1.0.5"
|
||||
description = "E2B Code Interpreter - Stateful code execution"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "e2b_code_interpreter-1.0.4-py3-none-any.whl", hash = "sha256:e8cea4946b3457072a524250aee712f7f8d44834b91cd9c13da3bdf96eda1a6e"},
|
||||
{file = "e2b_code_interpreter-1.0.4.tar.gz", hash = "sha256:fec5651d98ca0d03dd038c5df943a0beaeb59c6d422112356f55f2b662d8dea1"},
|
||||
{file = "e2b_code_interpreter-1.0.5-py3-none-any.whl", hash = "sha256:4c7814e9eabba58097bf5e4019d327b3a82fab0813eafca4311b29ca6ea0639d"},
|
||||
{file = "e2b_code_interpreter-1.0.5.tar.gz", hash = "sha256:e7f70b039e6a70f8e592f90f806d696dc1056919414daabeb89e86c9b650a987"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -778,6 +799,22 @@ attrs = ">=21.3.0"
|
||||
e2b = ">=1.0.4,<2.0.0"
|
||||
httpx = ">=0.20.0,<1.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "email-validator"
|
||||
version = "2.2.0"
|
||||
description = "A robust email address syntax and deliverability validation library."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"},
|
||||
{file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
dnspython = ">=2.0.0"
|
||||
idna = ">=2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
@@ -827,14 +864,14 @@ typing-extensions = "*"
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.115.7"
|
||||
version = "0.115.8"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "fastapi-0.115.7-py3-none-any.whl", hash = "sha256:eb6a8c8bf7f26009e8147111ff15b5177a0e19bb4a45bc3486ab14804539d21e"},
|
||||
{file = "fastapi-0.115.7.tar.gz", hash = "sha256:0f106da6c01d88a6786b3248fb4d7a940d071f6f488488898ad5d354b25ed015"},
|
||||
{file = "fastapi-0.115.8-py3-none-any.whl", hash = "sha256:753a96dd7e036b34eeef8babdfcfe3f28ff79648f86551eb36bfc1b0bf4a8cbf"},
|
||||
{file = "fastapi-0.115.8.tar.gz", hash = "sha256:0ce9111231720190473e222cdf0f07f7206ad7e53ea02beb1d2dc36e2f0741e9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1015,14 +1052,14 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-api-python-client"
|
||||
version = "2.159.0"
|
||||
version = "2.160.0"
|
||||
description = "Google API Client Library for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_api_python_client-2.159.0-py2.py3-none-any.whl", hash = "sha256:baef0bb631a60a0bd7c0bf12a5499e3a40cd4388484de7ee55c1950bf820a0cf"},
|
||||
{file = "google_api_python_client-2.159.0.tar.gz", hash = "sha256:55197f430f25c907394b44fa078545ffef89d33fd4dca501b7db9f0d8e224bd6"},
|
||||
{file = "google_api_python_client-2.160.0-py2.py3-none-any.whl", hash = "sha256:63d61fb3e4cf3fb31a70a87f45567c22f6dfe87bbfa27252317e3e2c42900db4"},
|
||||
{file = "google_api_python_client-2.160.0.tar.gz", hash = "sha256:a8ccafaecfa42d15d5b5c3134ced8de08380019717fc9fb1ed510ca58eca3b7e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1147,14 +1184,14 @@ grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"]
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-logging"
|
||||
version = "3.11.3"
|
||||
version = "3.11.4"
|
||||
description = "Stackdriver Logging API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_cloud_logging-3.11.3-py2.py3-none-any.whl", hash = "sha256:b8ec23f2998f76a58f8492db26a0f4151dd500425c3f08448586b85972f3c494"},
|
||||
{file = "google_cloud_logging-3.11.3.tar.gz", hash = "sha256:0a73cd94118875387d4535371d9e9426861edef8e44fba1261e86782d5b8d54f"},
|
||||
{file = "google_cloud_logging-3.11.4-py2.py3-none-any.whl", hash = "sha256:1d465ac62df29fb94bba4d6b4891035e57d573d84541dd8a40eebbc74422b2f0"},
|
||||
{file = "google_cloud_logging-3.11.4.tar.gz", hash = "sha256:32305d989323f3c58603044e2ac5d9cf23e9465ede511bbe90b4309270d3195c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1173,14 +1210,14 @@ protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4
|
||||
|
||||
[[package]]
|
||||
name = "google-cloud-storage"
|
||||
version = "2.19.0"
|
||||
version = "3.0.0"
|
||||
description = "Google Cloud Storage API client library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba"},
|
||||
{file = "google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2"},
|
||||
{file = "google_cloud_storage-3.0.0-py2.py3-none-any.whl", hash = "sha256:f85fd059650d2dbb0ac158a9a6b304b66143b35ed2419afec2905ca522eb2c6a"},
|
||||
{file = "google_cloud_storage-3.0.0.tar.gz", hash = "sha256:2accb3e828e584888beff1165e5f3ac61aa9088965eb0165794a82d8c7f95297"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1303,6 +1340,23 @@ files = [
|
||||
httpx = {version = ">=0.26,<0.29", extras = ["http2"]}
|
||||
pydantic = ">=1.10,<3"
|
||||
|
||||
[[package]]
|
||||
name = "gravitasml"
|
||||
version = "0.1.3"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "gravitasml-0.1.3-py3-none-any.whl", hash = "sha256:51ff98b4564b7a61f7796f18d5f2558b919d30b3722579296089645b7bc18b85"},
|
||||
{file = "gravitasml-0.1.3.tar.gz", hash = "sha256:04d240b9fa35878252d57a36032130b6516487468847fcdced1022c032a20f57"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
black = ">=24.10.0,<25.0.0"
|
||||
pydantic = ">=2.9.2,<3.0.0"
|
||||
pytest = ">=8.2.1,<9.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "greenlet"
|
||||
version = "3.1.1"
|
||||
@@ -1393,14 +1447,14 @@ test = ["objgraph", "psutil"]
|
||||
|
||||
[[package]]
|
||||
name = "groq"
|
||||
version = "0.13.1"
|
||||
version = "0.18.0"
|
||||
description = "The official Python library for the groq API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "groq-0.13.1-py3-none-any.whl", hash = "sha256:0c5d1d6df93de55de705fe73729b79baaa0c871f7575d6aa64b2962b56101b3e"},
|
||||
{file = "groq-0.13.1.tar.gz", hash = "sha256:588fd5bee984f4eb46ec89552778d5698b9e9614435defef868645c19463cbcc"},
|
||||
{file = "groq-0.18.0-py3-none-any.whl", hash = "sha256:81d5ac00057a45d8ce559d23ab5d3b3893011d1f12c35187ab35a9182d826ea6"},
|
||||
{file = "groq-0.18.0.tar.gz", hash = "sha256:8e2ccfea406d68b3525af4b7c0e321fcb3d2a73fc60bb70b4156e6cd88c72f03"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2151,14 +2205,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "mem0ai"
|
||||
version = "0.1.44"
|
||||
version = "0.1.48"
|
||||
description = "Long-term memory for AI Agents"
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "mem0ai-0.1.44-py3-none-any.whl", hash = "sha256:32260a2cd935035a1b16ce04ad2e4510a5bd97618709466e2d06303e0eb8d9d4"},
|
||||
{file = "mem0ai-0.1.44.tar.gz", hash = "sha256:93214272915d94f673d370bb8fe7a8bfc21806267e65700b471bec454dcdfa5c"},
|
||||
{file = "mem0ai-0.1.48-py3-none-any.whl", hash = "sha256:23d1bd591c36da9e1f9f013d6f87a79ef9eb1495ac27b1e380af7f819b07fee0"},
|
||||
{file = "mem0ai-0.1.48.tar.gz", hash = "sha256:f5cceb768fa2898e59d55d3d472ccb983e3d9ae82ccba1d435545e16853dbeb6"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2321,7 +2375,7 @@ version = "1.0.0"
|
||||
description = "Type system extensions for programs checked with the mypy type checker."
|
||||
optional = false
|
||||
python-versions = ">=3.5"
|
||||
groups = ["dev"]
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
|
||||
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||
@@ -2439,14 +2493,14 @@ pydantic = ">=2.9.0,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "openai"
|
||||
version = "1.60.0"
|
||||
version = "1.61.1"
|
||||
description = "The official Python library for the openai API"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openai-1.60.0-py3-none-any.whl", hash = "sha256:df06c43be8018274980ac363da07d4b417bd835ead1c66e14396f6f15a0d5dda"},
|
||||
{file = "openai-1.60.0.tar.gz", hash = "sha256:7fa536cd4b644718645b874d2706e36dbbef38b327e42ca0623275da347ee1a9"},
|
||||
{file = "openai-1.61.1-py3-none-any.whl", hash = "sha256:72b0826240ce26026ac2cd17951691f046e5be82ad122d20a8e1b30ca18bd11e"},
|
||||
{file = "openai-1.61.1.tar.gz", hash = "sha256:ce1851507218209961f89f3520e06726c0aa7d0512386f0f977e3ac3e4f2472e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2525,12 +2579,29 @@ version = "0.12.1"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
|
||||
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pika"
|
||||
version = "1.3.2"
|
||||
description = "Pika Python AMQP Client Library"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pika-1.3.2-py3-none-any.whl", hash = "sha256:0779a7c1fafd805672796085560d290213a465e4f6f76a6fb19e378d8041a14f"},
|
||||
{file = "pika-1.3.2.tar.gz", hash = "sha256:b2a327ddddf8570b4965b3576ac77091b850262d34ce8c1d8cb4e4146aa4145f"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
gevent = ["gevent"]
|
||||
tornado = ["tornado"]
|
||||
twisted = ["twisted"]
|
||||
|
||||
[[package]]
|
||||
name = "pillow"
|
||||
version = "10.4.0"
|
||||
@@ -2689,7 +2760,7 @@ version = "4.3.6"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
groups = ["dev"]
|
||||
groups = ["main", "dev"]
|
||||
files = [
|
||||
{file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
|
||||
{file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
|
||||
@@ -3192,6 +3263,7 @@ files = [
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.6.0"
|
||||
email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""}
|
||||
pydantic-core = "2.27.2"
|
||||
typing-extensions = ">=4.12.2"
|
||||
|
||||
@@ -3451,14 +3523,14 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments
|
||||
|
||||
[[package]]
|
||||
name = "pytest-asyncio"
|
||||
version = "0.25.2"
|
||||
version = "0.25.3"
|
||||
description = "Pytest support for asyncio"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"},
|
||||
{file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"},
|
||||
{file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"},
|
||||
{file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3959,14 +4031,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "sentry-sdk"
|
||||
version = "2.19.2"
|
||||
version = "2.20.0"
|
||||
description = "Python client for Sentry (https://sentry.io)"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "sentry_sdk-2.19.2-py2.py3-none-any.whl", hash = "sha256:ebdc08228b4d131128e568d696c210d846e5b9d70aa0327dec6b1272d9d40b84"},
|
||||
{file = "sentry_sdk-2.19.2.tar.gz", hash = "sha256:467df6e126ba242d39952375dd816fbee0f217d119bf454a8ce74cf1e7909e8d"},
|
||||
{file = "sentry_sdk-2.20.0-py2.py3-none-any.whl", hash = "sha256:c359a1edf950eb5e80cffd7d9111f3dbeef57994cb4415df37d39fda2cf22364"},
|
||||
{file = "sentry_sdk-2.20.0.tar.gz", hash = "sha256:afa82713a92facf847df3c6f63cec71eb488d826a50965def3d7722aa6f0fdab"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4011,6 +4083,7 @@ sqlalchemy = ["sqlalchemy (>=1.2)"]
|
||||
starlette = ["starlette (>=0.19.1)"]
|
||||
starlite = ["starlite (>=1.48)"]
|
||||
tornado = ["tornado (>=6)"]
|
||||
unleash = ["UnleashClient (>=6.0.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "serpent"
|
||||
@@ -4229,14 +4302,14 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "stripe"
|
||||
version = "11.4.1"
|
||||
version = "11.5.0"
|
||||
description = "Python bindings for the Stripe API"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "stripe-11.4.1-py2.py3-none-any.whl", hash = "sha256:8aa47a241de0355c383c916c4ef7273ab666f096a44ee7081e357db4a36f0cce"},
|
||||
{file = "stripe-11.4.1.tar.gz", hash = "sha256:7ddd251b622d490fe57d78487855dc9f4d95b1bb113607e81fd377037a133d5a"},
|
||||
{file = "stripe-11.5.0-py2.py3-none-any.whl", hash = "sha256:3b2cd47ed3002328249bff5cacaee38d5e756c3899ab425d3bd07acdaf32534a"},
|
||||
{file = "stripe-11.5.0.tar.gz", hash = "sha256:bc3e0358ffc23d5ecfa8aafec1fa4f048ee8107c3237bcb00003e68c8c96fa02"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4245,14 +4318,14 @@ typing-extensions = {version = ">=4.5.0", markers = "python_version >= \"3.7\""}
|
||||
|
||||
[[package]]
|
||||
name = "supabase"
|
||||
version = "2.11.0"
|
||||
version = "2.13.0"
|
||||
description = "Supabase client for Python."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase-2.11.0-py3-none-any.whl", hash = "sha256:67a0da498895f4cd6554935e2854b4c41f87b297b78fb9c9414902a382041406"},
|
||||
{file = "supabase-2.11.0.tar.gz", hash = "sha256:2a906f7909fd9a50f944cd9332ce66c684e2d37c0864284d34c5815e6c63cc01"},
|
||||
{file = "supabase-2.13.0-py3-none-any.whl", hash = "sha256:6cfccc055be21dab311afc5e9d5b37f3a4966f8394703763fbc8f8e86f36eaa6"},
|
||||
{file = "supabase-2.13.0.tar.gz", hash = "sha256:452574d34bd978c8d11b5f02b0182b48e8854e511c969483c83875ec01495f11"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4295,6 +4368,21 @@ files = [
|
||||
doc = ["reno", "sphinx"]
|
||||
test = ["pytest", "tornado (>=4.5)", "typeguard"]
|
||||
|
||||
[[package]]
|
||||
name = "todoist-api-python"
|
||||
version = "2.1.7"
|
||||
description = "Official Python SDK for the Todoist REST API."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "todoist_api_python-2.1.7-py3-none-any.whl", hash = "sha256:278bfe851b9bd19bde5ff5de09d813d671ef7310ba55e1962131fca5b59bb735"},
|
||||
{file = "todoist_api_python-2.1.7.tar.gz", hash = "sha256:84934a19ccd83fb61010a8126362a5d7d6486c92454c111307ba55bc74903f5c"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
requests = ">=2.32.3,<3.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.2.1"
|
||||
@@ -5034,4 +5122,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.13"
|
||||
content-hash = "38a5c750ddca1a6264fd98b7ee74d199c2bbf57d3acc189264bd9f8ec90febc2"
|
||||
content-hash = "4052d96f95ad3dbf8bef4d651168f6df1ef21c506f152ddca119ad8f23caf159"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[tool.poetry]
|
||||
name = "autogpt-platform-backend"
|
||||
version = "0.3.4"
|
||||
version = "0.4.9"
|
||||
description = "A platform for building AI-powered agentic workflows"
|
||||
authors = ["AutoGPT <info@agpt.co>"]
|
||||
readme = "README.md"
|
||||
@@ -10,66 +10,71 @@ packages = [{ include = "backend", format = "sdist" }]
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<3.13"
|
||||
aio-pika = "^9.5.4"
|
||||
anthropic = "^0.40.0"
|
||||
anthropic = "^0.45.2"
|
||||
apscheduler = "^3.11.0"
|
||||
autogpt-libs = { path = "../autogpt_libs", develop = true }
|
||||
click = "^8.1.7"
|
||||
cryptography = "^43.0"
|
||||
discord-py = "^2.4.0"
|
||||
e2b-code-interpreter = "^1.0.1"
|
||||
fastapi = "^0.115.5"
|
||||
e2b-code-interpreter = "^1.0.5"
|
||||
fastapi = "^0.115.8"
|
||||
feedparser = "^6.0.11"
|
||||
flake8 = "^7.0.0"
|
||||
google-api-python-client = "^2.154.0"
|
||||
google-api-python-client = "^2.160.0"
|
||||
google-auth-oauthlib = "^1.2.1"
|
||||
groq = "^0.13.1"
|
||||
google-cloud-storage = "^3.0.0"
|
||||
googlemaps = "^4.10.0"
|
||||
gravitasml = "^0.1.3"
|
||||
groq = "^0.18.0"
|
||||
jinja2 = "^3.1.4"
|
||||
jsonref = "^1.1.0"
|
||||
jsonschema = "^4.22.0"
|
||||
launchdarkly-server-sdk = "^9.8.0"
|
||||
mem0ai = "^0.1.48"
|
||||
moviepy = "^2.1.2"
|
||||
ollama = "^0.4.1"
|
||||
openai = "^1.57.4"
|
||||
openai = "^1.61.1"
|
||||
pika = "^1.3.2"
|
||||
pinecone = "^5.3.1"
|
||||
praw = "~7.8.1"
|
||||
prisma = "^0.15.0"
|
||||
psutil = "^6.1.0"
|
||||
pydantic = "^2.9.2"
|
||||
psycopg2-binary = "^2.9.10"
|
||||
pydantic = {extras = ["email"], version = "^2.10.6"}
|
||||
pydantic-settings = "^2.3.4"
|
||||
pyro5 = "^5.15"
|
||||
pytest = "^8.2.1"
|
||||
pytest-asyncio = "^0.25.0"
|
||||
pytest-asyncio = "^0.25.3"
|
||||
python-dotenv = "^1.0.1"
|
||||
python-multipart = "^0.0.20"
|
||||
redis = "^5.2.0"
|
||||
sentry-sdk = "2.19.2"
|
||||
replicate = "^1.0.4"
|
||||
sentry-sdk = "2.20.0"
|
||||
sqlalchemy = "^2.0.36"
|
||||
strenum = "^0.4.9"
|
||||
stripe = "^11.3.0"
|
||||
supabase = "2.11.0"
|
||||
stripe = "^11.5.0"
|
||||
supabase = "2.13.0"
|
||||
tenacity = "^9.0.0"
|
||||
todoist-api-python = "^2.1.7"
|
||||
tweepy = "^4.14.0"
|
||||
uvicorn = { extras = ["standard"], version = "^0.34.0" }
|
||||
websockets = "^13.1"
|
||||
youtube-transcript-api = "^0.6.2"
|
||||
googlemaps = "^4.10.0"
|
||||
replicate = "^1.0.4"
|
||||
pinecone = "^5.3.1"
|
||||
cryptography = "^43.0"
|
||||
python-multipart = "^0.0.20"
|
||||
sqlalchemy = "^2.0.36"
|
||||
psycopg2-binary = "^2.9.10"
|
||||
google-cloud-storage = "^2.18.2"
|
||||
launchdarkly-server-sdk = "^9.8.0"
|
||||
mem0ai = "^0.1.44"
|
||||
moviepy = "^2.1.2"
|
||||
# NOTE: please insert new dependencies in their alphabetical location
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
poethepoet = "^0.32.1"
|
||||
aiohappyeyeballs = "^2.4.4"
|
||||
black = "^24.10.0"
|
||||
faker = "^33.3.1"
|
||||
httpx = "^0.27.0"
|
||||
isort = "^5.13.2"
|
||||
poethepoet = "^0.32.1"
|
||||
pyright = "^1.1.392"
|
||||
pytest-mock = "^3.14.0"
|
||||
pytest-watcher = "^0.4.2"
|
||||
requests = "^2.32.3"
|
||||
ruff = "^0.9.2"
|
||||
pyright = "^1.1.392"
|
||||
isort = "^5.13.2"
|
||||
black = "^24.10.0"
|
||||
aiohappyeyeballs = "^2.4.4"
|
||||
pytest-mock = "^3.14.0"
|
||||
faker = "^33.3.1"
|
||||
# NOTE: please insert new dependencies in their alphabetical location
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
|
||||
@@ -23,15 +23,26 @@ model User {
|
||||
stripeCustomerId String?
|
||||
topUpConfig Json?
|
||||
|
||||
maxEmailsPerDay Int @default(3)
|
||||
notifyOnAgentRun Boolean @default(true)
|
||||
notifyOnZeroBalance Boolean @default(true)
|
||||
notifyOnLowBalance Boolean @default(true)
|
||||
notifyOnBlockExecutionFailed Boolean @default(true)
|
||||
notifyOnContinuousAgentError Boolean @default(true)
|
||||
notifyOnDailySummary Boolean @default(true)
|
||||
notifyOnWeeklySummary Boolean @default(true)
|
||||
notifyOnMonthlySummary Boolean @default(true)
|
||||
|
||||
// Relations
|
||||
|
||||
AgentGraphs AgentGraph[]
|
||||
AgentGraphExecutions AgentGraphExecution[]
|
||||
AnalyticsDetails AnalyticsDetails[]
|
||||
AnalyticsMetrics AnalyticsMetrics[]
|
||||
CreditTransaction CreditTransaction[]
|
||||
|
||||
AgentPreset AgentPreset[]
|
||||
UserAgent UserAgent[]
|
||||
AgentPreset AgentPreset[]
|
||||
LibraryAgent LibraryAgent[]
|
||||
|
||||
Profile Profile[]
|
||||
StoreListing StoreListing[]
|
||||
@@ -39,6 +50,7 @@ model User {
|
||||
StoreListingSubmission StoreListingSubmission[]
|
||||
APIKeys APIKey[]
|
||||
IntegrationWebhooks IntegrationWebhook[]
|
||||
UserNotificationBatch UserNotificationBatch[]
|
||||
|
||||
@@index([id])
|
||||
@@index([email])
|
||||
@@ -53,6 +65,7 @@ model AgentGraph {
|
||||
|
||||
name String?
|
||||
description String?
|
||||
|
||||
isActive Boolean @default(true)
|
||||
isTemplate Boolean @default(false)
|
||||
|
||||
@@ -66,7 +79,7 @@ model AgentGraph {
|
||||
AgentGraphExecution AgentGraphExecution[]
|
||||
|
||||
AgentPreset AgentPreset[]
|
||||
UserAgent UserAgent[]
|
||||
LibraryAgent LibraryAgent[]
|
||||
StoreListing StoreListing[]
|
||||
StoreListingVersion StoreListingVersion?
|
||||
|
||||
@@ -104,15 +117,56 @@ model AgentPreset {
|
||||
Agent AgentGraph @relation(fields: [agentId, agentVersion], references: [id, version], onDelete: Cascade)
|
||||
|
||||
InputPresets AgentNodeExecutionInputOutput[] @relation("AgentPresetsInputData")
|
||||
UserAgents UserAgent[]
|
||||
LibraryAgents LibraryAgent[]
|
||||
AgentExecution AgentGraphExecution[]
|
||||
|
||||
isDeleted Boolean @default(false)
|
||||
|
||||
@@index([userId])
|
||||
}
|
||||
|
||||
enum NotificationType {
|
||||
AGENT_RUN
|
||||
ZERO_BALANCE
|
||||
LOW_BALANCE
|
||||
BLOCK_EXECUTION_FAILED
|
||||
CONTINUOUS_AGENT_ERROR
|
||||
DAILY_SUMMARY
|
||||
WEEKLY_SUMMARY
|
||||
MONTHLY_SUMMARY
|
||||
}
|
||||
|
||||
model NotificationEvent {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @default(now()) @updatedAt
|
||||
|
||||
UserNotificationBatch UserNotificationBatch? @relation(fields: [userNotificationBatchId], references: [id])
|
||||
userNotificationBatchId String?
|
||||
|
||||
type NotificationType
|
||||
data Json
|
||||
}
|
||||
|
||||
model UserNotificationBatch {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @default(now()) @updatedAt
|
||||
|
||||
userId String
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
type NotificationType
|
||||
|
||||
notifications NotificationEvent[]
|
||||
|
||||
// Each user can only have one batch of a notification type at a time
|
||||
@@unique([userId, type])
|
||||
}
|
||||
|
||||
// For the library page
|
||||
// It is a user controlled list of agents, that they will see in there library
|
||||
model UserAgent {
|
||||
model LibraryAgent {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @default(now()) @updatedAt
|
||||
@@ -120,6 +174,8 @@ model UserAgent {
|
||||
userId String
|
||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
image_url String?
|
||||
|
||||
agentId String
|
||||
agentVersion Int
|
||||
Agent AgentGraph @relation(fields: [agentId, agentVersion], references: [id, version])
|
||||
@@ -127,6 +183,11 @@ model UserAgent {
|
||||
agentPresetId String?
|
||||
AgentPreset AgentPreset? @relation(fields: [agentPresetId], references: [id])
|
||||
|
||||
creatorId String?
|
||||
Creator Profile? @relation(fields: [creatorId], references: [id])
|
||||
|
||||
useGraphIsActiveVersion Boolean @default(false)
|
||||
|
||||
isFavorite Boolean @default(false)
|
||||
isCreatedByUser Boolean @default(false)
|
||||
isArchived Boolean @default(false)
|
||||
@@ -158,15 +219,13 @@ model AgentNode {
|
||||
// List of produced output, that the child node should be executed.
|
||||
Output AgentNodeLink[] @relation("AgentNodeSource")
|
||||
|
||||
// JSON serialized dict[str, str] containing predefined input values.
|
||||
constantInput String @default("{}")
|
||||
constantInput Json @default("{}")
|
||||
|
||||
// For webhook-triggered blocks: reference to the webhook that triggers the node
|
||||
webhookId String?
|
||||
Webhook IntegrationWebhook? @relation(fields: [webhookId], references: [id])
|
||||
|
||||
// JSON serialized dict[str, str] containing the node metadata.
|
||||
metadata String @default("{}")
|
||||
metadata Json @default("{}")
|
||||
|
||||
ExecutionHistory AgentNodeExecution[]
|
||||
|
||||
@@ -239,7 +298,7 @@ model AgentGraphExecution {
|
||||
userId String
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
stats String? // JSON serialized object
|
||||
stats Json?
|
||||
AgentPreset AgentPreset? @relation(fields: [agentPresetId], references: [id])
|
||||
agentPresetId String?
|
||||
|
||||
@@ -261,14 +320,13 @@ model AgentNodeExecution {
|
||||
Output AgentNodeExecutionInputOutput[] @relation("AgentNodeExecutionOutput")
|
||||
|
||||
executionStatus AgentExecutionStatus @default(COMPLETED)
|
||||
// Final JSON serialized input data for the node execution.
|
||||
executionData String?
|
||||
executionData Json?
|
||||
addedTime DateTime @default(now())
|
||||
queuedTime DateTime?
|
||||
startedTime DateTime?
|
||||
endedTime DateTime?
|
||||
|
||||
stats String? // JSON serialized object
|
||||
stats Json?
|
||||
|
||||
@@index([agentGraphExecutionId])
|
||||
@@index([agentNodeId])
|
||||
@@ -279,7 +337,7 @@ model AgentNodeExecutionInputOutput {
|
||||
id String @id @default(uuid())
|
||||
|
||||
name String
|
||||
data String
|
||||
data Json
|
||||
time DateTime @default(now())
|
||||
|
||||
// Prisma requires explicit back-references.
|
||||
@@ -425,6 +483,8 @@ model Profile {
|
||||
|
||||
isFeatured Boolean @default(false)
|
||||
|
||||
LibraryAgent LibraryAgent[]
|
||||
|
||||
@@index([username])
|
||||
@@index([userId])
|
||||
}
|
||||
|
||||
@@ -5,8 +5,9 @@ import fastapi.responses
|
||||
import pytest
|
||||
from prisma.models import User
|
||||
|
||||
import backend.server.v2.library.model
|
||||
import backend.server.v2.store.model
|
||||
from backend.blocks.basic import FindInDictionaryBlock, StoreValueBlock
|
||||
from backend.blocks.basic import AgentInputBlock, FindInDictionaryBlock, StoreValueBlock
|
||||
from backend.blocks.maths import CalculatorBlock, Operation
|
||||
from backend.data import execution, graph
|
||||
from backend.server.model import CreateGraph
|
||||
@@ -39,7 +40,7 @@ async def execute_graph(
|
||||
graph_version=test_graph.version,
|
||||
node_input=input_data,
|
||||
)
|
||||
graph_exec_id = response["id"]
|
||||
graph_exec_id = response.graph_exec_id
|
||||
logger.info(f"Created execution with ID: {graph_exec_id}")
|
||||
|
||||
# Execution queue should be empty
|
||||
@@ -131,7 +132,7 @@ async def test_agent_execution(server: SpinTestServer):
|
||||
logger.info("Starting test_agent_execution")
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(server, create_test_graph(), test_user)
|
||||
data = {"node_input": {"input_1": "Hello", "input_2": "World"}}
|
||||
data = {"input_1": "Hello", "input_2": "World"}
|
||||
graph_exec_id = await execute_graph(
|
||||
server.agent_server,
|
||||
test_graph,
|
||||
@@ -295,6 +296,192 @@ async def test_static_input_link_on_graph(server: SpinTestServer):
|
||||
logger.info("Completed test_static_input_link_on_graph")
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_execute_preset(server: SpinTestServer):
|
||||
"""
|
||||
Test executing a preset.
|
||||
|
||||
This test ensures that:
|
||||
1. A preset can be successfully executed
|
||||
2. The execution results are correct
|
||||
|
||||
Args:
|
||||
server (SpinTestServer): The test server instance.
|
||||
"""
|
||||
# Create test graph and user
|
||||
nodes = [
|
||||
graph.Node( # 0
|
||||
block_id=AgentInputBlock().id,
|
||||
input_default={"name": "dictionary"},
|
||||
),
|
||||
graph.Node( # 1
|
||||
block_id=AgentInputBlock().id,
|
||||
input_default={"name": "selected_value"},
|
||||
),
|
||||
graph.Node( # 2
|
||||
block_id=StoreValueBlock().id,
|
||||
input_default={"input": {"key1": "Hi", "key2": "Everyone"}},
|
||||
),
|
||||
graph.Node( # 3
|
||||
block_id=FindInDictionaryBlock().id,
|
||||
input_default={"key": "", "input": {}},
|
||||
),
|
||||
]
|
||||
links = [
|
||||
graph.Link(
|
||||
source_id=nodes[0].id,
|
||||
sink_id=nodes[2].id,
|
||||
source_name="result",
|
||||
sink_name="input",
|
||||
),
|
||||
graph.Link(
|
||||
source_id=nodes[1].id,
|
||||
sink_id=nodes[3].id,
|
||||
source_name="result",
|
||||
sink_name="key",
|
||||
),
|
||||
graph.Link(
|
||||
source_id=nodes[2].id,
|
||||
sink_id=nodes[3].id,
|
||||
source_name="output",
|
||||
sink_name="input",
|
||||
),
|
||||
]
|
||||
test_graph = graph.Graph(
|
||||
name="TestGraph",
|
||||
description="Test graph",
|
||||
nodes=nodes,
|
||||
links=links,
|
||||
)
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(server, test_graph, test_user)
|
||||
|
||||
# Create preset with initial values
|
||||
preset = backend.server.v2.library.model.CreateLibraryAgentPresetRequest(
|
||||
name="Test Preset With Clash",
|
||||
description="Test preset with clashing input values",
|
||||
agent_id=test_graph.id,
|
||||
agent_version=test_graph.version,
|
||||
inputs={
|
||||
"dictionary": {"key1": "Hello", "key2": "World"},
|
||||
"selected_value": "key2",
|
||||
},
|
||||
is_active=True,
|
||||
)
|
||||
created_preset = await server.agent_server.test_create_preset(preset, test_user.id)
|
||||
|
||||
# Execute preset with overriding values
|
||||
result = await server.agent_server.test_execute_preset(
|
||||
graph_id=test_graph.id,
|
||||
graph_version=test_graph.version,
|
||||
preset_id=created_preset.id,
|
||||
user_id=test_user.id,
|
||||
)
|
||||
|
||||
# Verify execution
|
||||
assert result is not None
|
||||
graph_exec_id = result["id"]
|
||||
|
||||
# Wait for execution to complete
|
||||
executions = await wait_execution(test_user.id, test_graph.id, graph_exec_id)
|
||||
assert len(executions) == 4
|
||||
|
||||
# FindInDictionaryBlock should wait for the input pin to be provided,
|
||||
# Hence executing extraction of "key" from {"key1": "value1", "key2": "value2"}
|
||||
assert executions[3].status == execution.ExecutionStatus.COMPLETED
|
||||
assert executions[3].output_data == {"output": ["World"]}
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_execute_preset_with_clash(server: SpinTestServer):
|
||||
"""
|
||||
Test executing a preset with clashing input data.
|
||||
"""
|
||||
# Create test graph and user
|
||||
nodes = [
|
||||
graph.Node( # 0
|
||||
block_id=AgentInputBlock().id,
|
||||
input_default={"name": "dictionary"},
|
||||
),
|
||||
graph.Node( # 1
|
||||
block_id=AgentInputBlock().id,
|
||||
input_default={"name": "selected_value"},
|
||||
),
|
||||
graph.Node( # 2
|
||||
block_id=StoreValueBlock().id,
|
||||
input_default={"input": {"key1": "Hi", "key2": "Everyone"}},
|
||||
),
|
||||
graph.Node( # 3
|
||||
block_id=FindInDictionaryBlock().id,
|
||||
input_default={"key": "", "input": {}},
|
||||
),
|
||||
]
|
||||
links = [
|
||||
graph.Link(
|
||||
source_id=nodes[0].id,
|
||||
sink_id=nodes[2].id,
|
||||
source_name="result",
|
||||
sink_name="input",
|
||||
),
|
||||
graph.Link(
|
||||
source_id=nodes[1].id,
|
||||
sink_id=nodes[3].id,
|
||||
source_name="result",
|
||||
sink_name="key",
|
||||
),
|
||||
graph.Link(
|
||||
source_id=nodes[2].id,
|
||||
sink_id=nodes[3].id,
|
||||
source_name="output",
|
||||
sink_name="input",
|
||||
),
|
||||
]
|
||||
test_graph = graph.Graph(
|
||||
name="TestGraph",
|
||||
description="Test graph",
|
||||
nodes=nodes,
|
||||
links=links,
|
||||
)
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(server, test_graph, test_user)
|
||||
|
||||
# Create preset with initial values
|
||||
preset = backend.server.v2.library.model.CreateLibraryAgentPresetRequest(
|
||||
name="Test Preset With Clash",
|
||||
description="Test preset with clashing input values",
|
||||
agent_id=test_graph.id,
|
||||
agent_version=test_graph.version,
|
||||
inputs={
|
||||
"dictionary": {"key1": "Hello", "key2": "World"},
|
||||
"selected_value": "key2",
|
||||
},
|
||||
is_active=True,
|
||||
)
|
||||
created_preset = await server.agent_server.test_create_preset(preset, test_user.id)
|
||||
|
||||
# Execute preset with overriding values
|
||||
result = await server.agent_server.test_execute_preset(
|
||||
graph_id=test_graph.id,
|
||||
graph_version=test_graph.version,
|
||||
preset_id=created_preset.id,
|
||||
node_input={"selected_value": "key1"},
|
||||
user_id=test_user.id,
|
||||
)
|
||||
|
||||
# Verify execution
|
||||
assert result is not None, "Result must not be None"
|
||||
graph_exec_id = result["id"]
|
||||
|
||||
# Wait for execution to complete
|
||||
executions = await wait_execution(test_user.id, test_graph.id, graph_exec_id)
|
||||
assert len(executions) == 4
|
||||
|
||||
# FindInDictionaryBlock should wait for the input pin to be provided,
|
||||
# Hence executing extraction of "key" from {"key1": "value1", "key2": "value2"}
|
||||
assert executions[3].status == execution.ExecutionStatus.COMPLETED
|
||||
assert executions[3].output_data == {"output": ["Hello"]}
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_store_listing_graph(server: SpinTestServer):
|
||||
logger.info("Starting test_agent_execution")
|
||||
@@ -344,7 +531,7 @@ async def test_store_listing_graph(server: SpinTestServer):
|
||||
)
|
||||
alt_test_user = admin_user
|
||||
|
||||
data = {"node_input": {"input_1": "Hello", "input_2": "World"}}
|
||||
data = {"input_1": "Hello", "input_2": "World"}
|
||||
graph_exec_id = await execute_graph(
|
||||
server.agent_server,
|
||||
test_graph,
|
||||
|
||||
@@ -34,29 +34,29 @@ def test_disconnect(
|
||||
connection_manager: ConnectionManager, mock_websocket: AsyncMock
|
||||
) -> None:
|
||||
connection_manager.active_connections.add(mock_websocket)
|
||||
connection_manager.subscriptions["test_graph"] = {mock_websocket}
|
||||
connection_manager.subscriptions["test_graph_1"] = {mock_websocket}
|
||||
|
||||
connection_manager.disconnect(mock_websocket)
|
||||
|
||||
assert mock_websocket not in connection_manager.active_connections
|
||||
assert mock_websocket not in connection_manager.subscriptions["test_graph"]
|
||||
assert mock_websocket not in connection_manager.subscriptions["test_graph_1"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_subscribe(
|
||||
connection_manager: ConnectionManager, mock_websocket: AsyncMock
|
||||
) -> None:
|
||||
await connection_manager.subscribe("test_graph", mock_websocket)
|
||||
assert mock_websocket in connection_manager.subscriptions["test_graph"]
|
||||
await connection_manager.subscribe("test_graph", 1, mock_websocket)
|
||||
assert mock_websocket in connection_manager.subscriptions["test_graph_1"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unsubscribe(
|
||||
connection_manager: ConnectionManager, mock_websocket: AsyncMock
|
||||
) -> None:
|
||||
connection_manager.subscriptions["test_graph"] = {mock_websocket}
|
||||
connection_manager.subscriptions["test_graph_1"] = {mock_websocket}
|
||||
|
||||
await connection_manager.unsubscribe("test_graph", mock_websocket)
|
||||
await connection_manager.unsubscribe("test_graph", 1, mock_websocket)
|
||||
|
||||
assert "test_graph" not in connection_manager.subscriptions
|
||||
|
||||
@@ -65,7 +65,7 @@ async def test_unsubscribe(
|
||||
async def test_send_execution_result(
|
||||
connection_manager: ConnectionManager, mock_websocket: AsyncMock
|
||||
) -> None:
|
||||
connection_manager.subscriptions["test_graph"] = {mock_websocket}
|
||||
connection_manager.subscriptions["test_graph_1"] = {mock_websocket}
|
||||
result: ExecutionResult = ExecutionResult(
|
||||
graph_id="test_graph",
|
||||
graph_version=1,
|
||||
@@ -87,7 +87,7 @@ async def test_send_execution_result(
|
||||
mock_websocket.send_text.assert_called_once_with(
|
||||
WsMessage(
|
||||
method=Methods.EXECUTION_EVENT,
|
||||
channel="test_graph",
|
||||
channel="test_graph_1",
|
||||
data=result.model_dump(),
|
||||
).model_dump_json()
|
||||
)
|
||||
|
||||
@@ -30,7 +30,8 @@ async def test_websocket_router_subscribe(
|
||||
) -> None:
|
||||
mock_websocket.receive_text.side_effect = [
|
||||
WsMessage(
|
||||
method=Methods.SUBSCRIBE, data={"graph_id": "test_graph"}
|
||||
method=Methods.SUBSCRIBE,
|
||||
data={"graph_id": "test_graph", "graph_version": 1},
|
||||
).model_dump_json(),
|
||||
WebSocketDisconnect(),
|
||||
]
|
||||
@@ -40,7 +41,7 @@ async def test_websocket_router_subscribe(
|
||||
)
|
||||
|
||||
mock_manager.connect.assert_called_once_with(mock_websocket)
|
||||
mock_manager.subscribe.assert_called_once_with("test_graph", mock_websocket)
|
||||
mock_manager.subscribe.assert_called_once_with("test_graph", 1, mock_websocket)
|
||||
mock_websocket.send_text.assert_called_once()
|
||||
assert '"method":"subscribe"' in mock_websocket.send_text.call_args[0][0]
|
||||
assert '"success":true' in mock_websocket.send_text.call_args[0][0]
|
||||
@@ -53,7 +54,8 @@ async def test_websocket_router_unsubscribe(
|
||||
) -> None:
|
||||
mock_websocket.receive_text.side_effect = [
|
||||
WsMessage(
|
||||
method=Methods.UNSUBSCRIBE, data={"graph_id": "test_graph"}
|
||||
method=Methods.UNSUBSCRIBE,
|
||||
data={"graph_id": "test_graph", "graph_version": 1},
|
||||
).model_dump_json(),
|
||||
WebSocketDisconnect(),
|
||||
]
|
||||
@@ -63,7 +65,7 @@ async def test_websocket_router_unsubscribe(
|
||||
)
|
||||
|
||||
mock_manager.connect.assert_called_once_with(mock_websocket)
|
||||
mock_manager.unsubscribe.assert_called_once_with("test_graph", mock_websocket)
|
||||
mock_manager.unsubscribe.assert_called_once_with("test_graph", 1, mock_websocket)
|
||||
mock_websocket.send_text.assert_called_once()
|
||||
assert '"method":"unsubscribe"' in mock_websocket.send_text.call_args[0][0]
|
||||
assert '"success":true' in mock_websocket.send_text.call_args[0][0]
|
||||
@@ -94,13 +96,15 @@ async def test_websocket_router_invalid_method(
|
||||
async def test_handle_subscribe_success(
|
||||
mock_websocket: AsyncMock, mock_manager: AsyncMock
|
||||
) -> None:
|
||||
message = WsMessage(method=Methods.SUBSCRIBE, data={"graph_id": "test_graph"})
|
||||
message = WsMessage(
|
||||
method=Methods.SUBSCRIBE, data={"graph_id": "test_graph", "graph_version": 1}
|
||||
)
|
||||
|
||||
await handle_subscribe(
|
||||
cast(WebSocket, mock_websocket), cast(ConnectionManager, mock_manager), message
|
||||
)
|
||||
|
||||
mock_manager.subscribe.assert_called_once_with("test_graph", mock_websocket)
|
||||
mock_manager.subscribe.assert_called_once_with("test_graph", 1, mock_websocket)
|
||||
mock_websocket.send_text.assert_called_once()
|
||||
assert '"method":"subscribe"' in mock_websocket.send_text.call_args[0][0]
|
||||
assert '"success":true' in mock_websocket.send_text.call_args[0][0]
|
||||
@@ -126,13 +130,15 @@ async def test_handle_subscribe_missing_data(
|
||||
async def test_handle_unsubscribe_success(
|
||||
mock_websocket: AsyncMock, mock_manager: AsyncMock
|
||||
) -> None:
|
||||
message = WsMessage(method=Methods.UNSUBSCRIBE, data={"graph_id": "test_graph"})
|
||||
message = WsMessage(
|
||||
method=Methods.UNSUBSCRIBE, data={"graph_id": "test_graph", "graph_version": 1}
|
||||
)
|
||||
|
||||
await handle_unsubscribe(
|
||||
cast(WebSocket, mock_websocket), cast(ConnectionManager, mock_manager), message
|
||||
)
|
||||
|
||||
mock_manager.unsubscribe.assert_called_once_with("test_graph", mock_websocket)
|
||||
mock_manager.unsubscribe.assert_called_once_with("test_graph", 1, mock_websocket)
|
||||
mock_websocket.send_text.assert_called_once()
|
||||
assert '"method":"unsubscribe"' in mock_websocket.send_text.call_args[0][0]
|
||||
assert '"success":true' in mock_websocket.send_text.call_args[0][0]
|
||||
|
||||
@@ -4,7 +4,7 @@ from datetime import datetime
|
||||
|
||||
import prisma.enums
|
||||
from faker import Faker
|
||||
from prisma import Prisma
|
||||
from prisma import Json, Prisma
|
||||
|
||||
faker = Faker()
|
||||
|
||||
@@ -110,8 +110,8 @@ async def main():
|
||||
"agentBlockId": block.id,
|
||||
"agentGraphId": graph.id,
|
||||
"agentGraphVersion": graph.version,
|
||||
"constantInput": "{}",
|
||||
"metadata": "{}",
|
||||
"constantInput": Json({}),
|
||||
"metadata": Json({}),
|
||||
}
|
||||
)
|
||||
agent_nodes.append(node)
|
||||
@@ -140,10 +140,10 @@ async def main():
|
||||
print(f"Inserting {NUM_USERS * MAX_AGENTS_PER_USER} user agents")
|
||||
for user in users:
|
||||
num_agents = random.randint(MIN_AGENTS_PER_USER, MAX_AGENTS_PER_USER)
|
||||
for _ in range(num_agents): # Create 1 UserAgent per user
|
||||
for _ in range(num_agents): # Create 1 LibraryAgent per user
|
||||
graph = random.choice(agent_graphs)
|
||||
preset = random.choice(agent_presets)
|
||||
user_agent = await db.useragent.create(
|
||||
user_agent = await db.libraryagent.create(
|
||||
data={
|
||||
"userId": user.id,
|
||||
"agentId": graph.id,
|
||||
|
||||
@@ -36,6 +36,21 @@ services:
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
rabbitmq:
|
||||
image: rabbitmq:management
|
||||
container_name: rabbitmq
|
||||
healthcheck:
|
||||
test: rabbitmq-diagnostics -q ping
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 10s
|
||||
environment:
|
||||
- RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
- RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7 # CHANGE THIS TO A RANDOM PASSWORD IN PRODUCTION -- everywhere lol
|
||||
ports:
|
||||
- "5672:5672"
|
||||
- "15672:15672"
|
||||
|
||||
rest_server:
|
||||
build:
|
||||
@@ -55,6 +70,8 @@ services:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- SUPABASE_URL=http://kong:8000
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
@@ -62,6 +79,10 @@ services:
|
||||
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- RABBITMQ_HOST=rabbitmq
|
||||
- RABBITMQ_PORT=5672
|
||||
- RABBITMQ_USER=rabbitmq_user_default
|
||||
- RABBITMQ_PASSWORD=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
- REDIS_PASSWORD=password
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
@@ -69,7 +90,7 @@ services:
|
||||
- EXECUTIONMANAGER_HOST=executor
|
||||
- FRONTEND_BASE_URL=http://localhost:3000
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
ports:
|
||||
- "8006:8006"
|
||||
- "8003:8003" # execution scheduler
|
||||
@@ -90,6 +111,8 @@ services:
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_healthy
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
db:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
@@ -102,10 +125,14 @@ services:
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- RABBITMQ_HOST=rabbitmq
|
||||
- RABBITMQ_PORT=5672
|
||||
- RABBITMQ_USER=rabbitmq_user_default
|
||||
- RABBITMQ_PASSWORD=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- AGENTSERVER_HOST=rest_server
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
ports:
|
||||
- "8002:8000"
|
||||
networks:
|
||||
@@ -127,14 +154,20 @@ services:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
# rabbitmq:
|
||||
# condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
# - RABBITMQ_HOST=rabbitmq # TODO: Uncomment this when we have a need for it in websocket (like nofifying when stuff went down)
|
||||
# - RABBITMQ_PORT=5672
|
||||
# - RABBITMQ_USER=rabbitmq_user_default
|
||||
# - RABBITMQ_PASSWORD=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
|
||||
@@ -33,6 +33,12 @@ services:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: redis
|
||||
|
||||
rabbitmq:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.platform.yml
|
||||
service: rabbitmq
|
||||
|
||||
rest_server:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
@@ -146,3 +152,4 @@ services:
|
||||
- db
|
||||
- vector
|
||||
- redis
|
||||
- rabbitmq
|
||||
|
||||
@@ -45,7 +45,7 @@
|
||||
"@radix-ui/react-toast": "^1.2.5",
|
||||
"@radix-ui/react-tooltip": "^1.1.7",
|
||||
"@sentry/nextjs": "^8",
|
||||
"@stripe/stripe-js": "^5.5.0",
|
||||
"@stripe/stripe-js": "^5.6.0",
|
||||
"@supabase/ssr": "^0.5.2",
|
||||
"@supabase/supabase-js": "^2.48.1",
|
||||
"@tanstack/react-table": "^8.20.6",
|
||||
@@ -60,9 +60,9 @@
|
||||
"dotenv": "^16.4.7",
|
||||
"elliptic": "6.6.1",
|
||||
"embla-carousel-react": "^8.5.2",
|
||||
"framer-motion": "^11.16.0",
|
||||
"framer-motion": "^12.0.11",
|
||||
"geist": "^1.3.1",
|
||||
"launchdarkly-react-client-sdk": "^3.6.0",
|
||||
"launchdarkly-react-client-sdk": "^3.6.1",
|
||||
"lucide-react": "^0.474.0",
|
||||
"moment": "^2.30.1",
|
||||
"next": "^14.2.21",
|
||||
@@ -75,7 +75,7 @@
|
||||
"react-markdown": "^9.0.3",
|
||||
"react-modal": "^3.16.3",
|
||||
"react-shepherd": "^6.1.7",
|
||||
"recharts": "^2.14.1",
|
||||
"recharts": "^2.15.1",
|
||||
"tailwind-merge": "^2.6.0",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"uuid": "^11.0.5",
|
||||
@@ -83,24 +83,24 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "^3.2.4",
|
||||
"@playwright/test": "^1.50.0",
|
||||
"@storybook/addon-a11y": "^8.5.2",
|
||||
"@storybook/addon-essentials": "^8.5.2",
|
||||
"@storybook/addon-interactions": "^8.5.2",
|
||||
"@storybook/addon-links": "^8.5.2",
|
||||
"@storybook/addon-onboarding": "^8.5.2",
|
||||
"@storybook/blocks": "^8.5.2",
|
||||
"@storybook/nextjs": "^8.5.2",
|
||||
"@playwright/test": "^1.50.1",
|
||||
"@storybook/addon-a11y": "^8.5.3",
|
||||
"@storybook/addon-essentials": "^8.5.3",
|
||||
"@storybook/addon-interactions": "^8.5.3",
|
||||
"@storybook/addon-links": "^8.5.3",
|
||||
"@storybook/addon-onboarding": "^8.5.3",
|
||||
"@storybook/blocks": "^8.5.3",
|
||||
"@storybook/nextjs": "^8.5.3",
|
||||
"@storybook/react": "^8.3.5",
|
||||
"@storybook/test": "^8.3.5",
|
||||
"@storybook/test-runner": "^0.21.0",
|
||||
"@types/negotiator": "^0.6.3",
|
||||
"@types/node": "^22.10.10",
|
||||
"@types/node": "^22.13.0",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
"@types/react-modal": "^3.16.3",
|
||||
"axe-playwright": "^2.0.3",
|
||||
"chromatic": "^11.25.1",
|
||||
"chromatic": "^11.25.2",
|
||||
"concurrently": "^9.1.2",
|
||||
"eslint": "^8",
|
||||
"eslint-config-next": "15.1.6",
|
||||
@@ -110,7 +110,7 @@
|
||||
"postcss": "^8",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-plugin-tailwindcss": "^0.6.11",
|
||||
"storybook": "^8.5.2",
|
||||
"storybook": "^8.5.3",
|
||||
"tailwindcss": "^3.4.17",
|
||||
"typescript": "^5"
|
||||
},
|
||||
|
||||
@@ -10,6 +10,7 @@ export default function Home() {
|
||||
<FlowEditor
|
||||
className="flow-container"
|
||||
flowID={query.get("flowID") ?? undefined}
|
||||
flowVersion={query.get("flowVersion") ?? undefined}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -66,7 +66,7 @@ export default async function RootLayout({
|
||||
{
|
||||
icon: IconType.Edit,
|
||||
text: "Edit profile",
|
||||
href: "/marketplace/profile",
|
||||
href: "/profile",
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -75,7 +75,7 @@ export default async function RootLayout({
|
||||
{
|
||||
icon: IconType.LayoutDashboard,
|
||||
text: "Creator Dashboard",
|
||||
href: "/marketplace/dashboard",
|
||||
href: "/profile/dashboard",
|
||||
},
|
||||
{
|
||||
icon: IconType.UploadCloud,
|
||||
@@ -88,7 +88,7 @@ export default async function RootLayout({
|
||||
{
|
||||
icon: IconType.Settings,
|
||||
text: "Settings",
|
||||
href: "/marketplace/settings",
|
||||
href: "/profile/settings",
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import * as React from "react";
|
||||
import { HeroSection } from "@/components/agptui/composite/HeroSection";
|
||||
import {
|
||||
FeaturedSection,
|
||||
FeaturedAgent,
|
||||
} from "@/components/agptui/composite/FeaturedSection";
|
||||
import { FeaturedSection } from "@/components/agptui/composite/FeaturedSection";
|
||||
import {
|
||||
AgentsSection,
|
||||
Agent,
|
||||
@@ -155,9 +152,7 @@ export default async function Page({}: {}) {
|
||||
<div className="mx-auto w-screen max-w-[1360px]">
|
||||
<main className="px-4">
|
||||
<HeroSection />
|
||||
<FeaturedSection
|
||||
featuredAgents={featuredAgents.agents as FeaturedAgent[]}
|
||||
/>
|
||||
<FeaturedSection featuredAgents={featuredAgents.agents} />
|
||||
<Separator />
|
||||
<AgentsSection
|
||||
sectionTitle="Top Agents"
|
||||
|
||||
@@ -37,8 +37,8 @@ const Monitor = () => {
|
||||
);
|
||||
|
||||
const fetchAgents = useCallback(() => {
|
||||
api.listLibraryAgents().then((agent) => {
|
||||
setFlows(agent);
|
||||
api.listLibraryAgents().then((agents) => {
|
||||
setFlows(agents);
|
||||
});
|
||||
api.getExecutions().then((executions) => {
|
||||
setExecutions(executions);
|
||||
|
||||
@@ -3,5 +3,6 @@
|
||||
import { redirect } from "next/navigation";
|
||||
|
||||
export default function Page() {
|
||||
// Redirects to marketplace
|
||||
redirect("/marketplace");
|
||||
}
|
||||
|
||||
@@ -6,9 +6,25 @@ import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { useSearchParams, useRouter } from "next/navigation";
|
||||
import { useToast } from "@/components/ui/use-toast";
|
||||
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
|
||||
export default function CreditsPage() {
|
||||
const api = useBackendAPI();
|
||||
const { requestTopUp, autoTopUpConfig, updateAutoTopUpConfig } = useCredits();
|
||||
const {
|
||||
requestTopUp,
|
||||
autoTopUpConfig,
|
||||
updateAutoTopUpConfig,
|
||||
transactionHistory,
|
||||
fetchTransactionHistory,
|
||||
formatCredits,
|
||||
} = useCredits();
|
||||
const router = useRouter();
|
||||
const searchParams = useSearchParams();
|
||||
const topupStatus = searchParams.get("topup") as "success" | "cancel" | null;
|
||||
@@ -44,7 +60,8 @@ export default function CreditsPage() {
|
||||
const submitTopUp = (e: React.FormEvent<HTMLFormElement>) => {
|
||||
e.preventDefault();
|
||||
const form = e.currentTarget;
|
||||
const amount = parseInt(new FormData(form).get("topUpAmount") as string);
|
||||
const amount =
|
||||
parseInt(new FormData(form).get("topUpAmount") as string) * 100;
|
||||
toastOnFail("request top-up", () => requestTopUp(amount));
|
||||
};
|
||||
|
||||
@@ -52,8 +69,8 @@ export default function CreditsPage() {
|
||||
e.preventDefault();
|
||||
const form = e.currentTarget;
|
||||
const formData = new FormData(form);
|
||||
const amount = parseInt(formData.get("topUpAmount") as string);
|
||||
const threshold = parseInt(formData.get("threshold") as string);
|
||||
const amount = parseInt(formData.get("topUpAmount") as string) * 100;
|
||||
const threshold = parseInt(formData.get("threshold") as string) * 100;
|
||||
toastOnFail("update auto top-up config", () =>
|
||||
updateAutoTopUpConfig(amount, threshold).then(() => {
|
||||
toast({ title: "Auto top-up config updated! 🎉" });
|
||||
@@ -64,7 +81,7 @@ export default function CreditsPage() {
|
||||
return (
|
||||
<div className="w-full min-w-[800px] px-4 sm:px-8">
|
||||
<h1 className="mb-6 text-[28px] font-normal text-neutral-900 dark:text-neutral-100 sm:mb-8 sm:text-[35px]">
|
||||
Credits
|
||||
Billing
|
||||
</h1>
|
||||
|
||||
<div className="grid grid-cols-1 gap-8 lg:grid-cols-2">
|
||||
@@ -93,16 +110,16 @@ export default function CreditsPage() {
|
||||
htmlFor="topUpAmount"
|
||||
className="mb-1 block text-neutral-700"
|
||||
>
|
||||
Top-up Amount (Credits)
|
||||
Top-up amount (USD), minimum $5:
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
id="topUpAmount"
|
||||
name="topUpAmount"
|
||||
placeholder="Enter top-up amount"
|
||||
min="500"
|
||||
step="100"
|
||||
defaultValue={500}
|
||||
min="5"
|
||||
step="1"
|
||||
defaultValue={5}
|
||||
className="w-full rounded-md border border-slate-200 px-4 py-2 dark:border-slate-700 dark:bg-slate-800"
|
||||
required
|
||||
/>
|
||||
@@ -115,49 +132,83 @@ export default function CreditsPage() {
|
||||
|
||||
{/* Auto Top-up Form */}
|
||||
<form onSubmit={submitAutoTopUpConfig} className="mt-6 space-y-4">
|
||||
<h3 className="text-lg font-medium">Auto Top-up Configuration</h3>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="autoTopUpAmount"
|
||||
className="mb-1 block text-neutral-700"
|
||||
>
|
||||
Auto Top-up Amount (Credits)
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
id="autoTopUpAmount"
|
||||
name="topUpAmount"
|
||||
defaultValue={autoTopUpConfig?.amount || ""}
|
||||
placeholder="Enter auto top-up amount"
|
||||
step="100"
|
||||
className="w-full rounded-md border border-slate-200 px-4 py-2 dark:border-slate-700 dark:bg-slate-800"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
<h3 className="text-lg font-medium">Automatic Refill Settings</h3>
|
||||
|
||||
<div>
|
||||
<label
|
||||
htmlFor="threshold"
|
||||
className="mb-1 block text-neutral-700"
|
||||
>
|
||||
Threshold (Credits)
|
||||
When my balance goes below this amount:
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
id="threshold"
|
||||
name="threshold"
|
||||
defaultValue={autoTopUpConfig?.threshold || ""}
|
||||
placeholder="Enter threshold value"
|
||||
step="100"
|
||||
defaultValue={
|
||||
autoTopUpConfig?.threshold
|
||||
? autoTopUpConfig.threshold / 100
|
||||
: ""
|
||||
}
|
||||
placeholder="Refill threshold, minimum $5"
|
||||
min="5"
|
||||
step="1"
|
||||
className="w-full rounded-md border border-slate-200 px-4 py-2 dark:border-slate-700 dark:bg-slate-800"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
|
||||
<Button type="submit" className="w-full">
|
||||
Save
|
||||
</Button>
|
||||
<div>
|
||||
<label
|
||||
htmlFor="autoTopUpAmount"
|
||||
className="mb-1 block text-neutral-700"
|
||||
>
|
||||
Automatically refill my balance with this amount:
|
||||
</label>
|
||||
<input
|
||||
type="number"
|
||||
id="autoTopUpAmount"
|
||||
name="topUpAmount"
|
||||
defaultValue={
|
||||
autoTopUpConfig?.amount ? autoTopUpConfig.amount / 100 : ""
|
||||
}
|
||||
placeholder="Refill amount, minimum $5"
|
||||
min="5"
|
||||
step="1"
|
||||
className="w-full rounded-md border border-slate-200 px-4 py-2 dark:border-slate-700 dark:bg-slate-800"
|
||||
required
|
||||
/>
|
||||
</div>
|
||||
|
||||
<p className="text-sm">
|
||||
<b>Note:</b> For your safety, we will top up your balance{" "}
|
||||
<b>at most once</b> per agent execution to prevent unintended
|
||||
excessive charges. Therefore, ensure that the automatic top-up
|
||||
amount is sufficient for your agent's operation.
|
||||
</p>
|
||||
|
||||
{autoTopUpConfig?.amount ? (
|
||||
<>
|
||||
<Button type="submit" className="w-full">
|
||||
Save Changes
|
||||
</Button>
|
||||
<Button
|
||||
className="w-full"
|
||||
variant="destructive"
|
||||
onClick={() =>
|
||||
updateAutoTopUpConfig(0, 0).then(() => {
|
||||
toast({ title: "Auto top-up config disabled! 🎉" });
|
||||
})
|
||||
}
|
||||
>
|
||||
Disable Auto-Refill
|
||||
</Button>
|
||||
</>
|
||||
) : (
|
||||
<Button type="submit" className="w-full">
|
||||
Enable Auto-Refill
|
||||
</Button>
|
||||
)}
|
||||
</form>
|
||||
</div>
|
||||
|
||||
@@ -179,6 +230,60 @@ export default function CreditsPage() {
|
||||
>
|
||||
Open Portal
|
||||
</Button>
|
||||
|
||||
{/* Transaction History */}
|
||||
<h2 className="mt-6 text-lg">Transaction History</h2>
|
||||
<br />
|
||||
<p className="text-neutral-600">
|
||||
Running balance might not be ordered accurately when concurrent
|
||||
executions are happening.
|
||||
</p>
|
||||
<br />
|
||||
{transactionHistory.transactions.length === 0 && (
|
||||
<p className="text-neutral-600">No transactions found.</p>
|
||||
)}
|
||||
<Table
|
||||
className={
|
||||
transactionHistory.transactions.length === 0 ? "hidden" : ""
|
||||
}
|
||||
>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Date</TableHead>
|
||||
<TableHead>Description</TableHead>
|
||||
<TableHead>Amount</TableHead>
|
||||
<TableHead>Balance</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{transactionHistory.transactions.map((transaction, i) => (
|
||||
<TableRow key={i}>
|
||||
<TableCell>
|
||||
{new Date(transaction.transaction_time).toLocaleString()}
|
||||
</TableCell>
|
||||
<TableCell>{transaction.description}</TableCell>
|
||||
{/* Make it green if it's positive, red if it's negative */}
|
||||
<TableCell
|
||||
className={
|
||||
transaction.amount > 0 ? "text-green-500" : "text-red-500"
|
||||
}
|
||||
>
|
||||
<b>{formatCredits(transaction.amount)}</b>
|
||||
</TableCell>
|
||||
<TableCell>{formatCredits(transaction.balance)}</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
{transactionHistory.next_transaction_time && (
|
||||
<Button
|
||||
type="submit"
|
||||
className="w-full"
|
||||
onClick={() => fetchTransactionHistory()}
|
||||
>
|
||||
Load More
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
import * as React from "react";
|
||||
import { AgentTable } from "@/components/agptui/AgentTable";
|
||||
import { AgentTableRowProps } from "@/components/agptui/AgentTableRow";
|
||||
import { Button } from "@/components/agptui/Button";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { StatusType } from "@/components/agptui/Status";
|
||||
@@ -2,10 +2,9 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useCallback, useContext, useMemo, useState } from "react";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { useToast } from "@/components/ui/use-toast";
|
||||
import { IconKey, IconUser } from "@/components/ui/icons";
|
||||
import { LogOutIcon, Trash2Icon } from "lucide-react";
|
||||
import { Trash2Icon } from "lucide-react";
|
||||
import { providerIcons } from "@/components/integrations/credentials-input";
|
||||
import { CredentialsProvidersContext } from "@/components/integrations/credentials-provider";
|
||||
import {
|
||||
@@ -5,13 +5,13 @@ export default function Layout({ children }: { children: React.ReactNode }) {
|
||||
const sidebarLinkGroups = [
|
||||
{
|
||||
links: [
|
||||
{ text: "Creator Dashboard", href: "/marketplace/dashboard" },
|
||||
{ text: "Agent dashboard", href: "/marketplace/agent-dashboard" },
|
||||
{ text: "Credits", href: "/marketplace/credits" },
|
||||
{ text: "Integrations", href: "/marketplace/integrations" },
|
||||
{ text: "API Keys", href: "/marketplace/api_keys" },
|
||||
{ text: "Profile", href: "/marketplace/profile" },
|
||||
{ text: "Settings", href: "/marketplace/settings" },
|
||||
{ text: "Creator Dashboard", href: "/profile/dashboard" },
|
||||
{ text: "Agent dashboard", href: "/profile/agent-dashboard" },
|
||||
{ text: "Billing", href: "/profile/credits" },
|
||||
{ text: "Integrations", href: "/profile/integrations" },
|
||||
{ text: "API Keys", href: "/profile/api_keys" },
|
||||
{ text: "Profile", href: "/profile" },
|
||||
{ text: "Settings", href: "/profile/settings" },
|
||||
],
|
||||
},
|
||||
];
|
||||
@@ -1,242 +0,0 @@
|
||||
"use client";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useCallback, useContext, useMemo, useState } from "react";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { useToast } from "@/components/ui/use-toast";
|
||||
import { IconKey, IconUser } from "@/components/ui/icons";
|
||||
import { LogOutIcon, Trash2Icon } from "lucide-react";
|
||||
import { providerIcons } from "@/components/integrations/credentials-input";
|
||||
import { CredentialsProvidersContext } from "@/components/integrations/credentials-provider";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { CredentialsProviderName } from "@/lib/autogpt-server-api";
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from "@/components/ui/alert-dialog";
|
||||
import useSupabase from "@/hooks/useSupabase";
|
||||
import Spinner from "@/components/Spinner";
|
||||
|
||||
export default function PrivatePage() {
|
||||
const { supabase, user, isUserLoading } = useSupabase();
|
||||
const router = useRouter();
|
||||
const providers = useContext(CredentialsProvidersContext);
|
||||
const { toast } = useToast();
|
||||
|
||||
const [confirmationDialogState, setConfirmationDialogState] = useState<
|
||||
| {
|
||||
open: true;
|
||||
message: string;
|
||||
onConfirm: () => void;
|
||||
onReject: () => void;
|
||||
}
|
||||
| { open: false }
|
||||
>({ open: false });
|
||||
|
||||
const removeCredentials = useCallback(
|
||||
async (
|
||||
provider: CredentialsProviderName,
|
||||
id: string,
|
||||
force: boolean = false,
|
||||
) => {
|
||||
if (!providers || !providers[provider]) {
|
||||
return;
|
||||
}
|
||||
|
||||
let result;
|
||||
try {
|
||||
result = await providers[provider].deleteCredentials(id, force);
|
||||
} catch (error: any) {
|
||||
toast({
|
||||
title: "Something went wrong when deleting credentials: " + error,
|
||||
variant: "destructive",
|
||||
duration: 2000,
|
||||
});
|
||||
setConfirmationDialogState({ open: false });
|
||||
return;
|
||||
}
|
||||
if (result.deleted) {
|
||||
if (result.revoked) {
|
||||
toast({
|
||||
title: "Credentials deleted",
|
||||
duration: 2000,
|
||||
});
|
||||
} else {
|
||||
toast({
|
||||
title: "Credentials deleted from AutoGPT",
|
||||
description: `You may also manually remove the connection to AutoGPT at ${provider}!`,
|
||||
duration: 3000,
|
||||
});
|
||||
}
|
||||
setConfirmationDialogState({ open: false });
|
||||
} else if (result.need_confirmation) {
|
||||
setConfirmationDialogState({
|
||||
open: true,
|
||||
message: result.message,
|
||||
onConfirm: () => removeCredentials(provider, id, true),
|
||||
onReject: () => setConfirmationDialogState({ open: false }),
|
||||
});
|
||||
}
|
||||
},
|
||||
[providers, toast],
|
||||
);
|
||||
|
||||
//TODO: remove when the way system credentials are handled is updated
|
||||
// This contains ids for built-in "Use Credits for X" credentials
|
||||
const hiddenCredentials = useMemo(
|
||||
() => [
|
||||
"744fdc56-071a-4761-b5a5-0af0ce10a2b5", // Ollama
|
||||
"fdb7f412-f519-48d1-9b5f-d2f73d0e01fe", // Revid
|
||||
"760f84fc-b270-42de-91f6-08efe1b512d0", // Ideogram
|
||||
"6b9fc200-4726-4973-86c9-cd526f5ce5db", // Replicate
|
||||
"53c25cb8-e3ee-465c-a4d1-e75a4c899c2a", // OpenAI
|
||||
"24e5d942-d9e3-4798-8151-90143ee55629", // Anthropic
|
||||
"4ec22295-8f97-4dd1-b42b-2c6957a02545", // Groq
|
||||
"7f7b0654-c36b-4565-8fa7-9a52575dfae2", // D-ID
|
||||
"7f26de70-ba0d-494e-ba76-238e65e7b45f", // Jina
|
||||
"66f20754-1b81-48e4-91d0-f4f0dd82145f", // Unreal Speech
|
||||
"b5a0e27d-0c98-4df3-a4b9-10193e1f3c40", // Open Router
|
||||
"6c0f5bd0-9008-4638-9d79-4b40b631803e", // FAL
|
||||
"96153e04-9c6c-4486-895f-5bb683b1ecec", // Exa
|
||||
"78d19fd7-4d59-4a16-8277-3ce310acf2b7", // E2B
|
||||
"96b83908-2789-4dec-9968-18f0ece4ceb3", // Nvidia
|
||||
"ed55ac19-356e-4243-a6cb-bc599e9b716f", // Mem0
|
||||
],
|
||||
[],
|
||||
);
|
||||
|
||||
if (isUserLoading) {
|
||||
return <Spinner />;
|
||||
}
|
||||
|
||||
if (!user || !supabase) {
|
||||
router.push("/login");
|
||||
return null;
|
||||
}
|
||||
|
||||
const allCredentials = providers
|
||||
? Object.values(providers).flatMap((provider) =>
|
||||
[
|
||||
...provider.savedOAuthCredentials,
|
||||
...provider.savedApiKeys,
|
||||
...provider.savedUserPasswordCredentials,
|
||||
]
|
||||
.filter((cred) => !hiddenCredentials.includes(cred.id))
|
||||
.map((credentials) => ({
|
||||
...credentials,
|
||||
provider: provider.provider,
|
||||
providerName: provider.providerName,
|
||||
ProviderIcon: providerIcons[provider.provider],
|
||||
TypeIcon: {
|
||||
oauth2: IconUser,
|
||||
api_key: IconKey,
|
||||
user_password: IconKey,
|
||||
}[credentials.type],
|
||||
})),
|
||||
)
|
||||
: [];
|
||||
|
||||
return (
|
||||
<div className="mx-auto max-w-3xl md:py-8">
|
||||
<div className="flex items-center justify-between">
|
||||
<p>
|
||||
Hello <span data-testid="profile-email">{user.email}</span>
|
||||
</p>
|
||||
<Button onClick={() => supabase.auth.signOut()}>
|
||||
<LogOutIcon className="mr-1.5 size-4" />
|
||||
Log out
|
||||
</Button>
|
||||
</div>
|
||||
<Separator className="my-6" />
|
||||
<h2 className="mb-4 text-lg">Connections & Credentials</h2>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Provider</TableHead>
|
||||
<TableHead>Name</TableHead>
|
||||
<TableHead>Actions</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{allCredentials.map((cred) => (
|
||||
<TableRow key={cred.id}>
|
||||
<TableCell>
|
||||
<div className="flex items-center space-x-1.5">
|
||||
<cred.ProviderIcon className="h-4 w-4" />
|
||||
<strong>{cred.providerName}</strong>
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<div className="flex h-full items-center space-x-1.5">
|
||||
<cred.TypeIcon />
|
||||
<span>{cred.title || cred.username}</span>
|
||||
</div>
|
||||
<small className="text-muted-foreground">
|
||||
{
|
||||
{
|
||||
oauth2: "OAuth2 credentials",
|
||||
api_key: "API key",
|
||||
user_password: "User password",
|
||||
}[cred.type]
|
||||
}{" "}
|
||||
- <code>{cred.id}</code>
|
||||
</small>
|
||||
</TableCell>
|
||||
<TableCell className="w-0 whitespace-nowrap">
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={() => removeCredentials(cred.provider, cred.id)}
|
||||
>
|
||||
<Trash2Icon className="mr-1.5 size-4" /> Delete
|
||||
</Button>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
|
||||
<AlertDialog open={confirmationDialogState.open}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Are you sure?</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
{confirmationDialogState.open && confirmationDialogState.message}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel
|
||||
onClick={() =>
|
||||
confirmationDialogState.open &&
|
||||
confirmationDialogState.onReject()
|
||||
}
|
||||
>
|
||||
Cancel
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
variant="destructive"
|
||||
onClick={() =>
|
||||
confirmationDialogState.open &&
|
||||
confirmationDialogState.onConfirm()
|
||||
}
|
||||
>
|
||||
Continue
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -109,78 +109,80 @@ export default function ResetPasswordPage() {
|
||||
}
|
||||
|
||||
return (
|
||||
<AuthCard>
|
||||
<AuthHeader>Reset Password</AuthHeader>
|
||||
{user ? (
|
||||
<form onSubmit={changePasswordForm.handleSubmit(onChangePassword)}>
|
||||
<Form {...changePasswordForm}>
|
||||
<FormField
|
||||
control={changePasswordForm.control}
|
||||
name="password"
|
||||
render={({ field }) => (
|
||||
<FormItem className="mb-6">
|
||||
<FormLabel>Password</FormLabel>
|
||||
<FormControl>
|
||||
<PasswordInput {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={changePasswordForm.control}
|
||||
name="confirmPassword"
|
||||
render={({ field }) => (
|
||||
<FormItem className="mb-6">
|
||||
<FormLabel>Confirm Password</FormLabel>
|
||||
<FormControl>
|
||||
<PasswordInput {...field} />
|
||||
</FormControl>
|
||||
<FormDescription className="text-sm font-normal leading-tight text-slate-500">
|
||||
Password needs to be at least 6 characters long
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<AuthButton
|
||||
onClick={() => onChangePassword(changePasswordForm.getValues())}
|
||||
isLoading={isLoading}
|
||||
type="submit"
|
||||
>
|
||||
Update password
|
||||
</AuthButton>
|
||||
<AuthFeedback message={feedback} isError={isError} />
|
||||
</Form>
|
||||
</form>
|
||||
) : (
|
||||
<form onSubmit={sendEmailForm.handleSubmit(onSendEmail)}>
|
||||
<Form {...sendEmailForm}>
|
||||
<FormField
|
||||
control={sendEmailForm.control}
|
||||
name="email"
|
||||
render={({ field }) => (
|
||||
<FormItem className="mb-6">
|
||||
<FormLabel>Email</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder="m@example.com" {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<AuthButton
|
||||
onClick={() => onSendEmail(sendEmailForm.getValues())}
|
||||
isLoading={isLoading}
|
||||
disabled={disabled}
|
||||
type="submit"
|
||||
>
|
||||
Send reset email
|
||||
</AuthButton>
|
||||
<AuthFeedback message={feedback} isError={isError} />
|
||||
</Form>
|
||||
</form>
|
||||
)}
|
||||
</AuthCard>
|
||||
<div className="flex min-h-screen items-center justify-center">
|
||||
<AuthCard>
|
||||
<AuthHeader>Reset Password</AuthHeader>
|
||||
{user ? (
|
||||
<form onSubmit={changePasswordForm.handleSubmit(onChangePassword)}>
|
||||
<Form {...changePasswordForm}>
|
||||
<FormField
|
||||
control={changePasswordForm.control}
|
||||
name="password"
|
||||
render={({ field }) => (
|
||||
<FormItem className="mb-6">
|
||||
<FormLabel>Password</FormLabel>
|
||||
<FormControl>
|
||||
<PasswordInput {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={changePasswordForm.control}
|
||||
name="confirmPassword"
|
||||
render={({ field }) => (
|
||||
<FormItem className="mb-6">
|
||||
<FormLabel>Confirm Password</FormLabel>
|
||||
<FormControl>
|
||||
<PasswordInput {...field} />
|
||||
</FormControl>
|
||||
<FormDescription className="text-sm font-normal leading-tight text-slate-500">
|
||||
Password needs to be at least 6 characters long
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<AuthButton
|
||||
onClick={() => onChangePassword(changePasswordForm.getValues())}
|
||||
isLoading={isLoading}
|
||||
type="submit"
|
||||
>
|
||||
Update password
|
||||
</AuthButton>
|
||||
<AuthFeedback message={feedback} isError={isError} />
|
||||
</Form>
|
||||
</form>
|
||||
) : (
|
||||
<form onSubmit={sendEmailForm.handleSubmit(onSendEmail)}>
|
||||
<Form {...sendEmailForm}>
|
||||
<FormField
|
||||
control={sendEmailForm.control}
|
||||
name="email"
|
||||
render={({ field }) => (
|
||||
<FormItem className="mb-6">
|
||||
<FormLabel>Email</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder="m@example.com" {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<AuthButton
|
||||
onClick={() => onSendEmail(sendEmailForm.getValues())}
|
||||
isLoading={isLoading}
|
||||
disabled={disabled}
|
||||
type="submit"
|
||||
>
|
||||
Send reset email
|
||||
</AuthButton>
|
||||
<AuthFeedback message={feedback} isError={isError} />
|
||||
</Form>
|
||||
</form>
|
||||
)}
|
||||
</AuthCard>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -25,10 +25,10 @@ export async function signup(values: z.infer<typeof signupFormSchema>) {
|
||||
console.error("Error signing up", error);
|
||||
// FIXME: supabase doesn't return the correct error message for this case
|
||||
if (error.message.includes("P0001")) {
|
||||
return "Please join our waitlist for your turn: https://agpt.co/waitlist";
|
||||
return "not_allowed";
|
||||
}
|
||||
if (error.code?.includes("user_already_exists")) {
|
||||
redirect("/login");
|
||||
if (error.code === "user_already_exists") {
|
||||
return "user_already_exists";
|
||||
}
|
||||
return error.message;
|
||||
}
|
||||
@@ -36,9 +36,8 @@ export async function signup(values: z.infer<typeof signupFormSchema>) {
|
||||
if (data.session) {
|
||||
await supabase.auth.setSession(data.session);
|
||||
}
|
||||
console.log("Signed up");
|
||||
revalidatePath("/", "layout");
|
||||
redirect("/marketplace/profile");
|
||||
redirect("/");
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ export default function SignupPage() {
|
||||
const [feedback, setFeedback] = useState<string | null>(null);
|
||||
const router = useRouter();
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
//TODO: Remove after closed beta
|
||||
const [showWaitlistPrompt, setShowWaitlistPrompt] = useState(false);
|
||||
|
||||
const form = useForm<z.infer<typeof signupFormSchema>>({
|
||||
@@ -58,10 +59,16 @@ export default function SignupPage() {
|
||||
const error = await signup(data);
|
||||
setIsLoading(false);
|
||||
if (error) {
|
||||
setShowWaitlistPrompt(true);
|
||||
if (error === "user_already_exists") {
|
||||
setFeedback("User with this email already exists");
|
||||
return;
|
||||
} else {
|
||||
setShowWaitlistPrompt(true);
|
||||
}
|
||||
return;
|
||||
}
|
||||
setFeedback(null);
|
||||
setShowWaitlistPrompt(false);
|
||||
},
|
||||
[form],
|
||||
);
|
||||
|
||||
@@ -40,7 +40,6 @@ export function CustomEdge({
|
||||
targetY,
|
||||
markerEnd,
|
||||
}: EdgeProps<CustomEdge>) {
|
||||
const [isHovered, setIsHovered] = useState(false);
|
||||
const [beads, setBeads] = useState<{
|
||||
beads: Bead[];
|
||||
created: number;
|
||||
@@ -182,13 +181,7 @@ export function CustomEdge({
|
||||
<BaseEdge
|
||||
path={svgPath}
|
||||
markerEnd={markerEnd}
|
||||
style={{
|
||||
strokeWidth: (isHovered ? 3 : 2) + (data?.isStatic ? 0.5 : 0),
|
||||
stroke:
|
||||
(data?.edgeColor ?? "#555555") +
|
||||
(selected || isHovered ? "" : "80"),
|
||||
strokeDasharray: data?.isStatic ? "5 3" : "0",
|
||||
}}
|
||||
className={`transition-all duration-200 ${data?.isStatic ? "[stroke-dasharray:5_3]" : "[stroke-dasharray:0]"} [stroke-width:${data?.isStatic ? 2.5 : 2}px] hover:[stroke-width:${data?.isStatic ? 3.5 : 3}px] ${selected ? `[stroke:${data?.edgeColor ?? "#555555"}]` : `[stroke:${data?.edgeColor ?? "#555555"}80] hover:[stroke:${data?.edgeColor ?? "#555555"}]`}`}
|
||||
/>
|
||||
<path
|
||||
d={svgPath}
|
||||
@@ -196,8 +189,6 @@ export function CustomEdge({
|
||||
strokeOpacity={0}
|
||||
strokeWidth={20}
|
||||
className="react-flow__edge-interaction"
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
/>
|
||||
<EdgeLabelRenderer>
|
||||
<div
|
||||
@@ -209,9 +200,7 @@ export function CustomEdge({
|
||||
className="edge-label-renderer"
|
||||
>
|
||||
<button
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
className={`edge-label-button ${isHovered ? "visible" : ""}`}
|
||||
className="edge-label-button opacity-0 transition-opacity duration-200 hover:opacity-100"
|
||||
onClick={onEdgeRemoveClick}
|
||||
>
|
||||
<X className="size-4" />
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -26,7 +26,7 @@ import {
|
||||
import "@xyflow/react/dist/style.css";
|
||||
import { CustomNode } from "./CustomNode";
|
||||
import "./flow.css";
|
||||
import { BlockUIType, Link } from "@/lib/autogpt-server-api";
|
||||
import { BlockUIType, formatEdgeID } from "@/lib/autogpt-server-api";
|
||||
import { getTypeColor, findNewlyAddedBlockCoordinates } from "@/lib/utils";
|
||||
import { history } from "./history";
|
||||
import { CustomEdge } from "./CustomEdge";
|
||||
@@ -70,8 +70,9 @@ export const FlowContext = createContext<FlowContextType | null>(null);
|
||||
|
||||
const FlowEditor: React.FC<{
|
||||
flowID?: string;
|
||||
flowVersion?: string;
|
||||
className?: string;
|
||||
}> = ({ flowID, className }) => {
|
||||
}> = ({ flowID, flowVersion, className }) => {
|
||||
const {
|
||||
addNodes,
|
||||
addEdges,
|
||||
@@ -85,6 +86,7 @@ const FlowEditor: React.FC<{
|
||||
const [visualizeBeads, setVisualizeBeads] = useState<
|
||||
"no" | "static" | "animate"
|
||||
>("animate");
|
||||
const [flowExecutionID, setFlowExecutionID] = useState<string | undefined>();
|
||||
const {
|
||||
agentName,
|
||||
setAgentName,
|
||||
@@ -107,7 +109,12 @@ const FlowEditor: React.FC<{
|
||||
setNodes,
|
||||
edges,
|
||||
setEdges,
|
||||
} = useAgentGraph(flowID, visualizeBeads !== "no");
|
||||
} = useAgentGraph(
|
||||
flowID,
|
||||
flowVersion ? parseInt(flowVersion) : undefined,
|
||||
flowExecutionID,
|
||||
visualizeBeads !== "no",
|
||||
);
|
||||
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
@@ -157,6 +164,7 @@ const FlowEditor: React.FC<{
|
||||
if (params.get("open_scheduling") === "true") {
|
||||
setOpenCron(true);
|
||||
}
|
||||
setFlowExecutionID(params.get("flowExecutionID") || undefined);
|
||||
}, [params]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -267,14 +275,6 @@ const FlowEditor: React.FC<{
|
||||
[deleteElements, setNodes, nodes, edges, addNodes],
|
||||
);
|
||||
|
||||
const formatEdgeID = useCallback((conn: Link | Connection): string => {
|
||||
if ("sink_id" in conn) {
|
||||
return `${conn.source_id}_${conn.source_name}_${conn.sink_id}_${conn.sink_name}`;
|
||||
} else {
|
||||
return `${conn.source}_${conn.sourceHandle}_${conn.target}_${conn.targetHandle}`;
|
||||
}
|
||||
}, []);
|
||||
|
||||
const onConnect: OnConnect = useCallback(
|
||||
(connection: Connection) => {
|
||||
// Check if this exact connection already exists
|
||||
@@ -660,7 +660,7 @@ const FlowEditor: React.FC<{
|
||||
onNodeDragStop={onNodeDragEnd}
|
||||
onNodeDragStart={onNodeDragStart}
|
||||
deleteKeyCode={["Backspace", "Delete"]}
|
||||
minZoom={0.2}
|
||||
minZoom={0.1}
|
||||
maxZoom={2}
|
||||
className="dark:bg-slate-900"
|
||||
>
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
import Link from "next/link";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import React from "react";
|
||||
import { Sheet, SheetContent, SheetTrigger } from "@/components/ui/sheet";
|
||||
import Image from "next/image";
|
||||
import getServerUser from "@/lib/supabase/getServerUser";
|
||||
import ProfileDropdown from "./ProfileDropdown";
|
||||
import { IconCircleUser, IconMenu } from "@/components/ui/icons";
|
||||
import CreditButton from "@/components/nav/CreditButton";
|
||||
import { NavBarButtons } from "./nav/NavBarButtons";
|
||||
|
||||
export async function NavBar() {
|
||||
const isAvailable = Boolean(
|
||||
process.env.NEXT_PUBLIC_SUPABASE_URL &&
|
||||
process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY,
|
||||
);
|
||||
const { user } = await getServerUser();
|
||||
|
||||
return user ? (
|
||||
<header className="sticky top-0 z-50 mx-4 flex h-16 select-none items-center gap-4 border border-gray-300 bg-background p-3 md:rounded-b-2xl md:px-6 md:shadow">
|
||||
<div className="flex flex-1 items-center gap-4">
|
||||
<Sheet>
|
||||
<SheetTrigger asChild>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
className="shrink-0 md:hidden"
|
||||
>
|
||||
<IconMenu />
|
||||
<span className="sr-only">Toggle navigation menu</span>
|
||||
</Button>
|
||||
</SheetTrigger>
|
||||
<SheetContent side="left">
|
||||
<nav className="grid gap-6 text-lg font-medium">
|
||||
<NavBarButtons className="flex flex-row items-center gap-2" />
|
||||
</nav>
|
||||
</SheetContent>
|
||||
</Sheet>
|
||||
<nav className="hidden md:flex md:flex-row md:items-center md:gap-5 lg:gap-8">
|
||||
<div className="flex h-10 w-20 flex-1 flex-row items-center justify-center gap-2">
|
||||
<a href="https://agpt.co/">
|
||||
<Image
|
||||
src="/AUTOgpt_Logo_dark.png"
|
||||
alt="AutoGPT Logo"
|
||||
width={100}
|
||||
height={40}
|
||||
priority
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
<NavBarButtons className="flex flex-row items-center gap-1 border border-white font-semibold hover:border-gray-900" />
|
||||
</nav>
|
||||
</div>
|
||||
<div className="flex flex-1 items-center justify-end gap-4">
|
||||
{isAvailable && user && <CreditButton />}
|
||||
|
||||
{isAvailable && !user && (
|
||||
<Link
|
||||
href="/login"
|
||||
className="flex flex-row items-center gap-2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
Log In
|
||||
<IconCircleUser />
|
||||
</Link>
|
||||
)}
|
||||
{isAvailable && user && <ProfileDropdown />}
|
||||
</div>
|
||||
</header>
|
||||
) : (
|
||||
<nav className="flex w-full items-center p-2 pt-8">
|
||||
<div className="flex h-10 w-20 flex-1 flex-row items-center justify-center gap-2">
|
||||
<a href="https://agpt.co/">
|
||||
<Image
|
||||
src="/AUTOgpt_Logo_dark.png"
|
||||
alt="AutoGPT Logo"
|
||||
width={100}
|
||||
height={40}
|
||||
priority
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
</nav>
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { BlockIOSubSchema } from "@/lib/autogpt-server-api/types";
|
||||
import { beautifyString, getTypeBgColor, getTypeTextColor } from "@/lib/utils";
|
||||
import { FC } from "react";
|
||||
import { FC, memo, useCallback } from "react";
|
||||
import { Handle, Position } from "@xyflow/react";
|
||||
import SchemaTooltip from "./SchemaTooltip";
|
||||
|
||||
@@ -13,6 +13,32 @@ type HandleProps = {
|
||||
title?: string;
|
||||
};
|
||||
|
||||
// Move the constant out of the component to avoid re-creation on every render.
|
||||
const TYPE_NAME: Record<string, string> = {
|
||||
string: "text",
|
||||
number: "number",
|
||||
integer: "integer",
|
||||
boolean: "true/false",
|
||||
object: "object",
|
||||
array: "list",
|
||||
null: "null",
|
||||
};
|
||||
|
||||
// Extract and memoize the Dot component so that it doesn't re-render unnecessarily.
|
||||
const Dot: FC<{ isConnected: boolean; type?: string }> = memo(
|
||||
({ isConnected, type }) => {
|
||||
const color = isConnected
|
||||
? getTypeBgColor(type || "any")
|
||||
: "border-gray-300 dark:border-gray-600";
|
||||
return (
|
||||
<div
|
||||
className={`${color} m-1 h-4 w-4 rounded-full border-2 bg-white transition-colors duration-100 group-hover:bg-gray-300 dark:bg-slate-800 dark:group-hover:bg-gray-700`}
|
||||
/>
|
||||
);
|
||||
},
|
||||
);
|
||||
Dot.displayName = "Dot";
|
||||
|
||||
const NodeHandle: FC<HandleProps> = ({
|
||||
keyName,
|
||||
schema,
|
||||
@@ -21,17 +47,9 @@ const NodeHandle: FC<HandleProps> = ({
|
||||
side,
|
||||
title,
|
||||
}) => {
|
||||
const typeName: Record<string, string> = {
|
||||
string: "text",
|
||||
number: "number",
|
||||
integer: "integer",
|
||||
boolean: "true/false",
|
||||
object: "object",
|
||||
array: "list",
|
||||
null: "null",
|
||||
};
|
||||
|
||||
const typeClass = `text-sm ${getTypeTextColor(schema.type || "any")} ${side === "left" ? "text-left" : "text-right"}`;
|
||||
const typeClass = `text-sm ${getTypeTextColor(schema.type || "any")} ${
|
||||
side === "left" ? "text-left" : "text-right"
|
||||
}`;
|
||||
|
||||
const label = (
|
||||
<div className="flex flex-grow flex-row">
|
||||
@@ -40,25 +58,27 @@ const NodeHandle: FC<HandleProps> = ({
|
||||
{isRequired ? "*" : ""}
|
||||
</span>
|
||||
<span className={`${typeClass} flex items-end`}>
|
||||
({typeName[schema.type as keyof typeof typeName] || "any"})
|
||||
({TYPE_NAME[schema.type as keyof typeof TYPE_NAME] || "any"})
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
|
||||
const Dot = () => {
|
||||
const color = isConnected
|
||||
? getTypeBgColor(schema.type || "any")
|
||||
: "border-gray-300 dark:border-gray-600";
|
||||
return (
|
||||
<div
|
||||
className={`${color} m-1 h-4 w-4 rounded-full border-2 bg-white transition-colors duration-100 group-hover:bg-gray-300 dark:bg-slate-800 dark:group-hover:bg-gray-700`}
|
||||
/>
|
||||
);
|
||||
};
|
||||
// Use a native HTML onContextMenu handler instead of wrapping a large node with a Radix ContextMenu trigger.
|
||||
const handleContextMenu = useCallback(
|
||||
(e: React.MouseEvent<HTMLDivElement>) => {
|
||||
e.preventDefault();
|
||||
// Optionally, you can trigger a custom, lightweight context menu here.
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
if (side === "left") {
|
||||
return (
|
||||
<div key={keyName} className="handle-container">
|
||||
<div
|
||||
key={keyName}
|
||||
className="handle-container"
|
||||
onContextMenu={handleContextMenu}
|
||||
>
|
||||
<Handle
|
||||
type="target"
|
||||
data-testid={`input-handle-${keyName}`}
|
||||
@@ -67,7 +87,7 @@ const NodeHandle: FC<HandleProps> = ({
|
||||
className="group -ml-[38px]"
|
||||
>
|
||||
<div className="pointer-events-none flex items-center">
|
||||
<Dot />
|
||||
<Dot isConnected={isConnected} type={schema.type} />
|
||||
{label}
|
||||
</div>
|
||||
</Handle>
|
||||
@@ -76,7 +96,11 @@ const NodeHandle: FC<HandleProps> = ({
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<div key={keyName} className="handle-container justify-end">
|
||||
<div
|
||||
key={keyName}
|
||||
className="handle-container justify-end"
|
||||
onContextMenu={handleContextMenu}
|
||||
>
|
||||
<Handle
|
||||
type="source"
|
||||
data-testid={`output-handle-${keyName}`}
|
||||
@@ -86,7 +110,7 @@ const NodeHandle: FC<HandleProps> = ({
|
||||
>
|
||||
<div className="pointer-events-none flex items-center">
|
||||
{label}
|
||||
<Dot />
|
||||
<Dot isConnected={isConnected} type={schema.type} />
|
||||
</div>
|
||||
</Handle>
|
||||
</div>
|
||||
@@ -94,4 +118,4 @@ const NodeHandle: FC<HandleProps> = ({
|
||||
}
|
||||
};
|
||||
|
||||
export default NodeHandle;
|
||||
export default memo(NodeHandle);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user