mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-04-08 03:00:28 -04:00
Merge branch 'master' into zamilmajdy/code-validation
This commit is contained in:
200
autogpt/poetry.lock
generated
200
autogpt/poetry.lock
generated
@@ -306,7 +306,7 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""}
|
||||
|
||||
[[package]]
|
||||
name = "autogpt-forge"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = "^3.10"
|
||||
@@ -325,7 +325,7 @@ click = "*"
|
||||
colorama = "^0.4.6"
|
||||
demjson3 = "^3.0.0"
|
||||
docker = "*"
|
||||
duckduckgo-search = "^5.0.0"
|
||||
duckduckgo-search = "^6.1.7"
|
||||
fastapi = "^0.109.1"
|
||||
gitpython = "^3.1.32"
|
||||
google-api-python-client = "*"
|
||||
@@ -1130,34 +1130,6 @@ files = [
|
||||
{file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "curl-cffi"
|
||||
version = "0.7.0b4"
|
||||
description = "libcurl ffi bindings for Python, with impersonation support."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:694d88f7065c59c651970f14bc415431f65ac601a9ba537463d70f432a48ccfc"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6faf01aa8d98d322b877d3d801544692c73729ea6eb4a45af83514a4ecd1c8fe"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d39849371bbf3eab048113693715a8da5c729c494cccfa1128d768d96fdc31e"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3a5099b98c4bf12cc1afecb3409a9c57e7ebce9447a03c96dfb661ad8fa5e79"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e3616141a2a0be7896e7dc5da1ed3965e1a78aa2e563d8aba7a641135aeaf1b"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd16cccc0d3e93c2fbc4f4cb7cce0e10cb2ef7f8957352f3f0d770f0d6e05702"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d65aa649abb24020c2ad7b3ce45e2816d1ffe25df06f1a6b0f52fbf353af82e0"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-win32.whl", hash = "sha256:b55c53bb6dff713cb63f76e2f147e2d54c984b1b09df66b08f52f3acae1aeca0"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-win_amd64.whl", hash = "sha256:449ab07e07335558997cd62296b5c4f16ce27630de7830e4ad22441049a0ef1e"},
|
||||
{file = "curl_cffi-0.7.0b4.tar.gz", hash = "sha256:c09a062b8aac93d4890d2c33b7053c0e1a5cf275328b80c1fb1a950310df75f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2024.2.2"
|
||||
cffi = ">=1.12.0"
|
||||
|
||||
[package.extras]
|
||||
build = ["cibuildwheel", "wheel"]
|
||||
dev = ["charset-normalizer (>=3.3.2,<4.0)", "coverage (>=6.4.1,<7.0)", "cryptography (>=42.0.5,<43.0)", "httpx (==0.23.1)", "mypy (>=1.9.0,<2.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "ruff (>=0.3.5,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
|
||||
test = ["charset-normalizer (>=3.3.2,<4.0)", "cryptography (>=42.0.5,<43.0)", "fastapi (==0.110.0)", "httpx (==0.23.1)", "proxy.py (>=2.4.3,<3.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "python-multipart (>=0.0.9,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "cx-freeze"
|
||||
version = "7.0.0"
|
||||
@@ -1391,23 +1363,23 @@ websockets = ["websocket-client (>=1.3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "duckduckgo-search"
|
||||
version = "5.3.1"
|
||||
version = "6.1.7"
|
||||
description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "duckduckgo_search-5.3.1-py3-none-any.whl", hash = "sha256:a07d94babe45c9a9bd0ce2dc185346b47fe95dab516d414f21f06a0a1200aca9"},
|
||||
{file = "duckduckgo_search-5.3.1.tar.gz", hash = "sha256:2ee309e76b7e34ee84bddd5e046df723faecf7f999acdb499f3dad7e8a614c21"},
|
||||
{file = "duckduckgo_search-6.1.7-py3-none-any.whl", hash = "sha256:ec7d5becb8c392c0293ff9464938c1014896e1e14725c05adc306290a636fab2"},
|
||||
{file = "duckduckgo_search-6.1.7.tar.gz", hash = "sha256:c6fd8ba17fe9cd0a4f32e5b96984e959c3da865f9c2864bfcf82bf7ff9b7e8f0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.1.7"
|
||||
curl-cffi = ">=0.7.0b4"
|
||||
orjson = ">=3.10.3"
|
||||
orjson = ">=3.10.5"
|
||||
pyreqwest-impersonate = ">=0.4.8"
|
||||
|
||||
[package.extras]
|
||||
dev = ["mypy (>=1.10.0)", "pytest (>=8.2.0)", "ruff (>=0.4.3)"]
|
||||
lxml = ["lxml (>=5.2.1)"]
|
||||
dev = ["mypy (>=1.10.0)", "pytest (>=8.2.2)", "pytest-asyncio (>=0.23.7)", "ruff (>=0.4.8)"]
|
||||
lxml = ["lxml (>=5.2.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "en-core-web-sm"
|
||||
@@ -3793,57 +3765,57 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "orjson"
|
||||
version = "3.10.3"
|
||||
version = "3.10.5"
|
||||
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"},
|
||||
{file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"},
|
||||
{file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"},
|
||||
{file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"},
|
||||
{file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"},
|
||||
{file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"},
|
||||
{file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"},
|
||||
{file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"},
|
||||
{file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"},
|
||||
{file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"},
|
||||
{file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"},
|
||||
{file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"},
|
||||
{file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"},
|
||||
{file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"},
|
||||
{file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"},
|
||||
{file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"},
|
||||
{file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"},
|
||||
{file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"},
|
||||
{file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"},
|
||||
{file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"},
|
||||
{file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"},
|
||||
{file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"},
|
||||
{file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4677,6 +4649,66 @@ files = [
|
||||
{file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyreqwest-impersonate"
|
||||
version = "0.4.8"
|
||||
description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:45cad57afe4e6f56078ed9a7a90d0dc839d19d3e7a70175c80af21017f383bfb"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1986600253baf38f25fd07b8bdc1903359c26e5d34beb7d7d084845554b5664d"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cca4e6e59b9ad0cd20bad6caed3ac96992cd9c1d3126ecdfcab2c0ac2b75376"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab6b32544491ee655264dab86fc8a58e47c4f87d196b28022d4007faf971a50"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:64bd6299e7fc888bb7f7292cf3e29504c406e5d5d04afd37ca994ab8142d8ee4"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e914b650dd953b8d9b24ef56aa4ecbfc16e399227b68accd818f8bf159e0c558"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-none-win_amd64.whl", hash = "sha256:cb56a2149b0c4548a8e0158b071a943f33dae9b717f92b5c9ac34ccd1f5a958c"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f62620e023490902feca0109f306e122e427feff7d59e03ecd22c69a89452367"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08d4c01d76da88cfe3d7d03b311b375ce3fb5a59130f93f0637bb755d6e56ff1"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524e276bc460176c79d7ba4b9131d9db73c534586660371ebdf067749252a33"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22863bc0aaf02ca2f5d76c8130929ae680b7d82dfc1c28c1ed5f306ff626928"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8cc82d57f6a91037e64a7aa9122f909576ef2a141a42ce599958ef9f8c4bc033"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da8a053308210e44fd8349f07f45442a0691ac932f2881e98b05cf9ac404b091"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-none-win_amd64.whl", hash = "sha256:4baf3916c14364a815a64ead7f728afb61b37541933b2771f18dbb245029bb55"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:78db05deed0b32c9c75f2b3168a3a9b7d5e36487b218cb839bfe7e2a143450cb"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9af9446d605903c2b4e94621a9093f8d8a403729bc9cbfbcb62929f8238c838f"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c55890181d8d81e66cac25a95e215dc9680645d01e9091b64449d5407ad9bc6"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69344e7ae9964502a8693da7ad77ebc3e1418ee197e2e394bc23c5d4970772a"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b5db5c957a10d8cc2815085ba0b8fe09245b2f94c2225d9653a854a03b4217e1"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03c19c21f63f9c91c590c4bbcc32cc2d8066b508c683a1d163b8c7d9816a01d5"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-none-win_amd64.whl", hash = "sha256:0230610779129f74ff802c744643ce7589b1d07cba21d046fe3b574281c29581"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b8cb9471ab4b2fa7e80d3ac4e580249ff988d782f2938ad1f0428433652b170d"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8081a5ace2658be91519902bde9ddc5f94e1f850a39be196007a25e3da5bbfdc"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69eababfa3200459276acd780a0f3eaf41d1fe7c02bd169e714cba422055b5b9"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632957fa671ebb841166e40913015de457225cb73600ef250c436c280e68bf45"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2ce7ddef334b4e5c68f5ea1da1d65f686b8d84f4443059d128e0f069d3fa499a"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6ce333d450b158d582e36317089a006440b4e66739a8e8849d170e4cb15e8c8d"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-none-win_amd64.whl", hash = "sha256:9d9c85ce19db92362854f534807e470f03e905f283a7de6826dc79b790a8788e"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2503277f2a95a30e28e498570e2ed03ef4302f873054e8e21d6c0e607cbbc1d1"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8260395ef4ddae325e8b30cef0391adde7bd35e1a1decf8c729e26391f09b52d"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d8066b46d82bbaff5402d767e2f13d3449b8191c37bf8283e91d301a7159869"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c42f6343cfbd6663fb53edc9eb9feb4ebf6186b284e22368adc1eeb6a33854"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ff534f491a059e74fb7f994876df86078b4b125dbecc53c098a298ecd55fa9c6"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b8fbf73b3ac513ddadafd338d61f79cd2370f0691d9175b2b92a45920920d6b"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-none-win_amd64.whl", hash = "sha256:a26447c82665d0e361207c1a15e56b0ca54974aa6c1fdfa18c68f908dec78cbe"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24a16b8d55309f0af0db9d04ff442b0c91afccf078a94809e7c3a71747a5c214"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c8fada56465fc19179404cc9d5d5e1064f5dfe27405cb052f57a5b4fe06aed1"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a3d48d5abc146fd804395713427d944757a99254350e6a651e7d776818074aee"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:475829fe9994c66258157a8d4adb1c038f44f79f901208ba656d547842337227"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef1ec0e97623bc0e18469418cc4dd2c59a2d5fddcae944de61e13c0b46f910e"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91857b196de89e9b36d3f8629aa8772c0bbe7efef8334fe266956b1c192ec31c"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:63831e407487b8a21bb51f97cd86a616c291d5138f8caec16ab6019cf6423935"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c30e61de93bcd0a9d3ca226b1ae5475002afde61e9d85018a6a4a040eeb86567"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c72c37b03bce9900f5dbb4f476af17253ec60c13bf7a7259f71a8dc1b036cb"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f1096165741b5c2178ab15b0eb09b5de16dd39b1cc135767d72471f0a69ce"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:70c940c0e4ef335e22a6c705b01f286ee44780b5909065d212d94d82ea2580cb"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81c06f21757602d85f16dbc1cbaee1121cd65455f65aed4c048b7dcda7be85c4"},
|
||||
{file = "pyreqwest_impersonate-0.4.8.tar.gz", hash = "sha256:1eba11d47bd17244c64fec1502cc26ee66cc5c8a3be131e408101ae2b455e5bc"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest (>=8.1.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.366"
|
||||
|
||||
198
forge/poetry.lock
generated
198
forge/poetry.lock
generated
@@ -1444,34 +1444,6 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
|
||||
[package.extras]
|
||||
toml = ["tomli"]
|
||||
|
||||
[[package]]
|
||||
name = "curl-cffi"
|
||||
version = "0.7.0b4"
|
||||
description = "libcurl ffi bindings for Python, with impersonation support."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:694d88f7065c59c651970f14bc415431f65ac601a9ba537463d70f432a48ccfc"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6faf01aa8d98d322b877d3d801544692c73729ea6eb4a45af83514a4ecd1c8fe"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d39849371bbf3eab048113693715a8da5c729c494cccfa1128d768d96fdc31e"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3a5099b98c4bf12cc1afecb3409a9c57e7ebce9447a03c96dfb661ad8fa5e79"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e3616141a2a0be7896e7dc5da1ed3965e1a78aa2e563d8aba7a641135aeaf1b"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd16cccc0d3e93c2fbc4f4cb7cce0e10cb2ef7f8957352f3f0d770f0d6e05702"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d65aa649abb24020c2ad7b3ce45e2816d1ffe25df06f1a6b0f52fbf353af82e0"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-win32.whl", hash = "sha256:b55c53bb6dff713cb63f76e2f147e2d54c984b1b09df66b08f52f3acae1aeca0"},
|
||||
{file = "curl_cffi-0.7.0b4-cp38-abi3-win_amd64.whl", hash = "sha256:449ab07e07335558997cd62296b5c4f16ce27630de7830e4ad22441049a0ef1e"},
|
||||
{file = "curl_cffi-0.7.0b4.tar.gz", hash = "sha256:c09a062b8aac93d4890d2c33b7053c0e1a5cf275328b80c1fb1a950310df75f2"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
certifi = ">=2024.2.2"
|
||||
cffi = ">=1.12.0"
|
||||
|
||||
[package.extras]
|
||||
build = ["cibuildwheel", "wheel"]
|
||||
dev = ["charset-normalizer (>=3.3.2,<4.0)", "coverage (>=6.4.1,<7.0)", "cryptography (>=42.0.5,<43.0)", "httpx (==0.23.1)", "mypy (>=1.9.0,<2.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "ruff (>=0.3.5,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
|
||||
test = ["charset-normalizer (>=3.3.2,<4.0)", "cryptography (>=42.0.5,<43.0)", "fastapi (==0.110.0)", "httpx (==0.23.1)", "proxy.py (>=2.4.3,<3.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "python-multipart (>=0.0.9,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "cycler"
|
||||
version = "0.12.1"
|
||||
@@ -1612,23 +1584,23 @@ websockets = ["websocket-client (>=1.3.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "duckduckgo-search"
|
||||
version = "5.3.1"
|
||||
version = "6.1.7"
|
||||
description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "duckduckgo_search-5.3.1-py3-none-any.whl", hash = "sha256:a07d94babe45c9a9bd0ce2dc185346b47fe95dab516d414f21f06a0a1200aca9"},
|
||||
{file = "duckduckgo_search-5.3.1.tar.gz", hash = "sha256:2ee309e76b7e34ee84bddd5e046df723faecf7f999acdb499f3dad7e8a614c21"},
|
||||
{file = "duckduckgo_search-6.1.7-py3-none-any.whl", hash = "sha256:ec7d5becb8c392c0293ff9464938c1014896e1e14725c05adc306290a636fab2"},
|
||||
{file = "duckduckgo_search-6.1.7.tar.gz", hash = "sha256:c6fd8ba17fe9cd0a4f32e5b96984e959c3da865f9c2864bfcf82bf7ff9b7e8f0"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
click = ">=8.1.7"
|
||||
curl-cffi = ">=0.7.0b4"
|
||||
orjson = ">=3.10.3"
|
||||
orjson = ">=3.10.5"
|
||||
pyreqwest-impersonate = ">=0.4.8"
|
||||
|
||||
[package.extras]
|
||||
dev = ["mypy (>=1.10.0)", "pytest (>=8.2.0)", "ruff (>=0.4.3)"]
|
||||
lxml = ["lxml (>=5.2.1)"]
|
||||
dev = ["mypy (>=1.10.0)", "pytest (>=8.2.2)", "pytest-asyncio (>=0.23.7)", "ruff (>=0.4.8)"]
|
||||
lxml = ["lxml (>=5.2.2)"]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
@@ -3974,57 +3946,57 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "orjson"
|
||||
version = "3.10.3"
|
||||
version = "3.10.5"
|
||||
description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"},
|
||||
{file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"},
|
||||
{file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"},
|
||||
{file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"},
|
||||
{file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"},
|
||||
{file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"},
|
||||
{file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"},
|
||||
{file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"},
|
||||
{file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"},
|
||||
{file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"},
|
||||
{file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"},
|
||||
{file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"},
|
||||
{file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"},
|
||||
{file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"},
|
||||
{file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"},
|
||||
{file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"},
|
||||
{file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"},
|
||||
{file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"},
|
||||
{file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"},
|
||||
{file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"},
|
||||
{file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"},
|
||||
{file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"},
|
||||
{file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"},
|
||||
{file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"},
|
||||
{file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"},
|
||||
{file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"},
|
||||
{file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"},
|
||||
{file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"},
|
||||
{file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"},
|
||||
{file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"},
|
||||
{file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"},
|
||||
{file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"},
|
||||
{file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4746,6 +4718,66 @@ files = [
|
||||
{file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyreqwest-impersonate"
|
||||
version = "0.4.8"
|
||||
description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:45cad57afe4e6f56078ed9a7a90d0dc839d19d3e7a70175c80af21017f383bfb"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1986600253baf38f25fd07b8bdc1903359c26e5d34beb7d7d084845554b5664d"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cca4e6e59b9ad0cd20bad6caed3ac96992cd9c1d3126ecdfcab2c0ac2b75376"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab6b32544491ee655264dab86fc8a58e47c4f87d196b28022d4007faf971a50"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:64bd6299e7fc888bb7f7292cf3e29504c406e5d5d04afd37ca994ab8142d8ee4"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e914b650dd953b8d9b24ef56aa4ecbfc16e399227b68accd818f8bf159e0c558"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp310-none-win_amd64.whl", hash = "sha256:cb56a2149b0c4548a8e0158b071a943f33dae9b717f92b5c9ac34ccd1f5a958c"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f62620e023490902feca0109f306e122e427feff7d59e03ecd22c69a89452367"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08d4c01d76da88cfe3d7d03b311b375ce3fb5a59130f93f0637bb755d6e56ff1"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524e276bc460176c79d7ba4b9131d9db73c534586660371ebdf067749252a33"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22863bc0aaf02ca2f5d76c8130929ae680b7d82dfc1c28c1ed5f306ff626928"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8cc82d57f6a91037e64a7aa9122f909576ef2a141a42ce599958ef9f8c4bc033"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da8a053308210e44fd8349f07f45442a0691ac932f2881e98b05cf9ac404b091"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp311-none-win_amd64.whl", hash = "sha256:4baf3916c14364a815a64ead7f728afb61b37541933b2771f18dbb245029bb55"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:78db05deed0b32c9c75f2b3168a3a9b7d5e36487b218cb839bfe7e2a143450cb"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9af9446d605903c2b4e94621a9093f8d8a403729bc9cbfbcb62929f8238c838f"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c55890181d8d81e66cac25a95e215dc9680645d01e9091b64449d5407ad9bc6"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69344e7ae9964502a8693da7ad77ebc3e1418ee197e2e394bc23c5d4970772a"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b5db5c957a10d8cc2815085ba0b8fe09245b2f94c2225d9653a854a03b4217e1"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03c19c21f63f9c91c590c4bbcc32cc2d8066b508c683a1d163b8c7d9816a01d5"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp312-none-win_amd64.whl", hash = "sha256:0230610779129f74ff802c744643ce7589b1d07cba21d046fe3b574281c29581"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b8cb9471ab4b2fa7e80d3ac4e580249ff988d782f2938ad1f0428433652b170d"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8081a5ace2658be91519902bde9ddc5f94e1f850a39be196007a25e3da5bbfdc"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69eababfa3200459276acd780a0f3eaf41d1fe7c02bd169e714cba422055b5b9"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632957fa671ebb841166e40913015de457225cb73600ef250c436c280e68bf45"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2ce7ddef334b4e5c68f5ea1da1d65f686b8d84f4443059d128e0f069d3fa499a"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6ce333d450b158d582e36317089a006440b4e66739a8e8849d170e4cb15e8c8d"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp38-none-win_amd64.whl", hash = "sha256:9d9c85ce19db92362854f534807e470f03e905f283a7de6826dc79b790a8788e"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2503277f2a95a30e28e498570e2ed03ef4302f873054e8e21d6c0e607cbbc1d1"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8260395ef4ddae325e8b30cef0391adde7bd35e1a1decf8c729e26391f09b52d"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d8066b46d82bbaff5402d767e2f13d3449b8191c37bf8283e91d301a7159869"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c42f6343cfbd6663fb53edc9eb9feb4ebf6186b284e22368adc1eeb6a33854"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ff534f491a059e74fb7f994876df86078b4b125dbecc53c098a298ecd55fa9c6"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b8fbf73b3ac513ddadafd338d61f79cd2370f0691d9175b2b92a45920920d6b"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-cp39-none-win_amd64.whl", hash = "sha256:a26447c82665d0e361207c1a15e56b0ca54974aa6c1fdfa18c68f908dec78cbe"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24a16b8d55309f0af0db9d04ff442b0c91afccf078a94809e7c3a71747a5c214"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c8fada56465fc19179404cc9d5d5e1064f5dfe27405cb052f57a5b4fe06aed1"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a3d48d5abc146fd804395713427d944757a99254350e6a651e7d776818074aee"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:475829fe9994c66258157a8d4adb1c038f44f79f901208ba656d547842337227"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef1ec0e97623bc0e18469418cc4dd2c59a2d5fddcae944de61e13c0b46f910e"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91857b196de89e9b36d3f8629aa8772c0bbe7efef8334fe266956b1c192ec31c"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:63831e407487b8a21bb51f97cd86a616c291d5138f8caec16ab6019cf6423935"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c30e61de93bcd0a9d3ca226b1ae5475002afde61e9d85018a6a4a040eeb86567"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c72c37b03bce9900f5dbb4f476af17253ec60c13bf7a7259f71a8dc1b036cb"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f1096165741b5c2178ab15b0eb09b5de16dd39b1cc135767d72471f0a69ce"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:70c940c0e4ef335e22a6c705b01f286ee44780b5909065d212d94d82ea2580cb"},
|
||||
{file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81c06f21757602d85f16dbc1cbaee1121cd65455f65aed4c048b7dcda7be85c4"},
|
||||
{file = "pyreqwest_impersonate-0.4.8.tar.gz", hash = "sha256:1eba11d47bd17244c64fec1502cc26ee66cc5c8a3be131e408101ae2b455e5bc"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
dev = ["pytest (>=8.1.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pyright"
|
||||
version = "1.1.366"
|
||||
@@ -6830,4 +6862,4 @@ benchmark = ["agbenchmark"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "e04a03e3f2663d3e54d5f4e6649cd4442cb00fd17ae5c68d06dbaadfc02ca309"
|
||||
content-hash = "b02486b166870b64f778f117708c9dab9ce9305340ad5ef28994273925f09c4c"
|
||||
|
||||
@@ -22,7 +22,7 @@ click = "*"
|
||||
colorama = "^0.4.6"
|
||||
demjson3 = "^3.0.0"
|
||||
docker = "*"
|
||||
duckduckgo-search = "^5.0.0"
|
||||
duckduckgo-search = "^6.1.7"
|
||||
fastapi = "^0.109.1"
|
||||
gitpython = "^3.1.32"
|
||||
google-api-python-client = "*"
|
||||
|
||||
@@ -1,27 +1,34 @@
|
||||
from multiprocessing import freeze_support
|
||||
from multiprocessing.spawn import freeze_support as freeze_support_spawn
|
||||
|
||||
from autogpt_server.data.execution import ExecutionQueue
|
||||
from autogpt_server.executor import start_executor_manager
|
||||
from autogpt_server.server import start_server
|
||||
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
|
||||
from autogpt_server.server import AgentServer
|
||||
from autogpt_server.util.process import AppProcess
|
||||
from autogpt_server.util.service import PyroNameServer
|
||||
|
||||
|
||||
def background_process() -> None:
|
||||
def run_processes(processes: list[AppProcess], **kwargs):
|
||||
"""
|
||||
Used by the cli to run the server and executor in the background.
|
||||
This function runs the server and starts the executor in the background.
|
||||
Execute all processes in the app. The last process is run in the foreground.
|
||||
"""
|
||||
# These directives are required to make multiprocessing work with cx_Freeze
|
||||
# and are both required and safe across platforms (Windows, macOS, Linux)
|
||||
# They must be placed at the beginning of the executions before any other
|
||||
# multiprocessing code is run
|
||||
freeze_support()
|
||||
freeze_support_spawn()
|
||||
# Start the application
|
||||
queue = ExecutionQueue()
|
||||
start_executor_manager(5, queue)
|
||||
start_server(queue)
|
||||
try:
|
||||
for process in processes[:-1]:
|
||||
process.start(background=True, **kwargs)
|
||||
processes[-1].start(background=False, **kwargs)
|
||||
except Exception as e:
|
||||
for process in processes:
|
||||
process.stop()
|
||||
raise e
|
||||
|
||||
|
||||
def main(**kwargs):
|
||||
run_processes(
|
||||
[
|
||||
PyroNameServer(),
|
||||
ExecutionManager(pool_size=5),
|
||||
ExecutionScheduler(),
|
||||
AgentServer(),
|
||||
],
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
background_process()
|
||||
main()
|
||||
|
||||
@@ -2,25 +2,53 @@
|
||||
The command line interface for the agent server
|
||||
"""
|
||||
|
||||
from multiprocessing import freeze_support
|
||||
from multiprocessing.spawn import freeze_support as freeze_support_spawn
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import click
|
||||
import psutil
|
||||
|
||||
from autogpt_server import app
|
||||
from autogpt_server.util.process import AppProcess
|
||||
|
||||
|
||||
def get_pid_path() -> pathlib.Path:
|
||||
home_dir = pathlib.Path.home()
|
||||
new_dir = home_dir / ".config" / "agpt"
|
||||
file_path = new_dir / "running.tmp"
|
||||
return file_path
|
||||
|
||||
|
||||
def get_pid() -> int | None:
|
||||
file_path = get_pid_path()
|
||||
if not file_path.exists():
|
||||
return None
|
||||
|
||||
os.makedirs(file_path.parent, exist_ok=True)
|
||||
with open(file_path, "r", encoding="utf-8") as file:
|
||||
pid = file.read()
|
||||
try:
|
||||
return int(pid)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def write_pid(pid: int):
|
||||
file_path = get_pid_path()
|
||||
os.makedirs(file_path.parent, exist_ok=True)
|
||||
with open(file_path, "w", encoding="utf-8") as file:
|
||||
file.write(str(pid))
|
||||
|
||||
|
||||
class MainApp(AppProcess):
|
||||
def run(self):
|
||||
app.main(silent=True)
|
||||
|
||||
|
||||
@click.group()
|
||||
def main():
|
||||
"""AutoGPT Server CLI Tool"""
|
||||
|
||||
|
||||
@main.command()
|
||||
def background() -> None:
|
||||
"""
|
||||
Command to run the server in the background. Used by the run command
|
||||
"""
|
||||
from autogpt_server.app import background_process
|
||||
|
||||
background_process()
|
||||
pass
|
||||
|
||||
|
||||
@main.command()
|
||||
@@ -28,37 +56,22 @@ def start():
|
||||
"""
|
||||
Starts the server in the background and saves the PID
|
||||
"""
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
import psutil
|
||||
|
||||
# Define the path for the new directory and file
|
||||
home_dir = pathlib.Path.home()
|
||||
new_dir = home_dir / ".config" / "agpt"
|
||||
file_path = new_dir / "running.tmp"
|
||||
pid = get_pid()
|
||||
if pid and psutil.pid_exists(pid):
|
||||
print("Server is already running")
|
||||
exit(1)
|
||||
elif pid:
|
||||
print("PID does not exist deleting file")
|
||||
os.remove(get_pid_path())
|
||||
|
||||
# Create the directory if it does not exist
|
||||
os.makedirs(new_dir, exist_ok=True)
|
||||
if file_path.exists():
|
||||
with open(file_path, "r", encoding="utf-8") as file:
|
||||
pid = int(file.read())
|
||||
if psutil.pid_exists(pid):
|
||||
print("Server is already running")
|
||||
exit(1)
|
||||
else:
|
||||
print("PID does not exist deleting file")
|
||||
os.remove(file_path)
|
||||
print("Starting server")
|
||||
pid = MainApp().start(background=True, silent=True)
|
||||
print(f"Server running in process: {pid}")
|
||||
|
||||
sp = subprocess.Popen(
|
||||
["poetry", "run", "python", "autogpt_server/cli.py", "background"],
|
||||
stdout=subprocess.DEVNULL, # Redirect standard output to devnull
|
||||
stderr=subprocess.DEVNULL, # Redirect standard error to devnull
|
||||
)
|
||||
print(f"Server running in process: {sp.pid}")
|
||||
|
||||
with open(file_path, "w", encoding="utf-8") as file:
|
||||
file.write(str(sp.pid))
|
||||
write_pid(pid)
|
||||
print("done")
|
||||
os._exit(status=0)
|
||||
|
||||
|
||||
@main.command()
|
||||
@@ -66,22 +79,17 @@ def stop():
|
||||
"""
|
||||
Stops the server
|
||||
"""
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
home_dir = pathlib.Path.home()
|
||||
new_dir = home_dir / ".config" / "agpt"
|
||||
file_path = new_dir / "running.tmp"
|
||||
if not file_path.exists():
|
||||
pid = get_pid()
|
||||
if not pid:
|
||||
print("Server is not running")
|
||||
return
|
||||
|
||||
with open(file_path, "r", encoding="utf-8") as file:
|
||||
pid = file.read()
|
||||
os.remove(file_path)
|
||||
os.remove(get_pid_path())
|
||||
process = psutil.Process(int(pid))
|
||||
for child in process.children(recursive=True):
|
||||
child.terminate()
|
||||
process.terminate()
|
||||
|
||||
subprocess.Popen(["kill", pid])
|
||||
print("Server Stopped")
|
||||
|
||||
|
||||
@@ -90,6 +98,7 @@ def test():
|
||||
"""
|
||||
Group for test commands
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
@test.command()
|
||||
@@ -100,17 +109,5 @@ def event():
|
||||
print("Event sent")
|
||||
|
||||
|
||||
main.add_command(test)
|
||||
|
||||
|
||||
def start_cli() -> None:
|
||||
"""
|
||||
Entry point into the cli
|
||||
"""
|
||||
freeze_support()
|
||||
freeze_support_spawn()
|
||||
main()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
start_cli()
|
||||
main()
|
||||
|
||||
@@ -2,9 +2,10 @@ import json
|
||||
import jsonschema
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, ClassVar
|
||||
|
||||
from prisma.models import AgentBlock
|
||||
from pydantic import BaseModel
|
||||
from typing import Any, ClassVar
|
||||
|
||||
BlockData = dict[str, Any]
|
||||
|
||||
@@ -49,7 +50,7 @@ class BlockSchema(BaseModel):
|
||||
self,
|
||||
properties: dict[str, str | dict],
|
||||
required: list[str] | None = None,
|
||||
**kwargs: Any
|
||||
**kwargs: Any,
|
||||
):
|
||||
schema = {
|
||||
"type": "object",
|
||||
@@ -125,7 +126,7 @@ class Block(ABC, BaseModel):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def run(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
def run(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
"""
|
||||
Run the block with the given input data.
|
||||
Args:
|
||||
@@ -140,13 +141,21 @@ class Block(ABC, BaseModel):
|
||||
def name(cls):
|
||||
return cls.__name__
|
||||
|
||||
async def execute(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
def to_dict(self):
|
||||
return {
|
||||
"id": self.id,
|
||||
"name": self.name,
|
||||
"inputSchema": self.input_schema.jsonschema,
|
||||
"outputSchema": self.output_schema.jsonschema,
|
||||
}
|
||||
|
||||
def execute(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
if error := self.input_schema.validate_data(input_data):
|
||||
raise ValueError(
|
||||
f"Unable to execute block with invalid input data: {error}"
|
||||
)
|
||||
|
||||
output_name, output_data = await self.run(input_data)
|
||||
output_name, output_data = self.run(input_data)
|
||||
|
||||
if error := self.output_schema.validate_field(output_name, output_data):
|
||||
raise ValueError(
|
||||
@@ -161,29 +170,37 @@ class Block(ABC, BaseModel):
|
||||
|
||||
class ParrotBlock(Block):
|
||||
id: ClassVar[str] = "1ff065e9-88e8-4358-9d82-8dc91f622ba9" # type: ignore
|
||||
input_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
|
||||
"input": "string",
|
||||
})
|
||||
output_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
|
||||
"output": "string",
|
||||
})
|
||||
input_schema: ClassVar[BlockSchema] = BlockSchema( # type: ignore
|
||||
{
|
||||
"input": "string",
|
||||
}
|
||||
)
|
||||
output_schema: ClassVar[BlockSchema] = BlockSchema( # type: ignore
|
||||
{
|
||||
"output": "string",
|
||||
}
|
||||
)
|
||||
|
||||
async def run(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
def run(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
return "output", input_data["input"]
|
||||
|
||||
|
||||
class TextCombinerBlock(Block):
|
||||
id: ClassVar[str] = "db7d8f02-2f44-4c55-ab7a-eae0941f0c30" # type: ignore
|
||||
input_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
|
||||
"text1": "string",
|
||||
"text2": "string",
|
||||
"format": "string",
|
||||
})
|
||||
output_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
|
||||
"combined_text": "string",
|
||||
})
|
||||
input_schema: ClassVar[BlockSchema] = BlockSchema( # type: ignore
|
||||
{
|
||||
"text1": "string",
|
||||
"text2": "string",
|
||||
"format": "string",
|
||||
}
|
||||
)
|
||||
output_schema: ClassVar[BlockSchema] = BlockSchema( # type: ignore
|
||||
{
|
||||
"combined_text": "string",
|
||||
}
|
||||
)
|
||||
|
||||
async def run(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
def run(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
return "combined_text", input_data["format"].format(
|
||||
text1=input_data["text1"],
|
||||
text2=input_data["text2"],
|
||||
@@ -192,15 +209,18 @@ class TextCombinerBlock(Block):
|
||||
|
||||
class PrintingBlock(Block):
|
||||
id: ClassVar[str] = "f3b1c1b2-4c4f-4f0d-8d2f-4c4f0d8d2f4c" # type: ignore
|
||||
input_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
|
||||
"text": "string",
|
||||
})
|
||||
output_schema: ClassVar[BlockSchema] = BlockSchema({ # type: ignore
|
||||
"status": "string",
|
||||
})
|
||||
input_schema: ClassVar[BlockSchema] = BlockSchema( # type: ignore
|
||||
{
|
||||
"text": "string",
|
||||
}
|
||||
)
|
||||
output_schema: ClassVar[BlockSchema] = BlockSchema( # type: ignore
|
||||
{
|
||||
"status": "string",
|
||||
}
|
||||
)
|
||||
|
||||
async def run(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
print(input_data["text"])
|
||||
def run(self, input_data: BlockData) -> tuple[str, Any]:
|
||||
return "status", "printed"
|
||||
|
||||
|
||||
@@ -215,10 +235,7 @@ async def initialize_blocks() -> None:
|
||||
AVAILABLE_BLOCKS = {block.id: block() for block in Block.__subclasses__()}
|
||||
|
||||
for block in AVAILABLE_BLOCKS.values():
|
||||
existing_block = await AgentBlock.prisma().find_unique(
|
||||
where={"id": block.id}
|
||||
)
|
||||
if existing_block:
|
||||
if await AgentBlock.prisma().find_unique(where={"id": block.id}):
|
||||
continue
|
||||
|
||||
await AgentBlock.prisma().create(
|
||||
@@ -231,7 +248,13 @@ async def initialize_blocks() -> None:
|
||||
)
|
||||
|
||||
|
||||
async def get_block(block_id: str) -> Block:
|
||||
async def get_blocks() -> list[Block]:
|
||||
if not AVAILABLE_BLOCKS:
|
||||
await initialize_blocks()
|
||||
return AVAILABLE_BLOCKS[block_id]
|
||||
return list(AVAILABLE_BLOCKS.values())
|
||||
|
||||
|
||||
async def get_block(block_id: str) -> Block | None:
|
||||
if not AVAILABLE_BLOCKS:
|
||||
await initialize_blocks()
|
||||
return AVAILABLE_BLOCKS.get(block_id)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import asyncio
|
||||
from uuid import uuid4
|
||||
from prisma import Prisma
|
||||
from pydantic import BaseModel
|
||||
@@ -6,16 +5,14 @@ from pydantic import BaseModel
|
||||
prisma = Prisma(auto_register=True)
|
||||
|
||||
|
||||
def connect_sync():
|
||||
asyncio.get_event_loop().run_until_complete(connect())
|
||||
|
||||
|
||||
async def connect():
|
||||
await prisma.connect()
|
||||
if not prisma.is_connected():
|
||||
await prisma.connect()
|
||||
|
||||
|
||||
async def disconnect():
|
||||
await prisma.disconnect()
|
||||
if prisma.is_connected():
|
||||
await prisma.disconnect()
|
||||
|
||||
|
||||
class BaseDbModel(BaseModel):
|
||||
|
||||
@@ -2,15 +2,16 @@ import json
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from multiprocessing import Queue
|
||||
from typing import Any
|
||||
|
||||
from prisma.models import AgentNodeExecution
|
||||
from typing import Any
|
||||
|
||||
from autogpt_server.data.db import BaseDbModel
|
||||
|
||||
|
||||
class Execution(BaseDbModel):
|
||||
"""Data model for an execution of an Agent"""
|
||||
|
||||
run_id: str
|
||||
node_id: str
|
||||
data: dict[str, Any]
|
||||
@@ -23,11 +24,6 @@ class ExecutionStatus(str, Enum):
|
||||
FAILED = "FAILED"
|
||||
|
||||
|
||||
# TODO: This shared class make api & executor coupled in one machine.
|
||||
# Replace this with a persistent & remote-hosted queue.
|
||||
# One very likely candidate would be persisted Redis (Redis Queue).
|
||||
# It will also open the possibility of using it for other purposes like
|
||||
# caching, execution engine broker (like Celery), user session management etc.
|
||||
class ExecutionQueue:
|
||||
"""
|
||||
Queue for managing the execution of agents.
|
||||
@@ -48,7 +44,38 @@ class ExecutionQueue:
|
||||
return self.queue.empty()
|
||||
|
||||
|
||||
async def add_execution(execution: Execution, queue: ExecutionQueue) -> Execution:
|
||||
class ExecutionResult(BaseDbModel):
|
||||
run_id: str
|
||||
execution_id: str
|
||||
node_id: str
|
||||
status: ExecutionStatus
|
||||
input_data: dict[str, Any]
|
||||
output_name: str
|
||||
output_data: Any
|
||||
creation_time: datetime
|
||||
start_time: datetime | None
|
||||
end_time: datetime | None
|
||||
|
||||
@staticmethod
|
||||
def from_db(execution: AgentNodeExecution):
|
||||
return ExecutionResult(
|
||||
run_id=execution.executionId,
|
||||
node_id=execution.agentNodeId,
|
||||
execution_id=execution.id,
|
||||
status=ExecutionStatus(execution.executionStatus),
|
||||
input_data=json.loads(execution.inputData or "{}"),
|
||||
output_name=execution.outputName or "",
|
||||
output_data=json.loads(execution.outputData or "{}"),
|
||||
creation_time=execution.creationTime,
|
||||
start_time=execution.startTime,
|
||||
end_time=execution.endTime,
|
||||
)
|
||||
|
||||
|
||||
# --------------------- Model functions --------------------- #
|
||||
|
||||
|
||||
async def enqueue_execution(execution: Execution) -> None:
|
||||
await AgentNodeExecution.prisma().create(
|
||||
data={
|
||||
"id": execution.id,
|
||||
@@ -59,7 +86,6 @@ async def add_execution(execution: Execution, queue: ExecutionQueue) -> Executio
|
||||
"creationTime": datetime.now(),
|
||||
}
|
||||
)
|
||||
return queue.add(execution)
|
||||
|
||||
|
||||
async def start_execution(exec_id: str) -> None:
|
||||
@@ -96,3 +122,12 @@ async def fail_execution(exec_id: str, error: Exception) -> None:
|
||||
"endTime": datetime.now(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def get_executions(run_id: str) -> list[ExecutionResult]:
|
||||
executions = await AgentNodeExecution.prisma().find_many(
|
||||
where={"executionId": run_id},
|
||||
order={"startTime": "asc"},
|
||||
)
|
||||
res = [ExecutionResult.from_db(execution) for execution in executions]
|
||||
return res
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import asyncio
|
||||
import json
|
||||
import uuid
|
||||
from typing import Any
|
||||
|
||||
from prisma.models import AgentGraph, AgentNode, AgentNodeLink, AgentNodeExecution
|
||||
from typing import Any
|
||||
from prisma.models import AgentGraph, AgentNode, AgentNodeExecution, AgentNodeLink
|
||||
|
||||
from autogpt_server.data.db import BaseDbModel
|
||||
from autogpt_server.data.block import get_block
|
||||
|
||||
|
||||
class Node(BaseDbModel):
|
||||
@@ -15,6 +15,7 @@ class Node(BaseDbModel):
|
||||
# TODO: Make it `dict[str, list[str]]`, output can be connected to multiple blocks.
|
||||
# Other option is to use an edge-list, but it will complicate the rest code.
|
||||
output_nodes: dict[str, str] = {} # dict[output_name, node_id]
|
||||
metadata: dict[str, Any] = {}
|
||||
|
||||
@staticmethod
|
||||
def from_db(node: AgentNode):
|
||||
@@ -27,16 +28,13 @@ class Node(BaseDbModel):
|
||||
input_default=json.loads(node.constantInput),
|
||||
input_nodes={v.sinkName: v.agentNodeSourceId for v in node.Input or []},
|
||||
output_nodes={v.sourceName: v.agentNodeSinkId for v in node.Output or []},
|
||||
metadata=json.loads(node.metadata),
|
||||
)
|
||||
|
||||
def connect(self, node: "Node", source_name: str, sink_name: str):
|
||||
self.output_nodes[source_name] = node.id
|
||||
node.input_nodes[sink_name] = self.id
|
||||
|
||||
@property
|
||||
async def block(self):
|
||||
return await get_block(self.block_id)
|
||||
|
||||
|
||||
class Graph(BaseDbModel):
|
||||
name: str
|
||||
@@ -64,6 +62,9 @@ EXECUTION_NODE_INCLUDE = {
|
||||
}
|
||||
|
||||
|
||||
# --------------------- Model functions --------------------- #
|
||||
|
||||
|
||||
async def get_node(node_id: str) -> Node | None:
|
||||
node = await AgentNode.prisma().find_unique_or_raise(
|
||||
where={"id": node_id},
|
||||
@@ -72,6 +73,10 @@ async def get_node(node_id: str) -> Node | None:
|
||||
return Node.from_db(node) if node else None
|
||||
|
||||
|
||||
async def get_graph_ids() -> list[str]:
|
||||
return [graph.id for graph in await AgentGraph.prisma().find_many()] # type: ignore
|
||||
|
||||
|
||||
async def get_graph(graph_id: str) -> Graph | None:
|
||||
graph = await AgentGraph.prisma().find_unique(
|
||||
where={"id": graph_id},
|
||||
@@ -89,7 +94,7 @@ async def get_node_input(node: Node, exec_id: str) -> dict[str, Any]:
|
||||
Returns:
|
||||
dictionary of input data, key is the input name, value is the input data.
|
||||
"""
|
||||
query = AgentNodeExecution.prisma().find_many(
|
||||
query = await AgentNodeExecution.prisma().find_many(
|
||||
where={ # type: ignore
|
||||
"executionId": exec_id,
|
||||
"agentNodeId": {"in": list(node.input_nodes.values())},
|
||||
@@ -100,7 +105,7 @@ async def get_node_input(node: Node, exec_id: str) -> dict[str, Any]:
|
||||
)
|
||||
|
||||
latest_executions: dict[str, AgentNodeExecution] = {
|
||||
execution.agentNodeId: execution for execution in await query
|
||||
execution.agentNodeId: execution for execution in query
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -114,6 +119,7 @@ async def get_node_input(node: Node, exec_id: str) -> dict[str, Any]:
|
||||
|
||||
|
||||
async def create_graph(graph: Graph) -> Graph:
|
||||
|
||||
await AgentGraph.prisma().create(
|
||||
data={
|
||||
"id": graph.id,
|
||||
@@ -123,19 +129,15 @@ async def create_graph(graph: Graph) -> Graph:
|
||||
)
|
||||
|
||||
# TODO: replace bulk creation using create_many
|
||||
await asyncio.gather(
|
||||
*[
|
||||
AgentNode.prisma().create(
|
||||
{
|
||||
"id": node.id,
|
||||
"agentBlockId": node.block_id,
|
||||
"agentGraphId": graph.id,
|
||||
"constantInput": json.dumps(node.input_default),
|
||||
}
|
||||
)
|
||||
for node in graph.nodes
|
||||
]
|
||||
)
|
||||
await asyncio.gather(*[
|
||||
AgentNode.prisma().create({
|
||||
"id": node.id,
|
||||
"agentBlockId": node.block_id,
|
||||
"agentGraphId": graph.id,
|
||||
"constantInput": json.dumps(node.input_default),
|
||||
"metadata": json.dumps(node.metadata),
|
||||
}) for node in graph.nodes
|
||||
])
|
||||
|
||||
edge_source_names = {
|
||||
(source_node.id, sink_node_id): output_name
|
||||
@@ -149,22 +151,16 @@ async def create_graph(graph: Graph) -> Graph:
|
||||
}
|
||||
|
||||
# TODO: replace bulk creation using create_many
|
||||
await asyncio.gather(
|
||||
*[
|
||||
AgentNodeLink.prisma().create(
|
||||
{
|
||||
"id": str(uuid.uuid4()),
|
||||
"sourceName": edge_source_names.get((input_node, output_node), ""),
|
||||
"sinkName": edge_sink_names.get((input_node, output_node), ""),
|
||||
"agentNodeSourceId": input_node,
|
||||
"agentNodeSinkId": output_node,
|
||||
}
|
||||
)
|
||||
for input_node, output_node in (
|
||||
edge_source_names.keys() | edge_sink_names.keys()
|
||||
)
|
||||
]
|
||||
)
|
||||
await asyncio.gather(*[
|
||||
AgentNodeLink.prisma().create({
|
||||
"id": str(uuid.uuid4()),
|
||||
"sourceName": edge_source_names.get((input_node, output_node), ""),
|
||||
"sinkName": edge_sink_names.get((input_node, output_node), ""),
|
||||
"agentNodeSourceId": input_node,
|
||||
"agentNodeSinkId": output_node,
|
||||
})
|
||||
for input_node, output_node in edge_source_names.keys() | edge_sink_names.keys()
|
||||
])
|
||||
|
||||
if created_graph := await get_graph(graph.id):
|
||||
return created_graph
|
||||
|
||||
89
rnd/autogpt_server/autogpt_server/data/schedule.py
Normal file
89
rnd/autogpt_server/autogpt_server/data/schedule.py
Normal file
@@ -0,0 +1,89 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import Optional, Any
|
||||
|
||||
from prisma.models import AgentExecutionSchedule
|
||||
|
||||
from autogpt_server.data.db import BaseDbModel
|
||||
|
||||
|
||||
class ExecutionSchedule(BaseDbModel):
|
||||
id: str
|
||||
agent_id: str
|
||||
schedule: str
|
||||
is_enabled: bool
|
||||
input_data: dict[str, Any]
|
||||
last_updated: Optional[datetime] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
is_enabled: Optional[bool] = None,
|
||||
**kwargs
|
||||
):
|
||||
if is_enabled is None:
|
||||
is_enabled = True
|
||||
super().__init__(is_enabled=is_enabled, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def from_db(schedule: AgentExecutionSchedule):
|
||||
return ExecutionSchedule(
|
||||
id=schedule.id,
|
||||
agent_id=schedule.agentGraphId,
|
||||
schedule=schedule.schedule,
|
||||
is_enabled=schedule.isEnabled,
|
||||
last_updated=schedule.lastUpdated.replace(tzinfo=None),
|
||||
input_data=json.loads(schedule.inputData),
|
||||
)
|
||||
|
||||
|
||||
async def get_active_schedules(last_fetch_time: datetime) -> list[ExecutionSchedule]:
|
||||
query = AgentExecutionSchedule.prisma().find_many(
|
||||
where={
|
||||
"isEnabled": True,
|
||||
"lastUpdated": {"gt": last_fetch_time}
|
||||
},
|
||||
order={"lastUpdated": "asc"}
|
||||
)
|
||||
return [
|
||||
ExecutionSchedule.from_db(schedule)
|
||||
for schedule in await query
|
||||
]
|
||||
|
||||
|
||||
async def disable_schedule(schedule_id: str):
|
||||
await AgentExecutionSchedule.prisma().update(
|
||||
where={"id": schedule_id},
|
||||
data={"isEnabled": False}
|
||||
)
|
||||
|
||||
|
||||
async def get_schedules(agent_id: str) -> list[ExecutionSchedule]:
|
||||
query = AgentExecutionSchedule.prisma().find_many(
|
||||
where={
|
||||
"isEnabled": True,
|
||||
"agentGraphId": agent_id,
|
||||
},
|
||||
)
|
||||
return [
|
||||
ExecutionSchedule.from_db(schedule)
|
||||
for schedule in await query
|
||||
]
|
||||
|
||||
|
||||
async def add_schedule(schedule: ExecutionSchedule):
|
||||
await AgentExecutionSchedule.prisma().create(
|
||||
data={
|
||||
"id": schedule.id,
|
||||
"agentGraphId": schedule.agent_id,
|
||||
"schedule": schedule.schedule,
|
||||
"isEnabled": schedule.is_enabled,
|
||||
"inputData": json.dumps(schedule.input_data),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def update_schedule(schedule_id: str, is_enabled: bool):
|
||||
await AgentExecutionSchedule.prisma().update(
|
||||
where={"id": schedule_id},
|
||||
data={"isEnabled": is_enabled}
|
||||
)
|
||||
@@ -1 +1,8 @@
|
||||
from .executor import start_executor_manager # type: ignore # noqa
|
||||
from .manager import ExecutionManager
|
||||
from .scheduler import ExecutionScheduler
|
||||
|
||||
__all__ = [
|
||||
"ExecutionManager",
|
||||
"ExecutionScheduler",
|
||||
]
|
||||
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from multiprocessing import Process
|
||||
from typing import Optional
|
||||
|
||||
from autogpt_server.data import block, db, graph
|
||||
from autogpt_server.data.execution import (
|
||||
Execution,
|
||||
ExecutionQueue,
|
||||
add_execution,
|
||||
complete_execution,
|
||||
fail_execution,
|
||||
start_execution,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_log_prefix(run_id: str, exec_id: str, block_name: str = "-"):
|
||||
return f"[Execution graph-{run_id}|node-{exec_id}|{block_name}]"
|
||||
|
||||
|
||||
async def execute_node(data: Execution) -> Execution | None:
|
||||
"""
|
||||
Execute a node in the graph. This will trigger a block execution on a node,
|
||||
persist the execution result, and return the subsequent node to be executed.
|
||||
|
||||
Args:
|
||||
data: The execution data for executing the current node.
|
||||
|
||||
Returns:
|
||||
The subsequent node to be enqueued, or None if there is no subsequent node.
|
||||
"""
|
||||
run_id = data.run_id
|
||||
exec_id = data.id
|
||||
exec_data = data.data
|
||||
node_id = data.node_id
|
||||
|
||||
node = await graph.get_node(node_id)
|
||||
if not node:
|
||||
logger.error(f"Node {node_id} not found.")
|
||||
return None
|
||||
|
||||
node_block = await block.get_block(node.block_id)
|
||||
if not node_block:
|
||||
logger.error(f"Block {node.block_id} not found.")
|
||||
return None
|
||||
|
||||
# Execute the node
|
||||
prefix = get_log_prefix(run_id, exec_id, node_block.name)
|
||||
logger.warning(f"{prefix} execute with input:\n{exec_data}")
|
||||
await start_execution(exec_id)
|
||||
|
||||
try:
|
||||
output_name, output_data = await node_block.execute(exec_data)
|
||||
logger.warning(f"{prefix} executed with output: `{output_name}`:{output_data}")
|
||||
await complete_execution(exec_id, (output_name, output_data))
|
||||
except Exception as e:
|
||||
logger.exception(f"{prefix} failed with error: %s", e)
|
||||
await fail_execution(exec_id, e)
|
||||
raise e
|
||||
|
||||
# Try to enqueue next eligible nodes
|
||||
if output_name not in node.output_nodes:
|
||||
logger.error(f"{prefix} output name `{output_name}` has no subsequent node.")
|
||||
return None
|
||||
|
||||
next_node_id = node.output_nodes[output_name]
|
||||
next_node = await graph.get_node(next_node_id)
|
||||
if not next_node:
|
||||
logger.error(f"{prefix} Error, next node {next_node_id} not found.")
|
||||
return None
|
||||
|
||||
next_node_input = await graph.get_node_input(next_node, run_id)
|
||||
next_node_block = await next_node.block
|
||||
|
||||
if not set(next_node.input_nodes).issubset(next_node_input):
|
||||
logger.warning(f"{prefix} Skipped {next_node_id}-{next_node_block.name}, "
|
||||
f"missing: {set(next_node.input_nodes) - set(next_node_input)}")
|
||||
return None
|
||||
|
||||
if error := next_node_block.input_schema.validate_data(next_node_input):
|
||||
logger.warning(
|
||||
f"{prefix} Skipped {next_node_id}-{next_node_block.name}, {error}")
|
||||
return None
|
||||
|
||||
logger.warning(f"{prefix} Enqueue next node {next_node_id}-{next_node_block.name}")
|
||||
return Execution(
|
||||
run_id=run_id, node_id=next_node_id, data=next_node_input
|
||||
)
|
||||
|
||||
|
||||
def execute_node_sync(data: Execution) -> Optional[Execution | None]:
|
||||
"""
|
||||
A synchronous version of `execute_node`, to be used in the ProcessPoolExecutor.
|
||||
"""
|
||||
prefix = get_log_prefix(data.run_id, data.id)
|
||||
try:
|
||||
logger.warning(f"{prefix} Start execution")
|
||||
loop = asyncio.get_event_loop()
|
||||
return loop.run_until_complete(execute_node(data))
|
||||
except Exception as e:
|
||||
logger.error(f"{prefix} Error: {e}")
|
||||
|
||||
|
||||
def start_executor(pool_size: int, queue: ExecutionQueue) -> None:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(db.connect())
|
||||
loop.run_until_complete(block.initialize_blocks())
|
||||
|
||||
def on_complete_execution(f: asyncio.Future[Execution | None]):
|
||||
exception = f.exception()
|
||||
if exception:
|
||||
logger.exception("Error during execution!! %s", exception)
|
||||
return exception
|
||||
|
||||
execution = f.result()
|
||||
if execution:
|
||||
loop.run_until_complete(add_execution(execution, queue))
|
||||
return exception
|
||||
|
||||
return None
|
||||
|
||||
logger.warning("Executor started!")
|
||||
|
||||
with ProcessPoolExecutor(
|
||||
max_workers=pool_size,
|
||||
initializer=db.connect_sync,
|
||||
) as executor:
|
||||
while True:
|
||||
future = executor.submit(execute_node_sync, queue.get())
|
||||
future.add_done_callback(on_complete_execution) # type: ignore
|
||||
|
||||
|
||||
def start_executor_manager(pool_size: int, queue: ExecutionQueue) -> None:
|
||||
executor_process = Process(target=start_executor, args=(pool_size, queue))
|
||||
executor_process.start()
|
||||
200
rnd/autogpt_server/autogpt_server/executor/manager.py
Normal file
200
rnd/autogpt_server/autogpt_server/executor/manager.py
Normal file
@@ -0,0 +1,200 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from concurrent.futures import ProcessPoolExecutor
|
||||
from typing import Optional, Any
|
||||
|
||||
from autogpt_server.data import db
|
||||
from autogpt_server.data.block import Block, get_block
|
||||
from autogpt_server.data.graph import Node, get_node, get_node_input, get_graph
|
||||
from autogpt_server.data.execution import (
|
||||
Execution,
|
||||
ExecutionQueue,
|
||||
enqueue_execution,
|
||||
complete_execution,
|
||||
fail_execution,
|
||||
start_execution,
|
||||
)
|
||||
from autogpt_server.util.service import AppService, expose
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_log_prefix(run_id: str, exec_id: str, block_name: str = "-"):
|
||||
return f"[ExecutionManager] [graph-{run_id}|node-{exec_id}|{block_name}]"
|
||||
|
||||
|
||||
def execute_node(loop: asyncio.AbstractEventLoop, data: Execution) -> Execution | None:
|
||||
"""
|
||||
Execute a node in the graph. This will trigger a block execution on a node,
|
||||
persist the execution result, and return the subsequent node to be executed.
|
||||
|
||||
Args:
|
||||
loop: The event loop to run the async functions.
|
||||
data: The execution data for executing the current node.
|
||||
|
||||
Returns:
|
||||
The subsequent node to be enqueued, or None if there is no subsequent node.
|
||||
"""
|
||||
run_id = data.run_id
|
||||
exec_id = data.id
|
||||
exec_data = data.data
|
||||
node_id = data.node_id
|
||||
|
||||
asyncio.set_event_loop(loop)
|
||||
wait = lambda f: loop.run_until_complete(f)
|
||||
|
||||
node: Optional[Node] = wait(get_node(node_id))
|
||||
if not node:
|
||||
logger.error(f"Node {node_id} not found.")
|
||||
return None
|
||||
|
||||
node_block: Optional[Block] = wait(get_block(node.block_id))
|
||||
if not node_block:
|
||||
logger.error(f"Block {node.block_id} not found.")
|
||||
return None
|
||||
|
||||
# Execute the node
|
||||
prefix = get_log_prefix(run_id, exec_id, node_block.name)
|
||||
logger.warning(f"{prefix} execute with input:\n`{exec_data}`")
|
||||
wait(start_execution(exec_id))
|
||||
|
||||
try:
|
||||
output_name, output_data = node_block.execute(exec_data)
|
||||
logger.warning(f"{prefix} executed with output [{output_name}]:`{output_data}`")
|
||||
wait(complete_execution(exec_id, (output_name, output_data)))
|
||||
except Exception as e:
|
||||
logger.exception(f"{prefix} failed with error: %s", e)
|
||||
wait(fail_execution(exec_id, e))
|
||||
raise e
|
||||
|
||||
# Try to enqueue next eligible nodes
|
||||
if output_name not in node.output_nodes:
|
||||
logger.error(f"{prefix} Output [{output_name}] has no subsequent node.")
|
||||
return None
|
||||
|
||||
next_node_id = node.output_nodes[output_name]
|
||||
next_node: Optional[Node] = wait(get_node(next_node_id))
|
||||
if not next_node:
|
||||
logger.error(f"{prefix} Error, next node {next_node_id} not found.")
|
||||
return None
|
||||
|
||||
next_node_input: dict[str, Any] = wait(get_node_input(next_node, run_id))
|
||||
is_valid, validation_resp = wait(validate_exec(next_node, next_node_input))
|
||||
if not is_valid:
|
||||
logger.warning(f"{prefix} Skipped {next_node_id}: {validation_resp}")
|
||||
return None
|
||||
|
||||
logger.warning(f"{prefix} Enqueue next node {next_node_id}-{validation_resp}")
|
||||
return Execution(run_id=run_id, node_id=next_node_id, data=next_node_input)
|
||||
|
||||
|
||||
async def validate_exec(node: Node, data: dict[str, Any]) -> tuple[bool, str]:
|
||||
"""
|
||||
Validate the input data for a node execution.
|
||||
|
||||
Args:
|
||||
node: The node to execute.
|
||||
data: The input data for the node execution.
|
||||
|
||||
Returns:
|
||||
A tuple of a boolean indicating if the data is valid, and a message if not.
|
||||
Return the executed block name if the data is valid.
|
||||
"""
|
||||
node_block: Block | None = await(get_block(node.block_id))
|
||||
if not node_block:
|
||||
return False, f"Block for {node.block_id} not found."
|
||||
|
||||
if not set(node.input_nodes).issubset(data):
|
||||
return False, f"Input data missing: {set(node.input_nodes) - set(data)}"
|
||||
|
||||
if error := node_block.input_schema.validate_data(data):
|
||||
return False, f"Input data doesn't match {node_block.name}: {error}"
|
||||
|
||||
return True, node_block.name
|
||||
|
||||
|
||||
class Executor:
|
||||
loop: asyncio.AbstractEventLoop
|
||||
|
||||
@classmethod
|
||||
def on_executor_start(cls):
|
||||
cls.loop = asyncio.new_event_loop()
|
||||
cls.loop.run_until_complete(db.connect())
|
||||
|
||||
@classmethod
|
||||
def on_start_execution(cls, data: Execution) -> Optional[Execution | None]:
|
||||
"""
|
||||
A synchronous version of `execute_node`, to be used in the ProcessPoolExecutor.
|
||||
"""
|
||||
prefix = get_log_prefix(data.run_id, data.id)
|
||||
try:
|
||||
logger.warning(f"{prefix} Start execution")
|
||||
return execute_node(cls.loop, data)
|
||||
except Exception as e:
|
||||
logger.error(f"{prefix} Error: {e}")
|
||||
|
||||
|
||||
class ExecutionManager(AppService):
|
||||
|
||||
def __init__(self, pool_size: int):
|
||||
self.pool_size = pool_size
|
||||
self.queue = ExecutionQueue()
|
||||
|
||||
def run_service(self):
|
||||
def on_complete_execution(f: asyncio.Future[Execution | None]):
|
||||
exception = f.exception()
|
||||
if exception:
|
||||
logger.exception("Error during execution!! %s", exception)
|
||||
return exception
|
||||
|
||||
execution = f.result()
|
||||
if execution:
|
||||
return self.add_node_execution(execution)
|
||||
|
||||
return None
|
||||
|
||||
with ProcessPoolExecutor(
|
||||
max_workers=self.pool_size,
|
||||
initializer=Executor.on_executor_start,
|
||||
) as executor:
|
||||
logger.warning(f"Execution manager started with {self.pool_size} workers.")
|
||||
while True:
|
||||
future = executor.submit(
|
||||
Executor.on_start_execution,
|
||||
self.queue.get()
|
||||
)
|
||||
future.add_done_callback(on_complete_execution) # type: ignore
|
||||
|
||||
@expose
|
||||
def add_execution(self, graph_id: str, data: dict[str, Any]) -> dict:
|
||||
run_id = str(uuid.uuid4())
|
||||
|
||||
agent = self.run_and_wait(get_graph(graph_id))
|
||||
if not agent:
|
||||
raise Exception(f"Agent #{graph_id} not found.")
|
||||
|
||||
# Currently, there is no constraint on the number of root nodes in the graph.
|
||||
for node in agent.starting_nodes:
|
||||
valid, error = self.run_and_wait(validate_exec(node, data))
|
||||
if not valid:
|
||||
raise Exception(error)
|
||||
|
||||
executions = []
|
||||
for node in agent.starting_nodes:
|
||||
exec_id = self.add_node_execution(
|
||||
Execution(run_id=run_id, node_id=node.id, data=data)
|
||||
)
|
||||
executions.append({
|
||||
"exec_id": exec_id,
|
||||
"node_id": node.id,
|
||||
})
|
||||
|
||||
return {
|
||||
"run_id": run_id,
|
||||
"executions": executions,
|
||||
}
|
||||
|
||||
def add_node_execution(self, execution: Execution) -> Execution:
|
||||
self.run_and_wait(enqueue_execution(execution))
|
||||
return self.queue.add(execution)
|
||||
82
rnd/autogpt_server/autogpt_server/executor/scheduler.py
Normal file
82
rnd/autogpt_server/autogpt_server/executor/scheduler.py
Normal file
@@ -0,0 +1,82 @@
|
||||
import logging
|
||||
import time
|
||||
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
from apscheduler.triggers.cron import CronTrigger
|
||||
from datetime import datetime
|
||||
|
||||
from autogpt_server.data import schedule as model
|
||||
from autogpt_server.util.service import AppService, expose, get_service_client
|
||||
from autogpt_server.executor.manager import ExecutionManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def log(msg, **kwargs):
|
||||
logger.warning("[ExecutionScheduler] " + msg, **kwargs)
|
||||
|
||||
|
||||
class ExecutionScheduler(AppService):
|
||||
|
||||
def __init__(self, refresh_interval=10):
|
||||
self.last_check = datetime.min
|
||||
self.refresh_interval = refresh_interval
|
||||
|
||||
@property
|
||||
def execution_manager_client(self):
|
||||
return get_service_client(ExecutionManager)
|
||||
|
||||
def run_service(self):
|
||||
scheduler = BackgroundScheduler()
|
||||
scheduler.start()
|
||||
while True:
|
||||
self.__refresh_jobs_from_db(scheduler)
|
||||
time.sleep(self.refresh_interval)
|
||||
|
||||
def __refresh_jobs_from_db(self, scheduler: BackgroundScheduler):
|
||||
schedules = self.run_and_wait(model.get_active_schedules(self.last_check))
|
||||
for schedule in schedules:
|
||||
self.last_check = max(self.last_check, schedule.last_updated)
|
||||
|
||||
if not schedule.is_enabled:
|
||||
log(f"Removing recurring job {schedule.id}: {schedule.schedule}")
|
||||
scheduler.remove_job(schedule.id)
|
||||
continue
|
||||
|
||||
log(f"Adding recurring job {schedule.id}: {schedule.schedule}")
|
||||
scheduler.add_job(
|
||||
self.__execute_agent,
|
||||
CronTrigger.from_crontab(schedule.schedule),
|
||||
id=schedule.id,
|
||||
args=[schedule.agent_id, schedule.input_data],
|
||||
replace_existing=True,
|
||||
)
|
||||
|
||||
def __execute_agent(self, agent_id: str, input_data: dict):
|
||||
try:
|
||||
log(f"Executing recurring job for agent #{agent_id}")
|
||||
execution_manager = self.execution_manager_client
|
||||
execution_manager.add_execution(agent_id, input_data)
|
||||
except Exception as e:
|
||||
logger.error(f"Error executing agent {agent_id}: {e}")
|
||||
|
||||
@expose
|
||||
def update_schedule(self, schedule_id: str, is_enabled: bool) -> str:
|
||||
self.run_and_wait(model.update_schedule(schedule_id, is_enabled))
|
||||
return schedule_id
|
||||
|
||||
@expose
|
||||
def add_execution_schedule(self, agent_id: str, cron: str, input_data: dict) -> str:
|
||||
schedule = model.ExecutionSchedule(
|
||||
agent_id=agent_id,
|
||||
schedule=cron,
|
||||
input_data=input_data,
|
||||
)
|
||||
self.run_and_wait(model.add_schedule(schedule))
|
||||
return schedule.id
|
||||
|
||||
@expose
|
||||
def get_execution_schedules(self, agent_id: str) -> dict[str, str]:
|
||||
query = model.get_schedules(agent_id)
|
||||
schedules: list[model.ExecutionSchedule] = self.run_and_wait(query)
|
||||
return {v.id: v.schedule for v in schedules}
|
||||
@@ -1 +1,3 @@
|
||||
from .server import start_server # type: ignore # noqa
|
||||
from .server import AgentServer
|
||||
|
||||
__all__ = ["AgentServer"]
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import uuid
|
||||
|
||||
import uvicorn
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import APIRouter, FastAPI, HTTPException
|
||||
|
||||
from autogpt_server.data import db, execution, graph
|
||||
from autogpt_server.data import db, execution, graph, block
|
||||
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
|
||||
from autogpt_server.util.process import AppProcess
|
||||
from autogpt_server.util.service import get_service_client
|
||||
|
||||
|
||||
class AgentServer:
|
||||
class AgentServer(AppProcess):
|
||||
|
||||
def __init__(self, queue: execution.ExecutionQueue):
|
||||
@asynccontextmanager
|
||||
async def lifespan(self, _: FastAPI):
|
||||
await db.connect()
|
||||
yield
|
||||
await db.disconnect()
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await db.connect()
|
||||
yield
|
||||
await db.disconnect()
|
||||
|
||||
self.app = FastAPI(
|
||||
def run(self):
|
||||
app = FastAPI(
|
||||
title="AutoGPT Agent Server",
|
||||
description=(
|
||||
"This server is used to execute agents that are created by the "
|
||||
@@ -27,46 +27,122 @@ class AgentServer:
|
||||
),
|
||||
summary="AutoGPT Agent Server",
|
||||
version="0.1",
|
||||
lifespan=lifespan,
|
||||
lifespan=self.lifespan,
|
||||
)
|
||||
self.execution_queue = queue
|
||||
|
||||
# Define the API routes
|
||||
self.router = APIRouter()
|
||||
self.router.add_api_route(
|
||||
router = APIRouter()
|
||||
router.add_api_route(
|
||||
path="/blocks",
|
||||
endpoint=self.get_agent_blocks,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/agents",
|
||||
endpoint=self.get_agents,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/agents/{agent_id}",
|
||||
endpoint=self.get_agent,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/agents",
|
||||
endpoint=self.create_agent,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/agents/{agent_id}/execute",
|
||||
endpoint=self.execute_agent,
|
||||
methods=["POST"],
|
||||
)
|
||||
self.app.include_router(self.router)
|
||||
router.add_api_route(
|
||||
path="/agents/{agent_id}/executions/{run_id}",
|
||||
endpoint=self.get_executions,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/agents/{agent_id}/schedules",
|
||||
endpoint=self.schedule_agent,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/agents/{agent_id}/schedules",
|
||||
endpoint=self.get_execution_schedules,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
path="/agents/schedules/{schedule_id}",
|
||||
endpoint=self.update_schedule,
|
||||
methods=["PUT"],
|
||||
)
|
||||
|
||||
async def execute_agent(self, agent_id: str, node_input: dict):
|
||||
app.include_router(router)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
|
||||
@property
|
||||
def execution_manager_client(self) -> ExecutionManager:
|
||||
return get_service_client(ExecutionManager)
|
||||
|
||||
@property
|
||||
def execution_scheduler_client(self) -> ExecutionScheduler:
|
||||
return get_service_client(ExecutionScheduler)
|
||||
|
||||
async def get_agent_blocks(self) -> list[dict]:
|
||||
return [v.to_dict() for v in await block.get_blocks()]
|
||||
|
||||
async def get_agents(self) -> list[str]:
|
||||
return await graph.get_graph_ids()
|
||||
|
||||
async def get_agent(self, agent_id: str) -> graph.Graph:
|
||||
agent = await graph.get_graph(agent_id)
|
||||
if not agent:
|
||||
raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.")
|
||||
|
||||
run_id = str(uuid.uuid4())
|
||||
tasks = []
|
||||
return agent
|
||||
|
||||
# Currently, there is no constraint on the number of root nodes in the graph.
|
||||
for node in agent.starting_nodes:
|
||||
block = await node.block
|
||||
if error := block.input_schema.validate_data(node_input):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Input data doesn't match {block.name} input: {error}",
|
||||
)
|
||||
async def create_agent(self, agent: graph.Graph) -> graph.Graph:
|
||||
agent.id = str(uuid.uuid4())
|
||||
|
||||
task = execution.add_execution(
|
||||
execution.Execution(run_id=run_id, node_id=node.id, data=node_input),
|
||||
self.execution_queue,
|
||||
)
|
||||
id_map = {node.id: str(uuid.uuid4()) for node in agent.nodes}
|
||||
for node in agent.nodes:
|
||||
node.id = id_map[node.id]
|
||||
node.input_nodes = {k: id_map[v] for k, v in node.input_nodes.items()}
|
||||
node.output_nodes = {k: id_map[v] for k, v in node.output_nodes.items()}
|
||||
|
||||
tasks.append(task)
|
||||
return await graph.create_graph(agent)
|
||||
|
||||
return await asyncio.gather(*tasks)
|
||||
async def execute_agent(self, agent_id: str, node_input: dict) -> dict:
|
||||
try:
|
||||
return self.execution_manager_client.add_execution(agent_id, node_input)
|
||||
except Exception as e:
|
||||
msg = e.__str__().encode().decode('unicode_escape')
|
||||
raise HTTPException(status_code=400, detail=msg)
|
||||
|
||||
async def get_executions(
|
||||
self, agent_id: str, run_id: str) -> list[execution.ExecutionResult]:
|
||||
agent = await graph.get_graph(agent_id)
|
||||
if not agent:
|
||||
raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.")
|
||||
|
||||
def start_server(queue: execution.ExecutionQueue):
|
||||
agent_server = AgentServer(queue)
|
||||
uvicorn.run(agent_server.app)
|
||||
return await execution.get_executions(run_id)
|
||||
|
||||
async def schedule_agent(self, agent_id: str, cron: str, input_data: dict) -> dict:
|
||||
agent = await graph.get_graph(agent_id)
|
||||
if not agent:
|
||||
raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.")
|
||||
execution_scheduler = self.execution_scheduler_client
|
||||
return {
|
||||
"id": execution_scheduler.add_execution_schedule(agent_id, cron, input_data)
|
||||
}
|
||||
|
||||
def update_schedule(self, schedule_id: str, input_data: dict) -> dict:
|
||||
execution_scheduler = self.execution_scheduler_client
|
||||
is_enabled = input_data.get("is_enabled", False)
|
||||
execution_scheduler.update_schedule(schedule_id, is_enabled)
|
||||
return {"id": schedule_id}
|
||||
|
||||
def get_execution_schedules(self, agent_id: str) -> dict[str, str]:
|
||||
execution_scheduler = self.execution_scheduler_client
|
||||
return execution_scheduler.get_execution_schedules(agent_id)
|
||||
|
||||
73
rnd/autogpt_server/autogpt_server/util/process.py
Normal file
73
rnd/autogpt_server/autogpt_server/util/process.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import os
|
||||
import sys
|
||||
from abc import ABC, abstractmethod
|
||||
from multiprocessing import Process, freeze_support, set_start_method
|
||||
from multiprocessing.spawn import freeze_support as freeze_support_spawn
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class AppProcess(ABC):
|
||||
"""
|
||||
A class to represent an object that can be executed in a background process.
|
||||
"""
|
||||
process: Optional[Process] = None
|
||||
set_start_method('spawn', force=True)
|
||||
freeze_support()
|
||||
freeze_support_spawn()
|
||||
|
||||
@abstractmethod
|
||||
def run(self):
|
||||
"""
|
||||
The method that will be executed in the process.
|
||||
"""
|
||||
pass
|
||||
|
||||
def execute_run_command(self, silent):
|
||||
try:
|
||||
if silent:
|
||||
sys.stdout = open(os.devnull, "w")
|
||||
sys.stderr = open(os.devnull, "w")
|
||||
self.run()
|
||||
except KeyboardInterrupt or SystemExit as e:
|
||||
print(f"Process terminated: {e}")
|
||||
|
||||
def __enter__(self):
|
||||
self.start(background=True)
|
||||
return self
|
||||
|
||||
def __exit__(self, *args, **kwargs):
|
||||
self.stop()
|
||||
|
||||
def start(self, background: bool = False, silent: bool = False, **proc_args) -> int:
|
||||
"""
|
||||
Start the background process.
|
||||
Args:
|
||||
background: Whether to run the process in the background.
|
||||
silent: Whether to disable stdout and stderr.
|
||||
proc_args: Additional arguments to pass to the process.
|
||||
Returns:
|
||||
the process id or 0 if the process is not running in the background.
|
||||
"""
|
||||
if not background:
|
||||
self.execute_run_command(silent)
|
||||
return 0
|
||||
|
||||
self.process = Process(
|
||||
name=self.__class__.__name__,
|
||||
target=self.execute_run_command,
|
||||
args=(silent,),
|
||||
**proc_args,
|
||||
)
|
||||
self.process.start()
|
||||
return self.process.pid or 0
|
||||
|
||||
def stop(self):
|
||||
"""
|
||||
Stop the background process.
|
||||
"""
|
||||
if not self.process:
|
||||
return
|
||||
|
||||
self.process.terminate()
|
||||
self.process.join()
|
||||
self.process = None
|
||||
110
rnd/autogpt_server/autogpt_server/util/service.py
Normal file
110
rnd/autogpt_server/autogpt_server/util/service.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import time
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
|
||||
from abc import abstractmethod
|
||||
from typing import Any, Callable, Type, TypeVar, cast, Coroutine
|
||||
|
||||
from Pyro5 import api as pyro
|
||||
from Pyro5 import nameserver
|
||||
from tenacity import retry, stop_after_delay, wait_exponential
|
||||
|
||||
from autogpt_server.data import db
|
||||
from autogpt_server.util.process import AppProcess
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
conn_retry = retry(stop=stop_after_delay(5), wait=wait_exponential(multiplier=0.1))
|
||||
|
||||
|
||||
def expose(func: Callable) -> Callable:
|
||||
def wrapper(*args, **kwargs):
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
msg = f"Error in {func.__name__}: {e.__str__()}"
|
||||
logger.error(msg)
|
||||
raise Exception(msg, e)
|
||||
|
||||
return pyro.expose(wrapper)
|
||||
|
||||
|
||||
class PyroNameServer(AppProcess):
|
||||
def run(self):
|
||||
try:
|
||||
print("Starting NameServer loop")
|
||||
nameserver.start_ns_loop()
|
||||
except KeyboardInterrupt:
|
||||
print("Shutting down NameServer")
|
||||
|
||||
|
||||
class AppService(AppProcess):
|
||||
shared_event_loop: asyncio.AbstractEventLoop
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def service_name(cls) -> str:
|
||||
return cls.__name__
|
||||
|
||||
@abstractmethod
|
||||
def run_service(self):
|
||||
while True:
|
||||
time.sleep(10)
|
||||
|
||||
def run_async(self, coro: Coroutine):
|
||||
return asyncio.run_coroutine_threadsafe(coro, self.shared_event_loop)
|
||||
|
||||
def run_and_wait(self, coro: Coroutine):
|
||||
future = self.run_async(coro)
|
||||
return future.result()
|
||||
|
||||
def run(self):
|
||||
self.shared_event_loop = asyncio.get_event_loop()
|
||||
self.shared_event_loop.run_until_complete(db.connect())
|
||||
|
||||
# Initialize the async loop.
|
||||
async_thread = threading.Thread(target=self.__start_async_loop)
|
||||
async_thread.daemon = True
|
||||
async_thread.start()
|
||||
|
||||
# Initialize pyro service
|
||||
daemon_thread = threading.Thread(target=self.__start_pyro)
|
||||
daemon_thread.daemon = True
|
||||
daemon_thread.start()
|
||||
|
||||
# Run the main service (if it's not implemented, just sleep).
|
||||
self.run_service()
|
||||
|
||||
@conn_retry
|
||||
def __start_pyro(self):
|
||||
daemon = pyro.Daemon()
|
||||
ns = pyro.locate_ns()
|
||||
uri = daemon.register(self)
|
||||
ns.register(self.service_name, uri)
|
||||
logger.warning(f"Service [{self.service_name}] Ready. Object URI = {uri}")
|
||||
daemon.requestLoop()
|
||||
|
||||
def __start_async_loop(self):
|
||||
# asyncio.set_event_loop(self.shared_event_loop)
|
||||
self.shared_event_loop.run_forever()
|
||||
|
||||
|
||||
AS = TypeVar("AS", bound=AppService)
|
||||
|
||||
|
||||
def get_service_client(service_type: Type[AS]) -> AS:
|
||||
service_name = service_type.service_name
|
||||
|
||||
class DynamicClient:
|
||||
|
||||
@conn_retry
|
||||
def __init__(self):
|
||||
ns = pyro.locate_ns()
|
||||
uri = ns.lookup(service_name)
|
||||
self.proxy = pyro.Proxy(uri)
|
||||
self.proxy._pyroBind()
|
||||
|
||||
def __getattr__(self, name: str) -> Callable[..., Any]:
|
||||
return getattr(self.proxy, name)
|
||||
|
||||
return cast(AS, DynamicClient())
|
||||
166
rnd/autogpt_server/poetry.lock
generated
166
rnd/autogpt_server/poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "annotated-types"
|
||||
@@ -33,6 +33,34 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin
|
||||
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"]
|
||||
trio = ["trio (>=0.23)"]
|
||||
|
||||
[[package]]
|
||||
name = "apscheduler"
|
||||
version = "3.10.4"
|
||||
description = "In-process task scheduler with Cron-like capabilities"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "APScheduler-3.10.4-py3-none-any.whl", hash = "sha256:fb91e8a768632a4756a585f79ec834e0e27aad5860bac7eaa523d9ccefd87661"},
|
||||
{file = "APScheduler-3.10.4.tar.gz", hash = "sha256:e6df071b27d9be898e486bc7940a7be50b4af2e9da7c08f0744a96d4bd4cef4a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
pytz = "*"
|
||||
six = ">=1.4.0"
|
||||
tzlocal = ">=2.0,<3.dev0 || >=4.dev0"
|
||||
|
||||
[package.extras]
|
||||
doc = ["sphinx", "sphinx-rtd-theme"]
|
||||
gevent = ["gevent"]
|
||||
mongodb = ["pymongo (>=3.0)"]
|
||||
redis = ["redis (>=3.0)"]
|
||||
rethinkdb = ["rethinkdb (>=2.4.0)"]
|
||||
sqlalchemy = ["sqlalchemy (>=1.4)"]
|
||||
testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"]
|
||||
tornado = ["tornado (>=4.3)"]
|
||||
twisted = ["twisted"]
|
||||
zookeeper = ["kazoo"]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
version = "23.2.0"
|
||||
@@ -88,6 +116,21 @@ files = [
|
||||
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "croniter"
|
||||
version = "2.0.5"
|
||||
description = "croniter provides iteration for datetime object with cron like format"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6"
|
||||
files = [
|
||||
{file = "croniter-2.0.5-py2.py3-none-any.whl", hash = "sha256:fdbb44920944045cc323db54599b321325141d82d14fa7453bc0699826bbe9ed"},
|
||||
{file = "croniter-2.0.5.tar.gz", hash = "sha256:f1f8ca0af64212fbe99b1bee125ee5a1b53a9c1b433968d8bca8817b79d237f3"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
python-dateutil = "*"
|
||||
pytz = ">2021.1"
|
||||
|
||||
[[package]]
|
||||
name = "cx-freeze"
|
||||
version = "7.0.0"
|
||||
@@ -216,18 +259,18 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)"
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.14.0"
|
||||
version = "3.15.1"
|
||||
description = "A platform independent file lock."
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"},
|
||||
{file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"},
|
||||
{file = "filelock-3.15.1-py3-none-any.whl", hash = "sha256:71b3102950e91dfc1bb4209b64be4dc8854f40e5f534428d8684f953ac847fac"},
|
||||
{file = "filelock-3.15.1.tar.gz", hash = "sha256:58a2549afdf9e02e10720eaa4d4470f56386d7a6f72edd7d0596337af8ed7ad8"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"]
|
||||
typing = ["typing-extensions (>=4.8)"]
|
||||
|
||||
[[package]]
|
||||
@@ -695,13 +738,13 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.7.3"
|
||||
version = "2.7.4"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pydantic-2.7.3-py3-none-any.whl", hash = "sha256:ea91b002777bf643bb20dd717c028ec43216b24a6001a280f83877fd2655d0b4"},
|
||||
{file = "pydantic-2.7.3.tar.gz", hash = "sha256:c46c76a40bb1296728d7a8b99aa73dd70a48c3510111ff290034f860c99c419e"},
|
||||
{file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"},
|
||||
{file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -814,6 +857,20 @@ files = [
|
||||
{file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyro5"
|
||||
version = "5.15"
|
||||
description = "Remote object communication library, fifth major version"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "Pyro5-5.15-py3-none-any.whl", hash = "sha256:4d85428ed75985e63f159d2486ad5680743ea76f766340fd30b65dd20f83d471"},
|
||||
{file = "Pyro5-5.15.tar.gz", hash = "sha256:82c3dfc9860b49f897b28ff24fe6716c841672c600af8fe40d0e3a7fac9a3f5e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
serpent = ">=1.41"
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "8.2.2"
|
||||
@@ -869,6 +926,20 @@ files = [
|
||||
tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""}
|
||||
watchdog = ">=2.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "python-dateutil"
|
||||
version = "2.9.0.post0"
|
||||
description = "Extensions to the standard Python datetime module"
|
||||
optional = false
|
||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||
files = [
|
||||
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
|
||||
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
six = ">=1.5"
|
||||
|
||||
[[package]]
|
||||
name = "python-dotenv"
|
||||
version = "1.0.1"
|
||||
@@ -883,6 +954,17 @@ files = [
|
||||
[package.extras]
|
||||
cli = ["click (>=5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytz"
|
||||
version = "2024.1"
|
||||
description = "World timezone definitions, modern and historical"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
|
||||
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyyaml"
|
||||
version = "6.0.1"
|
||||
@@ -908,6 +990,7 @@ files = [
|
||||
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
|
||||
{file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
|
||||
@@ -1091,6 +1174,17 @@ files = [
|
||||
{file = "ruff-0.4.8.tar.gz", hash = "sha256:16d717b1d57b2e2fd68bd0bf80fb43931b79d05a7131aa477d66fc40fbd86268"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serpent"
|
||||
version = "1.41"
|
||||
description = "Serialization based on ast.literal_eval"
|
||||
optional = false
|
||||
python-versions = ">=3.2"
|
||||
files = [
|
||||
{file = "serpent-1.41-py3-none-any.whl", hash = "sha256:5fd776b3420441985bc10679564c2c9b4a19f77bea59f018e473441d98ae5dd7"},
|
||||
{file = "serpent-1.41.tar.gz", hash = "sha256:0407035fe3c6644387d48cff1467d5aa9feff814d07372b78677ed0ee3ed7095"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "69.5.1"
|
||||
@@ -1107,6 +1201,17 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments
|
||||
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "six"
|
||||
version = "1.16.0"
|
||||
description = "Python 2 and 3 compatibility utilities"
|
||||
optional = false
|
||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
|
||||
files = [
|
||||
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
|
||||
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sniffio"
|
||||
version = "1.3.1"
|
||||
@@ -1151,6 +1256,21 @@ docs = ["myst-parser[linkify]", "sphinx", "sphinx-rtd-theme"]
|
||||
release = ["twine"]
|
||||
test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"]
|
||||
|
||||
[[package]]
|
||||
name = "tenacity"
|
||||
version = "8.3.0"
|
||||
description = "Retry code until it succeeds"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tenacity-8.3.0-py3-none-any.whl", hash = "sha256:3649f6443dbc0d9b01b9d8020a9c4ec7a1ff5f6f3c6c8a036ef371f573fe9185"},
|
||||
{file = "tenacity-8.3.0.tar.gz", hash = "sha256:953d4e6ad24357bceffbc9707bc74349aca9d245f68eb65419cf0c249a1949a2"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
doc = ["reno", "sphinx"]
|
||||
test = ["pytest", "tornado (>=4.5)", "typeguard"]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.0.1"
|
||||
@@ -1184,6 +1304,34 @@ files = [
|
||||
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzdata"
|
||||
version = "2024.1"
|
||||
description = "Provider of IANA time zone data"
|
||||
optional = false
|
||||
python-versions = ">=2"
|
||||
files = [
|
||||
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
|
||||
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tzlocal"
|
||||
version = "5.2"
|
||||
description = "tzinfo object for the local timezone"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"},
|
||||
{file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
tzdata = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[package.extras]
|
||||
devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.30.1"
|
||||
@@ -1483,4 +1631,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "391567de870dbbf86ea217ff6b15f7c6d2c9406707c196661d29f45deb886812"
|
||||
content-hash = "17f25b61da5f54bb4bb13cecfedda56d23c097aacb95bb213f13ce63ee08c761"
|
||||
|
||||
@@ -16,11 +16,15 @@ prisma = "^0.13.1"
|
||||
pytest = "^8.2.1"
|
||||
uvicorn = { extras = ["standard"], version = "^0.30.1" }
|
||||
fastapi = "^0.109.0"
|
||||
pytest-asyncio = "^0.23.7"
|
||||
ruff = "^0.4.8"
|
||||
flake8 = "^7.0.0"
|
||||
jsonschema = "^4.22.0"
|
||||
psutil = "^5.9.8"
|
||||
pyro5 = "^5.15"
|
||||
tenacity = "^8.3.0"
|
||||
apscheduler = "^3.10.4"
|
||||
croniter = "^2.0.5"
|
||||
pytest-asyncio = "^0.23.7"
|
||||
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
@@ -35,7 +39,7 @@ build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
app = "autogpt_server.app:main"
|
||||
cli = "autogpt_server.cli:start_cli"
|
||||
cli = "autogpt_server.cli:main"
|
||||
|
||||
# https://poethepoet.natn.io/index.html
|
||||
[tool.poe]
|
||||
@@ -79,3 +83,6 @@ runner = "pytest"
|
||||
runner_args = []
|
||||
patterns = ["*.py"]
|
||||
ignore_patterns = []
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
asyncio_mode = "auto"
|
||||
@@ -15,7 +15,8 @@ model AgentGraph {
|
||||
name String?
|
||||
description String?
|
||||
|
||||
AgentNodes AgentNode[] @relation("AgentGraphNodes")
|
||||
AgentNodes AgentNode[] @relation("AgentGraphNodes")
|
||||
AgentExecutionSchedule AgentExecutionSchedule[]
|
||||
}
|
||||
|
||||
// This model describes a single node in the Agent Graph/Flow (Multi Agent System).
|
||||
@@ -37,6 +38,9 @@ model AgentNode {
|
||||
// JSON serialized dict[str, str] containing predefined input values.
|
||||
constantInput String @default("{}")
|
||||
|
||||
// JSON serialized dict[str, str] containing the node metadata.
|
||||
metadata String @default("{}")
|
||||
|
||||
ExecutionHistory AgentNodeExecution[]
|
||||
}
|
||||
|
||||
@@ -105,3 +109,20 @@ model FileDefinition {
|
||||
ReferencedByInputFiles AgentNodeExecution[] @relation("InputFiles")
|
||||
ReferencedByOutputFiles AgentNodeExecution[] @relation("OutputFiles")
|
||||
}
|
||||
|
||||
// This model describes the recurring execution schedule of an Agent.
|
||||
model AgentExecutionSchedule {
|
||||
id String @id
|
||||
|
||||
agentGraphId String
|
||||
AgentGraph AgentGraph @relation(fields: [agentGraphId], references: [id])
|
||||
|
||||
schedule String // cron expression
|
||||
isEnabled Boolean @default(true)
|
||||
inputData String // JSON serialized object
|
||||
|
||||
// default and set the value on each update, lastUpdated field has no time zone.
|
||||
lastUpdated DateTime @updatedAt
|
||||
|
||||
@@index([isEnabled])
|
||||
}
|
||||
|
||||
@@ -29,11 +29,14 @@ setup(
|
||||
icon=icon,
|
||||
),
|
||||
Executable(
|
||||
"autogpt_server/cli.py", target_name="agpt_server_cli", base="console", icon=icon
|
||||
"autogpt_server/cli.py",
|
||||
target_name="agpt_server_cli",
|
||||
base="console",
|
||||
icon=icon,
|
||||
),
|
||||
],
|
||||
options={
|
||||
# Options for building all the executables
|
||||
# Options for building all the executables
|
||||
"build_exe": {
|
||||
"packages": packages,
|
||||
"includes": [
|
||||
|
||||
119
rnd/autogpt_server/test/executor/test_manager.py
Normal file
119
rnd/autogpt_server/test/executor/test_manager.py
Normal file
@@ -0,0 +1,119 @@
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
||||
from autogpt_server.data import block, db, execution, graph
|
||||
from autogpt_server.executor import ExecutionManager
|
||||
from autogpt_server.server import AgentServer
|
||||
from autogpt_server.util.service import PyroNameServer
|
||||
|
||||
|
||||
async def create_test_graph() -> graph.Graph:
|
||||
"""
|
||||
ParrotBlock
|
||||
\
|
||||
---- TextCombinerBlock ---- PrintingBlock
|
||||
/
|
||||
ParrotBlock
|
||||
"""
|
||||
nodes = [
|
||||
graph.Node(block_id=block.ParrotBlock.id),
|
||||
graph.Node(block_id=block.ParrotBlock.id),
|
||||
graph.Node(
|
||||
block_id=block.TextCombinerBlock.id,
|
||||
input_default={"format": "{text1},{text2}"},
|
||||
),
|
||||
graph.Node(block_id=block.PrintingBlock.id),
|
||||
]
|
||||
nodes[0].connect(nodes[2], "output", "text1")
|
||||
nodes[1].connect(nodes[2], "output", "text2")
|
||||
nodes[2].connect(nodes[3], "combined_text", "text")
|
||||
|
||||
test_graph = graph.Graph(
|
||||
name="TestGraph",
|
||||
description="Test graph",
|
||||
nodes=nodes,
|
||||
)
|
||||
await block.initialize_blocks()
|
||||
result = await graph.create_graph(test_graph)
|
||||
|
||||
# Assertions
|
||||
assert result.name == test_graph.name
|
||||
assert result.description == test_graph.description
|
||||
assert len(result.nodes) == len(test_graph.nodes)
|
||||
|
||||
return test_graph
|
||||
|
||||
|
||||
async def execute_agent(test_manager: ExecutionManager, test_graph: graph.Graph):
|
||||
# --- Test adding new executions --- #
|
||||
text = "Hello, World!"
|
||||
input_data = {"input": text}
|
||||
agent_server = AgentServer()
|
||||
response = await agent_server.execute_agent(test_graph.id, input_data)
|
||||
executions = response["executions"]
|
||||
run_id = response["run_id"]
|
||||
assert len(executions) == 2
|
||||
|
||||
async def is_execution_completed():
|
||||
execs = await agent_server.get_executions(test_graph.id, run_id)
|
||||
return test_manager.queue.empty() and len(execs) == 4
|
||||
|
||||
# Wait for the executions to complete
|
||||
for i in range(10):
|
||||
if await is_execution_completed():
|
||||
break
|
||||
time.sleep(1)
|
||||
|
||||
# Execution queue should be empty
|
||||
assert await is_execution_completed()
|
||||
executions = await agent_server.get_executions(test_graph.id, run_id)
|
||||
|
||||
# Executing ParrotBlock1
|
||||
exec = executions[0]
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.run_id == run_id
|
||||
assert exec.output_name == "output"
|
||||
assert exec.output_data == "Hello, World!"
|
||||
assert exec.input_data == input_data
|
||||
assert exec.node_id == test_graph.nodes[0].id
|
||||
|
||||
# Executing ParrotBlock2
|
||||
exec = executions[1]
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.run_id == run_id
|
||||
assert exec.output_name == "output"
|
||||
assert exec.output_data == "Hello, World!"
|
||||
assert exec.input_data == input_data
|
||||
assert exec.node_id == test_graph.nodes[1].id
|
||||
|
||||
# Executing TextCombinerBlock
|
||||
exec = executions[2]
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.run_id == run_id
|
||||
assert exec.output_name == "combined_text"
|
||||
assert exec.output_data == "Hello, World!,Hello, World!"
|
||||
assert exec.input_data == {
|
||||
"format": "{text1},{text2}",
|
||||
"text1": "Hello, World!",
|
||||
"text2": "Hello, World!",
|
||||
}
|
||||
assert exec.node_id == test_graph.nodes[2].id
|
||||
|
||||
# Executing PrintingBlock
|
||||
exec = executions[3]
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.run_id == run_id
|
||||
assert exec.output_name == "status"
|
||||
assert exec.output_data == "printed"
|
||||
assert exec.input_data == {"text": "Hello, World!,Hello, World!"}
|
||||
assert exec.node_id == test_graph.nodes[3].id
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_agent_execution():
|
||||
with PyroNameServer():
|
||||
with ExecutionManager(1) as test_manager:
|
||||
await db.connect()
|
||||
test_graph = await create_test_graph()
|
||||
await execute_agent(test_manager, test_graph)
|
||||
33
rnd/autogpt_server/test/executor/test_scheduler.py
Normal file
33
rnd/autogpt_server/test/executor/test_scheduler.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import pytest
|
||||
|
||||
import test_manager
|
||||
from autogpt_server.executor.scheduler import ExecutionScheduler
|
||||
from autogpt_server.util.service import PyroNameServer, get_service_client
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_agent_schedule():
|
||||
await test_manager.db.connect()
|
||||
test_graph = await test_manager.create_test_graph()
|
||||
|
||||
with PyroNameServer():
|
||||
with ExecutionScheduler():
|
||||
scheduler = get_service_client(ExecutionScheduler)
|
||||
|
||||
schedules = scheduler.get_execution_schedules(test_graph.id)
|
||||
assert len(schedules) == 0
|
||||
|
||||
schedule_id = scheduler.add_execution_schedule(
|
||||
test_graph.id,
|
||||
"0 0 * * *",
|
||||
{"input": "data"}
|
||||
)
|
||||
assert schedule_id
|
||||
|
||||
schedules = scheduler.get_execution_schedules(test_graph.id)
|
||||
assert len(schedules) == 1
|
||||
assert schedules[schedule_id] == "0 0 * * *"
|
||||
|
||||
scheduler.update_schedule(schedule_id, is_enabled=False)
|
||||
schedules = scheduler.get_execution_schedules(test_graph.id)
|
||||
assert len(schedules) == 0
|
||||
@@ -1,97 +0,0 @@
|
||||
import pytest
|
||||
|
||||
from autogpt_server.data import block, db, graph
|
||||
from autogpt_server.data.execution import ExecutionQueue, add_execution
|
||||
from autogpt_server.executor import executor
|
||||
from autogpt_server.server import server
|
||||
|
||||
|
||||
async def create_test_graph() -> graph.Graph:
|
||||
"""
|
||||
ParrotBlock
|
||||
\
|
||||
---- TextCombinerBlock ---- PrintingBlock
|
||||
/
|
||||
ParrotBlock
|
||||
"""
|
||||
nodes = [
|
||||
graph.Node(block_id=block.ParrotBlock.id),
|
||||
graph.Node(block_id=block.ParrotBlock.id),
|
||||
graph.Node(
|
||||
block_id=block.TextCombinerBlock.id,
|
||||
input_default={"format": "{text1},{text2}"}
|
||||
),
|
||||
graph.Node(block_id=block.PrintingBlock.id),
|
||||
]
|
||||
nodes[0].connect(nodes[2], "output", "text1")
|
||||
nodes[1].connect(nodes[2], "output", "text2")
|
||||
nodes[2].connect(nodes[3], "combined_text", "text")
|
||||
|
||||
test_graph = graph.Graph(
|
||||
name="TestGraph",
|
||||
description="Test graph",
|
||||
nodes=nodes,
|
||||
)
|
||||
await block.initialize_blocks()
|
||||
result = await graph.create_graph(test_graph)
|
||||
|
||||
# Assertions
|
||||
assert result.name == test_graph.name
|
||||
assert result.description == test_graph.description
|
||||
assert len(result.nodes) == len(test_graph.nodes)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
async def execute_node(queue: ExecutionQueue) -> dict | None:
|
||||
next_exec = await executor.execute_node(queue.get())
|
||||
if not next_exec:
|
||||
return None
|
||||
await add_execution(next_exec, queue)
|
||||
return next_exec.data
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_agent_execution():
|
||||
await db.connect()
|
||||
test_graph = await create_test_graph()
|
||||
test_queue = ExecutionQueue()
|
||||
test_server = server.AgentServer(test_queue)
|
||||
|
||||
# --- Test adding new executions --- #
|
||||
text = "Hello, World!"
|
||||
input_data = {"input": text}
|
||||
executions = await test_server.execute_agent(test_graph.id, input_data)
|
||||
|
||||
# 2 executions should be created, one for each ParrotBlock, with same run_id.
|
||||
assert len(executions) == 2
|
||||
assert executions[0].run_id == executions[1].run_id
|
||||
assert executions[0].node_id != executions[1].node_id
|
||||
assert executions[0].data == executions[1].data == input_data
|
||||
|
||||
# --- Test Executing added tasks --- #
|
||||
|
||||
# Executing ParrotBlock1, TextCombinerBlock won't be enqueued yet.
|
||||
assert not test_queue.empty()
|
||||
next_execution = await execute_node(test_queue)
|
||||
assert next_execution is None
|
||||
|
||||
# Executing ParrotBlock2, TextCombinerBlock will be enqueued.
|
||||
assert not test_queue.empty()
|
||||
next_execution = await execute_node(test_queue)
|
||||
assert test_queue.empty()
|
||||
assert next_execution
|
||||
assert next_execution.keys() == {"text1", "text2", "format"}
|
||||
assert next_execution["text1"] == text
|
||||
assert next_execution["text2"] == text
|
||||
assert next_execution["format"] == "{text1},{text2}"
|
||||
|
||||
# Executing TextCombinerBlock, PrintingBlock will be enqueued.
|
||||
next_execution = await execute_node(test_queue)
|
||||
assert next_execution
|
||||
assert next_execution.keys() == {"text"}
|
||||
assert next_execution["text"] == f"{text},{text}"
|
||||
|
||||
# Executing PrintingBlock, no more tasks will be enqueued.
|
||||
next_execution = await execute_node(test_queue)
|
||||
assert next_execution is None
|
||||
35
rnd/autogpt_server/test/util/test_service.py
Normal file
35
rnd/autogpt_server/test/util/test_service.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from autogpt_server.util.service import (
|
||||
AppService,
|
||||
PyroNameServer,
|
||||
expose,
|
||||
get_service_client,
|
||||
)
|
||||
|
||||
|
||||
class TestService(AppService):
|
||||
|
||||
def run_service(self):
|
||||
super().run_service()
|
||||
|
||||
@expose
|
||||
def add(self, a: int, b: int) -> int:
|
||||
return a + b
|
||||
|
||||
@expose
|
||||
def subtract(self, a: int, b: int) -> int:
|
||||
return a - b
|
||||
|
||||
@expose
|
||||
def fun_with_async(self, a: int, b: int) -> int:
|
||||
async def add_async(a: int, b: int) -> int:
|
||||
return a + b
|
||||
return self.run_and_wait(add_async(a, b))
|
||||
|
||||
|
||||
def test_service_creation():
|
||||
with PyroNameServer():
|
||||
with TestService():
|
||||
client = get_service_client(TestService)
|
||||
assert client.add(5, 3) == 8
|
||||
assert client.subtract(10, 4) == 6
|
||||
assert client.fun_with_async(5, 3) == 8
|
||||
Reference in New Issue
Block a user