Compare commits

..

5 Commits

Author SHA1 Message Date
Otto
435738f2f9 refactor: address Pwuts review feedback
- Remove unused error_response() and format_inputs_as_markdown() from helpers.py
- Remove _get_inputs_list() wrapper from run_agent.py, use get_inputs_from_schema directly
- Fix type annotations: get_user_credentials, find_matching_credential, create_credential_meta_from_match
- Remove check_scopes parameter - always check scopes (original missing check was broken behavior)
- Reorder _credential_has_required_scopes to be defined before find_matching_credential
2026-02-08 16:43:21 +00:00
Otto
10ebaf5104 fix: enable scope checking for block credentials (consistency with graphs)
Previously run_block didn't check OAuth2 scopes while run_agent did.
Now both use the same scope-checking logic for credential matching.
2026-02-08 16:42:58 +00:00
Otto
59e58d5e9b fix: preserve original credential matching behavior
- Add check_scopes parameter to find_matching_credential and
  match_credentials_to_requirements (default True)
- run_block uses check_scopes=False to preserve original behavior
  (original run_block did not verify OAuth2 scopes)
- Add isinstance check to get_inputs_from_schema for safety
  (original returned [] if input_schema wasn't a dict)
2026-02-08 16:42:58 +00:00
Otto
f458adf2a5 chore: remove docstrings and use sorted() for deterministic UUID ordering 2026-02-08 16:42:53 +00:00
Otto
1613e41cab refactor(copilot): code cleanup - extract shared helpers and reduce duplication
- Create tools/helpers.py with shared utilities (get_inputs_from_schema)
- Add shared credential matching utilities to utils.py
- Refactor run_block to use shared matching with discriminator support
- Update run_agent.py to use shared helpers
- Add validation.py with UUID helpers

Note: routes.py streaming refactor dropped - dev has new stream registry implementation
2026-02-08 16:42:50 +00:00
37 changed files with 436 additions and 4244 deletions

View File

@@ -49,7 +49,7 @@ jobs:
- name: Create PR ${{ env.BUILD_BRANCH }} -> ${{ github.ref_name }}
if: github.event_name == 'push'
uses: peter-evans/create-pull-request@v8
uses: peter-evans/create-pull-request@v7
with:
add-paths: classic/frontend/build/web
base: ${{ github.ref_name }}

View File

@@ -309,7 +309,6 @@ jobs:
uses: anthropics/claude-code-action@v1
with:
claude_code_oauth_token: ${{ secrets.CLAUDE_CODE_OAUTH_TOKEN }}
allowed_bots: "dependabot[bot]"
claude_args: |
--allowedTools "Bash(npm:*),Bash(pnpm:*),Bash(poetry:*),Bash(git:*),Edit,Replace,NotebookEditCell,mcp__github_inline_comment__create_inline_comment,Bash(gh pr comment:*), Bash(gh pr diff:*), Bash(gh pr view:*)"
prompt: |

View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
[[package]]
name = "annotated-doc"
@@ -67,7 +67,7 @@ description = "Backport of asyncio.Runner, a context manager that controls event
optional = false
python-versions = "<3.11,>=3.8"
groups = ["dev"]
markers = "python_version == \"3.10\""
markers = "python_version < \"3.11\""
files = [
{file = "backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5"},
{file = "backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162"},
@@ -99,101 +99,84 @@ files = [
[[package]]
name = "cffi"
version = "2.0.0"
version = "1.17.1"
description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.9"
python-versions = ">=3.8"
groups = ["main"]
markers = "platform_python_implementation != \"PyPy\""
files = [
{file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"},
{file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"},
{file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"},
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"},
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"},
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"},
{file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"},
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"},
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"},
{file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"},
{file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"},
{file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"},
{file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"},
{file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"},
{file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"},
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"},
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"},
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"},
{file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"},
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"},
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"},
{file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"},
{file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"},
{file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"},
{file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"},
{file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"},
{file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"},
{file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"},
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"},
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"},
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"},
{file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"},
{file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"},
{file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"},
{file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"},
{file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"},
{file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"},
{file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"},
{file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"},
{file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"},
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"},
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"},
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"},
{file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"},
{file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"},
{file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"},
{file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"},
{file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"},
{file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"},
{file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"},
{file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"},
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"},
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"},
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"},
{file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"},
{file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"},
{file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"},
{file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"},
{file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"},
{file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"},
{file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"},
{file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"},
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"},
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"},
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"},
{file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"},
{file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"},
{file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"},
{file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"},
{file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"},
{file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"},
{file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"},
{file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"},
{file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"},
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"},
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"},
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"},
{file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"},
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"},
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"},
{file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"},
{file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"},
{file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"},
{file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"},
{file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
{file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
{file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
{file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
{file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
{file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
{file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
{file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
{file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
{file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
{file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
{file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
{file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
{file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
{file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
{file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
{file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
{file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
{file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
{file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
{file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
{file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
]
[package.dependencies]
pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
pycparser = "*"
[[package]]
name = "charset-normalizer"
@@ -430,75 +413,62 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
[[package]]
name = "cryptography"
version = "46.0.4"
version = "45.0.6"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
groups = ["main"]
files = [
{file = "cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616"},
{file = "cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0"},
{file = "cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0"},
{file = "cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5"},
{file = "cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b"},
{file = "cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f"},
{file = "cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82"},
{file = "cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c"},
{file = "cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061"},
{file = "cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7"},
{file = "cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019"},
{file = "cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4"},
{file = "cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b"},
{file = "cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc"},
{file = "cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3"},
{file = "cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59"},
{file = "cryptography-45.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:048e7ad9e08cf4c0ab07ff7f36cc3115924e22e2266e034450a890d9e312dd74"},
{file = "cryptography-45.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:44647c5d796f5fc042bbc6d61307d04bf29bccb74d188f18051b635f20a9c75f"},
{file = "cryptography-45.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e40b80ecf35ec265c452eea0ba94c9587ca763e739b8e559c128d23bff7ebbbf"},
{file = "cryptography-45.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:00e8724bdad672d75e6f069b27970883179bd472cd24a63f6e620ca7e41cc0c5"},
{file = "cryptography-45.0.6-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a3085d1b319d35296176af31c90338eeb2ddac8104661df79f80e1d9787b8b2"},
{file = "cryptography-45.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1b7fa6a1c1188c7ee32e47590d16a5a0646270921f8020efc9a511648e1b2e08"},
{file = "cryptography-45.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:275ba5cc0d9e320cd70f8e7b96d9e59903c815ca579ab96c1e37278d231fc402"},
{file = "cryptography-45.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f4028f29a9f38a2025abedb2e409973709c660d44319c61762202206ed577c42"},
{file = "cryptography-45.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ee411a1b977f40bd075392c80c10b58025ee5c6b47a822a33c1198598a7a5f05"},
{file = "cryptography-45.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:e2a21a8eda2d86bb604934b6b37691585bd095c1f788530c1fcefc53a82b3453"},
{file = "cryptography-45.0.6-cp311-abi3-win32.whl", hash = "sha256:d063341378d7ee9c91f9d23b431a3502fc8bfacd54ef0a27baa72a0843b29159"},
{file = "cryptography-45.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:833dc32dfc1e39b7376a87b9a6a4288a10aae234631268486558920029b086ec"},
{file = "cryptography-45.0.6-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:3436128a60a5e5490603ab2adbabc8763613f638513ffa7d311c900a8349a2a0"},
{file = "cryptography-45.0.6-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0d9ef57b6768d9fa58e92f4947cea96ade1233c0e236db22ba44748ffedca394"},
{file = "cryptography-45.0.6-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ea3c42f2016a5bbf71825537c2ad753f2870191134933196bee408aac397b3d9"},
{file = "cryptography-45.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:20ae4906a13716139d6d762ceb3e0e7e110f7955f3bc3876e3a07f5daadec5f3"},
{file = "cryptography-45.0.6-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dac5ec199038b8e131365e2324c03d20e97fe214af051d20c49db129844e8b3"},
{file = "cryptography-45.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:18f878a34b90d688982e43f4b700408b478102dd58b3e39de21b5ebf6509c301"},
{file = "cryptography-45.0.6-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:5bd6020c80c5b2b2242d6c48487d7b85700f5e0038e67b29d706f98440d66eb5"},
{file = "cryptography-45.0.6-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:eccddbd986e43014263eda489abbddfbc287af5cddfd690477993dbb31e31016"},
{file = "cryptography-45.0.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:550ae02148206beb722cfe4ef0933f9352bab26b087af00e48fdfb9ade35c5b3"},
{file = "cryptography-45.0.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5b64e668fc3528e77efa51ca70fadcd6610e8ab231e3e06ae2bab3b31c2b8ed9"},
{file = "cryptography-45.0.6-cp37-abi3-win32.whl", hash = "sha256:780c40fb751c7d2b0c6786ceee6b6f871e86e8718a8ff4bc35073ac353c7cd02"},
{file = "cryptography-45.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:20d15aed3ee522faac1a39fbfdfee25d17b1284bafd808e1640a74846d7c4d1b"},
{file = "cryptography-45.0.6-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:705bb7c7ecc3d79a50f236adda12ca331c8e7ecfbea51edd931ce5a7a7c4f012"},
{file = "cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:826b46dae41a1155a0c0e66fafba43d0ede1dc16570b95e40c4d83bfcf0a451d"},
{file = "cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:cc4d66f5dc4dc37b89cfef1bd5044387f7a1f6f0abb490815628501909332d5d"},
{file = "cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f68f833a9d445cc49f01097d95c83a850795921b3f7cc6488731e69bde3288da"},
{file = "cryptography-45.0.6-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3b5bf5267e98661b9b888a9250d05b063220dfa917a8203744454573c7eb79db"},
{file = "cryptography-45.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2384f2ab18d9be88a6e4f8972923405e2dbb8d3e16c6b43f15ca491d7831bd18"},
{file = "cryptography-45.0.6-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fc022c1fa5acff6def2fc6d7819bbbd31ccddfe67d075331a65d9cfb28a20983"},
{file = "cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3de77e4df42ac8d4e4d6cdb342d989803ad37707cf8f3fbf7b088c9cbdd46427"},
{file = "cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:599c8d7df950aa68baa7e98f7b73f4f414c9f02d0e8104a30c0182a07732638b"},
{file = "cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:31a2b9a10530a1cb04ffd6aa1cd4d3be9ed49f7d77a4dafe198f3b382f41545c"},
{file = "cryptography-45.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:e5b3dda1b00fb41da3af4c5ef3f922a200e33ee5ba0f0bc9ecf0b0c173958385"},
{file = "cryptography-45.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:629127cfdcdc6806dfe234734d7cb8ac54edaf572148274fa377a7d3405b0043"},
{file = "cryptography-45.0.6.tar.gz", hash = "sha256:5c966c732cf6e4a276ce83b6e4c729edda2df6929083a952cc7da973c539c719"},
]
[package.dependencies]
cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11.0\""}
cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""}
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""]
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
nox = ["nox[uv] (>=2024.4.15)"]
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""]
pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
sdist = ["build (>=1.0.0)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.4)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test = ["certifi (>=2024)", "cryptography-vectors (==45.0.6)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test-randomorder = ["pytest-randomly"]
[[package]]
@@ -523,7 +493,7 @@ description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
groups = ["main", "dev"]
markers = "python_version == \"3.10\""
markers = "python_version < \"3.11\""
files = [
{file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"},
{file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"},
@@ -1680,7 +1650,7 @@ description = "C parser in Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "platform_python_implementation != \"PyPy\" and implementation_name != \"PyPy\""
markers = "platform_python_implementation != \"PyPy\""
files = [
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
@@ -2545,7 +2515,7 @@ description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
markers = "python_version == \"3.10\""
markers = "python_version < \"3.11\""
files = [
{file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
{file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
@@ -2893,4 +2863,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<4.0"
content-hash = "cc80d3a129b84435a0f40132d073caa37858ca2427ed372fecfd810a61712d0c"
content-hash = "5f15a9c9381c9a374f3d18e087c23b1f1ba8cce192d6f67463a3e3a7a18fee44"

View File

@@ -9,7 +9,7 @@ packages = [{ include = "autogpt_libs" }]
[tool.poetry.dependencies]
python = ">=3.10,<4.0"
colorama = "^0.4.6"
cryptography = "^46.0"
cryptography = "^45.0"
expiringdict = "^1.2.2"
fastapi = "^0.128.0"
google-cloud-logging = "^3.13.0"

View File

@@ -1,251 +0,0 @@
import logging
import autogpt_libs.auth
import fastapi
import fastapi.responses
import backend.api.features.store.db as store_db
import backend.api.features.store.model as store_model
logger = logging.getLogger(__name__)
router = fastapi.APIRouter(
prefix="/admin/waitlist",
tags=["store", "admin", "waitlist"],
dependencies=[fastapi.Security(autogpt_libs.auth.requires_admin_user)],
)
@router.post(
"",
summary="Create Waitlist",
response_model=store_model.WaitlistAdminResponse,
)
async def create_waitlist(
request: store_model.WaitlistCreateRequest,
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
):
"""
Create a new waitlist (admin only).
Args:
request: Waitlist creation details
user_id: Authenticated admin user creating the waitlist
Returns:
WaitlistAdminResponse with the created waitlist details
"""
try:
waitlist = await store_db.create_waitlist_admin(
admin_user_id=user_id,
data=request,
)
return waitlist
except Exception as e:
logger.exception("Error creating waitlist: %s", e)
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while creating the waitlist"},
)
@router.get(
"",
summary="List All Waitlists",
response_model=store_model.WaitlistAdminListResponse,
)
async def list_waitlists():
"""
Get all waitlists with admin details (admin only).
Returns:
WaitlistAdminListResponse with all waitlists
"""
try:
return await store_db.get_waitlists_admin()
except Exception as e:
logger.exception("Error listing waitlists: %s", e)
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while fetching waitlists"},
)
@router.get(
"/{waitlist_id}",
summary="Get Waitlist Details",
response_model=store_model.WaitlistAdminResponse,
)
async def get_waitlist(
waitlist_id: str = fastapi.Path(..., description="The ID of the waitlist"),
):
"""
Get a single waitlist with admin details (admin only).
Args:
waitlist_id: ID of the waitlist to retrieve
Returns:
WaitlistAdminResponse with waitlist details
"""
try:
return await store_db.get_waitlist_admin(waitlist_id)
except ValueError:
logger.warning("Waitlist not found: %s", waitlist_id)
return fastapi.responses.JSONResponse(
status_code=404,
content={"detail": "Waitlist not found"},
)
except Exception as e:
logger.exception("Error fetching waitlist: %s", e)
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while fetching the waitlist"},
)
@router.put(
"/{waitlist_id}",
summary="Update Waitlist",
response_model=store_model.WaitlistAdminResponse,
)
async def update_waitlist(
request: store_model.WaitlistUpdateRequest,
waitlist_id: str = fastapi.Path(..., description="The ID of the waitlist"),
):
"""
Update a waitlist (admin only).
Args:
waitlist_id: ID of the waitlist to update
request: Fields to update
Returns:
WaitlistAdminResponse with updated waitlist details
"""
try:
return await store_db.update_waitlist_admin(waitlist_id, request)
except ValueError:
logger.warning("Waitlist not found for update: %s", waitlist_id)
return fastapi.responses.JSONResponse(
status_code=404,
content={"detail": "Waitlist not found"},
)
except Exception as e:
logger.exception("Error updating waitlist: %s", e)
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while updating the waitlist"},
)
@router.delete(
"/{waitlist_id}",
summary="Delete Waitlist",
)
async def delete_waitlist(
waitlist_id: str = fastapi.Path(..., description="The ID of the waitlist"),
):
"""
Soft delete a waitlist (admin only).
Args:
waitlist_id: ID of the waitlist to delete
Returns:
Success message
"""
try:
await store_db.delete_waitlist_admin(waitlist_id)
return {"message": "Waitlist deleted successfully"}
except ValueError:
logger.warning(f"Waitlist not found for deletion: {waitlist_id}")
return fastapi.responses.JSONResponse(
status_code=404,
content={"detail": "Waitlist not found"},
)
except Exception as e:
logger.exception("Error deleting waitlist: %s", e)
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while deleting the waitlist"},
)
@router.get(
"/{waitlist_id}/signups",
summary="Get Waitlist Signups",
response_model=store_model.WaitlistSignupListResponse,
)
async def get_waitlist_signups(
waitlist_id: str = fastapi.Path(..., description="The ID of the waitlist"),
):
"""
Get all signups for a waitlist (admin only).
Args:
waitlist_id: ID of the waitlist
Returns:
WaitlistSignupListResponse with all signups
"""
try:
return await store_db.get_waitlist_signups_admin(waitlist_id)
except ValueError:
logger.warning("Waitlist not found for signups: %s", waitlist_id)
return fastapi.responses.JSONResponse(
status_code=404,
content={"detail": "Waitlist not found"},
)
except Exception as e:
logger.exception("Error fetching waitlist signups: %s", e)
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while fetching waitlist signups"},
)
@router.post(
"/{waitlist_id}/link",
summary="Link Waitlist to Store Listing",
response_model=store_model.WaitlistAdminResponse,
)
async def link_waitlist_to_listing(
waitlist_id: str = fastapi.Path(..., description="The ID of the waitlist"),
store_listing_id: str = fastapi.Body(
..., embed=True, description="The ID of the store listing"
),
):
"""
Link a waitlist to a store listing (admin only).
When the linked store listing is approved/published, waitlist users
will be automatically notified.
Args:
waitlist_id: ID of the waitlist
store_listing_id: ID of the store listing to link
Returns:
WaitlistAdminResponse with updated waitlist details
"""
try:
return await store_db.link_waitlist_to_listing_admin(
waitlist_id, store_listing_id
)
except ValueError:
logger.warning(
"Link failed - waitlist or listing not found: %s, %s",
waitlist_id,
store_listing_id,
)
return fastapi.responses.JSONResponse(
status_code=404,
content={"detail": "Waitlist or store listing not found"},
)
except Exception as e:
logger.exception("Error linking waitlist to listing: %s", e)
return fastapi.responses.JSONResponse(
status_code=500,
content={"detail": "An error occurred while linking the waitlist"},
)

View File

@@ -1,26 +1,29 @@
"""Chat API routes for chat session management and streaming via SSE."""
import logging
import uuid as uuid_module
from collections.abc import AsyncGenerator
from typing import Annotated
from autogpt_libs import auth
from fastapi import APIRouter, Depends, Header, HTTPException, Query, Security
from fastapi import APIRouter, Depends, Query, Security
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
from backend.util.exceptions import NotFoundError
from . import service as chat_service
from . import stream_registry
from .completion_handler import process_operation_failure, process_operation_success
from .config import ChatConfig
from .model import ChatSession, create_chat_session, get_chat_session, get_user_sessions
from .response_model import StreamFinish, StreamHeartbeat, StreamStart
config = ChatConfig()
SSE_RESPONSE_HEADERS = {
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
"x-vercel-ai-ui-message-stream": "v1",
}
logger = logging.getLogger(__name__)
@@ -36,6 +39,48 @@ async def _validate_and_get_session(
return session
async def _create_stream_generator(
session_id: str,
message: str,
user_id: str | None,
session: ChatSession,
is_user_message: bool = True,
context: dict[str, str] | None = None,
) -> AsyncGenerator[str, None]:
"""Create SSE event generator for chat streaming."""
chunk_count = 0
first_chunk_type: str | None = None
async for chunk in chat_service.stream_chat_completion(
session_id,
message,
is_user_message=is_user_message,
user_id=user_id,
session=session,
context=context,
):
if chunk_count < 3:
logger.info(
"Chat stream chunk",
extra={
"session_id": session_id,
"chunk_type": str(chunk.type),
},
)
if not first_chunk_type:
first_chunk_type = str(chunk.type)
chunk_count += 1
yield chunk.to_sse()
logger.info(
"Chat stream completed",
extra={
"session_id": session_id,
"chunk_count": chunk_count,
"first_chunk_type": first_chunk_type,
},
)
yield "data: [DONE]\n\n"
router = APIRouter(
tags=["chat"],
)
@@ -59,15 +104,6 @@ class CreateSessionResponse(BaseModel):
user_id: str | None
class ActiveStreamInfo(BaseModel):
"""Information about an active stream for reconnection."""
task_id: str
last_message_id: str # Redis Stream message ID for resumption
operation_id: str # Operation ID for completion tracking
tool_name: str # Name of the tool being executed
class SessionDetailResponse(BaseModel):
"""Response model providing complete details for a chat session, including messages."""
@@ -76,7 +112,6 @@ class SessionDetailResponse(BaseModel):
updated_at: str
user_id: str | None
messages: list[dict]
active_stream: ActiveStreamInfo | None = None # Present if stream is still active
class SessionSummaryResponse(BaseModel):
@@ -95,14 +130,6 @@ class ListSessionsResponse(BaseModel):
total: int
class OperationCompleteRequest(BaseModel):
"""Request model for external completion webhook."""
success: bool
result: dict | str | None = None
error: str | None = None
# ========== Routes ==========
@@ -188,14 +215,13 @@ async def get_session(
Retrieve the details of a specific chat session.
Looks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.
If there's an active stream for this session, returns the task_id for reconnection.
Args:
session_id: The unique identifier for the desired chat session.
user_id: The optional authenticated user ID, or None for anonymous access.
Returns:
SessionDetailResponse: Details for the requested session, including active_stream info if applicable.
SessionDetailResponse: Details for the requested session, or None if not found.
"""
session = await get_chat_session(session_id, user_id)
@@ -203,28 +229,11 @@ async def get_session(
raise NotFoundError(f"Session {session_id} not found.")
messages = [message.model_dump() for message in session.messages]
# Check if there's an active stream for this session
active_stream_info = None
active_task, last_message_id = await stream_registry.get_active_task_for_session(
session_id, user_id
logger.info(
f"Returning session {session_id}: "
f"message_count={len(messages)}, "
f"roles={[m.get('role') for m in messages]}"
)
if active_task:
# Filter out the in-progress assistant message from the session response.
# The client will receive the complete assistant response through the SSE
# stream replay instead, preventing duplicate content.
if messages and messages[-1].get("role") == "assistant":
messages = messages[:-1]
# Use "0-0" as last_message_id to replay the stream from the beginning.
# Since we filtered out the cached assistant message, the client needs
# the full stream to reconstruct the response.
active_stream_info = ActiveStreamInfo(
task_id=active_task.task_id,
last_message_id="0-0",
operation_id=active_task.operation_id,
tool_name=active_task.tool_name,
)
return SessionDetailResponse(
id=session.session_id,
@@ -232,7 +241,6 @@ async def get_session(
updated_at=session.updated_at.isoformat(),
user_id=session.user_id or None,
messages=messages,
active_stream=active_stream_info,
)
@@ -252,122 +260,27 @@ async def stream_chat_post(
- Tool call UI elements (if invoked)
- Tool execution results
The AI generation runs in a background task that continues even if the client disconnects.
All chunks are written to Redis for reconnection support. If the client disconnects,
they can reconnect using GET /tasks/{task_id}/stream to resume from where they left off.
Args:
session_id: The chat session identifier to associate with the streamed messages.
request: Request body containing message, is_user_message, and optional context.
user_id: Optional authenticated user ID.
Returns:
StreamingResponse: SSE-formatted response chunks. First chunk is a "start" event
containing the task_id for reconnection.
StreamingResponse: SSE-formatted response chunks.
"""
import asyncio
session = await _validate_and_get_session(session_id, user_id)
# Create a task in the stream registry for reconnection support
task_id = str(uuid_module.uuid4())
operation_id = str(uuid_module.uuid4())
await stream_registry.create_task(
task_id=task_id,
session_id=session_id,
user_id=user_id,
tool_call_id="chat_stream", # Not a tool call, but needed for the model
tool_name="chat",
operation_id=operation_id,
)
# Background task that runs the AI generation independently of SSE connection
async def run_ai_generation():
try:
# Emit a start event with task_id for reconnection
start_chunk = StreamStart(messageId=task_id, taskId=task_id)
await stream_registry.publish_chunk(task_id, start_chunk)
async for chunk in chat_service.stream_chat_completion(
session_id,
request.message,
is_user_message=request.is_user_message,
user_id=user_id,
session=session, # Pass pre-fetched session to avoid double-fetch
context=request.context,
):
# Write to Redis (subscribers will receive via XREAD)
await stream_registry.publish_chunk(task_id, chunk)
# Mark task as completed
await stream_registry.mark_task_completed(task_id, "completed")
except Exception as e:
logger.error(
f"Error in background AI generation for session {session_id}: {e}"
)
await stream_registry.mark_task_completed(task_id, "failed")
# Start the AI generation in a background task
bg_task = asyncio.create_task(run_ai_generation())
await stream_registry.set_task_asyncio_task(task_id, bg_task)
# SSE endpoint that subscribes to the task's stream
async def event_generator() -> AsyncGenerator[str, None]:
subscriber_queue = None
try:
# Subscribe to the task stream (this replays existing messages + live updates)
subscriber_queue = await stream_registry.subscribe_to_task(
task_id=task_id,
user_id=user_id,
last_message_id="0-0", # Get all messages from the beginning
)
if subscriber_queue is None:
yield StreamFinish().to_sse()
yield "data: [DONE]\n\n"
return
# Read from the subscriber queue and yield to SSE
while True:
try:
chunk = await asyncio.wait_for(subscriber_queue.get(), timeout=30.0)
yield chunk.to_sse()
# Check for finish signal
if isinstance(chunk, StreamFinish):
break
except asyncio.TimeoutError:
# Send heartbeat to keep connection alive
yield StreamHeartbeat().to_sse()
except GeneratorExit:
pass # Client disconnected - background task continues
except Exception as e:
logger.error(f"Error in SSE stream for task {task_id}: {e}")
finally:
# Unsubscribe when client disconnects or stream ends to prevent resource leak
if subscriber_queue is not None:
try:
await stream_registry.unsubscribe_from_task(
task_id, subscriber_queue
)
except Exception as unsub_err:
logger.error(
f"Error unsubscribing from task {task_id}: {unsub_err}",
exc_info=True,
)
# AI SDK protocol termination - always yield even if unsubscribe fails
yield "data: [DONE]\n\n"
return StreamingResponse(
event_generator(),
_create_stream_generator(
session_id=session_id,
message=request.message,
user_id=user_id,
session=session,
is_user_message=request.is_user_message,
context=request.context,
),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no", # Disable nginx buffering
"x-vercel-ai-ui-message-stream": "v1", # AI SDK protocol header
},
headers=SSE_RESPONSE_HEADERS,
)
@@ -399,48 +312,16 @@ async def stream_chat_get(
"""
session = await _validate_and_get_session(session_id, user_id)
async def event_generator() -> AsyncGenerator[str, None]:
chunk_count = 0
first_chunk_type: str | None = None
async for chunk in chat_service.stream_chat_completion(
session_id,
message,
is_user_message=is_user_message,
user_id=user_id,
session=session, # Pass pre-fetched session to avoid double-fetch
):
if chunk_count < 3:
logger.info(
"Chat stream chunk",
extra={
"session_id": session_id,
"chunk_type": str(chunk.type),
},
)
if not first_chunk_type:
first_chunk_type = str(chunk.type)
chunk_count += 1
yield chunk.to_sse()
logger.info(
"Chat stream completed",
extra={
"session_id": session_id,
"chunk_count": chunk_count,
"first_chunk_type": first_chunk_type,
},
)
# AI SDK protocol termination
yield "data: [DONE]\n\n"
return StreamingResponse(
event_generator(),
_create_stream_generator(
session_id=session_id,
message=message,
user_id=user_id,
session=session,
is_user_message=is_user_message,
),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no", # Disable nginx buffering
"x-vercel-ai-ui-message-stream": "v1", # AI SDK protocol header
},
headers=SSE_RESPONSE_HEADERS,
)
@@ -470,251 +351,6 @@ async def session_assign_user(
return {"status": "ok"}
# ========== Task Streaming (SSE Reconnection) ==========
@router.get(
"/tasks/{task_id}/stream",
)
async def stream_task(
task_id: str,
user_id: str | None = Depends(auth.get_user_id),
last_message_id: str = Query(
default="0-0",
description="Last Redis Stream message ID received (e.g., '1706540123456-0'). Use '0-0' for full replay.",
),
):
"""
Reconnect to a long-running task's SSE stream.
When a long-running operation (like agent generation) starts, the client
receives a task_id. If the connection drops, the client can reconnect
using this endpoint to resume receiving updates.
Args:
task_id: The task ID from the operation_started response.
user_id: Authenticated user ID for ownership validation.
last_message_id: Last Redis Stream message ID received ("0-0" for full replay).
Returns:
StreamingResponse: SSE-formatted response chunks starting after last_message_id.
Raises:
HTTPException: 404 if task not found, 410 if task expired, 403 if access denied.
"""
# Check task existence and expiry before subscribing
task, error_code = await stream_registry.get_task_with_expiry_info(task_id)
if error_code == "TASK_EXPIRED":
raise HTTPException(
status_code=410,
detail={
"code": "TASK_EXPIRED",
"message": "This operation has expired. Please try again.",
},
)
if error_code == "TASK_NOT_FOUND":
raise HTTPException(
status_code=404,
detail={
"code": "TASK_NOT_FOUND",
"message": f"Task {task_id} not found.",
},
)
# Validate ownership if task has an owner
if task and task.user_id and user_id != task.user_id:
raise HTTPException(
status_code=403,
detail={
"code": "ACCESS_DENIED",
"message": "You do not have access to this task.",
},
)
# Get subscriber queue from stream registry
subscriber_queue = await stream_registry.subscribe_to_task(
task_id=task_id,
user_id=user_id,
last_message_id=last_message_id,
)
if subscriber_queue is None:
raise HTTPException(
status_code=404,
detail={
"code": "TASK_NOT_FOUND",
"message": f"Task {task_id} not found or access denied.",
},
)
async def event_generator() -> AsyncGenerator[str, None]:
import asyncio
heartbeat_interval = 15.0 # Send heartbeat every 15 seconds
try:
while True:
try:
# Wait for next chunk with timeout for heartbeats
chunk = await asyncio.wait_for(
subscriber_queue.get(), timeout=heartbeat_interval
)
yield chunk.to_sse()
# Check for finish signal
if isinstance(chunk, StreamFinish):
break
except asyncio.TimeoutError:
# Send heartbeat to keep connection alive
yield StreamHeartbeat().to_sse()
except Exception as e:
logger.error(f"Error in task stream {task_id}: {e}", exc_info=True)
finally:
# Unsubscribe when client disconnects or stream ends
try:
await stream_registry.unsubscribe_from_task(task_id, subscriber_queue)
except Exception as unsub_err:
logger.error(
f"Error unsubscribing from task {task_id}: {unsub_err}",
exc_info=True,
)
# AI SDK protocol termination - always yield even if unsubscribe fails
yield "data: [DONE]\n\n"
return StreamingResponse(
event_generator(),
media_type="text/event-stream",
headers={
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"X-Accel-Buffering": "no",
"x-vercel-ai-ui-message-stream": "v1",
},
)
@router.get(
"/tasks/{task_id}",
)
async def get_task_status(
task_id: str,
user_id: str | None = Depends(auth.get_user_id),
) -> dict:
"""
Get the status of a long-running task.
Args:
task_id: The task ID to check.
user_id: Authenticated user ID for ownership validation.
Returns:
dict: Task status including task_id, status, tool_name, and operation_id.
Raises:
NotFoundError: If task_id is not found or user doesn't have access.
"""
task = await stream_registry.get_task(task_id)
if task is None:
raise NotFoundError(f"Task {task_id} not found.")
# Validate ownership - if task has an owner, requester must match
if task.user_id and user_id != task.user_id:
raise NotFoundError(f"Task {task_id} not found.")
return {
"task_id": task.task_id,
"session_id": task.session_id,
"status": task.status,
"tool_name": task.tool_name,
"operation_id": task.operation_id,
"created_at": task.created_at.isoformat(),
}
# ========== External Completion Webhook ==========
@router.post(
"/operations/{operation_id}/complete",
status_code=200,
)
async def complete_operation(
operation_id: str,
request: OperationCompleteRequest,
x_api_key: str | None = Header(default=None),
) -> dict:
"""
External completion webhook for long-running operations.
Called by Agent Generator (or other services) when an operation completes.
This triggers the stream registry to publish completion and continue LLM generation.
Args:
operation_id: The operation ID to complete.
request: Completion payload with success status and result/error.
x_api_key: Internal API key for authentication.
Returns:
dict: Status of the completion.
Raises:
HTTPException: If API key is invalid or operation not found.
"""
# Validate internal API key - reject if not configured or invalid
if not config.internal_api_key:
logger.error(
"Operation complete webhook rejected: CHAT_INTERNAL_API_KEY not configured"
)
raise HTTPException(
status_code=503,
detail="Webhook not available: internal API key not configured",
)
if x_api_key != config.internal_api_key:
raise HTTPException(status_code=401, detail="Invalid API key")
# Find task by operation_id
task = await stream_registry.find_task_by_operation_id(operation_id)
if task is None:
raise HTTPException(
status_code=404,
detail=f"Operation {operation_id} not found",
)
logger.info(
f"Received completion webhook for operation {operation_id} "
f"(task_id={task.task_id}, success={request.success})"
)
if request.success:
await process_operation_success(task, request.result)
else:
await process_operation_failure(task, request.error)
return {"status": "ok", "task_id": task.task_id}
# ========== Configuration ==========
@router.get("/config/ttl", status_code=200)
async def get_ttl_config() -> dict:
"""
Get the stream TTL configuration.
Returns the Time-To-Live settings for chat streams, which determines
how long clients can reconnect to an active stream.
Returns:
dict: TTL configuration with seconds and milliseconds values.
"""
return {
"stream_ttl_seconds": config.stream_ttl,
"stream_ttl_ms": config.stream_ttl * 1000,
}
# ========== Health Check ==========

View File

@@ -0,0 +1,29 @@
"""Shared helpers for chat tools."""
from typing import Any
def get_inputs_from_schema(
input_schema: dict[str, Any],
exclude_fields: set[str] | None = None,
) -> list[dict[str, Any]]:
"""Extract input field info from JSON schema."""
if not isinstance(input_schema, dict):
return []
exclude = exclude_fields or set()
properties = input_schema.get("properties", {})
required = set(input_schema.get("required", []))
return [
{
"name": name,
"title": schema.get("title", name),
"type": schema.get("type", "string"),
"description": schema.get("description", ""),
"required": name in required,
"default": schema.get("default"),
}
for name, schema in properties.items()
if name not in exclude
]

View File

@@ -24,6 +24,7 @@ from backend.util.timezone_utils import (
)
from .base import BaseTool
from .helpers import get_inputs_from_schema
from .models import (
AgentDetails,
AgentDetailsResponse,
@@ -261,7 +262,7 @@ class RunAgentTool(BaseTool):
),
requirements={
"credentials": requirements_creds_list,
"inputs": self._get_inputs_list(graph.input_schema),
"inputs": get_inputs_from_schema(graph.input_schema),
"execution_modes": self._get_execution_modes(graph),
},
),
@@ -369,22 +370,6 @@ class RunAgentTool(BaseTool):
session_id=session_id,
)
def _get_inputs_list(self, input_schema: dict[str, Any]) -> list[dict[str, Any]]:
"""Extract inputs list from schema."""
inputs_list = []
if isinstance(input_schema, dict) and "properties" in input_schema:
for field_name, field_schema in input_schema["properties"].items():
inputs_list.append(
{
"name": field_name,
"title": field_schema.get("title", field_name),
"type": field_schema.get("type", "string"),
"description": field_schema.get("description", ""),
"required": field_name in input_schema.get("required", []),
}
)
return inputs_list
def _get_execution_modes(self, graph: GraphModel) -> list[str]:
"""Get available execution modes for the graph."""
trigger_info = graph.trigger_setup_info
@@ -398,7 +383,7 @@ class RunAgentTool(BaseTool):
suffix: str,
) -> str:
"""Build a message describing available inputs for an agent."""
inputs_list = self._get_inputs_list(graph.input_schema)
inputs_list = get_inputs_from_schema(graph.input_schema)
required_names = [i["name"] for i in inputs_list if i["required"]]
optional_names = [i["name"] for i in inputs_list if not i["required"]]

View File

@@ -10,12 +10,13 @@ from pydantic_core import PydanticUndefined
from backend.api.features.chat.model import ChatSession
from backend.data.block import get_block
from backend.data.execution import ExecutionContext
from backend.data.model import CredentialsMetaInput
from backend.data.model import CredentialsFieldInfo, CredentialsMetaInput
from backend.data.workspace import get_or_create_workspace
from backend.integrations.creds_manager import IntegrationCredentialsManager
from backend.util.exceptions import BlockError
from .base import BaseTool
from .helpers import get_inputs_from_schema
from .models import (
BlockOutputResponse,
ErrorResponse,
@@ -24,7 +25,10 @@ from .models import (
ToolResponseBase,
UserReadiness,
)
from .utils import build_missing_credentials_from_field_info
from .utils import (
build_missing_credentials_from_field_info,
match_credentials_to_requirements,
)
logger = logging.getLogger(__name__)
@@ -73,41 +77,22 @@ class RunBlockTool(BaseTool):
def requires_auth(self) -> bool:
return True
async def _check_block_credentials(
def _resolve_discriminated_credentials(
self,
user_id: str,
block: Any,
input_data: dict[str, Any] | None = None,
) -> tuple[dict[str, CredentialsMetaInput], list[CredentialsMetaInput]]:
"""
Check if user has required credentials for a block.
Args:
user_id: User ID
block: Block to check credentials for
input_data: Input data for the block (used to determine provider via discriminator)
Returns:
tuple[matched_credentials, missing_credentials]
"""
matched_credentials: dict[str, CredentialsMetaInput] = {}
missing_credentials: list[CredentialsMetaInput] = []
input_data = input_data or {}
# Get credential field info from block's input schema
input_data: dict[str, Any],
) -> dict[str, CredentialsFieldInfo]:
"""Resolve credential requirements, applying discriminator logic where needed."""
credentials_fields_info = block.input_schema.get_credentials_fields_info()
if not credentials_fields_info:
return matched_credentials, missing_credentials
return {}
# Get user's available credentials
creds_manager = IntegrationCredentialsManager()
available_creds = await creds_manager.store.get_all_creds(user_id)
resolved: dict[str, CredentialsFieldInfo] = {}
for field_name, field_info in credentials_fields_info.items():
effective_field_info = field_info
if field_info.discriminator and field_info.discriminator_mapping:
# Get discriminator from input, falling back to schema default
discriminator_value = input_data.get(field_info.discriminator)
if discriminator_value is None:
field = block.input_schema.model_fields.get(
@@ -126,37 +111,34 @@ class RunBlockTool(BaseTool):
f"{discriminator_value} -> {effective_field_info.provider}"
)
matching_cred = next(
(
cred
for cred in available_creds
if cred.provider in effective_field_info.provider
and cred.type in effective_field_info.supported_types
),
None,
)
resolved[field_name] = effective_field_info
if matching_cred:
matched_credentials[field_name] = CredentialsMetaInput(
id=matching_cred.id,
provider=matching_cred.provider, # type: ignore
type=matching_cred.type,
title=matching_cred.title,
)
else:
# Create a placeholder for the missing credential
provider = next(iter(effective_field_info.provider), "unknown")
cred_type = next(iter(effective_field_info.supported_types), "api_key")
missing_credentials.append(
CredentialsMetaInput(
id=field_name,
provider=provider, # type: ignore
type=cred_type, # type: ignore
title=field_name.replace("_", " ").title(),
)
)
return resolved
return matched_credentials, missing_credentials
async def _check_block_credentials(
self,
user_id: str,
block: Any,
input_data: dict[str, Any] | None = None,
) -> tuple[dict[str, CredentialsMetaInput], list[CredentialsMetaInput]]:
"""
Check if user has required credentials for a block.
Args:
user_id: User ID
block: Block to check credentials for
input_data: Input data for the block (used to determine provider via discriminator)
Returns:
tuple[matched_credentials, missing_credentials]
"""
input_data = input_data or {}
requirements = self._resolve_discriminated_credentials(block, input_data)
if not requirements:
return {}, []
return await match_credentials_to_requirements(user_id, requirements)
async def _execute(
self,
@@ -347,27 +329,6 @@ class RunBlockTool(BaseTool):
def _get_inputs_list(self, block: Any) -> list[dict[str, Any]]:
"""Extract non-credential inputs from block schema."""
inputs_list = []
schema = block.input_schema.jsonschema()
properties = schema.get("properties", {})
required_fields = set(schema.get("required", []))
# Get credential field names to exclude
credentials_fields = set(block.input_schema.get_credentials_fields().keys())
for field_name, field_schema in properties.items():
# Skip credential fields
if field_name in credentials_fields:
continue
inputs_list.append(
{
"name": field_name,
"title": field_schema.get("title", field_name),
"type": field_schema.get("type", "string"),
"description": field_schema.get("description", ""),
"required": field_name in required_fields,
}
)
return inputs_list
return get_inputs_from_schema(schema, exclude_fields=credentials_fields)

View File

@@ -223,6 +223,103 @@ async def get_or_create_library_agent(
return library_agents[0]
async def get_user_credentials(user_id: str) -> list:
"""Get all available credentials for a user."""
creds_manager = IntegrationCredentialsManager()
return await creds_manager.store.get_all_creds(user_id)
def find_matching_credential(
available_creds: list,
field_info: CredentialsFieldInfo,
check_scopes: bool = True,
):
"""Find a credential that matches the required provider, type, and optionally scopes."""
for cred in available_creds:
if cred.provider not in field_info.provider:
continue
if cred.type not in field_info.supported_types:
continue
if check_scopes and not _credential_has_required_scopes(cred, field_info):
continue
return cred
return None
def create_credential_meta_from_match(matching_cred) -> CredentialsMetaInput:
"""Create a CredentialsMetaInput from a matched credential."""
return CredentialsMetaInput(
id=matching_cred.id,
provider=matching_cred.provider, # type: ignore
type=matching_cred.type,
title=matching_cred.title,
)
async def match_credentials_to_requirements(
user_id: str,
requirements: dict[str, CredentialsFieldInfo],
check_scopes: bool = True,
) -> tuple[dict[str, CredentialsMetaInput], list[CredentialsMetaInput]]:
"""
Match user's credentials against a dictionary of credential requirements.
This is the core matching logic shared by both graph and block credential matching.
Args:
user_id: User ID to fetch credentials for
requirements: Dict mapping field names to CredentialsFieldInfo
check_scopes: Whether to verify OAuth2 scopes match requirements (default True).
Set to False to preserve original run_block behavior which didn't check scopes.
"""
matched: dict[str, CredentialsMetaInput] = {}
missing: list[CredentialsMetaInput] = []
if not requirements:
return matched, missing
available_creds = await get_user_credentials(user_id)
for field_name, field_info in requirements.items():
matching_cred = find_matching_credential(
available_creds, field_info, check_scopes=check_scopes
)
if matching_cred:
try:
matched[field_name] = create_credential_meta_from_match(matching_cred)
except Exception as e:
logger.error(
f"Failed to create CredentialsMetaInput for field '{field_name}': "
f"provider={matching_cred.provider}, type={matching_cred.type}, "
f"credential_id={matching_cred.id}",
exc_info=True,
)
provider = next(iter(field_info.provider), "unknown")
cred_type = next(iter(field_info.supported_types), "api_key")
missing.append(
CredentialsMetaInput(
id=field_name,
provider=provider, # type: ignore
type=cred_type, # type: ignore
title=f"{field_name} (validation failed: {e})",
)
)
else:
provider = next(iter(field_info.provider), "unknown")
cred_type = next(iter(field_info.supported_types), "api_key")
missing.append(
CredentialsMetaInput(
id=field_name,
provider=provider, # type: ignore
type=cred_type, # type: ignore
title=field_name.replace("_", " ").title(),
)
)
return matched, missing
async def match_user_credentials_to_graph(
user_id: str,
graph: GraphModel,

View File

@@ -22,7 +22,6 @@ from backend.data.notifications import (
AgentApprovalData,
AgentRejectionData,
NotificationEventModel,
WaitlistLaunchData,
)
from backend.notifications.notifications import queue_notification_async
from backend.util.exceptions import DatabaseError
@@ -1731,29 +1730,6 @@ async def review_store_submission(
# Don't fail the review process if email sending fails
pass
# Notify waitlist users if this is an approval and has a linked waitlist
if is_approved and submission.StoreListing:
try:
frontend_base_url = (
settings.config.frontend_base_url
or settings.config.platform_base_url
)
store_agent = (
await prisma.models.StoreAgent.prisma().find_first_or_raise(
where={"storeListingVersionId": submission.id}
)
)
creator_username = store_agent.creator_username or "unknown"
store_url = f"{frontend_base_url}/marketplace/agent/{creator_username}/{store_agent.slug}"
await notify_waitlist_users_on_launch(
store_listing_id=submission.StoreListing.id,
agent_name=submission.name,
store_url=store_url,
)
except Exception as e:
logger.error(f"Failed to notify waitlist users on agent approval: {e}")
# Don't fail the approval process
# Convert to Pydantic model for consistency
return store_model.StoreSubmission(
listing_id=(submission.StoreListing.id if submission.StoreListing else ""),
@@ -2001,554 +1977,3 @@ async def get_agent_as_admin(
)
return graph
def _waitlist_to_store_entry(
waitlist: prisma.models.WaitlistEntry,
) -> store_model.StoreWaitlistEntry:
"""Convert a WaitlistEntry to StoreWaitlistEntry for public display."""
return store_model.StoreWaitlistEntry(
waitlistId=waitlist.id,
slug=waitlist.slug,
name=waitlist.name,
subHeading=waitlist.subHeading,
videoUrl=waitlist.videoUrl,
agentOutputDemoUrl=waitlist.agentOutputDemoUrl,
imageUrls=waitlist.imageUrls or [],
description=waitlist.description,
categories=waitlist.categories or [],
)
async def get_waitlist() -> list[store_model.StoreWaitlistEntry]:
"""Get all active waitlists for public display."""
try:
waitlists = await prisma.models.WaitlistEntry.prisma().find_many(
where=prisma.types.WaitlistEntryWhereInput(isDeleted=False),
)
# Filter out closed/done waitlists and sort by votes (descending)
excluded_statuses = {
prisma.enums.WaitlistExternalStatus.CANCELED,
prisma.enums.WaitlistExternalStatus.DONE,
}
active_waitlists = [w for w in waitlists if w.status not in excluded_statuses]
sorted_list = sorted(active_waitlists, key=lambda x: x.votes, reverse=True)
return [_waitlist_to_store_entry(w) for w in sorted_list]
except Exception as e:
logger.error(f"Error fetching waitlists: {e}")
raise DatabaseError("Failed to fetch waitlists") from e
async def get_user_waitlist_memberships(user_id: str) -> list[str]:
"""Get all waitlist IDs that a user has joined."""
try:
user = await prisma.models.User.prisma().find_unique(
where={"id": user_id},
include={"joinedWaitlists": True},
)
if not user or not user.joinedWaitlists:
return []
return [w.id for w in user.joinedWaitlists]
except Exception as e:
logger.error(f"Error fetching user waitlist memberships: {e}")
raise DatabaseError("Failed to fetch waitlist memberships") from e
async def add_user_to_waitlist(
waitlist_id: str, user_id: str | None, email: str | None
) -> store_model.StoreWaitlistEntry:
"""
Add a user to a waitlist.
For logged-in users: connects via joinedUsers relation
For anonymous users: adds email to unaffiliatedEmailUsers array
"""
if not user_id and not email:
raise ValueError("Either user_id or email must be provided")
try:
# Find the waitlist
waitlist = await prisma.models.WaitlistEntry.prisma().find_unique(
where={"id": waitlist_id},
include={"joinedUsers": True},
)
if not waitlist:
raise ValueError(f"Waitlist {waitlist_id} not found")
if waitlist.isDeleted:
raise ValueError(f"Waitlist {waitlist_id} is no longer available")
if waitlist.status in [
prisma.enums.WaitlistExternalStatus.CANCELED,
prisma.enums.WaitlistExternalStatus.DONE,
]:
raise ValueError(f"Waitlist {waitlist_id} is closed")
if user_id:
# Check if user already joined
joined_user_ids = [u.id for u in (waitlist.joinedUsers or [])]
if user_id in joined_user_ids:
# Already joined - return waitlist info
logger.debug(f"User {user_id} already joined waitlist {waitlist_id}")
else:
# Connect user to waitlist
await prisma.models.WaitlistEntry.prisma().update(
where={"id": waitlist_id},
data={"joinedUsers": {"connect": [{"id": user_id}]}},
)
logger.info(f"User {user_id} joined waitlist {waitlist_id}")
# If user was previously in email list, remove them
# Use transaction to prevent race conditions
if email:
async with transaction() as tx:
current_waitlist = await tx.waitlistentry.find_unique(
where={"id": waitlist_id}
)
if current_waitlist and email in (
current_waitlist.unaffiliatedEmailUsers or []
):
updated_emails: list[str] = [
e
for e in (current_waitlist.unaffiliatedEmailUsers or [])
if e != email
]
await tx.waitlistentry.update(
where={"id": waitlist_id},
data={"unaffiliatedEmailUsers": updated_emails},
)
elif email:
# Add email to unaffiliated list if not already present
# Use transaction to prevent race conditions with concurrent signups
async with transaction() as tx:
# Re-fetch within transaction to get latest state
current_waitlist = await tx.waitlistentry.find_unique(
where={"id": waitlist_id}
)
if current_waitlist:
current_emails: list[str] = list(
current_waitlist.unaffiliatedEmailUsers or []
)
if email not in current_emails:
current_emails.append(email)
await tx.waitlistentry.update(
where={"id": waitlist_id},
data={"unaffiliatedEmailUsers": current_emails},
)
# Mask email for logging to avoid PII exposure
masked = email.split("@")[0][0] + "***@" + email.split("@")[1] if "@" in email else "***"
logger.info(f"Email {masked} added to waitlist {waitlist_id}")
else:
logger.debug(f"Email already exists on waitlist {waitlist_id}")
# Re-fetch to return updated data
updated_waitlist = await prisma.models.WaitlistEntry.prisma().find_unique(
where={"id": waitlist_id}
)
return _waitlist_to_store_entry(updated_waitlist or waitlist)
except ValueError:
raise
except Exception as e:
logger.error(f"Error adding user to waitlist: {e}")
raise DatabaseError("Failed to add user to waitlist") from e
# ============== Admin Waitlist Functions ==============
def _waitlist_to_admin_response(
waitlist: prisma.models.WaitlistEntry,
) -> store_model.WaitlistAdminResponse:
"""Convert a WaitlistEntry to WaitlistAdminResponse."""
joined_count = len(waitlist.joinedUsers) if waitlist.joinedUsers else 0
email_count = (
len(waitlist.unaffiliatedEmailUsers) if waitlist.unaffiliatedEmailUsers else 0
)
return store_model.WaitlistAdminResponse(
id=waitlist.id,
createdAt=waitlist.createdAt.isoformat() if waitlist.createdAt else "",
updatedAt=waitlist.updatedAt.isoformat() if waitlist.updatedAt else "",
slug=waitlist.slug,
name=waitlist.name,
subHeading=waitlist.subHeading,
description=waitlist.description,
categories=waitlist.categories or [],
imageUrls=waitlist.imageUrls or [],
videoUrl=waitlist.videoUrl,
agentOutputDemoUrl=waitlist.agentOutputDemoUrl,
status=waitlist.status or prisma.enums.WaitlistExternalStatus.NOT_STARTED,
votes=waitlist.votes,
signupCount=joined_count + email_count,
storeListingId=waitlist.storeListingId,
owningUserId=waitlist.owningUserId,
)
async def create_waitlist_admin(
admin_user_id: str,
data: store_model.WaitlistCreateRequest,
) -> store_model.WaitlistAdminResponse:
"""Create a new waitlist (admin only)."""
logger.info(f"Admin {admin_user_id} creating waitlist: {data.name}")
try:
waitlist = await prisma.models.WaitlistEntry.prisma().create(
data=prisma.types.WaitlistEntryCreateInput(
name=data.name,
slug=data.slug,
subHeading=data.subHeading,
description=data.description,
categories=data.categories,
imageUrls=data.imageUrls,
videoUrl=data.videoUrl,
agentOutputDemoUrl=data.agentOutputDemoUrl,
owningUserId=admin_user_id,
status=prisma.enums.WaitlistExternalStatus.NOT_STARTED,
),
include={"joinedUsers": True},
)
return _waitlist_to_admin_response(waitlist)
except Exception as e:
logger.error(f"Error creating waitlist: {e}")
raise DatabaseError("Failed to create waitlist") from e
async def get_waitlists_admin() -> store_model.WaitlistAdminListResponse:
"""Get all waitlists with admin details."""
try:
waitlists = await prisma.models.WaitlistEntry.prisma().find_many(
where=prisma.types.WaitlistEntryWhereInput(isDeleted=False),
include={"joinedUsers": True},
order={"createdAt": "desc"},
)
return store_model.WaitlistAdminListResponse(
waitlists=[_waitlist_to_admin_response(w) for w in waitlists],
totalCount=len(waitlists),
)
except Exception as e:
logger.error(f"Error fetching waitlists for admin: {e}")
raise DatabaseError("Failed to fetch waitlists") from e
async def get_waitlist_admin(
waitlist_id: str,
) -> store_model.WaitlistAdminResponse:
"""Get a single waitlist with admin details."""
try:
waitlist = await prisma.models.WaitlistEntry.prisma().find_unique(
where={"id": waitlist_id},
include={"joinedUsers": True},
)
if not waitlist:
raise ValueError(f"Waitlist {waitlist_id} not found")
if waitlist.isDeleted:
raise ValueError(f"Waitlist {waitlist_id} has been deleted")
return _waitlist_to_admin_response(waitlist)
except ValueError:
raise
except Exception as e:
logger.error(f"Error fetching waitlist {waitlist_id}: {e}")
raise DatabaseError("Failed to fetch waitlist") from e
async def update_waitlist_admin(
waitlist_id: str,
data: store_model.WaitlistUpdateRequest,
) -> store_model.WaitlistAdminResponse:
"""Update a waitlist (admin only)."""
logger.info(f"Updating waitlist {waitlist_id}")
try:
# Check if waitlist exists first
existing = await prisma.models.WaitlistEntry.prisma().find_unique(
where={"id": waitlist_id}
)
if not existing:
raise ValueError(f"Waitlist {waitlist_id} not found")
if existing.isDeleted:
raise ValueError(f"Waitlist {waitlist_id} has been deleted")
# Build update data from explicitly provided fields
# Use model_fields_set to allow clearing fields by setting them to None
field_mappings = {
"name": data.name,
"slug": data.slug,
"subHeading": data.subHeading,
"description": data.description,
"categories": data.categories,
"imageUrls": data.imageUrls,
"videoUrl": data.videoUrl,
"agentOutputDemoUrl": data.agentOutputDemoUrl,
"storeListingId": data.storeListingId,
}
update_data: dict[str, Any] = {
k: v for k, v in field_mappings.items() if k in data.model_fields_set
}
# Add status if provided (already validated as enum by Pydantic)
if "status" in data.model_fields_set and data.status is not None:
update_data["status"] = data.status
if not update_data:
# No updates, just return current data
return await get_waitlist_admin(waitlist_id)
waitlist = await prisma.models.WaitlistEntry.prisma().update(
where={"id": waitlist_id},
data=prisma.types.WaitlistEntryUpdateInput(**update_data),
include={"joinedUsers": True},
)
# We already verified existence above, so this should never be None
assert waitlist is not None
return _waitlist_to_admin_response(waitlist)
except ValueError:
raise
except Exception as e:
logger.error(f"Error updating waitlist {waitlist_id}: {e}")
raise DatabaseError("Failed to update waitlist") from e
async def delete_waitlist_admin(waitlist_id: str) -> None:
"""Soft delete a waitlist (admin only)."""
logger.info(f"Soft deleting waitlist {waitlist_id}")
try:
# Check if waitlist exists first
waitlist = await prisma.models.WaitlistEntry.prisma().find_unique(
where={"id": waitlist_id},
)
if not waitlist:
raise ValueError(f"Waitlist {waitlist_id} not found")
if waitlist.isDeleted:
raise ValueError(f"Waitlist {waitlist_id} has already been deleted")
await prisma.models.WaitlistEntry.prisma().update(
where={"id": waitlist_id},
data={"isDeleted": True},
)
except ValueError:
raise
except Exception as e:
logger.error(f"Error deleting waitlist {waitlist_id}: {e}")
raise DatabaseError("Failed to delete waitlist") from e
async def get_waitlist_signups_admin(
waitlist_id: str,
) -> store_model.WaitlistSignupListResponse:
"""Get all signups for a waitlist (admin only)."""
try:
waitlist = await prisma.models.WaitlistEntry.prisma().find_unique(
where={"id": waitlist_id},
include={"joinedUsers": True},
)
if not waitlist:
raise ValueError(f"Waitlist {waitlist_id} not found")
signups: list[store_model.WaitlistSignup] = []
# Add user signups
for user in waitlist.joinedUsers or []:
signups.append(
store_model.WaitlistSignup(
type="user",
userId=user.id,
email=user.email,
username=user.name,
)
)
# Add email signups
for email in waitlist.unaffiliatedEmailUsers or []:
signups.append(
store_model.WaitlistSignup(
type="email",
email=email,
)
)
return store_model.WaitlistSignupListResponse(
waitlistId=waitlist_id,
signups=signups,
totalCount=len(signups),
)
except ValueError:
raise
except Exception as e:
logger.error(f"Error fetching signups for waitlist {waitlist_id}: {e}")
raise DatabaseError("Failed to fetch waitlist signups") from e
async def link_waitlist_to_listing_admin(
waitlist_id: str,
store_listing_id: str,
) -> store_model.WaitlistAdminResponse:
"""Link a waitlist to a store listing (admin only)."""
logger.info(f"Linking waitlist {waitlist_id} to listing {store_listing_id}")
try:
# Verify the waitlist exists
waitlist = await prisma.models.WaitlistEntry.prisma().find_unique(
where={"id": waitlist_id}
)
if not waitlist:
raise ValueError(f"Waitlist {waitlist_id} not found")
if waitlist.isDeleted:
raise ValueError(f"Waitlist {waitlist_id} has been deleted")
# Verify the store listing exists
listing = await prisma.models.StoreListing.prisma().find_unique(
where={"id": store_listing_id}
)
if not listing:
raise ValueError(f"Store listing {store_listing_id} not found")
updated_waitlist = await prisma.models.WaitlistEntry.prisma().update(
where={"id": waitlist_id},
data={"StoreListing": {"connect": {"id": store_listing_id}}},
include={"joinedUsers": True},
)
# We already verified existence above, so this should never be None
assert updated_waitlist is not None
return _waitlist_to_admin_response(updated_waitlist)
except ValueError:
raise
except Exception as e:
logger.error(f"Error linking waitlist to listing: {e}")
raise DatabaseError("Failed to link waitlist to listing") from e
async def notify_waitlist_users_on_launch(
store_listing_id: str,
agent_name: str,
store_url: str,
) -> int:
"""
Notify all users on waitlists linked to a store listing when the agent is launched.
Args:
store_listing_id: The ID of the store listing that was approved
agent_name: The name of the approved agent
store_url: The URL to the agent's store page
Returns:
The number of notifications sent
"""
logger.info(f"Notifying waitlist users for store listing {store_listing_id}")
try:
# Find all active waitlists linked to this store listing
# Exclude DONE and CANCELED to prevent duplicate notifications on re-approval
waitlists = await prisma.models.WaitlistEntry.prisma().find_many(
where={
"storeListingId": store_listing_id,
"isDeleted": False,
"status": {
"not_in": [
prisma.enums.WaitlistExternalStatus.DONE,
prisma.enums.WaitlistExternalStatus.CANCELED,
]
},
},
include={"joinedUsers": True},
)
if not waitlists:
logger.info(
f"No active waitlists found for store listing {store_listing_id}"
)
return 0
notification_count = 0
launched_at = datetime.now(tz=timezone.utc)
for waitlist in waitlists:
# Track notification results for this waitlist
users_to_notify = waitlist.joinedUsers or []
failed_user_ids: list[str] = []
# Notify registered users
for user in users_to_notify:
try:
notification_data = WaitlistLaunchData(
agent_name=agent_name,
waitlist_name=waitlist.name,
store_url=store_url,
launched_at=launched_at,
)
notification_event = NotificationEventModel[WaitlistLaunchData](
user_id=user.id,
type=prisma.enums.NotificationType.WAITLIST_LAUNCH,
data=notification_data,
)
await queue_notification_async(notification_event)
notification_count += 1
except Exception as e:
logger.error(
f"Failed to send waitlist launch notification to user {user.id}: {e}"
)
failed_user_ids.append(user.id)
# Note: For unaffiliated email users, you would need to send emails directly
# since they don't have user IDs for the notification system.
# This could be done via a separate email service.
# For now, we log these for potential manual follow-up or future implementation.
has_pending_email_users = bool(waitlist.unaffiliatedEmailUsers)
if has_pending_email_users:
logger.info(
f"Waitlist {waitlist.id} has {len(waitlist.unaffiliatedEmailUsers)} "
f"unaffiliated email users that need email notifications"
)
# Only mark waitlist as DONE if all registered user notifications succeeded
# AND there are no unaffiliated email users still waiting for notifications
if not failed_user_ids and not has_pending_email_users:
await prisma.models.WaitlistEntry.prisma().update(
where={"id": waitlist.id},
data={"status": prisma.enums.WaitlistExternalStatus.DONE},
)
logger.info(f"Updated waitlist {waitlist.id} status to DONE")
elif failed_user_ids:
logger.warning(
f"Waitlist {waitlist.id} not marked as DONE due to "
f"{len(failed_user_ids)} failed notifications"
)
elif has_pending_email_users:
logger.warning(
f"Waitlist {waitlist.id} not marked as DONE due to "
f"{len(waitlist.unaffiliatedEmailUsers)} pending email-only users"
)
logger.info(
f"Sent {notification_count} waitlist launch notifications for store listing {store_listing_id}"
)
return notification_count
except Exception as e:
logger.error(
f"Error notifying waitlist users for store listing {store_listing_id}: {e}"
)
# Don't raise - we don't want to fail the approval process
return 0

View File

@@ -224,102 +224,6 @@ class ReviewSubmissionRequest(pydantic.BaseModel):
internal_comments: str | None = None # Private admin notes
class StoreWaitlistEntry(pydantic.BaseModel):
"""Public waitlist entry - no PII fields exposed."""
waitlistId: str
slug: str
# Content fields
name: str
subHeading: str
videoUrl: str | None = None
agentOutputDemoUrl: str | None = None
imageUrls: list[str]
description: str
categories: list[str]
class StoreWaitlistsAllResponse(pydantic.BaseModel):
listings: list[StoreWaitlistEntry]
# Admin Waitlist Models
class WaitlistCreateRequest(pydantic.BaseModel):
"""Request model for creating a new waitlist."""
name: str
slug: str
subHeading: str
description: str
categories: list[str] = []
imageUrls: list[str] = []
videoUrl: str | None = None
agentOutputDemoUrl: str | None = None
class WaitlistUpdateRequest(pydantic.BaseModel):
"""Request model for updating a waitlist."""
name: str | None = None
slug: str | None = None
subHeading: str | None = None
description: str | None = None
categories: list[str] | None = None
imageUrls: list[str] | None = None
videoUrl: str | None = None
agentOutputDemoUrl: str | None = None
status: prisma.enums.WaitlistExternalStatus | None = None
storeListingId: str | None = None # Link to a store listing
class WaitlistAdminResponse(pydantic.BaseModel):
"""Admin response model with full waitlist details including internal data."""
id: str
createdAt: str
updatedAt: str
slug: str
name: str
subHeading: str
description: str
categories: list[str]
imageUrls: list[str]
videoUrl: str | None = None
agentOutputDemoUrl: str | None = None
status: prisma.enums.WaitlistExternalStatus
votes: int
signupCount: int # Total count of joinedUsers + unaffiliatedEmailUsers
storeListingId: str | None = None
owningUserId: str
class WaitlistSignup(pydantic.BaseModel):
"""Individual signup entry for a waitlist."""
type: str # "user" or "email"
userId: str | None = None
email: str | None = None
username: str | None = None # For user signups
class WaitlistSignupListResponse(pydantic.BaseModel):
"""Response model for listing waitlist signups."""
waitlistId: str
signups: list[WaitlistSignup]
totalCount: int
class WaitlistAdminListResponse(pydantic.BaseModel):
"""Response model for listing all waitlists (admin view)."""
waitlists: list[WaitlistAdminResponse]
totalCount: int
class UnifiedSearchResult(pydantic.BaseModel):
"""A single result from unified hybrid search across all content types."""

View File

@@ -8,7 +8,6 @@ import autogpt_libs.auth
import fastapi
import fastapi.responses
import prisma.enums
from autogpt_libs.auth.dependencies import get_optional_user_id
import backend.data.graph
import backend.util.json
@@ -82,74 +81,6 @@ async def update_or_create_profile(
return updated_profile
##############################################
############## Waitlist Endpoints ############
##############################################
@router.get(
"/waitlist",
summary="Get the agent waitlist",
tags=["store", "public"],
response_model=store_model.StoreWaitlistsAllResponse,
)
async def get_waitlist():
"""
Get all active waitlists for public display.
"""
waitlists = await store_db.get_waitlist()
return store_model.StoreWaitlistsAllResponse(listings=waitlists)
@router.get(
"/waitlist/my-memberships",
summary="Get waitlist IDs the current user has joined",
tags=["store", "private"],
)
async def get_my_waitlist_memberships(
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
) -> list[str]:
"""Returns list of waitlist IDs the authenticated user has joined."""
return await store_db.get_user_waitlist_memberships(user_id)
@router.post(
path="/waitlist/{waitlist_id}/join",
summary="Add self to the agent waitlist",
tags=["store", "public"],
response_model=store_model.StoreWaitlistEntry,
)
async def add_self_to_waitlist(
user_id: str | None = fastapi.Security(get_optional_user_id),
waitlist_id: str = fastapi.Path(..., description="The ID of the waitlist to join"),
email: str | None = fastapi.Body(
default=None, embed=True, description="Email address for unauthenticated users"
),
):
"""
Add the current user to the agent waitlist.
"""
if not user_id and not email:
raise fastapi.HTTPException(
status_code=400,
detail="Either user authentication or email address is required",
)
try:
waitlist_entry = await store_db.add_user_to_waitlist(
waitlist_id=waitlist_id, user_id=user_id, email=email
)
return waitlist_entry
except ValueError as e:
error_msg = str(e)
if "not found" in error_msg:
raise fastapi.HTTPException(status_code=404, detail="Waitlist not found")
# Waitlist exists but is closed or unavailable
raise fastapi.HTTPException(status_code=400, detail=error_msg)
except Exception:
raise fastapi.HTTPException(
status_code=500, detail="An error occurred while joining the waitlist"
)
##############################################
############### Agent Endpoints ##############
##############################################

View File

@@ -19,7 +19,6 @@ from prisma.errors import PrismaError
import backend.api.features.admin.credit_admin_routes
import backend.api.features.admin.execution_analytics_routes
import backend.api.features.admin.store_admin_routes
import backend.api.features.admin.waitlist_admin_routes
import backend.api.features.builder
import backend.api.features.builder.routes
import backend.api.features.chat.routes as chat_routes
@@ -307,11 +306,6 @@ app.include_router(
tags=["v2", "admin"],
prefix="/api/store",
)
app.include_router(
backend.api.features.admin.waitlist_admin_routes.router,
tags=["v2", "admin"],
prefix="/api/store",
)
app.include_router(
backend.api.features.admin.credit_admin_routes.router,
tags=["v2", "admin"],

View File

@@ -1,8 +1,9 @@
import logging
import queue
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from enum import Enum
from multiprocessing import Manager
from queue import Empty
from typing import (
TYPE_CHECKING,
Annotated,
@@ -1199,16 +1200,12 @@ class NodeExecutionEntry(BaseModel):
class ExecutionQueue(Generic[T]):
"""
Thread-safe queue for managing node execution within a single graph execution.
Note: Uses queue.Queue (not multiprocessing.Queue) since all access is from
threads within the same process. If migrating back to ProcessPoolExecutor,
replace with multiprocessing.Manager().Queue() for cross-process safety.
Queue for managing the execution of agents.
This will be shared between different processes
"""
def __init__(self):
# Thread-safe queue (not multiprocessing) — see class docstring
self.queue: queue.Queue[T] = queue.Queue()
self.queue = Manager().Queue()
def add(self, execution: T) -> T:
self.queue.put(execution)
@@ -1223,7 +1220,7 @@ class ExecutionQueue(Generic[T]):
def get_or_none(self) -> T | None:
try:
return self.queue.get_nowait()
except queue.Empty:
except Empty:
return None

View File

@@ -1,58 +0,0 @@
"""Tests for ExecutionQueue thread-safety."""
import queue
import threading
from backend.data.execution import ExecutionQueue
def test_execution_queue_uses_stdlib_queue():
"""Verify ExecutionQueue uses queue.Queue (not multiprocessing)."""
q = ExecutionQueue()
assert isinstance(q.queue, queue.Queue)
def test_basic_operations():
"""Test add, get, empty, and get_or_none."""
q = ExecutionQueue()
assert q.empty() is True
assert q.get_or_none() is None
result = q.add("item1")
assert result == "item1"
assert q.empty() is False
item = q.get()
assert item == "item1"
assert q.empty() is True
def test_thread_safety():
"""Test concurrent access from multiple threads."""
q = ExecutionQueue()
results = []
num_items = 100
def producer():
for i in range(num_items):
q.add(f"item_{i}")
def consumer():
count = 0
while count < num_items:
item = q.get_or_none()
if item is not None:
results.append(item)
count += 1
producer_thread = threading.Thread(target=producer)
consumer_thread = threading.Thread(target=consumer)
producer_thread.start()
consumer_thread.start()
producer_thread.join(timeout=5)
consumer_thread.join(timeout=5)
assert len(results) == num_items

View File

@@ -211,22 +211,6 @@ class AgentRejectionData(BaseNotificationData):
return value
class WaitlistLaunchData(BaseNotificationData):
"""Notification data for when an agent from a waitlist is launched."""
agent_name: str
waitlist_name: str
store_url: str
launched_at: datetime
@field_validator("launched_at")
@classmethod
def validate_timezone(cls, value: datetime):
if value.tzinfo is None:
raise ValueError("datetime must have timezone information")
return value
NotificationData = Annotated[
Union[
AgentRunData,
@@ -239,7 +223,6 @@ NotificationData = Annotated[
DailySummaryData,
RefundRequestData,
BaseSummaryData,
WaitlistLaunchData,
],
Field(discriminator="type"),
]
@@ -290,7 +273,6 @@ def get_notif_data_type(
NotificationType.REFUND_PROCESSED: RefundRequestData,
NotificationType.AGENT_APPROVED: AgentApprovalData,
NotificationType.AGENT_REJECTED: AgentRejectionData,
NotificationType.WAITLIST_LAUNCH: WaitlistLaunchData,
}[notification_type]
@@ -336,7 +318,6 @@ class NotificationTypeOverride:
NotificationType.REFUND_PROCESSED: QueueType.ADMIN,
NotificationType.AGENT_APPROVED: QueueType.IMMEDIATE,
NotificationType.AGENT_REJECTED: QueueType.IMMEDIATE,
NotificationType.WAITLIST_LAUNCH: QueueType.IMMEDIATE,
}
return BATCHING_RULES.get(self.notification_type, QueueType.IMMEDIATE)
@@ -356,7 +337,6 @@ class NotificationTypeOverride:
NotificationType.REFUND_PROCESSED: "refund_processed.html",
NotificationType.AGENT_APPROVED: "agent_approved.html",
NotificationType.AGENT_REJECTED: "agent_rejected.html",
NotificationType.WAITLIST_LAUNCH: "waitlist_launch.html",
}[self.notification_type]
@property
@@ -374,7 +354,6 @@ class NotificationTypeOverride:
NotificationType.REFUND_PROCESSED: "Refund for ${{data.amount / 100}} to {{data.user_name}} has been processed",
NotificationType.AGENT_APPROVED: "🎉 Your agent '{{data.agent_name}}' has been approved!",
NotificationType.AGENT_REJECTED: "Your agent '{{data.agent_name}}' needs some updates",
NotificationType.WAITLIST_LAUNCH: "🚀 {{data.agent_name}} is now available!",
}[self.notification_type]

View File

@@ -1,59 +0,0 @@
{# Waitlist Launch Notification Email Template #}
{#
Template variables:
data.agent_name: the name of the launched agent
data.waitlist_name: the name of the waitlist the user joined
data.store_url: URL to view the agent in the store
data.launched_at: when the agent was launched
Subject: {{ data.agent_name }} is now available!
#}
{% block content %}
<h1 style="color: #7c3aed; font-size: 32px; font-weight: 700; margin: 0 0 24px 0; text-align: center;">
The wait is over!
</h1>
<p style="color: #586069; font-size: 18px; text-align: center; margin: 0 0 24px 0;">
<strong>'{{ data.agent_name }}'</strong> is now live in the AutoGPT Store!
</p>
<div style="height: 32px; background: transparent;"></div>
<div style="background: #f3e8ff; border: 1px solid #d8b4fe; border-radius: 8px; padding: 20px; margin: 0;">
<h3 style="color: #6b21a8; font-size: 16px; font-weight: 600; margin: 0 0 12px 0;">
You're one of the first to know!
</h3>
<p style="color: #6b21a8; margin: 0; font-size: 16px; line-height: 1.5;">
You signed up for the <strong>{{ data.waitlist_name }}</strong> waitlist, and we're excited to let you know that this agent is now ready for you to use.
</p>
</div>
<div style="height: 32px; background: transparent;"></div>
<div style="text-align: center; margin: 24px 0;">
<a href="{{ data.store_url }}" style="display: inline-block; background: linear-gradient(135deg, #7c3aed 0%, #5b21b6 100%); color: white; text-decoration: none; padding: 14px 28px; border-radius: 6px; font-weight: 600; font-size: 16px;">
Get {{ data.agent_name }} Now
</a>
</div>
<div style="height: 32px; background: transparent;"></div>
<div style="background: #d1ecf1; border: 1px solid #bee5eb; border-radius: 8px; padding: 20px; margin: 0;">
<h3 style="color: #0c5460; font-size: 16px; font-weight: 600; margin: 0 0 12px 0;">
What can you do now?
</h3>
<ul style="color: #0c5460; margin: 0; padding-left: 18px; font-size: 16px; line-height: 1.6;">
<li>Visit the store to learn more about what this agent can do</li>
<li>Install and start using the agent right away</li>
<li>Share it with others who might find it useful</li>
</ul>
</div>
<div style="height: 32px; background: transparent;"></div>
<p style="color: #6a737d; font-size: 14px; text-align: center; margin: 24px 0;">
Thank you for helping us prioritize what to build! Your interest made this happen.
</p>
{% endblock %}

View File

@@ -342,14 +342,6 @@ async def store_media_file(
if not target_path.is_file():
raise ValueError(f"Local file does not exist: {target_path}")
# Virus scan the local file before any further processing
local_content = target_path.read_bytes()
if len(local_content) > MAX_FILE_SIZE_BYTES:
raise ValueError(
f"File too large: {len(local_content)} bytes > {MAX_FILE_SIZE_BYTES} bytes"
)
await scan_content_safe(local_content, filename=sanitized_file)
# Return based on requested format
if return_format == "for_local_processing":
# Use when processing files locally with tools like ffmpeg, MoviePy, PIL

View File

@@ -247,100 +247,3 @@ class TestFileCloudIntegration:
execution_context=make_test_context(graph_exec_id=graph_exec_id),
return_format="for_local_processing",
)
@pytest.mark.asyncio
async def test_store_media_file_local_path_scanned(self):
"""Test that local file paths are scanned for viruses."""
graph_exec_id = "test-exec-123"
local_file = "test_video.mp4"
file_content = b"fake video content"
with patch(
"backend.util.file.get_cloud_storage_handler"
) as mock_handler_getter, patch(
"backend.util.file.scan_content_safe"
) as mock_scan, patch(
"backend.util.file.Path"
) as mock_path_class:
# Mock cloud storage handler - not a cloud path
mock_handler = MagicMock()
mock_handler.is_cloud_path.return_value = False
mock_handler_getter.return_value = mock_handler
# Mock virus scanner
mock_scan.return_value = None
# Mock file system operations
mock_base_path = MagicMock()
mock_target_path = MagicMock()
mock_resolved_path = MagicMock()
mock_path_class.return_value = mock_base_path
mock_base_path.mkdir = MagicMock()
mock_base_path.__truediv__ = MagicMock(return_value=mock_target_path)
mock_target_path.resolve.return_value = mock_resolved_path
mock_resolved_path.is_relative_to.return_value = True
mock_resolved_path.is_file.return_value = True
mock_resolved_path.read_bytes.return_value = file_content
mock_resolved_path.relative_to.return_value = Path(local_file)
mock_resolved_path.name = local_file
result = await store_media_file(
file=MediaFileType(local_file),
execution_context=make_test_context(graph_exec_id=graph_exec_id),
return_format="for_local_processing",
)
# Verify virus scan was called for local file
mock_scan.assert_called_once_with(file_content, filename=local_file)
# Result should be the relative path
assert str(result) == local_file
@pytest.mark.asyncio
async def test_store_media_file_local_path_virus_detected(self):
"""Test that infected local files raise VirusDetectedError."""
from backend.api.features.store.exceptions import VirusDetectedError
graph_exec_id = "test-exec-123"
local_file = "infected.exe"
file_content = b"malicious content"
with patch(
"backend.util.file.get_cloud_storage_handler"
) as mock_handler_getter, patch(
"backend.util.file.scan_content_safe"
) as mock_scan, patch(
"backend.util.file.Path"
) as mock_path_class:
# Mock cloud storage handler - not a cloud path
mock_handler = MagicMock()
mock_handler.is_cloud_path.return_value = False
mock_handler_getter.return_value = mock_handler
# Mock virus scanner to detect virus
mock_scan.side_effect = VirusDetectedError(
"EICAR-Test-File", "File rejected due to virus detection"
)
# Mock file system operations
mock_base_path = MagicMock()
mock_target_path = MagicMock()
mock_resolved_path = MagicMock()
mock_path_class.return_value = mock_base_path
mock_base_path.mkdir = MagicMock()
mock_base_path.__truediv__ = MagicMock(return_value=mock_target_path)
mock_target_path.resolve.return_value = mock_resolved_path
mock_resolved_path.is_relative_to.return_value = True
mock_resolved_path.is_file.return_value = True
mock_resolved_path.read_bytes.return_value = file_content
with pytest.raises(VirusDetectedError):
await store_media_file(
file=MediaFileType(local_file),
execution_context=make_test_context(graph_exec_id=graph_exec_id),
return_format="for_local_processing",
)

View File

@@ -0,0 +1,16 @@
"""Validation utilities."""
import re
_UUID_V4_PATTERN = re.compile(
r"[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}",
re.IGNORECASE,
)
def is_uuid_v4(text: str) -> bool:
return bool(_UUID_V4_PATTERN.fullmatch(text.strip()))
def extract_uuids(text: str) -> list[str]:
return sorted({m.lower() for m in _UUID_V4_PATTERN.findall(text)})

View File

@@ -1,53 +0,0 @@
-- CreateEnum
CREATE TYPE "WaitlistExternalStatus" AS ENUM ('DONE', 'NOT_STARTED', 'CANCELED', 'WORK_IN_PROGRESS');
-- AlterEnum
ALTER TYPE "NotificationType" ADD VALUE 'WAITLIST_LAUNCH';
-- CreateTable
CREATE TABLE "WaitlistEntry" (
"id" TEXT NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
"storeListingId" TEXT,
"owningUserId" TEXT NOT NULL,
"slug" TEXT NOT NULL,
"search" tsvector DEFAULT ''::tsvector,
"name" TEXT NOT NULL,
"subHeading" TEXT NOT NULL,
"videoUrl" TEXT,
"agentOutputDemoUrl" TEXT,
"imageUrls" TEXT[],
"description" TEXT NOT NULL,
"categories" TEXT[],
"status" "WaitlistExternalStatus" NOT NULL DEFAULT 'NOT_STARTED',
"votes" INTEGER NOT NULL DEFAULT 0,
"unaffiliatedEmailUsers" TEXT[] DEFAULT ARRAY[]::TEXT[],
"isDeleted" BOOLEAN NOT NULL DEFAULT false,
CONSTRAINT "WaitlistEntry_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "_joinedWaitlists" (
"A" TEXT NOT NULL,
"B" TEXT NOT NULL
);
-- CreateIndex
CREATE UNIQUE INDEX "_joinedWaitlists_AB_unique" ON "_joinedWaitlists"("A", "B");
-- CreateIndex
CREATE INDEX "_joinedWaitlists_B_index" ON "_joinedWaitlists"("B");
-- AddForeignKey
ALTER TABLE "WaitlistEntry" ADD CONSTRAINT "WaitlistEntry_storeListingId_fkey" FOREIGN KEY ("storeListingId") REFERENCES "StoreListing"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "WaitlistEntry" ADD CONSTRAINT "WaitlistEntry_owningUserId_fkey" FOREIGN KEY ("owningUserId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "_joinedWaitlists" ADD CONSTRAINT "_joinedWaitlists_A_fkey" FOREIGN KEY ("A") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "_joinedWaitlists" ADD CONSTRAINT "_joinedWaitlists_B_fkey" FOREIGN KEY ("B") REFERENCES "WaitlistEntry"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -438,7 +438,7 @@ develop = true
[package.dependencies]
colorama = "^0.4.6"
cryptography = "^46.0"
cryptography = "^45.0"
expiringdict = "^1.2.2"
fastapi = "^0.128.0"
google-cloud-logging = "^3.13.0"
@@ -970,75 +970,62 @@ pytz = ">2021.1"
[[package]]
name = "cryptography"
version = "46.0.4"
version = "45.0.7"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
python-versions = "!=3.9.0,!=3.9.1,>=3.7"
groups = ["main"]
files = [
{file = "cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32"},
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616"},
{file = "cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0"},
{file = "cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0"},
{file = "cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5"},
{file = "cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b"},
{file = "cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e"},
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f"},
{file = "cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82"},
{file = "cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c"},
{file = "cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061"},
{file = "cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7"},
{file = "cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b"},
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019"},
{file = "cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4"},
{file = "cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b"},
{file = "cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc"},
{file = "cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947"},
{file = "cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3"},
{file = "cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59"},
{file = "cryptography-45.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:3be4f21c6245930688bd9e162829480de027f8bf962ede33d4f8ba7d67a00cee"},
{file = "cryptography-45.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:67285f8a611b0ebc0857ced2081e30302909f571a46bfa7a3cc0ad303fe015c6"},
{file = "cryptography-45.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:577470e39e60a6cd7780793202e63536026d9b8641de011ed9d8174da9ca5339"},
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:4bd3e5c4b9682bc112d634f2c6ccc6736ed3635fc3319ac2bb11d768cc5a00d8"},
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:465ccac9d70115cd4de7186e60cfe989de73f7bb23e8a7aa45af18f7412e75bf"},
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:16ede8a4f7929b4b7ff3642eba2bf79aa1d71f24ab6ee443935c0d269b6bc513"},
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8978132287a9d3ad6b54fcd1e08548033cc09dc6aacacb6c004c73c3eb5d3ac3"},
{file = "cryptography-45.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:b6a0e535baec27b528cb07a119f321ac024592388c5681a5ced167ae98e9fff3"},
{file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:a24ee598d10befaec178efdff6054bc4d7e883f615bfbcd08126a0f4931c83a6"},
{file = "cryptography-45.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:fa26fa54c0a9384c27fcdc905a2fb7d60ac6e47d14bc2692145f2b3b1e2cfdbd"},
{file = "cryptography-45.0.7-cp311-abi3-win32.whl", hash = "sha256:bef32a5e327bd8e5af915d3416ffefdbe65ed975b646b3805be81b23580b57b8"},
{file = "cryptography-45.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:3808e6b2e5f0b46d981c24d79648e5c25c35e59902ea4391a0dcb3e667bf7443"},
{file = "cryptography-45.0.7-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bfb4c801f65dd61cedfc61a83732327fafbac55a47282e6f26f073ca7a41c3b2"},
{file = "cryptography-45.0.7-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:81823935e2f8d476707e85a78a405953a03ef7b7b4f55f93f7c2d9680e5e0691"},
{file = "cryptography-45.0.7-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3994c809c17fc570c2af12c9b840d7cea85a9fd3e5c0e0491f4fa3c029216d59"},
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dad43797959a74103cb59c5dac71409f9c27d34c8a05921341fb64ea8ccb1dd4"},
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ce7a453385e4c4693985b4a4a3533e041558851eae061a58a5405363b098fcd3"},
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b04f85ac3a90c227b6e5890acb0edbaf3140938dbecf07bff618bf3638578cf1"},
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:48c41a44ef8b8c2e80ca4527ee81daa4c527df3ecbc9423c41a420a9559d0e27"},
{file = "cryptography-45.0.7-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f3df7b3d0f91b88b2106031fd995802a2e9ae13e02c36c1fc075b43f420f3a17"},
{file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd342f085542f6eb894ca00ef70236ea46070c8a13824c6bde0dfdcd36065b9b"},
{file = "cryptography-45.0.7-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1993a1bb7e4eccfb922b6cd414f072e08ff5816702a0bdb8941c247a6b1b287c"},
{file = "cryptography-45.0.7-cp37-abi3-win32.whl", hash = "sha256:18fcf70f243fe07252dcb1b268a687f2358025ce32f9f88028ca5c364b123ef5"},
{file = "cryptography-45.0.7-cp37-abi3-win_amd64.whl", hash = "sha256:7285a89df4900ed3bfaad5679b1e668cb4b38a8de1ccbfc84b05f34512da0a90"},
{file = "cryptography-45.0.7-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:de58755d723e86175756f463f2f0bddd45cc36fbd62601228a3f8761c9f58252"},
{file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a20e442e917889d1a6b3c570c9e3fa2fdc398c20868abcea268ea33c024c4083"},
{file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:258e0dff86d1d891169b5af222d362468a9570e2532923088658aa866eb11130"},
{file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d97cf502abe2ab9eff8bd5e4aca274da8d06dd3ef08b759a8d6143f4ad65d4b4"},
{file = "cryptography-45.0.7-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:c987dad82e8c65ebc985f5dae5e74a3beda9d0a2a4daf8a1115f3772b59e5141"},
{file = "cryptography-45.0.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:c13b1e3afd29a5b3b2656257f14669ca8fa8d7956d509926f0b130b600b50ab7"},
{file = "cryptography-45.0.7-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a862753b36620af6fc54209264f92c716367f2f0ff4624952276a6bbd18cbde"},
{file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:06ce84dc14df0bf6ea84666f958e6080cdb6fe1231be2a51f3fc1267d9f3fb34"},
{file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d0c5c6bac22b177bf8da7435d9d27a6834ee130309749d162b26c3105c0795a9"},
{file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:2f641b64acc00811da98df63df7d59fd4706c0df449da71cb7ac39a0732b40ae"},
{file = "cryptography-45.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:f5414a788ecc6ee6bc58560e85ca624258a55ca434884445440a810796ea0e0b"},
{file = "cryptography-45.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f3d56f73595376f4244646dd5c5870c14c196949807be39e79e7bd9bac3da63"},
{file = "cryptography-45.0.7.tar.gz", hash = "sha256:4b1654dfc64ea479c242508eb8c724044f1e964a47d1d1cacc5132292d851971"},
]
[package.dependencies]
cffi = {version = ">=2.0.0", markers = "python_full_version >= \"3.9.0\" and platform_python_implementation != \"PyPy\""}
typing-extensions = {version = ">=4.13.2", markers = "python_full_version < \"3.11.0\""}
cffi = {version = ">=1.14", markers = "platform_python_implementation != \"PyPy\""}
[package.extras]
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs", "sphinx-rtd-theme (>=3.0.0)"]
docs = ["sphinx (>=5.3.0)", "sphinx-inline-tabs ; python_full_version >= \"3.8.0\"", "sphinx-rtd-theme (>=3.0.0) ; python_full_version >= \"3.8.0\""]
docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
nox = ["nox[uv] (>=2024.4.15)"]
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_full_version >= \"3.8.0\""]
pep8test = ["check-sdist ; python_full_version >= \"3.8.0\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
sdist = ["build (>=1.0.0)"]
ssh = ["bcrypt (>=3.1.5)"]
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.4)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test = ["certifi (>=2024)", "cryptography-vectors (==45.0.7)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test-randomorder = ["pytest-randomly"]
[[package]]
@@ -8395,4 +8382,4 @@ cffi = ["cffi (>=1.17,<2.0) ; platform_python_implementation != \"PyPy\" and pyt
[metadata]
lock-version = "2.1"
python-versions = ">=3.10,<3.14"
content-hash = "1e226d8f7a342d17a85c036bfdfdf2ccc7d9e52c96644022fa69bf6044046528"
content-hash = "40b2c87c3c86bd10214bd30ad291cead75da5060ab894105025ee4c0a3b3828e"

View File

@@ -17,7 +17,7 @@ apscheduler = "^3.11.1"
autogpt-libs = { path = "../autogpt_libs", develop = true }
bleach = { extras = ["css"], version = "^6.2.0" }
click = "^8.2.0"
cryptography = "^46.0"
cryptography = "^45.0"
discord-py = "^2.5.2"
e2b-code-interpreter = "^1.5.2"
elevenlabs = "^1.50.0"

View File

@@ -70,10 +70,6 @@ model User {
OAuthAuthorizationCodes OAuthAuthorizationCode[]
OAuthAccessTokens OAuthAccessToken[]
OAuthRefreshTokens OAuthRefreshToken[]
// Waitlist relations
waitlistEntries WaitlistEntry[]
joinedWaitlists WaitlistEntry[] @relation("joinedWaitlists")
}
enum OnboardingStep {
@@ -348,7 +344,6 @@ enum NotificationType {
REFUND_PROCESSED
AGENT_APPROVED
AGENT_REJECTED
WAITLIST_LAUNCH
}
model NotificationEvent {
@@ -953,8 +948,7 @@ model StoreListing {
OwningUser User @relation(fields: [owningUserId], references: [id])
// Relations
Versions StoreListingVersion[] @relation("ListingVersions")
waitlistEntries WaitlistEntry[]
Versions StoreListingVersion[] @relation("ListingVersions")
// Unique index on agentId to ensure only one listing per agent, regardless of number of versions the agent has.
@@unique([agentGraphId])
@@ -1086,47 +1080,6 @@ model StoreListingReview {
@@index([reviewByUserId])
}
enum WaitlistExternalStatus {
DONE
NOT_STARTED
CANCELED
WORK_IN_PROGRESS
}
model WaitlistEntry {
id String @id @default(uuid())
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
storeListingId String?
StoreListing StoreListing? @relation(fields: [storeListingId], references: [id], onDelete: SetNull)
owningUserId String
OwningUser User @relation(fields: [owningUserId], references: [id])
slug String
search Unsupported("tsvector")? @default(dbgenerated("''::tsvector"))
// Content fields
name String
subHeading String
videoUrl String?
agentOutputDemoUrl String?
imageUrls String[]
description String
categories String[]
//Waitlist specific fields
status WaitlistExternalStatus @default(NOT_STARTED)
votes Int @default(0) // Hide from frontend api
joinedUsers User[] @relation("joinedWaitlists")
// NOTE: DO NOT DOUBLE SEND TO THESE USERS, IF THEY HAVE SIGNED UP SINCE THEY MAY HAVE ALREADY RECEIVED AN EMAIL
// DOUBLE CHECK WHEN SENDING THAT THEY ARE NOT IN THE JOINED USERS LIST ALSO
unaffiliatedEmailUsers String[] @default([])
isDeleted Boolean @default(false)
}
enum SubmissionStatus {
DRAFT // Being prepared, not yet submitted
PENDING // Submitted, awaiting review

View File

@@ -1,5 +1,5 @@
import { Sidebar } from "@/components/__legacy__/Sidebar";
import { Users, DollarSign, UserSearch, FileText, Clock } from "lucide-react";
import { Users, DollarSign, UserSearch, FileText } from "lucide-react";
import { IconSliders } from "@/components/__legacy__/ui/icons";
@@ -11,11 +11,6 @@ const sidebarLinkGroups = [
href: "/admin/marketplace",
icon: <Users className="h-6 w-6" />,
},
{
text: "Waitlist Management",
href: "/admin/waitlist",
icon: <Clock className="h-6 w-6" />,
},
{
text: "User Spending",
href: "/admin/spending",

View File

@@ -1,217 +0,0 @@
"use client";
import { useState } from "react";
import { useQueryClient } from "@tanstack/react-query";
import { Button } from "@/components/atoms/Button/Button";
import { Input } from "@/components/atoms/Input/Input";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import {
usePostV2CreateWaitlist,
getGetV2ListAllWaitlistsQueryKey,
} from "@/app/api/__generated__/endpoints/admin/admin";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { Plus } from "@phosphor-icons/react";
export function CreateWaitlistButton() {
const [open, setOpen] = useState(false);
const { toast } = useToast();
const queryClient = useQueryClient();
const createWaitlistMutation = usePostV2CreateWaitlist({
mutation: {
onSuccess: (response) => {
if (response.status === 200) {
toast({
title: "Success",
description: "Waitlist created successfully",
});
setOpen(false);
setFormData({
name: "",
slug: "",
subHeading: "",
description: "",
categories: "",
imageUrls: "",
videoUrl: "",
agentOutputDemoUrl: "",
});
queryClient.invalidateQueries({
queryKey: getGetV2ListAllWaitlistsQueryKey(),
});
} else {
toast({
variant: "destructive",
title: "Error",
description: "Failed to create waitlist",
});
}
},
onError: (error) => {
console.error("Error creating waitlist:", error);
toast({
variant: "destructive",
title: "Error",
description: "Failed to create waitlist",
});
},
},
});
const [formData, setFormData] = useState({
name: "",
slug: "",
subHeading: "",
description: "",
categories: "",
imageUrls: "",
videoUrl: "",
agentOutputDemoUrl: "",
});
function handleInputChange(id: string, value: string) {
setFormData((prev) => ({
...prev,
[id]: value,
}));
}
function generateSlug(name: string) {
return name
.toLowerCase()
.replace(/[^a-z0-9]+/g, "-")
.replace(/^-|-$/g, "");
}
function handleSubmit(e: React.FormEvent) {
e.preventDefault();
createWaitlistMutation.mutate({
data: {
name: formData.name,
slug: formData.slug || generateSlug(formData.name),
subHeading: formData.subHeading,
description: formData.description,
categories: formData.categories
? formData.categories.split(",").map((c) => c.trim())
: [],
imageUrls: formData.imageUrls
? formData.imageUrls.split(",").map((u) => u.trim())
: [],
videoUrl: formData.videoUrl || null,
agentOutputDemoUrl: formData.agentOutputDemoUrl || null,
},
});
}
return (
<>
<Button onClick={() => setOpen(true)}>
<Plus size={16} className="mr-2" />
Create Waitlist
</Button>
<Dialog
title="Create New Waitlist"
controlled={{
isOpen: open,
set: async (isOpen) => setOpen(isOpen),
}}
onClose={() => setOpen(false)}
styling={{ maxWidth: "600px" }}
>
<Dialog.Content>
<p className="mb-4 text-sm text-zinc-500">
Create a new waitlist for an upcoming agent. Users can sign up to be
notified when it launches.
</p>
<form onSubmit={handleSubmit} className="flex flex-col gap-2">
<Input
id="name"
label="Name"
value={formData.name}
onChange={(e) => handleInputChange("name", e.target.value)}
placeholder="SEO Analysis Agent"
required
/>
<Input
id="slug"
label="Slug"
value={formData.slug}
onChange={(e) => handleInputChange("slug", e.target.value)}
placeholder="seo-analysis-agent (auto-generated if empty)"
/>
<Input
id="subHeading"
label="Subheading"
value={formData.subHeading}
onChange={(e) => handleInputChange("subHeading", e.target.value)}
placeholder="Analyze your website's SEO in minutes"
required
/>
<Input
id="description"
label="Description"
type="textarea"
value={formData.description}
onChange={(e) => handleInputChange("description", e.target.value)}
placeholder="Detailed description of what this agent does..."
rows={4}
required
/>
<Input
id="categories"
label="Categories (comma-separated)"
value={formData.categories}
onChange={(e) => handleInputChange("categories", e.target.value)}
placeholder="SEO, Marketing, Analysis"
/>
<Input
id="imageUrls"
label="Image URLs (comma-separated)"
value={formData.imageUrls}
onChange={(e) => handleInputChange("imageUrls", e.target.value)}
placeholder="https://example.com/image1.jpg, https://example.com/image2.jpg"
/>
<Input
id="videoUrl"
label="Video URL (optional)"
value={formData.videoUrl}
onChange={(e) => handleInputChange("videoUrl", e.target.value)}
placeholder="https://youtube.com/watch?v=..."
/>
<Input
id="agentOutputDemoUrl"
label="Output Demo URL (optional)"
value={formData.agentOutputDemoUrl}
onChange={(e) =>
handleInputChange("agentOutputDemoUrl", e.target.value)
}
placeholder="https://example.com/demo-output.mp4"
/>
<Dialog.Footer>
<Button
type="button"
variant="secondary"
onClick={() => setOpen(false)}
>
Cancel
</Button>
<Button type="submit" loading={createWaitlistMutation.isPending}>
Create Waitlist
</Button>
</Dialog.Footer>
</form>
</Dialog.Content>
</Dialog>
</>
);
}

View File

@@ -1,221 +0,0 @@
"use client";
import { useState } from "react";
import { Button } from "@/components/atoms/Button/Button";
import { Input } from "@/components/atoms/Input/Input";
import { Select } from "@/components/atoms/Select/Select";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { usePutV2UpdateWaitlist } from "@/app/api/__generated__/endpoints/admin/admin";
import type { WaitlistAdminResponse } from "@/app/api/__generated__/models/waitlistAdminResponse";
import type { WaitlistUpdateRequest } from "@/app/api/__generated__/models/waitlistUpdateRequest";
import { WaitlistExternalStatus } from "@/app/api/__generated__/models/waitlistExternalStatus";
type EditWaitlistDialogProps = {
waitlist: WaitlistAdminResponse;
onClose: () => void;
onSave: () => void;
};
const STATUS_OPTIONS = [
{ value: WaitlistExternalStatus.NOT_STARTED, label: "Not Started" },
{ value: WaitlistExternalStatus.WORK_IN_PROGRESS, label: "Work In Progress" },
{ value: WaitlistExternalStatus.DONE, label: "Done" },
{ value: WaitlistExternalStatus.CANCELED, label: "Canceled" },
];
export function EditWaitlistDialog({
waitlist,
onClose,
onSave,
}: EditWaitlistDialogProps) {
const { toast } = useToast();
const updateWaitlistMutation = usePutV2UpdateWaitlist();
const [formData, setFormData] = useState({
name: waitlist.name,
slug: waitlist.slug,
subHeading: waitlist.subHeading,
description: waitlist.description,
categories: waitlist.categories.join(", "),
imageUrls: waitlist.imageUrls.join(", "),
videoUrl: waitlist.videoUrl || "",
agentOutputDemoUrl: waitlist.agentOutputDemoUrl || "",
status: waitlist.status,
storeListingId: waitlist.storeListingId || "",
});
function handleInputChange(id: string, value: string) {
setFormData((prev) => ({
...prev,
[id]: value,
}));
}
function handleStatusChange(value: string) {
setFormData((prev) => ({
...prev,
status: value as WaitlistExternalStatus,
}));
}
async function handleSubmit(e: React.FormEvent) {
e.preventDefault();
const updateData: WaitlistUpdateRequest = {
name: formData.name,
slug: formData.slug,
subHeading: formData.subHeading,
description: formData.description,
categories: formData.categories
? formData.categories.split(",").map((c) => c.trim())
: [],
imageUrls: formData.imageUrls
? formData.imageUrls.split(",").map((u) => u.trim())
: [],
videoUrl: formData.videoUrl || null,
agentOutputDemoUrl: formData.agentOutputDemoUrl || null,
status: formData.status,
storeListingId: formData.storeListingId || null,
};
updateWaitlistMutation.mutate(
{ waitlistId: waitlist.id, data: updateData },
{
onSuccess: (response) => {
if (response.status === 200) {
toast({
title: "Success",
description: "Waitlist updated successfully",
});
onSave();
} else {
toast({
variant: "destructive",
title: "Error",
description: "Failed to update waitlist",
});
}
},
onError: () => {
toast({
variant: "destructive",
title: "Error",
description: "Failed to update waitlist",
});
},
},
);
}
return (
<Dialog
title="Edit Waitlist"
controlled={{
isOpen: true,
set: async (open) => {
if (!open) onClose();
},
}}
onClose={onClose}
styling={{ maxWidth: "600px" }}
>
<Dialog.Content>
<p className="mb-4 text-sm text-zinc-500">
Update the waitlist details. Changes will be reflected immediately.
</p>
<form onSubmit={handleSubmit} className="flex flex-col gap-2">
<Input
id="name"
label="Name"
value={formData.name}
onChange={(e) => handleInputChange("name", e.target.value)}
required
/>
<Input
id="slug"
label="Slug"
value={formData.slug}
onChange={(e) => handleInputChange("slug", e.target.value)}
/>
<Input
id="subHeading"
label="Subheading"
value={formData.subHeading}
onChange={(e) => handleInputChange("subHeading", e.target.value)}
required
/>
<Input
id="description"
label="Description"
type="textarea"
value={formData.description}
onChange={(e) => handleInputChange("description", e.target.value)}
rows={4}
required
/>
<Select
id="status"
label="Status"
value={formData.status}
onValueChange={handleStatusChange}
options={STATUS_OPTIONS}
/>
<Input
id="categories"
label="Categories (comma-separated)"
value={formData.categories}
onChange={(e) => handleInputChange("categories", e.target.value)}
/>
<Input
id="imageUrls"
label="Image URLs (comma-separated)"
value={formData.imageUrls}
onChange={(e) => handleInputChange("imageUrls", e.target.value)}
/>
<Input
id="videoUrl"
label="Video URL"
value={formData.videoUrl}
onChange={(e) => handleInputChange("videoUrl", e.target.value)}
/>
<Input
id="agentOutputDemoUrl"
label="Output Demo URL"
value={formData.agentOutputDemoUrl}
onChange={(e) =>
handleInputChange("agentOutputDemoUrl", e.target.value)
}
/>
<Input
id="storeListingId"
label="Store Listing ID (for linking)"
value={formData.storeListingId}
onChange={(e) =>
handleInputChange("storeListingId", e.target.value)
}
placeholder="Leave empty if not linked"
/>
<Dialog.Footer>
<Button type="button" variant="secondary" onClick={onClose}>
Cancel
</Button>
<Button type="submit" loading={updateWaitlistMutation.isPending}>
Save Changes
</Button>
</Dialog.Footer>
</form>
</Dialog.Content>
</Dialog>
);
}

View File

@@ -1,156 +0,0 @@
"use client";
import { Button } from "@/components/atoms/Button/Button";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { User, Envelope, DownloadSimple } from "@phosphor-icons/react";
import { useGetV2GetWaitlistSignups } from "@/app/api/__generated__/endpoints/admin/admin";
type WaitlistSignupsDialogProps = {
waitlistId: string;
onClose: () => void;
};
export function WaitlistSignupsDialog({
waitlistId,
onClose,
}: WaitlistSignupsDialogProps) {
const {
data: signupsResponse,
isLoading,
isError,
} = useGetV2GetWaitlistSignups(waitlistId);
const signups = signupsResponse?.status === 200 ? signupsResponse.data : null;
function exportToCSV() {
if (!signups) return;
const headers = ["Type", "Email", "User ID", "Username"];
const rows = signups.signups.map((signup) => [
signup.type,
signup.email || "",
signup.userId || "",
signup.username || "",
]);
const escapeCell = (cell: string) => `"${cell.replace(/"/g, '""')}"`;
const csvContent = [
headers.join(","),
...rows.map((row) => row.map(escapeCell).join(",")),
].join("\n");
const blob = new Blob([csvContent], { type: "text/csv" });
const url = window.URL.createObjectURL(blob);
const a = document.createElement("a");
a.href = url;
a.download = `waitlist-${waitlistId}-signups.csv`;
a.click();
window.URL.revokeObjectURL(url);
}
function renderContent() {
if (isLoading) {
return <div className="py-10 text-center">Loading signups...</div>;
}
if (isError) {
return (
<div className="py-10 text-center text-red-500">
Failed to load signups. Please try again.
</div>
);
}
if (!signups || signups.signups.length === 0) {
return (
<div className="py-10 text-center text-gray-500">
No signups yet for this waitlist.
</div>
);
}
return (
<>
<div className="flex justify-end">
<Button variant="secondary" size="small" onClick={exportToCSV}>
<DownloadSimple className="mr-2 h-4 w-4" size={16} />
Export CSV
</Button>
</div>
<div className="max-h-[400px] overflow-y-auto rounded-md border">
<table className="w-full">
<thead className="bg-gray-50 dark:bg-gray-800">
<tr>
<th className="px-4 py-3 text-left text-sm font-medium">
Type
</th>
<th className="px-4 py-3 text-left text-sm font-medium">
Email / Username
</th>
<th className="px-4 py-3 text-left text-sm font-medium">
User ID
</th>
</tr>
</thead>
<tbody className="divide-y">
{signups.signups.map((signup, index) => (
<tr key={index}>
<td className="px-4 py-3">
{signup.type === "user" ? (
<span className="flex items-center gap-1 text-blue-600">
<User className="h-4 w-4" size={16} /> User
</span>
) : (
<span className="flex items-center gap-1 text-gray-600">
<Envelope className="h-4 w-4" size={16} /> Email
</span>
)}
</td>
<td className="px-4 py-3">
{signup.type === "user"
? signup.username || signup.email
: signup.email}
</td>
<td className="px-4 py-3 font-mono text-sm">
{signup.userId || "-"}
</td>
</tr>
))}
</tbody>
</table>
</div>
</>
);
}
return (
<Dialog
title="Waitlist Signups"
controlled={{
isOpen: true,
set: async (open) => {
if (!open) onClose();
},
}}
onClose={onClose}
styling={{ maxWidth: "700px" }}
>
<Dialog.Content>
<p className="mb-4 text-sm text-zinc-500">
{signups
? `${signups.totalCount} total signups`
: "Loading signups..."}
</p>
{renderContent()}
<Dialog.Footer>
<Button variant="secondary" onClick={onClose}>
Close
</Button>
</Dialog.Footer>
</Dialog.Content>
</Dialog>
);
}

View File

@@ -1,206 +0,0 @@
"use client";
import { useState } from "react";
import { useQueryClient } from "@tanstack/react-query";
import {
Table,
TableBody,
TableCell,
TableHead,
TableHeader,
TableRow,
} from "@/components/__legacy__/ui/table";
import { Button } from "@/components/atoms/Button/Button";
import {
useGetV2ListAllWaitlists,
useDeleteV2DeleteWaitlist,
getGetV2ListAllWaitlistsQueryKey,
} from "@/app/api/__generated__/endpoints/admin/admin";
import type { WaitlistAdminResponse } from "@/app/api/__generated__/models/waitlistAdminResponse";
import { EditWaitlistDialog } from "./EditWaitlistDialog";
import { WaitlistSignupsDialog } from "./WaitlistSignupsDialog";
import { Trash, PencilSimple, Users, Link } from "@phosphor-icons/react";
import { useToast } from "@/components/molecules/Toast/use-toast";
export function WaitlistTable() {
const [editingWaitlist, setEditingWaitlist] =
useState<WaitlistAdminResponse | null>(null);
const [viewingSignups, setViewingSignups] = useState<string | null>(null);
const { toast } = useToast();
const queryClient = useQueryClient();
const { data: response, isLoading, error } = useGetV2ListAllWaitlists();
const deleteWaitlistMutation = useDeleteV2DeleteWaitlist({
mutation: {
onSuccess: () => {
toast({
title: "Success",
description: "Waitlist deleted successfully",
});
queryClient.invalidateQueries({
queryKey: getGetV2ListAllWaitlistsQueryKey(),
});
},
onError: (error) => {
console.error("Error deleting waitlist:", error);
toast({
variant: "destructive",
title: "Error",
description: "Failed to delete waitlist",
});
},
},
});
function handleDelete(waitlistId: string) {
if (!confirm("Are you sure you want to delete this waitlist?")) return;
deleteWaitlistMutation.mutate({ waitlistId });
}
function handleWaitlistSaved() {
setEditingWaitlist(null);
queryClient.invalidateQueries({
queryKey: getGetV2ListAllWaitlistsQueryKey(),
});
}
function formatStatus(status: string) {
const statusColors: Record<string, string> = {
NOT_STARTED: "bg-gray-100 text-gray-800",
WORK_IN_PROGRESS: "bg-blue-100 text-blue-800",
DONE: "bg-green-100 text-green-800",
CANCELED: "bg-red-100 text-red-800",
};
return (
<span
className={`rounded-full px-2 py-1 text-xs font-medium ${statusColors[status] || "bg-gray-100 text-gray-700"}`}
>
{status.replace(/_/g, " ")}
</span>
);
}
function formatDate(dateStr: string) {
if (!dateStr) return "-";
return new Intl.DateTimeFormat("en-US", {
month: "short",
day: "numeric",
year: "numeric",
}).format(new Date(dateStr));
}
if (isLoading) {
return <div className="py-10 text-center">Loading waitlists...</div>;
}
if (error) {
return (
<div className="py-10 text-center text-red-500">
Error loading waitlists. Please try again.
</div>
);
}
const waitlists = response?.status === 200 ? response.data.waitlists : [];
if (waitlists.length === 0) {
return (
<div className="py-10 text-center text-gray-500">
No waitlists found. Create one to get started!
</div>
);
}
return (
<>
<div className="rounded-md border bg-white">
<Table>
<TableHeader className="bg-gray-50">
<TableRow>
<TableHead className="font-medium">Name</TableHead>
<TableHead className="font-medium">Status</TableHead>
<TableHead className="font-medium">Signups</TableHead>
<TableHead className="font-medium">Votes</TableHead>
<TableHead className="font-medium">Created</TableHead>
<TableHead className="font-medium">Linked Agent</TableHead>
<TableHead className="font-medium">Actions</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{waitlists.map((waitlist) => (
<TableRow key={waitlist.id}>
<TableCell>
<div>
<div className="font-medium">{waitlist.name}</div>
<div className="text-sm text-gray-500">
{waitlist.subHeading}
</div>
</div>
</TableCell>
<TableCell>{formatStatus(waitlist.status)}</TableCell>
<TableCell>{waitlist.signupCount}</TableCell>
<TableCell>{waitlist.votes}</TableCell>
<TableCell>{formatDate(waitlist.createdAt)}</TableCell>
<TableCell>
{waitlist.storeListingId ? (
<span className="text-green-600">
<Link size={16} className="inline" /> Linked
</span>
) : (
<span className="text-gray-400">Not linked</span>
)}
</TableCell>
<TableCell>
<div className="flex gap-2">
<Button
variant="ghost"
size="small"
onClick={() => setViewingSignups(waitlist.id)}
title="View signups"
>
<Users size={16} />
</Button>
<Button
variant="ghost"
size="small"
onClick={() => setEditingWaitlist(waitlist)}
title="Edit"
>
<PencilSimple size={16} />
</Button>
<Button
variant="ghost"
size="small"
onClick={() => handleDelete(waitlist.id)}
title="Delete"
disabled={deleteWaitlistMutation.isPending}
>
<Trash size={16} className="text-red-500" />
</Button>
</div>
</TableCell>
</TableRow>
))}
</TableBody>
</Table>
</div>
{editingWaitlist && (
<EditWaitlistDialog
waitlist={editingWaitlist}
onClose={() => setEditingWaitlist(null)}
onSave={handleWaitlistSaved}
/>
)}
{viewingSignups && (
<WaitlistSignupsDialog
waitlistId={viewingSignups}
onClose={() => setViewingSignups(null)}
/>
)}
</>
);
}

View File

@@ -1,52 +0,0 @@
import { withRoleAccess } from "@/lib/withRoleAccess";
import { Suspense } from "react";
import { WaitlistTable } from "./components/WaitlistTable";
import { CreateWaitlistButton } from "./components/CreateWaitlistButton";
import { Warning } from "@phosphor-icons/react/dist/ssr";
function WaitlistDashboard() {
return (
<div className="mx-auto p-6">
<div className="flex flex-col gap-4">
<div className="flex items-center justify-between">
<div>
<h1 className="text-3xl font-bold">Waitlist Management</h1>
<p className="text-gray-500">
Manage upcoming agent waitlists and track signups
</p>
</div>
<CreateWaitlistButton />
</div>
<div className="flex items-start gap-3 rounded-lg border border-amber-300 bg-amber-50 p-4 dark:border-amber-700 dark:bg-amber-950">
<Warning
className="mt-0.5 h-5 w-5 flex-shrink-0 text-amber-600 dark:text-amber-400"
weight="fill"
/>
<div className="text-sm text-amber-800 dark:text-amber-200">
<p className="font-medium">TODO: Email-only signup notifications</p>
<p className="mt-1 text-amber-700 dark:text-amber-300">
Notifications for email-only signups (users who weren&apos;t
logged in) have not been implemented yet. Currently only
registered users will receive launch emails.
</p>
</div>
</div>
<Suspense
fallback={
<div className="py-10 text-center">Loading waitlists...</div>
}
>
<WaitlistTable />
</Suspense>
</div>
</div>
);
}
export default async function WaitlistDashboardPage() {
const withAdminAccess = await withRoleAccess(["admin"]);
const ProtectedWaitlistDashboard = await withAdminAccess(WaitlistDashboard);
return <ProtectedWaitlistDashboard />;
}

View File

@@ -8,7 +8,6 @@ import { useMainMarketplacePage } from "./useMainMarketplacePage";
import { FeaturedCreators } from "../FeaturedCreators/FeaturedCreators";
import { MainMarketplacePageLoading } from "../MainMarketplacePageLoading";
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
import { WaitlistSection } from "../WaitlistSection/WaitlistSection";
export const MainMarkeplacePage = () => {
const { featuredAgents, topAgents, featuredCreators, isLoading, hasError } =
@@ -47,10 +46,6 @@ export const MainMarkeplacePage = () => {
{/* 100px margin because our featured sections button are placed 40px below the container */}
<Separator className="mb-6 mt-24" />
{/* Waitlist Section - "Help Shape What's Next" */}
<WaitlistSection />
<Separator className="mb-6 mt-12" />
{topAgents && (
<AgentsSection sectionTitle="Top Agents" agents={topAgents.agents} />
)}

View File

@@ -1,105 +0,0 @@
"use client";
import Image from "next/image";
import { Button } from "@/components/atoms/Button/Button";
import { Check } from "@phosphor-icons/react";
interface WaitlistCardProps {
name: string;
subHeading: string;
description: string;
imageUrl: string | null;
isMember?: boolean;
onCardClick: () => void;
onJoinClick: (e: React.MouseEvent) => void;
}
export function WaitlistCard({
name,
subHeading,
description,
imageUrl,
isMember = false,
onCardClick,
onJoinClick,
}: WaitlistCardProps) {
function handleJoinClick(e: React.MouseEvent) {
e.stopPropagation();
onJoinClick(e);
}
return (
<div
className="flex h-[24rem] w-full max-w-md cursor-pointer flex-col items-start rounded-3xl bg-white transition-all duration-300 hover:shadow-lg dark:bg-zinc-900 dark:hover:shadow-gray-700"
onClick={onCardClick}
data-testid="waitlist-card"
role="button"
tabIndex={0}
aria-label={`${name} waitlist card`}
onKeyDown={(e) => {
if (e.key === "Enter" || e.key === " ") {
onCardClick();
}
}}
>
{/* Image Section */}
<div className="relative aspect-[2/1.2] w-full overflow-hidden rounded-large md:aspect-[2.17/1]">
{imageUrl ? (
<Image
src={imageUrl}
alt={`${name} preview image`}
fill
className="object-cover"
/>
) : (
<div className="flex h-full w-full items-center justify-center bg-gradient-to-br from-neutral-200 to-neutral-300 dark:from-neutral-700 dark:to-neutral-800">
<span className="text-4xl font-bold text-neutral-400 dark:text-neutral-500">
{name.charAt(0)}
</span>
</div>
)}
</div>
<div className="mt-3 flex w-full flex-1 flex-col px-4">
{/* Name and Subheading */}
<div className="flex w-full flex-col">
<h3 className="line-clamp-1 font-poppins text-xl font-semibold text-[#272727] dark:text-neutral-100">
{name}
</h3>
<p className="mt-1 line-clamp-1 text-sm text-neutral-500 dark:text-neutral-400">
{subHeading}
</p>
</div>
{/* Description */}
<div className="mt-2 flex w-full flex-col">
<p className="line-clamp-5 text-sm font-normal leading-relaxed text-neutral-600 dark:text-neutral-400">
{description}
</p>
</div>
<div className="flex-grow" />
{/* Join Waitlist Button */}
<div className="mt-4 w-full pb-4">
{isMember ? (
<Button
disabled
className="w-full rounded-full bg-green-600 text-white hover:bg-green-600 dark:bg-green-700 dark:hover:bg-green-700"
>
<Check className="mr-2" size={16} weight="bold" />
On the waitlist
</Button>
) : (
<Button
onClick={handleJoinClick}
className="w-full rounded-full bg-zinc-800 text-white hover:bg-zinc-700 dark:bg-zinc-700 dark:hover:bg-zinc-600"
>
Join waitlist
</Button>
)}
</div>
</div>
</div>
);
}

View File

@@ -1,356 +0,0 @@
"use client";
import { useState } from "react";
import Image from "next/image";
import { Button } from "@/components/atoms/Button/Button";
import { Dialog } from "@/components/molecules/Dialog/Dialog";
import { Input } from "@/components/atoms/Input/Input";
import {
Carousel,
CarouselContent,
CarouselItem,
CarouselNext,
CarouselPrevious,
} from "@/components/__legacy__/ui/carousel";
import type { StoreWaitlistEntry } from "@/app/api/__generated__/models/storeWaitlistEntry";
import { Check, Play } from "@phosphor-icons/react";
import { useSupabaseStore } from "@/lib/supabase/hooks/useSupabaseStore";
import { useToast } from "@/components/molecules/Toast/use-toast";
import { usePostV2AddSelfToTheAgentWaitlist } from "@/app/api/__generated__/endpoints/store/store";
interface MediaItem {
type: "image" | "video";
url: string;
label?: string;
}
// Extract YouTube video ID from various URL formats
function getYouTubeVideoId(url: string): string | null {
const regExp =
/^.*((youtu.be\/)|(v\/)|(\/u\/\w\/)|(embed\/)|(watch\?))\??v?=?([^#&?]*).*/;
const match = url.match(regExp);
return match && match[7].length === 11 ? match[7] : null;
}
// Validate video URL for security
function isValidVideoUrl(url: string): boolean {
if (url.startsWith("data:video")) {
return true;
}
const videoExtensions = /\.(mp4|webm|ogg)$/i;
const youtubeRegex = /^(https?:\/\/)?(www\.)?(youtube\.com|youtu\.?be)\/.+$/;
const validUrl = /^(https?:\/\/)/i;
const cleanedUrl = url.split("?")[0];
return (
(validUrl.test(url) && videoExtensions.test(cleanedUrl)) ||
youtubeRegex.test(url)
);
}
// Video player with YouTube embed support
function VideoPlayer({
url,
autoPlay = false,
className = "",
}: {
url: string;
autoPlay?: boolean;
className?: string;
}) {
const youtubeId = getYouTubeVideoId(url);
if (youtubeId) {
return (
<iframe
src={`https://www.youtube.com/embed/${youtubeId}${autoPlay ? "?autoplay=1" : ""}`}
title="YouTube video player"
className={className}
allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture"
sandbox="allow-same-origin allow-scripts allow-presentation"
allowFullScreen
/>
);
}
if (!isValidVideoUrl(url)) {
return (
<div
className={`flex items-center justify-center bg-zinc-800 ${className}`}
>
<span className="text-sm text-zinc-400">Invalid video URL</span>
</div>
);
}
return <video src={url} controls autoPlay={autoPlay} className={className} />;
}
function MediaCarousel({ waitlist }: { waitlist: StoreWaitlistEntry }) {
const [activeVideo, setActiveVideo] = useState<string | null>(null);
// Build media items array: videos first, then images
const mediaItems: MediaItem[] = [
...(waitlist.videoUrl
? [{ type: "video" as const, url: waitlist.videoUrl, label: "Video" }]
: []),
...(waitlist.agentOutputDemoUrl
? [
{
type: "video" as const,
url: waitlist.agentOutputDemoUrl,
label: "Demo",
},
]
: []),
...waitlist.imageUrls.map((url) => ({ type: "image" as const, url })),
];
if (mediaItems.length === 0) return null;
// Single item - no carousel needed
if (mediaItems.length === 1) {
const item = mediaItems[0];
return (
<div className="relative aspect-[350/196] w-full overflow-hidden rounded-large">
{item.type === "image" ? (
<Image
src={item.url}
alt={`${waitlist.name} preview`}
fill
className="object-cover"
/>
) : (
<VideoPlayer url={item.url} className="h-full w-full object-cover" />
)}
</div>
);
}
// Multiple items - use carousel
return (
<Carousel className="w-full">
<CarouselContent>
{mediaItems.map((item, index) => (
<CarouselItem key={index}>
<div className="relative aspect-[350/196] w-full overflow-hidden rounded-large">
{item.type === "image" ? (
<Image
src={item.url}
alt={`${waitlist.name} preview ${index + 1}`}
fill
className="object-cover"
/>
) : activeVideo === item.url ? (
<VideoPlayer
url={item.url}
autoPlay
className="h-full w-full object-cover"
/>
) : (
<button
onClick={() => setActiveVideo(item.url)}
className="group relative h-full w-full bg-zinc-900"
>
<div className="absolute inset-0 flex items-center justify-center">
<div className="flex h-16 w-16 items-center justify-center rounded-full bg-white/90 transition-transform group-hover:scale-110">
<Play size={32} weight="fill" className="text-zinc-800" />
</div>
</div>
<span className="absolute bottom-3 left-3 text-sm text-white">
{item.label}
</span>
</button>
)}
</div>
</CarouselItem>
))}
</CarouselContent>
<CarouselPrevious className="left-2 top-1/2 -translate-y-1/2" />
<CarouselNext className="right-2 top-1/2 -translate-y-1/2" />
</Carousel>
);
}
interface WaitlistDetailModalProps {
waitlist: StoreWaitlistEntry;
isMember?: boolean;
onClose: () => void;
onJoinSuccess?: (waitlistId: string) => void;
}
export function WaitlistDetailModal({
waitlist,
isMember = false,
onClose,
onJoinSuccess,
}: WaitlistDetailModalProps) {
const { user } = useSupabaseStore();
const [email, setEmail] = useState("");
const [success, setSuccess] = useState(false);
const { toast } = useToast();
const joinWaitlistMutation = usePostV2AddSelfToTheAgentWaitlist();
function handleJoin() {
joinWaitlistMutation.mutate(
{
waitlistId: waitlist.waitlistId,
data: { email: user ? undefined : email },
},
{
onSuccess: (response) => {
if (response.status === 200) {
setSuccess(true);
toast({
title: "You're on the waitlist!",
description: `We'll notify you when ${waitlist.name} goes live.`,
});
onJoinSuccess?.(waitlist.waitlistId);
} else {
toast({
variant: "destructive",
title: "Error",
description: "Failed to join waitlist. Please try again.",
});
}
},
onError: () => {
toast({
variant: "destructive",
title: "Error",
description: "Failed to join waitlist. Please try again.",
});
},
},
);
}
// Success state
if (success) {
return (
<Dialog
title=""
controlled={{
isOpen: true,
set: async (open) => {
if (!open) onClose();
},
}}
onClose={onClose}
styling={{ maxWidth: "500px" }}
>
<Dialog.Content>
<div className="flex flex-col items-center justify-center py-4 text-center">
{/* Party emoji */}
<span className="mb-2 text-5xl">🎉</span>
{/* Title */}
<h2 className="mb-2 font-poppins text-[22px] font-medium leading-7 text-zinc-900 dark:text-zinc-100">
You&apos;re on the waitlist
</h2>
{/* Subtitle */}
<p className="text-base leading-[26px] text-zinc-600 dark:text-zinc-400">
Thanks for helping us prioritize which agents to build next.
We&apos;ll notify you when this agent goes live in the
marketplace.
</p>
</div>
{/* Close button */}
<Dialog.Footer className="flex justify-center pb-2 pt-4">
<Button
variant="secondary"
onClick={onClose}
className="rounded-full border border-zinc-700 bg-white px-4 py-3 text-zinc-900 hover:bg-zinc-100 dark:border-zinc-500 dark:bg-zinc-800 dark:text-zinc-100 dark:hover:bg-zinc-700"
>
Close
</Button>
</Dialog.Footer>
</Dialog.Content>
</Dialog>
);
}
// Main modal - handles both member and non-member states
return (
<Dialog
title="Join the waitlist"
controlled={{
isOpen: true,
set: async (open) => {
if (!open) onClose();
},
}}
onClose={onClose}
styling={{ maxWidth: "500px" }}
>
<Dialog.Content>
{/* Subtitle */}
<p className="mb-6 text-center text-base text-zinc-600 dark:text-zinc-400">
Help us decide what to build next and get notified when this agent
is ready
</p>
{/* Media Carousel */}
<MediaCarousel waitlist={waitlist} />
{/* Agent Name */}
<h3 className="mt-4 font-poppins text-[22px] font-medium leading-7 text-zinc-800 dark:text-zinc-100">
{waitlist.name}
</h3>
{/* Agent Description */}
<p className="mt-2 line-clamp-5 text-sm leading-[22px] text-zinc-500 dark:text-zinc-400">
{waitlist.description}
</p>
{/* Email input for non-logged-in users who haven't joined */}
{!isMember && !user && (
<div className="mt-4 pr-1">
<Input
id="email"
label="Email address"
type="email"
placeholder="you@example.com"
value={email}
onChange={(e) => setEmail(e.target.value)}
required
/>
</div>
)}
{/* Footer buttons */}
<Dialog.Footer className="sticky bottom-0 mt-6 flex justify-center gap-3 bg-white pb-2 pt-4 dark:bg-zinc-900">
{isMember ? (
<Button
disabled
className="rounded-full bg-green-600 px-4 py-3 text-white hover:bg-green-600 dark:bg-green-700 dark:hover:bg-green-700"
>
<Check size={16} className="mr-2" />
You&apos;re on the waitlist
</Button>
) : (
<>
<Button
onClick={handleJoin}
loading={joinWaitlistMutation.isPending}
disabled={!user && !email}
className="rounded-full bg-zinc-800 px-4 py-3 text-white hover:bg-zinc-700 dark:bg-zinc-700 dark:hover:bg-zinc-600"
>
Join waitlist
</Button>
<Button
type="button"
variant="secondary"
onClick={onClose}
className="rounded-full bg-zinc-200 px-4 py-3 text-zinc-900 hover:bg-zinc-300 dark:bg-zinc-700 dark:text-zinc-100 dark:hover:bg-zinc-600"
>
Not now
</Button>
</>
)}
</Dialog.Footer>
</Dialog.Content>
</Dialog>
);
}

View File

@@ -1,102 +0,0 @@
"use client";
import { useState } from "react";
import {
Carousel,
CarouselContent,
CarouselItem,
} from "@/components/__legacy__/ui/carousel";
import { WaitlistCard } from "../WaitlistCard/WaitlistCard";
import { WaitlistDetailModal } from "../WaitlistDetailModal/WaitlistDetailModal";
import type { StoreWaitlistEntry } from "@/app/api/__generated__/models/storeWaitlistEntry";
import { useWaitlistSection } from "./useWaitlistSection";
export function WaitlistSection() {
const { waitlists, joinedWaitlistIds, isLoading, hasError, markAsJoined } =
useWaitlistSection();
const [selectedWaitlist, setSelectedWaitlist] =
useState<StoreWaitlistEntry | null>(null);
function handleOpenModal(waitlist: StoreWaitlistEntry) {
setSelectedWaitlist(waitlist);
}
function handleJoinSuccess(waitlistId: string) {
markAsJoined(waitlistId);
}
// Don't render if loading, error, or no waitlists
if (isLoading || hasError || !waitlists || waitlists.length === 0) {
return null;
}
return (
<div className="flex flex-col items-center justify-center">
<div className="w-full max-w-[1360px]">
{/* Section Header */}
<div className="mb-6">
<h2 className="font-poppins text-2xl font-semibold text-[#282828] dark:text-neutral-200">
Help Shape What&apos;s Next
</h2>
<p className="mt-2 text-base text-neutral-600 dark:text-neutral-400">
These agents are in development. Your interest helps us prioritize
what gets built and we&apos;ll notify you when they&apos;re ready.
</p>
</div>
{/* Mobile Carousel View */}
<Carousel
className="md:hidden"
opts={{
loop: true,
}}
>
<CarouselContent>
{waitlists.map((waitlist) => (
<CarouselItem
key={waitlist.waitlistId}
className="min-w-64 max-w-71"
>
<WaitlistCard
name={waitlist.name}
subHeading={waitlist.subHeading}
description={waitlist.description}
imageUrl={waitlist.imageUrls[0] || null}
isMember={joinedWaitlistIds.has(waitlist.waitlistId)}
onCardClick={() => handleOpenModal(waitlist)}
onJoinClick={() => handleOpenModal(waitlist)}
/>
</CarouselItem>
))}
</CarouselContent>
</Carousel>
{/* Desktop Grid View */}
<div className="hidden grid-cols-1 place-items-center gap-6 md:grid md:grid-cols-2 lg:grid-cols-3">
{waitlists.map((waitlist) => (
<WaitlistCard
key={waitlist.waitlistId}
name={waitlist.name}
subHeading={waitlist.subHeading}
description={waitlist.description}
imageUrl={waitlist.imageUrls[0] || null}
isMember={joinedWaitlistIds.has(waitlist.waitlistId)}
onCardClick={() => handleOpenModal(waitlist)}
onJoinClick={() => handleOpenModal(waitlist)}
/>
))}
</div>
</div>
{/* Single Modal for both viewing and joining */}
{selectedWaitlist && (
<WaitlistDetailModal
waitlist={selectedWaitlist}
isMember={joinedWaitlistIds.has(selectedWaitlist.waitlistId)}
onClose={() => setSelectedWaitlist(null)}
onJoinSuccess={handleJoinSuccess}
/>
)}
</div>
);
}

View File

@@ -1,58 +0,0 @@
"use client";
import { useMemo } from "react";
import { useSupabaseStore } from "@/lib/supabase/hooks/useSupabaseStore";
import {
useGetV2GetTheAgentWaitlist,
useGetV2GetWaitlistIdsTheCurrentUserHasJoined,
getGetV2GetWaitlistIdsTheCurrentUserHasJoinedQueryKey,
} from "@/app/api/__generated__/endpoints/store/store";
import type { StoreWaitlistEntry } from "@/app/api/__generated__/models/storeWaitlistEntry";
import { useQueryClient } from "@tanstack/react-query";
export function useWaitlistSection() {
const { user } = useSupabaseStore();
const queryClient = useQueryClient();
// Fetch waitlists
const {
data: waitlistsResponse,
isLoading: waitlistsLoading,
isError: waitlistsError,
} = useGetV2GetTheAgentWaitlist();
// Fetch memberships if logged in
const { data: membershipsResponse, isLoading: membershipsLoading } =
useGetV2GetWaitlistIdsTheCurrentUserHasJoined({
query: {
enabled: !!user,
},
});
const waitlists: StoreWaitlistEntry[] = useMemo(() => {
if (waitlistsResponse?.status === 200) {
return waitlistsResponse.data.listings;
}
return [];
}, [waitlistsResponse]);
const joinedWaitlistIds: Set<string> = useMemo(() => {
if (membershipsResponse?.status === 200) {
return new Set(membershipsResponse.data);
}
return new Set();
}, [membershipsResponse]);
const isLoading = waitlistsLoading || (!!user && membershipsLoading);
const hasError = waitlistsError;
// Function to add a waitlist ID to joined set (called after successful join)
function markAsJoined(_waitlistId: string) {
// Invalidate the memberships query to refetch
queryClient.invalidateQueries({
queryKey: getGetV2GetWaitlistIdsTheCurrentUserHasJoinedQueryKey(),
});
}
return { waitlists, joinedWaitlistIds, isLoading, hasError, markAsJoined };
}

View File

@@ -5234,301 +5234,6 @@
}
}
},
"/api/store/admin/waitlist": {
"get": {
"tags": ["v2", "admin", "store", "admin", "waitlist"],
"summary": "List All Waitlists",
"description": "Get all waitlists with admin details (admin only).\n\nReturns:\n WaitlistAdminListResponse with all waitlists",
"operationId": "getV2List all waitlists",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WaitlistAdminListResponse"
}
}
}
},
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
}
},
"security": [{ "HTTPBearerJWT": [] }]
},
"post": {
"tags": ["v2", "admin", "store", "admin", "waitlist"],
"summary": "Create Waitlist",
"description": "Create a new waitlist (admin only).\n\nArgs:\n request: Waitlist creation details\n user_id: Authenticated admin user creating the waitlist\n\nReturns:\n WaitlistAdminResponse with the created waitlist details",
"operationId": "postV2Create waitlist",
"requestBody": {
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/WaitlistCreateRequest" }
}
},
"required": true
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WaitlistAdminResponse"
}
}
}
},
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
}
},
"security": [{ "HTTPBearerJWT": [] }]
}
},
"/api/store/admin/waitlist/{waitlist_id}": {
"delete": {
"tags": ["v2", "admin", "store", "admin", "waitlist"],
"summary": "Delete Waitlist",
"description": "Soft delete a waitlist (admin only).\n\nArgs:\n waitlist_id: ID of the waitlist to delete\n\nReturns:\n Success message",
"operationId": "deleteV2Delete waitlist",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
{
"name": "waitlist_id",
"in": "path",
"required": true,
"schema": {
"type": "string",
"description": "The ID of the waitlist",
"title": "Waitlist Id"
},
"description": "The ID of the waitlist"
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": { "application/json": { "schema": {} } }
},
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
}
}
},
"get": {
"tags": ["v2", "admin", "store", "admin", "waitlist"],
"summary": "Get Waitlist Details",
"description": "Get a single waitlist with admin details (admin only).\n\nArgs:\n waitlist_id: ID of the waitlist to retrieve\n\nReturns:\n WaitlistAdminResponse with waitlist details",
"operationId": "getV2Get waitlist details",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
{
"name": "waitlist_id",
"in": "path",
"required": true,
"schema": {
"type": "string",
"description": "The ID of the waitlist",
"title": "Waitlist Id"
},
"description": "The ID of the waitlist"
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WaitlistAdminResponse"
}
}
}
},
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
}
}
},
"put": {
"tags": ["v2", "admin", "store", "admin", "waitlist"],
"summary": "Update Waitlist",
"description": "Update a waitlist (admin only).\n\nArgs:\n waitlist_id: ID of the waitlist to update\n request: Fields to update\n\nReturns:\n WaitlistAdminResponse with updated waitlist details",
"operationId": "putV2Update waitlist",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
{
"name": "waitlist_id",
"in": "path",
"required": true,
"schema": {
"type": "string",
"description": "The ID of the waitlist",
"title": "Waitlist Id"
},
"description": "The ID of the waitlist"
}
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/WaitlistUpdateRequest" }
}
}
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WaitlistAdminResponse"
}
}
}
},
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
}
}
}
},
"/api/store/admin/waitlist/{waitlist_id}/link": {
"post": {
"tags": ["v2", "admin", "store", "admin", "waitlist"],
"summary": "Link Waitlist to Store Listing",
"description": "Link a waitlist to a store listing (admin only).\n\nWhen the linked store listing is approved/published, waitlist users\nwill be automatically notified.\n\nArgs:\n waitlist_id: ID of the waitlist\n store_listing_id: ID of the store listing to link\n\nReturns:\n WaitlistAdminResponse with updated waitlist details",
"operationId": "postV2Link waitlist to store listing",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
{
"name": "waitlist_id",
"in": "path",
"required": true,
"schema": {
"type": "string",
"description": "The ID of the waitlist",
"title": "Waitlist Id"
},
"description": "The ID of the waitlist"
}
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_postV2Link_waitlist_to_store_listing"
}
}
}
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WaitlistAdminResponse"
}
}
}
},
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
}
}
}
},
"/api/store/admin/waitlist/{waitlist_id}/signups": {
"get": {
"tags": ["v2", "admin", "store", "admin", "waitlist"],
"summary": "Get Waitlist Signups",
"description": "Get all signups for a waitlist (admin only).\n\nArgs:\n waitlist_id: ID of the waitlist\n\nReturns:\n WaitlistSignupListResponse with all signups",
"operationId": "getV2Get waitlist signups",
"security": [{ "HTTPBearerJWT": [] }],
"parameters": [
{
"name": "waitlist_id",
"in": "path",
"required": true,
"schema": {
"type": "string",
"description": "The ID of the waitlist",
"title": "Waitlist Id"
},
"description": "The ID of the waitlist"
}
],
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/WaitlistSignupListResponse"
}
}
}
},
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
}
}
}
},
"/api/store/agents": {
"get": {
"tags": ["v2", "store", "public"],
@@ -6376,101 +6081,6 @@
}
}
},
"/api/store/waitlist": {
"get": {
"tags": ["v2", "store", "public"],
"summary": "Get the agent waitlist",
"description": "Get all active waitlists for public display.",
"operationId": "getV2Get the agent waitlist",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/StoreWaitlistsAllResponse"
}
}
}
}
}
}
},
"/api/store/waitlist/my-memberships": {
"get": {
"tags": ["v2", "store", "private"],
"summary": "Get waitlist IDs the current user has joined",
"description": "Returns list of waitlist IDs the authenticated user has joined.",
"operationId": "getV2Get waitlist ids the current user has joined",
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"items": { "type": "string" },
"type": "array",
"title": "Response Getv2Get Waitlist Ids The Current User Has Joined"
}
}
}
},
"401": {
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
}
},
"security": [{ "HTTPBearerJWT": [] }]
}
},
"/api/store/waitlist/{waitlist_id}/join": {
"post": {
"tags": ["v2", "store", "public"],
"summary": "Add self to the agent waitlist",
"description": "Add the current user to the agent waitlist.",
"operationId": "postV2Add self to the agent waitlist",
"security": [{ "HTTPBearer": [] }],
"parameters": [
{
"name": "waitlist_id",
"in": "path",
"required": true,
"schema": {
"type": "string",
"description": "The ID of the waitlist to join",
"title": "Waitlist Id"
},
"description": "The ID of the waitlist to join"
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Body_postV2Add_self_to_the_agent_waitlist"
}
}
}
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/StoreWaitlistEntry" }
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
}
}
}
}
}
},
"/api/workspace/files/{file_id}/download": {
"get": {
"tags": ["workspace"],
@@ -7268,17 +6878,6 @@
"required": ["store_listing_version_id"],
"title": "Body_postV2Add marketplace agent"
},
"Body_postV2Add_self_to_the_agent_waitlist": {
"properties": {
"email": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Email",
"description": "Email address for unauthenticated users"
}
},
"type": "object",
"title": "Body_postV2Add self to the agent waitlist"
},
"Body_postV2Execute_a_preset": {
"properties": {
"inputs": {
@@ -7297,18 +6896,6 @@
"type": "object",
"title": "Body_postV2Execute a preset"
},
"Body_postV2Link_waitlist_to_store_listing": {
"properties": {
"store_listing_id": {
"type": "string",
"title": "Store Listing Id",
"description": "The ID of the store listing"
}
},
"type": "object",
"required": ["store_listing_id"],
"title": "Body_postV2Link waitlist to store listing"
},
"Body_postV2Upload_submission_media": {
"properties": {
"file": { "type": "string", "format": "binary", "title": "File" }
@@ -9272,8 +8859,7 @@
"REFUND_REQUEST",
"REFUND_PROCESSED",
"AGENT_APPROVED",
"AGENT_REJECTED",
"WAITLIST_LAUNCH"
"AGENT_REJECTED"
],
"title": "NotificationType"
},
@@ -10908,57 +10494,6 @@
"required": ["submissions", "pagination"],
"title": "StoreSubmissionsResponse"
},
"StoreWaitlistEntry": {
"properties": {
"waitlistId": { "type": "string", "title": "Waitlistid" },
"slug": { "type": "string", "title": "Slug" },
"name": { "type": "string", "title": "Name" },
"subHeading": { "type": "string", "title": "Subheading" },
"videoUrl": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Videourl"
},
"agentOutputDemoUrl": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Agentoutputdemourl"
},
"imageUrls": {
"items": { "type": "string" },
"type": "array",
"title": "Imageurls"
},
"description": { "type": "string", "title": "Description" },
"categories": {
"items": { "type": "string" },
"type": "array",
"title": "Categories"
}
},
"type": "object",
"required": [
"waitlistId",
"slug",
"name",
"subHeading",
"imageUrls",
"description",
"categories"
],
"title": "StoreWaitlistEntry",
"description": "Public waitlist entry - no PII fields exposed."
},
"StoreWaitlistsAllResponse": {
"properties": {
"listings": {
"items": { "$ref": "#/components/schemas/StoreWaitlistEntry" },
"type": "array",
"title": "Listings"
}
},
"type": "object",
"required": ["listings"],
"title": "StoreWaitlistsAllResponse"
},
"StreamChatRequest": {
"properties": {
"message": { "type": "string", "title": "Message" },
@@ -12780,203 +12315,6 @@
"required": ["loc", "msg", "type"],
"title": "ValidationError"
},
"WaitlistAdminListResponse": {
"properties": {
"waitlists": {
"items": { "$ref": "#/components/schemas/WaitlistAdminResponse" },
"type": "array",
"title": "Waitlists"
},
"totalCount": { "type": "integer", "title": "Totalcount" }
},
"type": "object",
"required": ["waitlists", "totalCount"],
"title": "WaitlistAdminListResponse",
"description": "Response model for listing all waitlists (admin view)."
},
"WaitlistAdminResponse": {
"properties": {
"id": { "type": "string", "title": "Id" },
"createdAt": { "type": "string", "title": "Createdat" },
"updatedAt": { "type": "string", "title": "Updatedat" },
"slug": { "type": "string", "title": "Slug" },
"name": { "type": "string", "title": "Name" },
"subHeading": { "type": "string", "title": "Subheading" },
"description": { "type": "string", "title": "Description" },
"categories": {
"items": { "type": "string" },
"type": "array",
"title": "Categories"
},
"imageUrls": {
"items": { "type": "string" },
"type": "array",
"title": "Imageurls"
},
"videoUrl": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Videourl"
},
"agentOutputDemoUrl": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Agentoutputdemourl"
},
"status": { "$ref": "#/components/schemas/WaitlistExternalStatus" },
"votes": { "type": "integer", "title": "Votes" },
"signupCount": { "type": "integer", "title": "Signupcount" },
"storeListingId": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Storelistingid"
},
"owningUserId": { "type": "string", "title": "Owninguserid" }
},
"type": "object",
"required": [
"id",
"createdAt",
"updatedAt",
"slug",
"name",
"subHeading",
"description",
"categories",
"imageUrls",
"status",
"votes",
"signupCount",
"owningUserId"
],
"title": "WaitlistAdminResponse",
"description": "Admin response model with full waitlist details including internal data."
},
"WaitlistCreateRequest": {
"properties": {
"name": { "type": "string", "title": "Name" },
"slug": { "type": "string", "title": "Slug" },
"subHeading": { "type": "string", "title": "Subheading" },
"description": { "type": "string", "title": "Description" },
"categories": {
"items": { "type": "string" },
"type": "array",
"title": "Categories",
"default": []
},
"imageUrls": {
"items": { "type": "string" },
"type": "array",
"title": "Imageurls",
"default": []
},
"videoUrl": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Videourl"
},
"agentOutputDemoUrl": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Agentoutputdemourl"
}
},
"type": "object",
"required": ["name", "slug", "subHeading", "description"],
"title": "WaitlistCreateRequest",
"description": "Request model for creating a new waitlist."
},
"WaitlistExternalStatus": {
"type": "string",
"enum": ["DONE", "NOT_STARTED", "CANCELED", "WORK_IN_PROGRESS"],
"title": "WaitlistExternalStatus"
},
"WaitlistSignup": {
"properties": {
"type": { "type": "string", "title": "Type" },
"userId": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Userid"
},
"email": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Email"
},
"username": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Username"
}
},
"type": "object",
"required": ["type"],
"title": "WaitlistSignup",
"description": "Individual signup entry for a waitlist."
},
"WaitlistSignupListResponse": {
"properties": {
"waitlistId": { "type": "string", "title": "Waitlistid" },
"signups": {
"items": { "$ref": "#/components/schemas/WaitlistSignup" },
"type": "array",
"title": "Signups"
},
"totalCount": { "type": "integer", "title": "Totalcount" }
},
"type": "object",
"required": ["waitlistId", "signups", "totalCount"],
"title": "WaitlistSignupListResponse",
"description": "Response model for listing waitlist signups."
},
"WaitlistUpdateRequest": {
"properties": {
"name": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Name"
},
"slug": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Slug"
},
"subHeading": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Subheading"
},
"description": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Description"
},
"categories": {
"anyOf": [
{ "items": { "type": "string" }, "type": "array" },
{ "type": "null" }
],
"title": "Categories"
},
"imageUrls": {
"anyOf": [
{ "items": { "type": "string" }, "type": "array" },
{ "type": "null" }
],
"title": "Imageurls"
},
"videoUrl": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Videourl"
},
"agentOutputDemoUrl": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Agentoutputdemourl"
},
"status": {
"anyOf": [
{ "$ref": "#/components/schemas/WaitlistExternalStatus" },
{ "type": "null" }
]
},
"storeListingId": {
"anyOf": [{ "type": "string" }, { "type": "null" }],
"title": "Storelistingid"
}
},
"type": "object",
"title": "WaitlistUpdateRequest",
"description": "Request model for updating a waitlist."
},
"Webhook": {
"properties": {
"id": { "type": "string", "title": "Id" },