mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-10 07:38:04 -05:00
feat(platform, blocks): Webhook-triggered blocks (#8358)
- feat(blocks): Add GitHub Pull Request Trigger block ## feat(platform): Add support for Webhook-triggered blocks - ⚠️ Add `PLATFORM_BASE_URL` setting - Add webhook config option and `BlockType.WEBHOOK` to `Block` - Add check to `Block.__init__` to enforce type and shape of webhook event filter - Add check to `Block.__init__` to enforce `payload` input on webhook blocks - Add check to `Block.__init__` to disable webhook blocks if `PLATFORM_BASE_URL` is not set - Add `Webhook` model + CRUD functions in `backend.data.integrations` to represent webhooks created by our system - Add `IntegrationWebhook` to DB schema + reference `AgentGraphNode.webhook_id` - Add `set_node_webhook(..)` in `backend.data.graph` - Add webhook-related endpoints: - `POST /integrations/{provider}/webhooks/{webhook_id}/ingress` endpoint, to receive webhook payloads, and for all associated nodes create graph executions - Add `Node.is_triggered_by_event_type(..)` helper method - `POST /integrations/{provider}/webhooks/{webhook_id}/ping` endpoint, to allow testing a webhook - Add `WebhookEvent` + pub/sub functions in `backend.data.integrations` - Add `backend.integrations.webhooks` module, including: - `graph_lifecycle_hooks`, e.g. `on_graph_activate(..)`, to handle corresponding webhook creation etc. - Add calls to these hooks in the graph create/update endpoints - `BaseWebhooksManager` + `GithubWebhooksManager` to handle creating + registering, removing + deregistering, and retrieving existing webhooks, and validating incoming payloads ## Other improvements - fix(blocks): Allow having an input and output pin with the same name - fix(blocks): Add tooltip with description in places where block inputs are rendered without `NodeHandle` - feat(blocks): Allow hiding inputs (e.g. `payload`) with `SchemaField(hidden=True)` - fix(frontend): Fix `MultiSelector` component styling - feat(frontend): Add `AlertDialog` UI component - feat(frontend): Add `NodeMultiSelectInput` component - feat(backend/data): Add `NodeModel` with `graph_id`, `graph_version`; `GraphModel` with `user_id` - Add `make_graph_model(..)` helper function in `backend.data.graph` - refactor(backend/data): Make `RedisEventQueue` generic and move to `backend.data.execution` - refactor(frontend): Deduplicate & clean up code for different block types in `generateInputHandles(..)` in `CustomNode` - dx(backend): Add `MissingConfigError`, `NeedConfirmation` exception --------- Co-authored-by: Zamil Majdy <zamil.majdy@agpt.co>
This commit is contained in:
committed by
GitHub
parent
464b5309d7
commit
eef9bbe991
@@ -28,8 +28,15 @@ SUPABASE_URL=http://localhost:8000
|
||||
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
|
||||
# For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow for integrations to work.
|
||||
FRONTEND_BASE_URL=http://localhost:3000
|
||||
## For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow
|
||||
## for integrations to work. Defaults to the value of PLATFORM_BASE_URL if not set.
|
||||
# FRONTEND_BASE_URL=http://localhost:3000
|
||||
|
||||
## PLATFORM_BASE_URL must be set to a *publicly accessible* URL pointing to your backend
|
||||
## to use the platform's webhook-related functionality.
|
||||
## If you are developing locally, you can use something like ngrok to get a publc URL
|
||||
## and tunnel it to your locally running backend.
|
||||
PLATFORM_BASE_URL=https://your-public-url-here
|
||||
|
||||
## == INTEGRATION CREDENTIALS == ##
|
||||
# Each set of server side credentials is required for the corresponding 3rd party
|
||||
|
||||
@@ -60,13 +60,6 @@ for block_cls in all_subclasses(Block):
|
||||
input_schema = block.input_schema.model_fields
|
||||
output_schema = block.output_schema.model_fields
|
||||
|
||||
# Prevent duplicate field name in input_schema and output_schema
|
||||
duplicate_field_names = set(input_schema.keys()) & set(output_schema.keys())
|
||||
if duplicate_field_names:
|
||||
raise ValueError(
|
||||
f"{block.name} has duplicate field names in input_schema and output_schema: {duplicate_field_names}"
|
||||
)
|
||||
|
||||
# Make sure `error` field is a string in the output schema
|
||||
if "error" in output_schema and output_schema["error"].annotation is not str:
|
||||
raise ValueError(
|
||||
|
||||
@@ -27,7 +27,7 @@ def get_executor_manager_client():
|
||||
|
||||
@thread_cached
|
||||
def get_event_bus():
|
||||
from backend.data.queue import RedisExecutionEventBus
|
||||
from backend.data.execution import RedisExecutionEventBus
|
||||
|
||||
return RedisExecutionEventBus()
|
||||
|
||||
|
||||
@@ -0,0 +1,700 @@
|
||||
{
|
||||
"action": "synchronize",
|
||||
"number": 8358,
|
||||
"pull_request": {
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358",
|
||||
"id": 2128918491,
|
||||
"node_id": "PR_kwDOJKSTjM5-5Lfb",
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358",
|
||||
"diff_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358.diff",
|
||||
"patch_url": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358.patch",
|
||||
"issue_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358",
|
||||
"number": 8358,
|
||||
"state": "open",
|
||||
"locked": false,
|
||||
"title": "feat(platform, blocks): Webhook-triggered blocks",
|
||||
"user": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"body": "- Resolves #8352\r\n\r\n## Changes 🏗️\r\n\r\n- feat(blocks): Add GitHub Pull Request Trigger block\r\n\r\n### feat(platform): Add support for Webhook-triggered blocks\r\n- ⚠️ Add `PLATFORM_BASE_URL` setting\r\n\r\n- Add webhook config option and `BlockType.WEBHOOK` to `Block`\r\n - Add check to `Block.__init__` to enforce type and shape of webhook event filter\r\n - Add check to `Block.__init__` to enforce `payload` input on webhook blocks\r\n\r\n- Add `Webhook` model + CRUD functions in `backend.data.integrations` to represent webhooks created by our system\r\n - Add `IntegrationWebhook` to DB schema + reference `AgentGraphNode.webhook_id`\r\n - Add `set_node_webhook(..)` in `backend.data.graph`\r\n\r\n- Add webhook-related endpoints:\r\n - `POST /integrations/{provider}/webhooks/{webhook_id}/ingress` endpoint, to receive webhook payloads, and for all associated nodes create graph executions\r\n - Add `Node.is_triggered_by_event_type(..)` helper method\r\n - `POST /integrations/{provider}/webhooks/{webhook_id}/ping` endpoint, to allow testing a webhook\r\n - Add `WebhookEvent` + pub/sub functions in `backend.data.integrations`\r\n\r\n- Add `backend.integrations.webhooks` module, including:\r\n - `graph_lifecycle_hooks`, e.g. `on_graph_activate(..)`, to handle corresponding webhook creation etc.\r\n - Add calls to these hooks in the graph create/update endpoints\r\n - `BaseWebhooksManager` + `GithubWebhooksManager` to handle creating + registering, removing + deregistering, and retrieving existing webhooks, and validating incoming payloads\r\n\r\n### Other improvements\r\n- fix(blocks): Allow having an input and output pin with the same name\r\n- feat(blocks): Allow hiding inputs (e.g. `payload`) with `SchemaField(hidden=True)`\r\n- feat(backend/data): Add `graph_id`, `graph_version` to `Node`; `user_id` to `GraphMeta`\r\n - Add `Creatable` versions of `Node`, `GraphMeta` and `Graph` without these properties\r\n - Add `graph_from_creatable(..)` helper function in `backend.data.graph`\r\n- refactor(backend/data): Make `RedisEventQueue` generic\r\n- refactor(frontend): Deduplicate & clean up code for different block types in `generateInputHandles(..)` in `CustomNode`\r\n- refactor(backend): Remove unused subgraph functionality\r\n\r\n## How it works\r\n- When a graph is created, the `on_graph_activate` and `on_node_activate` hooks are called on the graph and its nodes\r\n- If a webhook-triggered node has presets for all the relevant inputs, `on_node_activate` will get/create a suitable webhook and link it by setting `AgentGraphNode.webhook_id`\r\n - `on_node_activate` uses `webhook_manager.get_suitable_webhook(..)`, which tries to find a suitable webhook (with matching requirements) or creates it if none exists yet\r\n- When a graph is deactivated (in favor of a newer/other version) or deleted, `on_graph_deactivate` and `on_node_deactivate` are called on the graph and its nodes to clean up webhooks that are no longer in use\r\n- When a valid webhook payload is received, two things happen:\r\n 1. It is broadcast on the Redis channel `webhooks/{webhook_id}/{event_type}`\r\n 2. Graph executions are initiated for all nodes triggered by this webhook\r\n\r\n## TODO\r\n- [ ] #8537\r\n- [x] #8538\r\n- [ ] #8357\r\n- [ ] ~~#8554~~ can be done in a follow-up PR\r\n- [ ] Test test test!\r\n- [ ] Add note on `repo` input of webhook blocks that the credentials used must have the right permissions for the given organization/repo\r\n- [x] Implement proper detection and graceful handling of webhook creation failing due to insufficient permissions. This should give a clear message to the user to e.g. \"give the app access to this organization in your settings\".\r\n- [ ] Nice-to-have: make a button on webhook blocks to trigger a ping and check its result. The API endpoints for this is already implemented.",
|
||||
"created_at": "2024-10-16T22:13:47Z",
|
||||
"updated_at": "2024-11-11T18:34:54Z",
|
||||
"closed_at": null,
|
||||
"merged_at": null,
|
||||
"merge_commit_sha": "cbfd0cdd8db52cdd5a3b7ce088fc0ab4617a652e",
|
||||
"assignee": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"assignees": [
|
||||
{
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
],
|
||||
"requested_reviewers": [
|
||||
{
|
||||
"login": "kcze",
|
||||
"id": 34861343,
|
||||
"node_id": "MDQ6VXNlcjM0ODYxMzQz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/34861343?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/kcze",
|
||||
"html_url": "https://github.com/kcze",
|
||||
"followers_url": "https://api.github.com/users/kcze/followers",
|
||||
"following_url": "https://api.github.com/users/kcze/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/kcze/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/kcze/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/kcze/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/kcze/orgs",
|
||||
"repos_url": "https://api.github.com/users/kcze/repos",
|
||||
"events_url": "https://api.github.com/users/kcze/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/kcze/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
],
|
||||
"requested_teams": [
|
||||
{
|
||||
"name": "DevOps",
|
||||
"id": 9547361,
|
||||
"node_id": "T_kwDOB8roIc4Aka5h",
|
||||
"slug": "devops",
|
||||
"description": "",
|
||||
"privacy": "closed",
|
||||
"notification_setting": "notifications_enabled",
|
||||
"url": "https://api.github.com/organizations/130738209/team/9547361",
|
||||
"html_url": "https://github.com/orgs/Significant-Gravitas/teams/devops",
|
||||
"members_url": "https://api.github.com/organizations/130738209/team/9547361/members{/member}",
|
||||
"repositories_url": "https://api.github.com/organizations/130738209/team/9547361/repos",
|
||||
"permission": "pull",
|
||||
"parent": null
|
||||
}
|
||||
],
|
||||
"labels": [
|
||||
{
|
||||
"id": 5272676214,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABOkandg",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/documentation",
|
||||
"name": "documentation",
|
||||
"color": "0075ca",
|
||||
"default": true,
|
||||
"description": "Improvements or additions to documentation"
|
||||
},
|
||||
{
|
||||
"id": 5410633769,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABQn-4KQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/size/xl",
|
||||
"name": "size/xl",
|
||||
"color": "E751DD",
|
||||
"default": false,
|
||||
"description": ""
|
||||
},
|
||||
{
|
||||
"id": 6892322271,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABmtB93w",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/Review%20effort%20[1-5]:%204",
|
||||
"name": "Review effort [1-5]: 4",
|
||||
"color": "d1bcf9",
|
||||
"default": false,
|
||||
"description": null
|
||||
},
|
||||
{
|
||||
"id": 7218433025,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABrkCMAQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/frontend",
|
||||
"name": "platform/frontend",
|
||||
"color": "033C07",
|
||||
"default": false,
|
||||
"description": "AutoGPT Platform - Front end"
|
||||
},
|
||||
{
|
||||
"id": 7219356193,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABrk6iIQ",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/backend",
|
||||
"name": "platform/backend",
|
||||
"color": "ededed",
|
||||
"default": false,
|
||||
"description": "AutoGPT Platform - Back end"
|
||||
},
|
||||
{
|
||||
"id": 7515330106,
|
||||
"node_id": "LA_kwDOJKSTjM8AAAABv_LWOg",
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels/platform/blocks",
|
||||
"name": "platform/blocks",
|
||||
"color": "eb5757",
|
||||
"default": false,
|
||||
"description": null
|
||||
}
|
||||
],
|
||||
"milestone": null,
|
||||
"draft": false,
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/commits",
|
||||
"review_comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/comments",
|
||||
"review_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/comments{/number}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358/comments",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"head": {
|
||||
"label": "Significant-Gravitas:reinier/open-1961-implement-github-on-pull-request-block",
|
||||
"ref": "reinier/open-1961-implement-github-on-pull-request-block",
|
||||
"sha": "8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"user": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"repo": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"allow_squash_merge": true,
|
||||
"allow_merge_commit": false,
|
||||
"allow_rebase_merge": false,
|
||||
"allow_auto_merge": true,
|
||||
"delete_branch_on_merge": true,
|
||||
"allow_update_branch": true,
|
||||
"use_squash_pr_title_as_default": true,
|
||||
"squash_merge_commit_message": "COMMIT_MESSAGES",
|
||||
"squash_merge_commit_title": "PR_TITLE",
|
||||
"merge_commit_message": "BLANK",
|
||||
"merge_commit_title": "PR_TITLE"
|
||||
}
|
||||
},
|
||||
"base": {
|
||||
"label": "Significant-Gravitas:dev",
|
||||
"ref": "dev",
|
||||
"sha": "0b5b95eff5e18c1e162d2b30b66a7be2bed1cbc2",
|
||||
"user": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"repo": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"allow_squash_merge": true,
|
||||
"allow_merge_commit": false,
|
||||
"allow_rebase_merge": false,
|
||||
"allow_auto_merge": true,
|
||||
"delete_branch_on_merge": true,
|
||||
"allow_update_branch": true,
|
||||
"use_squash_pr_title_as_default": true,
|
||||
"squash_merge_commit_message": "COMMIT_MESSAGES",
|
||||
"squash_merge_commit_title": "PR_TITLE",
|
||||
"merge_commit_message": "BLANK",
|
||||
"merge_commit_title": "PR_TITLE"
|
||||
}
|
||||
},
|
||||
"_links": {
|
||||
"self": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358"
|
||||
},
|
||||
"html": {
|
||||
"href": "https://github.com/Significant-Gravitas/AutoGPT/pull/8358"
|
||||
},
|
||||
"issue": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358"
|
||||
},
|
||||
"comments": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/8358/comments"
|
||||
},
|
||||
"review_comments": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/comments"
|
||||
},
|
||||
"review_comment": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/comments{/number}"
|
||||
},
|
||||
"commits": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls/8358/commits"
|
||||
},
|
||||
"statuses": {
|
||||
"href": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/8f708a2b60463eec10747d8f45dead35b5a45bd0"
|
||||
}
|
||||
},
|
||||
"author_association": "MEMBER",
|
||||
"auto_merge": null,
|
||||
"active_lock_reason": null,
|
||||
"merged": false,
|
||||
"mergeable": null,
|
||||
"rebaseable": null,
|
||||
"mergeable_state": "unknown",
|
||||
"merged_by": null,
|
||||
"comments": 12,
|
||||
"review_comments": 29,
|
||||
"maintainer_can_modify": false,
|
||||
"commits": 62,
|
||||
"additions": 1674,
|
||||
"deletions": 331,
|
||||
"changed_files": 36
|
||||
},
|
||||
"before": "f40aef87672203f47bbbd53f83fae0964c5624da",
|
||||
"after": "8f708a2b60463eec10747d8f45dead35b5a45bd0",
|
||||
"repository": {
|
||||
"id": 614765452,
|
||||
"node_id": "R_kgDOJKSTjA",
|
||||
"name": "AutoGPT",
|
||||
"full_name": "Significant-Gravitas/AutoGPT",
|
||||
"private": false,
|
||||
"owner": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Significant-Gravitas",
|
||||
"html_url": "https://github.com/Significant-Gravitas",
|
||||
"followers_url": "https://api.github.com/users/Significant-Gravitas/followers",
|
||||
"following_url": "https://api.github.com/users/Significant-Gravitas/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Significant-Gravitas/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Significant-Gravitas/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Significant-Gravitas/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Significant-Gravitas/orgs",
|
||||
"repos_url": "https://api.github.com/users/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/users/Significant-Gravitas/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Significant-Gravitas/received_events",
|
||||
"type": "Organization",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
},
|
||||
"html_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"description": "AutoGPT is the vision of accessible AI for everyone, to use and to build on. Our mission is to provide the tools, so that you can focus on what matters.",
|
||||
"fork": false,
|
||||
"url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT",
|
||||
"forks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/forks",
|
||||
"keys_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/keys{/key_id}",
|
||||
"collaborators_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/collaborators{/collaborator}",
|
||||
"teams_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/teams",
|
||||
"hooks_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/hooks",
|
||||
"issue_events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/events{/number}",
|
||||
"events_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/events",
|
||||
"assignees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/assignees{/user}",
|
||||
"branches_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/branches{/branch}",
|
||||
"tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/tags",
|
||||
"blobs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/blobs{/sha}",
|
||||
"git_tags_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/tags{/sha}",
|
||||
"git_refs_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/refs{/sha}",
|
||||
"trees_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/trees{/sha}",
|
||||
"statuses_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/statuses/{sha}",
|
||||
"languages_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/languages",
|
||||
"stargazers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/stargazers",
|
||||
"contributors_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contributors",
|
||||
"subscribers_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscribers",
|
||||
"subscription_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/subscription",
|
||||
"commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/commits{/sha}",
|
||||
"git_commits_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/git/commits{/sha}",
|
||||
"comments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/comments{/number}",
|
||||
"issue_comment_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues/comments{/number}",
|
||||
"contents_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/contents/{+path}",
|
||||
"compare_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/compare/{base}...{head}",
|
||||
"merges_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/merges",
|
||||
"archive_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/{archive_format}{/ref}",
|
||||
"downloads_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/downloads",
|
||||
"issues_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/issues{/number}",
|
||||
"pulls_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/pulls{/number}",
|
||||
"milestones_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/milestones{/number}",
|
||||
"notifications_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/notifications{?since,all,participating}",
|
||||
"labels_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/labels{/name}",
|
||||
"releases_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/releases{/id}",
|
||||
"deployments_url": "https://api.github.com/repos/Significant-Gravitas/AutoGPT/deployments",
|
||||
"created_at": "2023-03-16T09:21:07Z",
|
||||
"updated_at": "2024-11-11T18:16:29Z",
|
||||
"pushed_at": "2024-11-11T18:34:52Z",
|
||||
"git_url": "git://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"ssh_url": "git@github.com:Significant-Gravitas/AutoGPT.git",
|
||||
"clone_url": "https://github.com/Significant-Gravitas/AutoGPT.git",
|
||||
"svn_url": "https://github.com/Significant-Gravitas/AutoGPT",
|
||||
"homepage": "https://agpt.co",
|
||||
"size": 181894,
|
||||
"stargazers_count": 168203,
|
||||
"watchers_count": 168203,
|
||||
"language": "Python",
|
||||
"has_issues": true,
|
||||
"has_projects": true,
|
||||
"has_downloads": true,
|
||||
"has_wiki": true,
|
||||
"has_pages": false,
|
||||
"has_discussions": true,
|
||||
"forks_count": 44376,
|
||||
"mirror_url": null,
|
||||
"archived": false,
|
||||
"disabled": false,
|
||||
"open_issues_count": 189,
|
||||
"license": {
|
||||
"key": "other",
|
||||
"name": "Other",
|
||||
"spdx_id": "NOASSERTION",
|
||||
"url": null,
|
||||
"node_id": "MDc6TGljZW5zZTA="
|
||||
},
|
||||
"allow_forking": true,
|
||||
"is_template": false,
|
||||
"web_commit_signoff_required": false,
|
||||
"topics": [
|
||||
"ai",
|
||||
"artificial-intelligence",
|
||||
"autonomous-agents",
|
||||
"gpt-4",
|
||||
"openai",
|
||||
"python"
|
||||
],
|
||||
"visibility": "public",
|
||||
"forks": 44376,
|
||||
"open_issues": 189,
|
||||
"watchers": 168203,
|
||||
"default_branch": "master",
|
||||
"custom_properties": {
|
||||
|
||||
}
|
||||
},
|
||||
"organization": {
|
||||
"login": "Significant-Gravitas",
|
||||
"id": 130738209,
|
||||
"node_id": "O_kgDOB8roIQ",
|
||||
"url": "https://api.github.com/orgs/Significant-Gravitas",
|
||||
"repos_url": "https://api.github.com/orgs/Significant-Gravitas/repos",
|
||||
"events_url": "https://api.github.com/orgs/Significant-Gravitas/events",
|
||||
"hooks_url": "https://api.github.com/orgs/Significant-Gravitas/hooks",
|
||||
"issues_url": "https://api.github.com/orgs/Significant-Gravitas/issues",
|
||||
"members_url": "https://api.github.com/orgs/Significant-Gravitas/members{/member}",
|
||||
"public_members_url": "https://api.github.com/orgs/Significant-Gravitas/public_members{/member}",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/130738209?v=4",
|
||||
"description": ""
|
||||
},
|
||||
"enterprise": {
|
||||
"id": 149607,
|
||||
"slug": "significant-gravitas",
|
||||
"name": "Significant Gravitas",
|
||||
"node_id": "E_kgDOAAJIZw",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/b/149607?v=4",
|
||||
"description": "The creators of AutoGPT",
|
||||
"website_url": "discord.gg/autogpt",
|
||||
"html_url": "https://github.com/enterprises/significant-gravitas",
|
||||
"created_at": "2024-04-18T17:43:53Z",
|
||||
"updated_at": "2024-10-23T16:59:55Z"
|
||||
},
|
||||
"sender": {
|
||||
"login": "Pwuts",
|
||||
"id": 12185583,
|
||||
"node_id": "MDQ6VXNlcjEyMTg1NTgz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/12185583?v=4",
|
||||
"gravatar_id": "",
|
||||
"url": "https://api.github.com/users/Pwuts",
|
||||
"html_url": "https://github.com/Pwuts",
|
||||
"followers_url": "https://api.github.com/users/Pwuts/followers",
|
||||
"following_url": "https://api.github.com/users/Pwuts/following{/other_user}",
|
||||
"gists_url": "https://api.github.com/users/Pwuts/gists{/gist_id}",
|
||||
"starred_url": "https://api.github.com/users/Pwuts/starred{/owner}{/repo}",
|
||||
"subscriptions_url": "https://api.github.com/users/Pwuts/subscriptions",
|
||||
"organizations_url": "https://api.github.com/users/Pwuts/orgs",
|
||||
"repos_url": "https://api.github.com/users/Pwuts/repos",
|
||||
"events_url": "https://api.github.com/users/Pwuts/events{/privacy}",
|
||||
"received_events_url": "https://api.github.com/users/Pwuts/received_events",
|
||||
"type": "User",
|
||||
"user_view_type": "public",
|
||||
"site_admin": false
|
||||
}
|
||||
}
|
||||
156
autogpt_platform/backend/backend/blocks/github/triggers.py
Normal file
156
autogpt_platform/backend/backend/blocks/github/triggers.py
Normal file
@@ -0,0 +1,156 @@
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import (
|
||||
Block,
|
||||
BlockCategory,
|
||||
BlockOutput,
|
||||
BlockSchema,
|
||||
BlockWebhookConfig,
|
||||
)
|
||||
from backend.data.model import SchemaField
|
||||
|
||||
from ._auth import (
|
||||
TEST_CREDENTIALS,
|
||||
TEST_CREDENTIALS_INPUT,
|
||||
GithubCredentialsField,
|
||||
GithubCredentialsInput,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# --8<-- [start:GithubTriggerExample]
|
||||
class GitHubTriggerBase:
|
||||
class Input(BlockSchema):
|
||||
credentials: GithubCredentialsInput = GithubCredentialsField("repo")
|
||||
repo: str = SchemaField(
|
||||
description=(
|
||||
"Repository to subscribe to.\n\n"
|
||||
"**Note:** Make sure your GitHub credentials have permissions "
|
||||
"to create webhooks on this repo."
|
||||
),
|
||||
placeholder="{owner}/{repo}",
|
||||
)
|
||||
# --8<-- [start:example-payload-field]
|
||||
payload: dict = SchemaField(hidden=True, default={})
|
||||
# --8<-- [end:example-payload-field]
|
||||
|
||||
class Output(BlockSchema):
|
||||
payload: dict = SchemaField(
|
||||
description="The complete webhook payload that was received from GitHub. "
|
||||
"Includes information about the affected resource (e.g. pull request), "
|
||||
"the event, and the user who triggered the event."
|
||||
)
|
||||
triggered_by_user: dict = SchemaField(
|
||||
description="Object representing the GitHub user who triggered the event"
|
||||
)
|
||||
error: str = SchemaField(
|
||||
description="Error message if the payload could not be processed"
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "payload", input_data.payload
|
||||
yield "triggered_by_user", input_data.payload["sender"]
|
||||
|
||||
|
||||
class GithubPullRequestTriggerBlock(GitHubTriggerBase, Block):
|
||||
EXAMPLE_PAYLOAD_FILE = (
|
||||
Path(__file__).parent / "example_payloads" / "pull_request.synchronize.json"
|
||||
)
|
||||
|
||||
# --8<-- [start:example-event-filter]
|
||||
class Input(GitHubTriggerBase.Input):
|
||||
class EventsFilter(BaseModel):
|
||||
"""
|
||||
https://docs.github.com/en/webhooks/webhook-events-and-payloads#pull_request
|
||||
"""
|
||||
|
||||
opened: bool = False
|
||||
edited: bool = False
|
||||
closed: bool = False
|
||||
reopened: bool = False
|
||||
synchronize: bool = False
|
||||
assigned: bool = False
|
||||
unassigned: bool = False
|
||||
labeled: bool = False
|
||||
unlabeled: bool = False
|
||||
converted_to_draft: bool = False
|
||||
locked: bool = False
|
||||
unlocked: bool = False
|
||||
enqueued: bool = False
|
||||
dequeued: bool = False
|
||||
milestoned: bool = False
|
||||
demilestoned: bool = False
|
||||
ready_for_review: bool = False
|
||||
review_requested: bool = False
|
||||
review_request_removed: bool = False
|
||||
auto_merge_enabled: bool = False
|
||||
auto_merge_disabled: bool = False
|
||||
|
||||
events: EventsFilter = SchemaField(
|
||||
title="Events", description="The events to subscribe to"
|
||||
)
|
||||
# --8<-- [end:example-event-filter]
|
||||
|
||||
class Output(GitHubTriggerBase.Output):
|
||||
event: str = SchemaField(
|
||||
description="The PR event that triggered the webhook (e.g. 'opened')"
|
||||
)
|
||||
number: int = SchemaField(description="The number of the affected pull request")
|
||||
pull_request: dict = SchemaField(
|
||||
description="Object representing the affected pull request"
|
||||
)
|
||||
pull_request_url: str = SchemaField(
|
||||
description="The URL of the affected pull request"
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
from backend.integrations.webhooks.github import GithubWebhookType
|
||||
|
||||
example_payload = json.loads(self.EXAMPLE_PAYLOAD_FILE.read_text())
|
||||
|
||||
super().__init__(
|
||||
id="6c60ec01-8128-419e-988f-96a063ee2fea",
|
||||
description="This block triggers on pull request events and outputs the event type and payload.",
|
||||
categories={BlockCategory.DEVELOPER_TOOLS, BlockCategory.INPUT},
|
||||
input_schema=GithubPullRequestTriggerBlock.Input,
|
||||
output_schema=GithubPullRequestTriggerBlock.Output,
|
||||
# --8<-- [start:example-webhook_config]
|
||||
webhook_config=BlockWebhookConfig(
|
||||
provider="github",
|
||||
webhook_type=GithubWebhookType.REPO,
|
||||
resource_format="{repo}",
|
||||
event_filter_input="events",
|
||||
event_format="pull_request.{event}",
|
||||
),
|
||||
# --8<-- [end:example-webhook_config]
|
||||
test_input={
|
||||
"repo": "Significant-Gravitas/AutoGPT",
|
||||
"events": {"opened": True, "synchronize": True},
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
"payload": example_payload,
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[
|
||||
("payload", example_payload),
|
||||
("triggered_by_user", example_payload["sender"]),
|
||||
("event", example_payload["action"]),
|
||||
("number", example_payload["number"]),
|
||||
("pull_request", example_payload["pull_request"]),
|
||||
("pull_request_url", example_payload["pull_request"]["html_url"]),
|
||||
],
|
||||
)
|
||||
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput: # type: ignore
|
||||
yield from super().run(input_data, **kwargs)
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "number", input_data.payload["number"]
|
||||
yield "pull_request", input_data.payload["pull_request"]
|
||||
yield "pull_request_url", input_data.payload["pull_request"]["html_url"]
|
||||
|
||||
|
||||
# --8<-- [end:GithubTriggerExample]
|
||||
@@ -20,9 +20,12 @@ from prisma.models import AgentBlock
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.util import json
|
||||
from backend.util.settings import Config
|
||||
|
||||
from .model import CREDENTIALS_FIELD_NAME, ContributorDetails, CredentialsMetaInput
|
||||
|
||||
app_config = Config()
|
||||
|
||||
BlockData = tuple[str, Any] # Input & Output data should be a tuple of (name, data).
|
||||
BlockInput = dict[str, Any] # Input: 1 input pin consumes 1 data.
|
||||
BlockOutput = Generator[BlockData, None, None] # Output: 1 output pin produces n data.
|
||||
@@ -34,6 +37,7 @@ class BlockType(Enum):
|
||||
INPUT = "Input"
|
||||
OUTPUT = "Output"
|
||||
NOTE = "Note"
|
||||
WEBHOOK = "Webhook"
|
||||
AGENT = "Agent"
|
||||
|
||||
|
||||
@@ -177,6 +181,41 @@ class EmptySchema(BlockSchema):
|
||||
pass
|
||||
|
||||
|
||||
# --8<-- [start:BlockWebhookConfig]
|
||||
class BlockWebhookConfig(BaseModel):
|
||||
provider: str
|
||||
"""The service provider that the webhook connects to"""
|
||||
|
||||
webhook_type: str
|
||||
"""
|
||||
Identifier for the webhook type. E.g. GitHub has repo and organization level hooks.
|
||||
|
||||
Only for use in the corresponding `WebhooksManager`.
|
||||
"""
|
||||
|
||||
resource_format: str
|
||||
"""
|
||||
Template string for the resource that a block instance subscribes to.
|
||||
Fields will be filled from the block's inputs (except `payload`).
|
||||
|
||||
Example: `f"{repo}/pull_requests"` (note: not how it's actually implemented)
|
||||
|
||||
Only for use in the corresponding `WebhooksManager`.
|
||||
"""
|
||||
|
||||
event_filter_input: str
|
||||
"""Name of the block's event filter input."""
|
||||
|
||||
event_format: str = "{event}"
|
||||
"""
|
||||
Template string for the event(s) that a block instance subscribes to.
|
||||
Applied individually to each event selected in the event filter input.
|
||||
|
||||
Example: `"pull_request.{event}"` -> `"pull_request.opened"`
|
||||
"""
|
||||
# --8<-- [end:BlockWebhookConfig]
|
||||
|
||||
|
||||
class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
def __init__(
|
||||
self,
|
||||
@@ -193,6 +232,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
disabled: bool = False,
|
||||
static_output: bool = False,
|
||||
block_type: BlockType = BlockType.STANDARD,
|
||||
webhook_config: Optional[BlockWebhookConfig] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the block with the given schema.
|
||||
@@ -223,9 +263,38 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
||||
self.contributors = contributors or set()
|
||||
self.disabled = disabled
|
||||
self.static_output = static_output
|
||||
self.block_type = block_type
|
||||
self.block_type = block_type if not webhook_config else BlockType.WEBHOOK
|
||||
self.webhook_config = webhook_config
|
||||
self.execution_stats = {}
|
||||
|
||||
if self.webhook_config:
|
||||
# Enforce shape of webhook event filter
|
||||
event_filter_field = self.input_schema.model_fields[
|
||||
self.webhook_config.event_filter_input
|
||||
]
|
||||
if not (
|
||||
isinstance(event_filter_field.annotation, type)
|
||||
and issubclass(event_filter_field.annotation, BaseModel)
|
||||
and all(
|
||||
field.annotation is bool
|
||||
for field in event_filter_field.annotation.model_fields.values()
|
||||
)
|
||||
):
|
||||
raise NotImplementedError(
|
||||
f"{self.name} has an invalid webhook event selector: "
|
||||
"field must be a BaseModel and all its fields must be boolean"
|
||||
)
|
||||
|
||||
# Enforce presence of 'payload' input
|
||||
if "payload" not in self.input_schema.model_fields:
|
||||
raise TypeError(
|
||||
f"{self.name} is webhook-triggered but has no 'payload' input"
|
||||
)
|
||||
|
||||
# Disable webhook-triggered block if webhook functionality not available
|
||||
if not app_config.platform_base_url:
|
||||
self.disabled = True
|
||||
|
||||
@classmethod
|
||||
def create(cls: Type["Block"]) -> "Block":
|
||||
return cls()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from multiprocessing import Manager
|
||||
from typing import Any, Generic, TypeVar
|
||||
from typing import Any, AsyncGenerator, Generator, Generic, TypeVar
|
||||
|
||||
from prisma.enums import AgentExecutionStatus
|
||||
from prisma.models import (
|
||||
@@ -14,7 +14,9 @@ from pydantic import BaseModel
|
||||
|
||||
from backend.data.block import BlockData, BlockInput, CompletedBlockOutput
|
||||
from backend.data.includes import EXECUTION_RESULT_INCLUDE, GRAPH_EXECUTION_INCLUDE
|
||||
from backend.data.queue import AsyncRedisEventBus, RedisEventBus
|
||||
from backend.util import json, mock
|
||||
from backend.util.settings import Config
|
||||
|
||||
|
||||
class GraphExecution(BaseModel):
|
||||
@@ -271,7 +273,6 @@ async def update_graph_execution_stats(
|
||||
graph_exec_id: str,
|
||||
stats: dict[str, Any],
|
||||
) -> ExecutionResult:
|
||||
|
||||
status = ExecutionStatus.FAILED if stats.get("error") else ExecutionStatus.COMPLETED
|
||||
res = await AgentGraphExecution.prisma().update(
|
||||
where={"id": graph_exec_id},
|
||||
@@ -471,3 +472,42 @@ async def get_incomplete_executions(
|
||||
include=EXECUTION_RESULT_INCLUDE,
|
||||
)
|
||||
return [ExecutionResult.from_db(execution) for execution in executions]
|
||||
|
||||
|
||||
# --------------------- Event Bus --------------------- #
|
||||
|
||||
config = Config()
|
||||
|
||||
|
||||
class RedisExecutionEventBus(RedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
def publish(self, res: ExecutionResult):
|
||||
self.publish_event(res, f"{res.graph_id}/{res.graph_exec_id}")
|
||||
|
||||
def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> Generator[ExecutionResult, None, None]:
|
||||
for execution_result in self.listen_events(f"{graph_id}/{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
||||
|
||||
class AsyncRedisExecutionEventBus(AsyncRedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
async def publish(self, res: ExecutionResult):
|
||||
await self.publish_event(res, f"{res.graph_id}/{res.graph_exec_id}")
|
||||
|
||||
async def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> AsyncGenerator[ExecutionResult, None]:
|
||||
async for execution_result in self.listen_events(f"{graph_id}/{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
||||
@@ -3,7 +3,7 @@ import logging
|
||||
import uuid
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any, Literal, Type
|
||||
from typing import Any, Literal, Optional, Type
|
||||
|
||||
import prisma
|
||||
from prisma.models import AgentGraph, AgentGraphExecution, AgentNode, AgentNodeLink
|
||||
@@ -12,12 +12,14 @@ from pydantic.fields import computed_field
|
||||
|
||||
from backend.blocks.agent import AgentExecutorBlock
|
||||
from backend.blocks.basic import AgentInputBlock, AgentOutputBlock
|
||||
from backend.data.block import BlockInput, BlockType, get_block, get_blocks
|
||||
from backend.data.db import BaseDbModel, transaction
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE
|
||||
from backend.util import json
|
||||
|
||||
from .block import BlockInput, BlockType, get_block, get_blocks
|
||||
from .db import BaseDbModel, transaction
|
||||
from .execution import ExecutionStatus
|
||||
from .includes import AGENT_GRAPH_INCLUDE, AGENT_NODE_INCLUDE
|
||||
from .integrations import Webhook
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -50,20 +52,51 @@ class Node(BaseDbModel):
|
||||
input_links: list[Link] = []
|
||||
output_links: list[Link] = []
|
||||
|
||||
webhook_id: Optional[str] = None
|
||||
|
||||
|
||||
class NodeModel(Node):
|
||||
graph_id: str
|
||||
graph_version: int
|
||||
|
||||
webhook: Optional[Webhook] = None
|
||||
|
||||
@staticmethod
|
||||
def from_db(node: AgentNode):
|
||||
if not node.AgentBlock:
|
||||
raise ValueError(f"Invalid node {node.id}, invalid AgentBlock.")
|
||||
obj = Node(
|
||||
obj = NodeModel(
|
||||
id=node.id,
|
||||
block_id=node.AgentBlock.id,
|
||||
input_default=json.loads(node.constantInput, target_type=dict[str, Any]),
|
||||
metadata=json.loads(node.metadata, target_type=dict[str, Any]),
|
||||
graph_id=node.agentGraphId,
|
||||
graph_version=node.agentGraphVersion,
|
||||
webhook_id=node.webhookId,
|
||||
webhook=Webhook.from_db(node.Webhook) if node.Webhook else None,
|
||||
)
|
||||
obj.input_links = [Link.from_db(link) for link in node.Input or []]
|
||||
obj.output_links = [Link.from_db(link) for link in node.Output or []]
|
||||
return obj
|
||||
|
||||
def is_triggered_by_event_type(self, event_type: str) -> bool:
|
||||
if not (block := get_block(self.block_id)):
|
||||
raise ValueError(f"Block #{self.block_id} not found for node #{self.id}")
|
||||
if not block.webhook_config:
|
||||
raise TypeError("This method can't be used on non-webhook blocks")
|
||||
event_filter = self.input_default.get(block.webhook_config.event_filter_input)
|
||||
if not event_filter:
|
||||
raise ValueError(f"Event filter is not configured on node #{self.id}")
|
||||
return event_type in [
|
||||
block.webhook_config.event_format.format(event=k)
|
||||
for k in event_filter
|
||||
if event_filter[k] is True
|
||||
]
|
||||
|
||||
|
||||
# Fix 2-way reference Node <-> Webhook
|
||||
Webhook.model_rebuild()
|
||||
|
||||
|
||||
class GraphExecution(BaseDbModel):
|
||||
execution_id: str
|
||||
@@ -110,33 +143,6 @@ class Graph(BaseDbModel):
|
||||
nodes: list[Node] = []
|
||||
links: list[Link] = []
|
||||
|
||||
@staticmethod
|
||||
def _generate_schema(
|
||||
type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input],
|
||||
data: list[dict],
|
||||
) -> dict[str, Any]:
|
||||
props = []
|
||||
for p in data:
|
||||
try:
|
||||
props.append(type_class(**p))
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid {type_class}: {p}, {e}")
|
||||
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
p.name: {
|
||||
"secret": p.secret,
|
||||
"advanced": p.advanced,
|
||||
"title": p.title or p.name,
|
||||
**({"description": p.description} if p.description else {}),
|
||||
**({"default": p.value} if p.value is not None else {}),
|
||||
}
|
||||
for p in props
|
||||
},
|
||||
"required": [p.name for p in props if p.value is None],
|
||||
}
|
||||
|
||||
@computed_field
|
||||
@property
|
||||
def input_schema(self) -> dict[str, Any]:
|
||||
@@ -165,6 +171,38 @@ class Graph(BaseDbModel):
|
||||
],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _generate_schema(
|
||||
type_class: Type[AgentInputBlock.Input] | Type[AgentOutputBlock.Input],
|
||||
data: list[dict],
|
||||
) -> dict[str, Any]:
|
||||
props = []
|
||||
for p in data:
|
||||
try:
|
||||
props.append(type_class(**p))
|
||||
except Exception as e:
|
||||
logger.warning(f"Invalid {type_class}: {p}, {e}")
|
||||
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
p.name: {
|
||||
"secret": p.secret,
|
||||
"advanced": p.advanced,
|
||||
"title": p.title or p.name,
|
||||
**({"description": p.description} if p.description else {}),
|
||||
**({"default": p.value} if p.value is not None else {}),
|
||||
}
|
||||
for p in props
|
||||
},
|
||||
"required": [p.name for p in props if p.value is None],
|
||||
}
|
||||
|
||||
|
||||
class GraphModel(Graph):
|
||||
user_id: str
|
||||
nodes: list[NodeModel] = [] # type: ignore
|
||||
|
||||
@property
|
||||
def starting_nodes(self) -> list[Node]:
|
||||
outbound_nodes = {link.sink_id for link in self.links}
|
||||
@@ -291,36 +329,39 @@ class Graph(BaseDbModel):
|
||||
GraphExecution.from_db(execution)
|
||||
for execution in graph.AgentGraphExecution or []
|
||||
]
|
||||
nodes = graph.AgentNodes or []
|
||||
|
||||
return Graph(
|
||||
return GraphModel(
|
||||
id=graph.id,
|
||||
user_id=graph.userId,
|
||||
version=graph.version,
|
||||
is_active=graph.isActive,
|
||||
is_template=graph.isTemplate,
|
||||
name=graph.name or "",
|
||||
description=graph.description or "",
|
||||
executions=executions,
|
||||
nodes=[Graph._process_node(node, hide_credentials) for node in nodes],
|
||||
nodes=[
|
||||
GraphModel._process_node(node, hide_credentials)
|
||||
for node in graph.AgentNodes or []
|
||||
],
|
||||
links=list(
|
||||
{
|
||||
Link.from_db(link)
|
||||
for node in nodes
|
||||
for node in graph.AgentNodes or []
|
||||
for link in (node.Input or []) + (node.Output or [])
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _process_node(node: AgentNode, hide_credentials: bool) -> Node:
|
||||
node_dict = node.model_dump()
|
||||
def _process_node(node: AgentNode, hide_credentials: bool) -> NodeModel:
|
||||
node_dict = {field: getattr(node, field) for field in node.model_fields}
|
||||
if hide_credentials and "constantInput" in node_dict:
|
||||
constant_input = json.loads(
|
||||
node_dict["constantInput"], target_type=dict[str, Any]
|
||||
)
|
||||
constant_input = Graph._hide_credentials_in_input(constant_input)
|
||||
constant_input = GraphModel._hide_credentials_in_input(constant_input)
|
||||
node_dict["constantInput"] = json.dumps(constant_input)
|
||||
return Node.from_db(AgentNode(**node_dict))
|
||||
return NodeModel.from_db(AgentNode(**node_dict))
|
||||
|
||||
@staticmethod
|
||||
def _hide_credentials_in_input(input_data: dict[str, Any]) -> dict[str, Any]:
|
||||
@@ -328,7 +369,7 @@ class Graph(BaseDbModel):
|
||||
result = {}
|
||||
for key, value in input_data.items():
|
||||
if isinstance(value, dict):
|
||||
result[key] = Graph._hide_credentials_in_input(value)
|
||||
result[key] = GraphModel._hide_credentials_in_input(value)
|
||||
elif isinstance(value, str) and any(
|
||||
sensitive_key in key.lower() for sensitive_key in sensitive_keys
|
||||
):
|
||||
@@ -339,22 +380,37 @@ class Graph(BaseDbModel):
|
||||
return result
|
||||
|
||||
|
||||
# --------------------- Model functions --------------------- #
|
||||
# --------------------- CRUD functions --------------------- #
|
||||
|
||||
|
||||
async def get_node(node_id: str) -> Node:
|
||||
async def get_node(node_id: str) -> NodeModel:
|
||||
node = await AgentNode.prisma().find_unique_or_raise(
|
||||
where={"id": node_id},
|
||||
include=AGENT_NODE_INCLUDE,
|
||||
)
|
||||
return Node.from_db(node)
|
||||
return NodeModel.from_db(node)
|
||||
|
||||
|
||||
async def set_node_webhook(node_id: str, webhook_id: str | None) -> NodeModel:
|
||||
node = await AgentNode.prisma().update(
|
||||
where={"id": node_id},
|
||||
data=(
|
||||
{"Webhook": {"connect": {"id": webhook_id}}}
|
||||
if webhook_id
|
||||
else {"Webhook": {"disconnect": True}}
|
||||
),
|
||||
include=AGENT_NODE_INCLUDE,
|
||||
)
|
||||
if not node:
|
||||
raise ValueError(f"Node #{node_id} not found")
|
||||
return NodeModel.from_db(node)
|
||||
|
||||
|
||||
async def get_graphs(
|
||||
user_id: str,
|
||||
include_executions: bool = False,
|
||||
filter_by: Literal["active", "template"] | None = "active",
|
||||
) -> list[Graph]:
|
||||
) -> list[GraphModel]:
|
||||
"""
|
||||
Retrieves graph metadata objects.
|
||||
Default behaviour is to get all currently active graphs.
|
||||
@@ -365,7 +421,7 @@ async def get_graphs(
|
||||
user_id: The ID of the user that owns the graph.
|
||||
|
||||
Returns:
|
||||
list[Graph]: A list of objects representing the retrieved graph metadata.
|
||||
list[GraphModel]: A list of objects representing the retrieved graphs.
|
||||
"""
|
||||
where_clause: AgentGraphWhereInput = {}
|
||||
|
||||
@@ -386,7 +442,7 @@ async def get_graphs(
|
||||
include=graph_include,
|
||||
)
|
||||
|
||||
return [Graph.from_db(graph) for graph in graphs]
|
||||
return [GraphModel.from_db(graph) for graph in graphs]
|
||||
|
||||
|
||||
async def get_graph(
|
||||
@@ -395,7 +451,7 @@ async def get_graph(
|
||||
template: bool = False,
|
||||
user_id: str | None = None,
|
||||
hide_credentials: bool = False,
|
||||
) -> Graph | None:
|
||||
) -> GraphModel | None:
|
||||
"""
|
||||
Retrieves a graph from the DB.
|
||||
Defaults to the version with `is_active` if `version` is not passed,
|
||||
@@ -420,38 +476,35 @@ async def get_graph(
|
||||
include=AGENT_GRAPH_INCLUDE,
|
||||
order={"version": "desc"},
|
||||
)
|
||||
return Graph.from_db(graph, hide_credentials) if graph else None
|
||||
return GraphModel.from_db(graph, hide_credentials) if graph else None
|
||||
|
||||
|
||||
async def set_graph_active_version(graph_id: str, version: int, user_id: str) -> None:
|
||||
# Check if the graph belongs to the user
|
||||
graph = await AgentGraph.prisma().find_first(
|
||||
# Activate the requested version if it exists and is owned by the user.
|
||||
updated_count = await AgentGraph.prisma().update_many(
|
||||
data={"isActive": True},
|
||||
where={
|
||||
"id": graph_id,
|
||||
"version": version,
|
||||
"userId": user_id,
|
||||
}
|
||||
)
|
||||
if not graph:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user")
|
||||
|
||||
updated_graph = await AgentGraph.prisma().update(
|
||||
data={"isActive": True},
|
||||
where={
|
||||
"graphVersionId": {"id": graph_id, "version": version},
|
||||
},
|
||||
)
|
||||
if not updated_graph:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found")
|
||||
if updated_count == 0:
|
||||
raise Exception(f"Graph #{graph_id} v{version} not found or not owned by user")
|
||||
|
||||
# Deactivate all other versions
|
||||
# Deactivate all other versions.
|
||||
await AgentGraph.prisma().update_many(
|
||||
data={"isActive": False},
|
||||
where={"id": graph_id, "version": {"not": version}, "userId": user_id},
|
||||
where={
|
||||
"id": graph_id,
|
||||
"version": {"not": version},
|
||||
"userId": user_id,
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def get_graph_all_versions(graph_id: str, user_id: str) -> list[Graph]:
|
||||
async def get_graph_all_versions(graph_id: str, user_id: str) -> list[GraphModel]:
|
||||
graph_versions = await AgentGraph.prisma().find_many(
|
||||
where={"id": graph_id, "userId": user_id},
|
||||
order={"version": "desc"},
|
||||
@@ -461,7 +514,7 @@ async def get_graph_all_versions(graph_id: str, user_id: str) -> list[Graph]:
|
||||
if not graph_versions:
|
||||
return []
|
||||
|
||||
return [Graph.from_db(graph) for graph in graph_versions]
|
||||
return [GraphModel.from_db(graph) for graph in graph_versions]
|
||||
|
||||
|
||||
async def delete_graph(graph_id: str, user_id: str) -> int:
|
||||
@@ -473,7 +526,7 @@ async def delete_graph(graph_id: str, user_id: str) -> int:
|
||||
return entries_count
|
||||
|
||||
|
||||
async def create_graph(graph: Graph, user_id: str) -> Graph:
|
||||
async def create_graph(graph: Graph, user_id: str) -> GraphModel:
|
||||
async with transaction() as tx:
|
||||
await __create_graph(tx, graph, user_id)
|
||||
|
||||
@@ -534,6 +587,32 @@ async def __create_graph(tx, graph: Graph, user_id: str):
|
||||
# ------------------------ UTILITIES ------------------------ #
|
||||
|
||||
|
||||
def make_graph_model(creatable_graph: Graph, user_id: str) -> GraphModel:
|
||||
"""
|
||||
Convert a Graph to a GraphModel, setting graph_id and graph_version on all nodes.
|
||||
|
||||
Args:
|
||||
creatable_graph (Graph): The creatable graph to convert.
|
||||
user_id (str): The ID of the user creating the graph.
|
||||
|
||||
Returns:
|
||||
GraphModel: The converted Graph object.
|
||||
"""
|
||||
# Create a new Graph object, inheriting properties from CreatableGraph
|
||||
return GraphModel(
|
||||
**creatable_graph.model_dump(exclude={"nodes"}),
|
||||
user_id=user_id,
|
||||
nodes=[
|
||||
NodeModel(
|
||||
**creatable_node.model_dump(),
|
||||
graph_id=creatable_graph.id,
|
||||
graph_version=creatable_graph.version,
|
||||
)
|
||||
for creatable_node in creatable_graph.nodes
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
async def fix_llm_provider_credentials():
|
||||
"""Fix node credentials with provider `llm`"""
|
||||
from autogpt_libs.supabase_integration_credentials_store import (
|
||||
|
||||
@@ -3,6 +3,7 @@ import prisma
|
||||
AGENT_NODE_INCLUDE: prisma.types.AgentNodeInclude = {
|
||||
"Input": True,
|
||||
"Output": True,
|
||||
"Webhook": True,
|
||||
"AgentBlock": True,
|
||||
}
|
||||
|
||||
@@ -27,3 +28,7 @@ GRAPH_EXECUTION_INCLUDE: prisma.types.AgentGraphExecutionInclude = {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
INTEGRATION_WEBHOOK_INCLUDE: prisma.types.IntegrationWebhookInclude = {
|
||||
"AgentNodes": {"include": AGENT_NODE_INCLUDE} # type: ignore
|
||||
}
|
||||
|
||||
168
autogpt_platform/backend/backend/data/integrations.py
Normal file
168
autogpt_platform/backend/backend/data/integrations.py
Normal file
@@ -0,0 +1,168 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, AsyncGenerator, Optional
|
||||
|
||||
from prisma import Json
|
||||
from prisma.models import IntegrationWebhook
|
||||
from pydantic import Field
|
||||
|
||||
from backend.data.includes import INTEGRATION_WEBHOOK_INCLUDE
|
||||
from backend.data.queue import AsyncRedisEventBus
|
||||
|
||||
from .db import BaseDbModel
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .graph import NodeModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Webhook(BaseDbModel):
|
||||
user_id: str
|
||||
provider: str
|
||||
credentials_id: str
|
||||
webhook_type: str
|
||||
resource: str
|
||||
events: list[str]
|
||||
config: dict = Field(default_factory=dict)
|
||||
secret: str
|
||||
|
||||
provider_webhook_id: str
|
||||
|
||||
attached_nodes: Optional[list["NodeModel"]] = None
|
||||
|
||||
@staticmethod
|
||||
def from_db(webhook: IntegrationWebhook):
|
||||
from .graph import NodeModel
|
||||
|
||||
return Webhook(
|
||||
id=webhook.id,
|
||||
user_id=webhook.userId,
|
||||
provider=webhook.provider,
|
||||
credentials_id=webhook.credentialsId,
|
||||
webhook_type=webhook.webhookType,
|
||||
resource=webhook.resource,
|
||||
events=webhook.events,
|
||||
config=dict(webhook.config),
|
||||
secret=webhook.secret,
|
||||
provider_webhook_id=webhook.providerWebhookId,
|
||||
attached_nodes=(
|
||||
[NodeModel.from_db(node) for node in webhook.AgentNodes]
|
||||
if webhook.AgentNodes is not None
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
# --------------------- CRUD functions --------------------- #
|
||||
|
||||
|
||||
async def create_webhook(webhook: Webhook) -> Webhook:
|
||||
created_webhook = await IntegrationWebhook.prisma().create(
|
||||
data={
|
||||
"id": webhook.id,
|
||||
"userId": webhook.user_id,
|
||||
"provider": webhook.provider,
|
||||
"credentialsId": webhook.credentials_id,
|
||||
"webhookType": webhook.webhook_type,
|
||||
"resource": webhook.resource,
|
||||
"events": webhook.events,
|
||||
"config": Json(webhook.config),
|
||||
"secret": webhook.secret,
|
||||
"providerWebhookId": webhook.provider_webhook_id,
|
||||
}
|
||||
)
|
||||
return Webhook.from_db(created_webhook)
|
||||
|
||||
|
||||
async def get_webhook(webhook_id: str) -> Webhook:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhook = await IntegrationWebhook.prisma().find_unique_or_raise(
|
||||
where={"id": webhook_id},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return Webhook.from_db(webhook)
|
||||
|
||||
|
||||
async def get_all_webhooks(credentials_id: str) -> list[Webhook]:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhooks = await IntegrationWebhook.prisma().find_many(
|
||||
where={"credentialsId": credentials_id},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return [Webhook.from_db(webhook) for webhook in webhooks]
|
||||
|
||||
|
||||
async def find_webhook(
|
||||
credentials_id: str, webhook_type: str, resource: str, events: list[str]
|
||||
) -> Webhook | None:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
webhook = await IntegrationWebhook.prisma().find_first(
|
||||
where={
|
||||
"credentialsId": credentials_id,
|
||||
"webhookType": webhook_type,
|
||||
"resource": resource,
|
||||
"events": {"has_every": events},
|
||||
},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
return Webhook.from_db(webhook) if webhook else None
|
||||
|
||||
|
||||
async def update_webhook_config(webhook_id: str, updated_config: dict) -> Webhook:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
_updated_webhook = await IntegrationWebhook.prisma().update(
|
||||
where={"id": webhook_id},
|
||||
data={"config": Json(updated_config)},
|
||||
include=INTEGRATION_WEBHOOK_INCLUDE,
|
||||
)
|
||||
if _updated_webhook is None:
|
||||
raise ValueError(f"Webhook #{webhook_id} not found")
|
||||
return Webhook.from_db(_updated_webhook)
|
||||
|
||||
|
||||
async def delete_webhook(webhook_id: str) -> None:
|
||||
"""⚠️ No `user_id` check: DO NOT USE without check in user-facing endpoints."""
|
||||
deleted = await IntegrationWebhook.prisma().delete(where={"id": webhook_id})
|
||||
if not deleted:
|
||||
raise ValueError(f"Webhook #{webhook_id} not found")
|
||||
|
||||
|
||||
# --------------------- WEBHOOK EVENTS --------------------- #
|
||||
|
||||
|
||||
class WebhookEvent(BaseDbModel):
|
||||
provider: str
|
||||
webhook_id: str
|
||||
event_type: str
|
||||
payload: dict
|
||||
|
||||
|
||||
class WebhookEventBus(AsyncRedisEventBus[WebhookEvent]):
|
||||
Model = WebhookEvent
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return "webhooks"
|
||||
|
||||
async def publish(self, event: WebhookEvent):
|
||||
await self.publish_event(event, f"{event.webhook_id}/{event.event_type}")
|
||||
|
||||
async def listen(
|
||||
self, webhook_id: str, event_type: Optional[str] = None
|
||||
) -> AsyncGenerator[WebhookEvent, None]:
|
||||
async for event in self.listen_events(f"{webhook_id}/{event_type or '*'}"):
|
||||
yield event
|
||||
|
||||
|
||||
event_bus = WebhookEventBus()
|
||||
|
||||
|
||||
async def publish_webhook_event(event: WebhookEvent):
|
||||
await event_bus.publish(event)
|
||||
|
||||
|
||||
async def listen_for_webhook_event(
|
||||
webhook_id: str, event_type: Optional[str] = None
|
||||
) -> WebhookEvent | None:
|
||||
async for event in event_bus.listen(webhook_id, event_type):
|
||||
return event # Only one event is expected
|
||||
@@ -113,6 +113,7 @@ def SchemaField(
|
||||
advanced: Optional[bool] = None,
|
||||
secret: bool = False,
|
||||
exclude: bool = False,
|
||||
hidden: Optional[bool] = None,
|
||||
**kwargs,
|
||||
) -> T:
|
||||
json_extra = {
|
||||
@@ -121,6 +122,7 @@ def SchemaField(
|
||||
"placeholder": placeholder,
|
||||
"secret": secret,
|
||||
"advanced": advanced,
|
||||
"hidden": hidden,
|
||||
}.items()
|
||||
if v is not None
|
||||
}
|
||||
|
||||
@@ -9,11 +9,8 @@ from redis.asyncio.client import PubSub as AsyncPubSub
|
||||
from redis.client import PubSub
|
||||
|
||||
from backend.data import redis
|
||||
from backend.data.execution import ExecutionResult
|
||||
from backend.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
config = Config()
|
||||
|
||||
|
||||
class DateTimeEncoder(json.JSONEncoder):
|
||||
@@ -36,7 +33,7 @@ class BaseRedisEventBus(Generic[M], ABC):
|
||||
|
||||
def _serialize_message(self, item: M, channel_key: str) -> tuple[str, str]:
|
||||
message = json.dumps(item.model_dump(), cls=DateTimeEncoder)
|
||||
channel_name = f"{self.event_bus_name}-{channel_key}"
|
||||
channel_name = f"{self.event_bus_name}/{channel_key}"
|
||||
logger.info(f"[{channel_name}] Publishing an event to Redis {message}")
|
||||
return message, channel_name
|
||||
|
||||
@@ -54,7 +51,7 @@ class BaseRedisEventBus(Generic[M], ABC):
|
||||
def _subscribe(
|
||||
self, connection: redis.Redis | redis.AsyncRedis, channel_key: str
|
||||
) -> tuple[PubSub | AsyncPubSub, str]:
|
||||
channel_name = f"{self.event_bus_name}-{channel_key}"
|
||||
channel_name = f"{self.event_bus_name}/{channel_key}"
|
||||
pubsub = connection.pubsub()
|
||||
return pubsub, channel_name
|
||||
|
||||
@@ -108,37 +105,3 @@ class AsyncRedisEventBus(BaseRedisEventBus[M], ABC):
|
||||
async for message in pubsub.listen():
|
||||
if event := self._deserialize_message(message, channel_key):
|
||||
yield event
|
||||
|
||||
|
||||
class RedisExecutionEventBus(RedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
def publish(self, res: ExecutionResult):
|
||||
self.publish_event(res, f"{res.graph_id}-{res.graph_exec_id}")
|
||||
|
||||
def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> Generator[ExecutionResult, None, None]:
|
||||
for execution_result in self.listen_events(f"{graph_id}-{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
||||
|
||||
class AsyncRedisExecutionEventBus(AsyncRedisEventBus[ExecutionResult]):
|
||||
Model = ExecutionResult
|
||||
|
||||
@property
|
||||
def event_bus_name(self) -> str:
|
||||
return config.execution_event_bus_name
|
||||
|
||||
async def publish(self, res: ExecutionResult):
|
||||
await self.publish_event(res, f"{res.graph_id}-{res.graph_exec_id}")
|
||||
|
||||
async def listen(
|
||||
self, graph_id: str = "*", graph_exec_id: str = "*"
|
||||
) -> AsyncGenerator[ExecutionResult, None]:
|
||||
async for execution_result in self.listen_events(f"{graph_id}-{graph_exec_id}"):
|
||||
yield execution_result
|
||||
|
||||
@@ -4,6 +4,7 @@ from typing import Any, Callable, Concatenate, Coroutine, ParamSpec, TypeVar, ca
|
||||
from backend.data.credit import get_user_credit_model
|
||||
from backend.data.execution import (
|
||||
ExecutionResult,
|
||||
RedisExecutionEventBus,
|
||||
create_graph_execution,
|
||||
get_execution_results,
|
||||
get_incomplete_executions,
|
||||
@@ -15,14 +16,13 @@ from backend.data.execution import (
|
||||
upsert_execution_output,
|
||||
)
|
||||
from backend.data.graph import get_graph, get_node
|
||||
from backend.data.queue import RedisExecutionEventBus
|
||||
from backend.data.user import (
|
||||
get_user_integrations,
|
||||
get_user_metadata,
|
||||
update_user_integrations,
|
||||
update_user_metadata,
|
||||
)
|
||||
from backend.util.service import AppService, expose
|
||||
from backend.util.service import AppService, expose, register_pydantic_serializers
|
||||
from backend.util.settings import Config
|
||||
|
||||
P = ParamSpec("P")
|
||||
@@ -56,6 +56,9 @@ class DatabaseManager(AppService):
|
||||
res = self.run_and_wait(coroutine)
|
||||
return res
|
||||
|
||||
# Register serializers for annotations on bare function
|
||||
register_pydantic_serializers(f)
|
||||
|
||||
return wrapper
|
||||
|
||||
# Executions
|
||||
|
||||
@@ -30,7 +30,7 @@ from backend.data.execution import (
|
||||
merge_execution_input,
|
||||
parse_execution_output,
|
||||
)
|
||||
from backend.data.graph import Graph, Link, Node
|
||||
from backend.data.graph import GraphModel, Link, Node
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME, CredentialsMetaInput
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.util import json
|
||||
@@ -186,7 +186,7 @@ def execute_node(
|
||||
input_data, **extra_exec_kwargs
|
||||
):
|
||||
output_size += len(json.dumps(output_data))
|
||||
log_metadata.info("Node produced output", output_name=output_data)
|
||||
log_metadata.info("Node produced output", **{output_name: output_data})
|
||||
db_client.upsert_execution_output(node_exec_id, output_name, output_data)
|
||||
|
||||
for execution in _enqueue_next_nodes(
|
||||
@@ -253,7 +253,6 @@ def _enqueue_next_nodes(
|
||||
graph_id: str,
|
||||
log_metadata: LogMetadata,
|
||||
) -> list[NodeExecution]:
|
||||
|
||||
def add_enqueued_execution(
|
||||
node_exec_id: str, node_id: str, data: BlockInput
|
||||
) -> NodeExecution:
|
||||
@@ -713,7 +712,6 @@ class Executor:
|
||||
|
||||
|
||||
class ExecutionManager(AppService):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.use_redis = True
|
||||
@@ -775,7 +773,7 @@ class ExecutionManager(AppService):
|
||||
user_id: str,
|
||||
graph_version: int | None = None,
|
||||
) -> GraphExecution:
|
||||
graph: Graph | None = self.db_client.get_graph(
|
||||
graph: GraphModel | None = self.db_client.get_graph(
|
||||
graph_id=graph_id, user_id=user_id, version=graph_version
|
||||
)
|
||||
if not graph:
|
||||
@@ -799,6 +797,15 @@ class ExecutionManager(AppService):
|
||||
if name and name in data:
|
||||
input_data = {"value": data[name]}
|
||||
|
||||
# Extract webhook payload, and assign it to the input pin
|
||||
webhook_payload_key = f"webhook_{node.webhook_id}_payload"
|
||||
if (
|
||||
block.block_type == BlockType.WEBHOOK
|
||||
and node.webhook_id
|
||||
and webhook_payload_key in data
|
||||
):
|
||||
input_data = {"payload": data[webhook_payload_key]}
|
||||
|
||||
input_data, error = validate_exec(node, input_data)
|
||||
if input_data is None:
|
||||
raise ValueError(error)
|
||||
@@ -876,7 +883,7 @@ class ExecutionManager(AppService):
|
||||
)
|
||||
self.db_client.send_execution_update(exec_update)
|
||||
|
||||
def _validate_node_input_credentials(self, graph: Graph, user_id: str):
|
||||
def _validate_node_input_credentials(self, graph: GraphModel, user_id: str):
|
||||
"""Checks all credentials for all nodes of the graph"""
|
||||
|
||||
for node in graph.nodes:
|
||||
|
||||
@@ -11,6 +11,7 @@ from redis.lock import Lock as RedisLock
|
||||
|
||||
from backend.data import redis
|
||||
from backend.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
from backend.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -157,12 +158,14 @@ def _get_provider_oauth_handler(provider_name: str) -> BaseOAuthHandler:
|
||||
client_id = getattr(settings.secrets, f"{provider_name}_client_id")
|
||||
client_secret = getattr(settings.secrets, f"{provider_name}_client_secret")
|
||||
if not (client_id and client_secret):
|
||||
raise Exception( # TODO: ConfigError
|
||||
raise MissingConfigError(
|
||||
f"Integration with provider '{provider_name}' is not configured",
|
||||
)
|
||||
|
||||
handler_class = HANDLERS_BY_NAME[provider_name]
|
||||
frontend_base_url = settings.config.frontend_base_url
|
||||
frontend_base_url = (
|
||||
settings.config.frontend_base_url or settings.config.platform_base_url
|
||||
)
|
||||
return handler_class(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class ProviderName(str, Enum):
|
||||
GITHUB = "github"
|
||||
GOOGLE = "google"
|
||||
NOTION = "notion"
|
||||
@@ -0,0 +1,17 @@
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .github import GithubWebhooksManager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
# --8<-- [start:WEBHOOK_MANAGERS_BY_NAME]
|
||||
WEBHOOK_MANAGERS_BY_NAME: dict[str, type["BaseWebhooksManager"]] = {
|
||||
handler.PROVIDER_NAME: handler
|
||||
for handler in [
|
||||
GithubWebhooksManager,
|
||||
]
|
||||
}
|
||||
# --8<-- [end:WEBHOOK_MANAGERS_BY_NAME]
|
||||
|
||||
__all__ = ["WEBHOOK_MANAGERS_BY_NAME"]
|
||||
163
autogpt_platform/backend/backend/integrations/webhooks/base.py
Normal file
163
autogpt_platform/backend/backend/integrations/webhooks/base.py
Normal file
@@ -0,0 +1,163 @@
|
||||
import logging
|
||||
import secrets
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import ClassVar, Generic, TypeVar
|
||||
from uuid import uuid4
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store import Credentials
|
||||
from fastapi import Request
|
||||
from strenum import StrEnum
|
||||
|
||||
from backend.data import integrations
|
||||
from backend.util.exceptions import MissingConfigError
|
||||
from backend.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
app_config = Config()
|
||||
|
||||
WT = TypeVar("WT", bound=StrEnum)
|
||||
|
||||
|
||||
class BaseWebhooksManager(ABC, Generic[WT]):
|
||||
# --8<-- [start:BaseWebhooksManager1]
|
||||
PROVIDER_NAME: ClassVar[str]
|
||||
# --8<-- [end:BaseWebhooksManager1]
|
||||
|
||||
WebhookType: WT
|
||||
|
||||
async def get_suitable_webhook(
|
||||
self,
|
||||
user_id: str,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
) -> integrations.Webhook:
|
||||
if not app_config.platform_base_url:
|
||||
raise MissingConfigError(
|
||||
"PLATFORM_BASE_URL must be set to use Webhook functionality"
|
||||
)
|
||||
|
||||
if webhook := await integrations.find_webhook(
|
||||
credentials.id, webhook_type, resource, events
|
||||
):
|
||||
return webhook
|
||||
return await self._create_webhook(
|
||||
user_id, credentials, webhook_type, resource, events
|
||||
)
|
||||
|
||||
async def prune_webhook_if_dangling(
|
||||
self, webhook_id: str, credentials: Credentials
|
||||
) -> bool:
|
||||
webhook = await integrations.get_webhook(webhook_id)
|
||||
if webhook.attached_nodes is None:
|
||||
raise ValueError("Error retrieving webhook including attached nodes")
|
||||
if webhook.attached_nodes:
|
||||
# Don't prune webhook if in use
|
||||
return False
|
||||
|
||||
await self._deregister_webhook(webhook, credentials)
|
||||
await integrations.delete_webhook(webhook.id)
|
||||
return True
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager3]
|
||||
@classmethod
|
||||
@abstractmethod
|
||||
async def validate_payload(
|
||||
cls, webhook: integrations.Webhook, request: Request
|
||||
) -> tuple[dict, str]:
|
||||
"""
|
||||
Validates an incoming webhook request and returns its payload and type.
|
||||
|
||||
Params:
|
||||
webhook: Object representing the configured webhook and its properties in our system.
|
||||
request: Incoming FastAPI `Request`
|
||||
|
||||
Returns:
|
||||
dict: The validated payload
|
||||
str: The event type associated with the payload
|
||||
"""
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager3]
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager5]
|
||||
async def trigger_ping(self, webhook: integrations.Webhook) -> None:
|
||||
"""
|
||||
Triggers a ping to the given webhook.
|
||||
|
||||
Raises:
|
||||
NotImplementedError: if the provider doesn't support pinging
|
||||
"""
|
||||
# --8<-- [end:BaseWebhooksManager5]
|
||||
raise NotImplementedError(f"{self.__class__.__name__} doesn't support pinging")
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager2]
|
||||
@abstractmethod
|
||||
async def _register_webhook(
|
||||
self,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
ingress_url: str,
|
||||
secret: str,
|
||||
) -> tuple[str, dict]:
|
||||
"""
|
||||
Registers a new webhook with the provider.
|
||||
|
||||
Params:
|
||||
credentials: The credentials with which to create the webhook
|
||||
webhook_type: The provider-specific webhook type to create
|
||||
resource: The resource to receive events for
|
||||
events: The events to subscribe to
|
||||
ingress_url: The ingress URL for webhook payloads
|
||||
secret: Secret used to verify webhook payloads
|
||||
|
||||
Returns:
|
||||
str: Webhook ID assigned by the provider
|
||||
config: Provider-specific configuration for the webhook
|
||||
"""
|
||||
...
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager2]
|
||||
|
||||
# --8<-- [start:BaseWebhooksManager4]
|
||||
@abstractmethod
|
||||
async def _deregister_webhook(
|
||||
self, webhook: integrations.Webhook, credentials: Credentials
|
||||
) -> None: ...
|
||||
|
||||
# --8<-- [end:BaseWebhooksManager4]
|
||||
|
||||
async def _create_webhook(
|
||||
self,
|
||||
user_id: str,
|
||||
credentials: Credentials,
|
||||
webhook_type: WT,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
) -> integrations.Webhook:
|
||||
id = str(uuid4())
|
||||
secret = secrets.token_hex(32)
|
||||
provider_name = self.PROVIDER_NAME
|
||||
ingress_url = (
|
||||
f"{app_config.platform_base_url}/api/integrations/{provider_name}"
|
||||
f"/webhooks/{id}/ingress"
|
||||
)
|
||||
provider_webhook_id, config = await self._register_webhook(
|
||||
credentials, webhook_type, resource, events, ingress_url, secret
|
||||
)
|
||||
return await integrations.create_webhook(
|
||||
integrations.Webhook(
|
||||
id=id,
|
||||
user_id=user_id,
|
||||
provider=provider_name,
|
||||
credentials_id=credentials.id,
|
||||
webhook_type=webhook_type,
|
||||
resource=resource,
|
||||
events=events,
|
||||
provider_webhook_id=provider_webhook_id,
|
||||
config=config,
|
||||
secret=secret,
|
||||
)
|
||||
)
|
||||
175
autogpt_platform/backend/backend/integrations/webhooks/github.py
Normal file
175
autogpt_platform/backend/backend/integrations/webhooks/github.py
Normal file
@@ -0,0 +1,175 @@
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import Credentials
|
||||
from fastapi import HTTPException, Request
|
||||
from strenum import StrEnum
|
||||
|
||||
from backend.data import integrations
|
||||
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# --8<-- [start:GithubWebhooksManager]
|
||||
class GithubWebhookType(StrEnum):
|
||||
REPO = "repo"
|
||||
|
||||
|
||||
class GithubWebhooksManager(BaseWebhooksManager):
|
||||
PROVIDER_NAME = "github"
|
||||
|
||||
WebhookType = GithubWebhookType
|
||||
|
||||
GITHUB_API_URL = "https://api.github.com"
|
||||
GITHUB_API_DEFAULT_HEADERS = {"Accept": "application/vnd.github.v3+json"}
|
||||
|
||||
@classmethod
|
||||
async def validate_payload(
|
||||
cls, webhook: integrations.Webhook, request: Request
|
||||
) -> tuple[dict, str]:
|
||||
if not (event_type := request.headers.get("X-GitHub-Event")):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="X-GitHub-Event header is missing!"
|
||||
)
|
||||
|
||||
if not (signature_header := request.headers.get("X-Hub-Signature-256")):
|
||||
raise HTTPException(
|
||||
status_code=403, detail="X-Hub-Signature-256 header is missing!"
|
||||
)
|
||||
|
||||
payload_body = await request.body()
|
||||
hash_object = hmac.new(
|
||||
webhook.secret.encode("utf-8"), msg=payload_body, digestmod=hashlib.sha256
|
||||
)
|
||||
expected_signature = "sha256=" + hash_object.hexdigest()
|
||||
|
||||
if not hmac.compare_digest(expected_signature, signature_header):
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Request signatures didn't match!"
|
||||
)
|
||||
|
||||
payload = await request.json()
|
||||
if action := payload.get("action"):
|
||||
event_type += f".{action}"
|
||||
|
||||
return payload, event_type
|
||||
|
||||
async def trigger_ping(self, webhook: integrations.Webhook) -> None:
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": f"Bearer {webhook.config.get('access_token')}",
|
||||
}
|
||||
|
||||
repo, github_hook_id = webhook.resource, webhook.provider_webhook_id
|
||||
ping_url = f"{self.GITHUB_API_URL}/repos/{repo}/hooks/{github_hook_id}/pings"
|
||||
|
||||
response = requests.post(ping_url, headers=headers)
|
||||
|
||||
if response.status_code != 204:
|
||||
error_msg = extract_github_error_msg(response)
|
||||
raise ValueError(f"Failed to ping GitHub webhook: {error_msg}")
|
||||
|
||||
async def _register_webhook(
|
||||
self,
|
||||
credentials: Credentials,
|
||||
webhook_type: GithubWebhookType,
|
||||
resource: str,
|
||||
events: list[str],
|
||||
ingress_url: str,
|
||||
secret: str,
|
||||
) -> tuple[str, dict]:
|
||||
if webhook_type == self.WebhookType.REPO and resource.count("/") > 1:
|
||||
raise ValueError("Invalid repo format: expected 'owner/repo'")
|
||||
|
||||
# Extract main event, e.g. `pull_request.opened` -> `pull_request`
|
||||
github_events = list({event.split(".")[0] for event in events})
|
||||
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": credentials.bearer(),
|
||||
}
|
||||
webhook_data = {
|
||||
"name": "web",
|
||||
"active": True,
|
||||
"events": github_events,
|
||||
"config": {
|
||||
"url": ingress_url,
|
||||
"content_type": "json",
|
||||
"insecure_ssl": "0",
|
||||
"secret": secret,
|
||||
},
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
f"{self.GITHUB_API_URL}/repos/{resource}/hooks",
|
||||
headers=headers,
|
||||
json=webhook_data,
|
||||
)
|
||||
|
||||
if response.status_code != 201:
|
||||
error_msg = extract_github_error_msg(response)
|
||||
if "not found" in error_msg.lower():
|
||||
error_msg = (
|
||||
f"{error_msg} "
|
||||
"(Make sure the GitHub account or API key has 'repo' or "
|
||||
f"webhook create permissions to '{resource}')"
|
||||
)
|
||||
raise ValueError(f"Failed to create GitHub webhook: {error_msg}")
|
||||
|
||||
webhook_id = response.json()["id"]
|
||||
config = response.json()["config"]
|
||||
|
||||
return str(webhook_id), config
|
||||
|
||||
async def _deregister_webhook(
|
||||
self, webhook: integrations.Webhook, credentials: Credentials
|
||||
) -> None:
|
||||
webhook_type = self.WebhookType(webhook.webhook_type)
|
||||
if webhook.credentials_id != credentials.id:
|
||||
raise ValueError(
|
||||
f"Webhook #{webhook.id} does not belong to credentials {credentials.id}"
|
||||
)
|
||||
|
||||
headers = {
|
||||
**self.GITHUB_API_DEFAULT_HEADERS,
|
||||
"Authorization": credentials.bearer(),
|
||||
}
|
||||
|
||||
if webhook_type == self.WebhookType.REPO:
|
||||
repo = webhook.resource
|
||||
delete_url = f"{self.GITHUB_API_URL}/repos/{repo}/hooks/{webhook.provider_webhook_id}" # noqa
|
||||
else:
|
||||
raise NotImplementedError(
|
||||
f"Unsupported webhook type '{webhook.webhook_type}'"
|
||||
)
|
||||
|
||||
response = requests.delete(delete_url, headers=headers)
|
||||
|
||||
if response.status_code not in [204, 404]:
|
||||
# 204 means successful deletion, 404 means the webhook was already deleted
|
||||
error_msg = extract_github_error_msg(response)
|
||||
raise ValueError(f"Failed to delete GitHub webhook: {error_msg}")
|
||||
|
||||
# If we reach here, the webhook was successfully deleted or didn't exist
|
||||
|
||||
|
||||
# --8<-- [end:GithubWebhooksManager]
|
||||
|
||||
|
||||
def extract_github_error_msg(response: requests.Response) -> str:
|
||||
error_msgs = []
|
||||
resp = response.json()
|
||||
if resp.get("message"):
|
||||
error_msgs.append(resp["message"])
|
||||
if resp.get("errors"):
|
||||
error_msgs.extend(f"* {err.get('message', err)}" for err in resp["errors"])
|
||||
if resp.get("error"):
|
||||
if isinstance(resp["error"], dict):
|
||||
error_msgs.append(resp["error"].get("message", resp["error"]))
|
||||
else:
|
||||
error_msgs.append(resp["error"])
|
||||
return "\n".join(error_msgs)
|
||||
@@ -0,0 +1,198 @@
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Callable, Optional, cast
|
||||
|
||||
from backend.data.block import get_block
|
||||
from backend.data.graph import set_node_webhook
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME
|
||||
from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import Credentials
|
||||
|
||||
from backend.data.graph import GraphModel, NodeModel
|
||||
|
||||
from .base import BaseWebhooksManager
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def on_graph_activate(
|
||||
graph: "GraphModel", get_credentials: Callable[[str], "Credentials | None"]
|
||||
):
|
||||
"""
|
||||
Hook to be called when a graph is activated/created.
|
||||
|
||||
⚠️ Assuming node entities are not re-used between graph versions, ⚠️
|
||||
this hook calls `on_node_activate` on all nodes in this graph.
|
||||
|
||||
Params:
|
||||
get_credentials: `credentials_id` -> Credentials
|
||||
"""
|
||||
# Compare nodes in new_graph_version with previous_graph_version
|
||||
updated_nodes = []
|
||||
for new_node in graph.nodes:
|
||||
node_credentials = None
|
||||
if creds_meta := new_node.input_default.get(CREDENTIALS_FIELD_NAME):
|
||||
node_credentials = get_credentials(creds_meta["id"])
|
||||
if not node_credentials:
|
||||
raise ValueError(
|
||||
f"Node #{new_node.id} updated with non-existent "
|
||||
f"credentials #{node_credentials}"
|
||||
)
|
||||
|
||||
updated_node = await on_node_activate(
|
||||
graph.user_id, new_node, credentials=node_credentials
|
||||
)
|
||||
updated_nodes.append(updated_node)
|
||||
|
||||
graph.nodes = updated_nodes
|
||||
return graph
|
||||
|
||||
|
||||
async def on_graph_deactivate(
|
||||
graph: "GraphModel", get_credentials: Callable[[str], "Credentials | None"]
|
||||
):
|
||||
"""
|
||||
Hook to be called when a graph is deactivated/deleted.
|
||||
|
||||
⚠️ Assuming node entities are not re-used between graph versions, ⚠️
|
||||
this hook calls `on_node_deactivate` on all nodes in `graph`.
|
||||
|
||||
Params:
|
||||
get_credentials: `credentials_id` -> Credentials
|
||||
"""
|
||||
updated_nodes = []
|
||||
for node in graph.nodes:
|
||||
node_credentials = None
|
||||
if creds_meta := node.input_default.get(CREDENTIALS_FIELD_NAME):
|
||||
node_credentials = get_credentials(creds_meta["id"])
|
||||
if not node_credentials:
|
||||
logger.error(
|
||||
f"Node #{node.id} referenced non-existent "
|
||||
f"credentials #{creds_meta['id']}"
|
||||
)
|
||||
|
||||
updated_node = await on_node_deactivate(node, credentials=node_credentials)
|
||||
updated_nodes.append(updated_node)
|
||||
|
||||
graph.nodes = updated_nodes
|
||||
return graph
|
||||
|
||||
|
||||
async def on_node_activate(
|
||||
user_id: str,
|
||||
node: "NodeModel",
|
||||
*,
|
||||
credentials: Optional["Credentials"] = None,
|
||||
) -> "NodeModel":
|
||||
"""Hook to be called when the node is activated/created"""
|
||||
|
||||
block = get_block(node.block_id)
|
||||
if not block:
|
||||
raise ValueError(
|
||||
f"Node #{node.id} is instance of unknown block #{node.block_id}"
|
||||
)
|
||||
|
||||
if not block.webhook_config:
|
||||
return node
|
||||
|
||||
logger.debug(
|
||||
f"Activating webhook node #{node.id} with config {block.webhook_config}"
|
||||
)
|
||||
|
||||
webhooks_manager = WEBHOOK_MANAGERS_BY_NAME[block.webhook_config.provider]()
|
||||
|
||||
try:
|
||||
resource = block.webhook_config.resource_format.format(**node.input_default)
|
||||
except KeyError:
|
||||
resource = None
|
||||
logger.debug(
|
||||
f"Constructed resource string {resource} from input {node.input_default}"
|
||||
)
|
||||
|
||||
event_filter_input_name = block.webhook_config.event_filter_input
|
||||
has_everything_for_webhook = (
|
||||
resource is not None
|
||||
and CREDENTIALS_FIELD_NAME in node.input_default
|
||||
and event_filter_input_name in node.input_default
|
||||
and any(is_on for is_on in node.input_default[event_filter_input_name].values())
|
||||
)
|
||||
|
||||
if has_everything_for_webhook and resource:
|
||||
logger.debug(f"Node #{node} has everything for a webhook!")
|
||||
if not credentials:
|
||||
credentials_meta = node.input_default[CREDENTIALS_FIELD_NAME]
|
||||
raise ValueError(
|
||||
f"Cannot set up webhook for node #{node.id}: "
|
||||
f"credentials #{credentials_meta['id']} not available"
|
||||
)
|
||||
|
||||
# Shape of the event filter is enforced in Block.__init__
|
||||
event_filter = cast(dict, node.input_default[event_filter_input_name])
|
||||
events = [
|
||||
block.webhook_config.event_format.format(event=event)
|
||||
for event, enabled in event_filter.items()
|
||||
if enabled is True
|
||||
]
|
||||
logger.debug(f"Webhook events to subscribe to: {', '.join(events)}")
|
||||
|
||||
# Find/make and attach a suitable webhook to the node
|
||||
new_webhook = await webhooks_manager.get_suitable_webhook(
|
||||
user_id,
|
||||
credentials,
|
||||
block.webhook_config.webhook_type,
|
||||
resource,
|
||||
events,
|
||||
)
|
||||
logger.debug(f"Acquired webhook: {new_webhook}")
|
||||
return await set_node_webhook(node.id, new_webhook.id)
|
||||
|
||||
return node
|
||||
|
||||
|
||||
async def on_node_deactivate(
|
||||
node: "NodeModel",
|
||||
*,
|
||||
credentials: Optional["Credentials"] = None,
|
||||
webhooks_manager: Optional["BaseWebhooksManager"] = None,
|
||||
) -> "NodeModel":
|
||||
"""Hook to be called when node is deactivated/deleted"""
|
||||
|
||||
logger.debug(f"Deactivating node #{node.id}")
|
||||
block = get_block(node.block_id)
|
||||
if not block:
|
||||
raise ValueError(
|
||||
f"Node #{node.id} is instance of unknown block #{node.block_id}"
|
||||
)
|
||||
|
||||
if not block.webhook_config:
|
||||
return node
|
||||
|
||||
webhooks_manager = WEBHOOK_MANAGERS_BY_NAME[block.webhook_config.provider]()
|
||||
|
||||
if node.webhook_id:
|
||||
logger.debug(f"Node #{node.id} has webhook_id {node.webhook_id}")
|
||||
if not node.webhook:
|
||||
logger.error(f"Node #{node.id} has webhook_id but no webhook object")
|
||||
raise ValueError("node.webhook not included")
|
||||
|
||||
# Detach webhook from node
|
||||
logger.debug(f"Detaching webhook from node #{node.id}")
|
||||
updated_node = await set_node_webhook(node.id, None)
|
||||
|
||||
# Prune and deregister the webhook if it is no longer used anywhere
|
||||
logger.debug("Pruning and deregistering webhook if dangling")
|
||||
webhook = node.webhook
|
||||
if credentials:
|
||||
logger.debug(f"Pruning webhook #{webhook.id} with credentials")
|
||||
await webhooks_manager.prune_webhook_if_dangling(webhook.id, credentials)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Cannot deregister webhook #{webhook.id}: credentials "
|
||||
f"#{webhook.credentials_id} not available "
|
||||
f"({webhook.provider} webhook ID: {webhook.provider_webhook_id})"
|
||||
)
|
||||
return updated_node
|
||||
|
||||
logger.debug(f"Node #{node.id} has no webhook_id, returning")
|
||||
return node
|
||||
@@ -10,8 +10,20 @@ from autogpt_libs.supabase_integration_credentials_store.types import (
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
|
||||
from backend.data.graph import set_node_webhook
|
||||
from backend.data.integrations import (
|
||||
WebhookEvent,
|
||||
get_all_webhooks,
|
||||
get_webhook,
|
||||
listen_for_webhook_event,
|
||||
publish_webhook_event,
|
||||
)
|
||||
from backend.executor.manager import ExecutionManager
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
|
||||
from backend.integrations.webhooks import WEBHOOK_MANAGERS_BY_NAME
|
||||
from backend.util.exceptions import NeedConfirmation
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
from ..utils import get_user_id
|
||||
@@ -183,13 +195,22 @@ class CredentialsDeletionResponse(BaseModel):
|
||||
)
|
||||
|
||||
|
||||
class CredentialsDeletionNeedsConfirmationResponse(BaseModel):
|
||||
deleted: Literal[False] = False
|
||||
need_confirmation: Literal[True] = True
|
||||
message: str
|
||||
|
||||
|
||||
@router.delete("/{provider}/credentials/{cred_id}")
|
||||
def delete_credentials(
|
||||
async def delete_credentials(
|
||||
request: Request,
|
||||
provider: Annotated[str, Path(title="The provider to delete credentials for")],
|
||||
cred_id: Annotated[str, Path(title="The ID of the credentials to delete")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> CredentialsDeletionResponse:
|
||||
force: Annotated[
|
||||
bool, Query(title="Whether to proceed if any linked webhooks are still in use")
|
||||
] = False,
|
||||
) -> CredentialsDeletionResponse | CredentialsDeletionNeedsConfirmationResponse:
|
||||
creds = creds_manager.store.get_creds_by_id(user_id, cred_id)
|
||||
if not creds:
|
||||
raise HTTPException(status_code=404, detail="Credentials not found")
|
||||
@@ -198,6 +219,11 @@ def delete_credentials(
|
||||
status_code=404, detail="Credentials do not match the specified provider"
|
||||
)
|
||||
|
||||
try:
|
||||
await remove_all_webhooks_for_credentials(creds, force)
|
||||
except NeedConfirmation as e:
|
||||
return CredentialsDeletionNeedsConfirmationResponse(message=str(e))
|
||||
|
||||
creds_manager.delete(user_id, cred_id)
|
||||
|
||||
tokens_revoked = None
|
||||
@@ -208,7 +234,98 @@ def delete_credentials(
|
||||
return CredentialsDeletionResponse(revoked=tokens_revoked)
|
||||
|
||||
|
||||
# -------- UTILITIES --------- #
|
||||
# ------------------------- WEBHOOK STUFF -------------------------- #
|
||||
|
||||
|
||||
# ⚠️ Note
|
||||
# No user auth check because this endpoint is for webhook ingress and relies on
|
||||
# validation by the provider-specific `WebhooksManager`.
|
||||
@router.post("/{provider}/webhooks/{webhook_id}/ingress")
|
||||
async def webhook_ingress_generic(
|
||||
request: Request,
|
||||
provider: Annotated[str, Path(title="Provider where the webhook was registered")],
|
||||
webhook_id: Annotated[str, Path(title="Our ID for the webhook")],
|
||||
):
|
||||
logger.debug(f"Received {provider} webhook ingress for ID {webhook_id}")
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[provider]()
|
||||
webhook = await get_webhook(webhook_id)
|
||||
logger.debug(f"Webhook #{webhook_id}: {webhook}")
|
||||
payload, event_type = await webhook_manager.validate_payload(webhook, request)
|
||||
logger.debug(f"Validated {provider} {event_type} event with payload {payload}")
|
||||
|
||||
webhook_event = WebhookEvent(
|
||||
provider=provider,
|
||||
webhook_id=webhook_id,
|
||||
event_type=event_type,
|
||||
payload=payload,
|
||||
)
|
||||
await publish_webhook_event(webhook_event)
|
||||
logger.debug(f"Webhook event published: {webhook_event}")
|
||||
|
||||
if not webhook.attached_nodes:
|
||||
return
|
||||
|
||||
executor = get_service_client(ExecutionManager)
|
||||
for node in webhook.attached_nodes:
|
||||
logger.debug(f"Webhook-attached node: {node}")
|
||||
if not node.is_triggered_by_event_type(event_type):
|
||||
logger.debug(f"Node #{node.id} doesn't trigger on event {event_type}")
|
||||
continue
|
||||
logger.debug(f"Executing graph #{node.graph_id} node #{node.id}")
|
||||
executor.add_execution(
|
||||
node.graph_id,
|
||||
data={f"webhook_{webhook_id}_payload": payload},
|
||||
user_id=webhook.user_id,
|
||||
)
|
||||
|
||||
|
||||
@router.post("/{provider}/webhooks/{webhook_id}/ping")
|
||||
async def webhook_ping(
|
||||
provider: Annotated[str, Path(title="Provider where the webhook was registered")],
|
||||
webhook_id: Annotated[str, Path(title="Our ID for the webhook")],
|
||||
user_id: Annotated[str, Depends(get_user_id)], # require auth
|
||||
):
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[provider]()
|
||||
webhook = await get_webhook(webhook_id)
|
||||
|
||||
await webhook_manager.trigger_ping(webhook)
|
||||
if not await listen_for_webhook_event(webhook_id, event_type="ping"):
|
||||
raise HTTPException(status_code=500, detail="Webhook ping event not received")
|
||||
|
||||
|
||||
# --------------------------- UTILITIES ---------------------------- #
|
||||
|
||||
|
||||
async def remove_all_webhooks_for_credentials(
|
||||
credentials: Credentials, force: bool = False
|
||||
) -> None:
|
||||
"""
|
||||
Remove and deregister all webhooks that were registered using the given credentials.
|
||||
|
||||
Params:
|
||||
credentials: The credentials for which to remove the associated webhooks.
|
||||
force: Whether to proceed if any of the webhooks are still in use.
|
||||
|
||||
Raises:
|
||||
NeedConfirmation: If any of the webhooks are still in use and `force` is `False`
|
||||
"""
|
||||
webhooks = await get_all_webhooks(credentials.id)
|
||||
if any(w.attached_nodes for w in webhooks) and not force:
|
||||
raise NeedConfirmation(
|
||||
"Some webhooks linked to these credentials are still in use by an agent"
|
||||
)
|
||||
for webhook in webhooks:
|
||||
# Unlink all nodes
|
||||
for node in webhook.attached_nodes or []:
|
||||
await set_node_webhook(node.id, None)
|
||||
|
||||
# Prune the webhook
|
||||
webhook_manager = WEBHOOK_MANAGERS_BY_NAME[credentials.provider]()
|
||||
success = await webhook_manager.prune_webhook_if_dangling(
|
||||
webhook.id, credentials
|
||||
)
|
||||
if not success:
|
||||
logger.warning(f"Webhook #{webhook.id} failed to prune")
|
||||
|
||||
|
||||
def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHandler:
|
||||
@@ -226,7 +343,11 @@ def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHa
|
||||
)
|
||||
|
||||
handler_class = HANDLERS_BY_NAME[provider_name]
|
||||
frontend_base_url = settings.config.frontend_base_url or str(req.base_url)
|
||||
frontend_base_url = (
|
||||
settings.config.frontend_base_url
|
||||
or settings.config.platform_base_url
|
||||
or str(req.base_url)
|
||||
)
|
||||
return handler_class(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
|
||||
@@ -29,21 +29,6 @@ async def lifespan_context(app: fastapi.FastAPI):
|
||||
await backend.data.db.disconnect()
|
||||
|
||||
|
||||
def handle_internal_http_error(status_code: int = 500, log_error: bool = True):
|
||||
def handler(request: fastapi.Request, exc: Exception):
|
||||
if log_error:
|
||||
logger.exception(f"{request.method} {request.url.path} failed: {exc}")
|
||||
return fastapi.responses.JSONResponse(
|
||||
content={
|
||||
"message": f"{request.method} {request.url.path} failed",
|
||||
"detail": str(exc),
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
docs_url = (
|
||||
"/docs"
|
||||
if settings.config.app_env == backend.util.settings.AppEnvironment.LOCAL
|
||||
@@ -62,8 +47,24 @@ app = fastapi.FastAPI(
|
||||
docs_url=docs_url,
|
||||
)
|
||||
|
||||
|
||||
def handle_internal_http_error(status_code: int = 500, log_error: bool = True):
|
||||
def handler(request: fastapi.Request, exc: Exception):
|
||||
if log_error:
|
||||
logger.exception(f"{request.method} {request.url.path} failed: {exc}")
|
||||
return fastapi.responses.JSONResponse(
|
||||
content={
|
||||
"message": f"{request.method} {request.url.path} failed",
|
||||
"detail": str(exc),
|
||||
},
|
||||
status_code=status_code,
|
||||
)
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
app.add_exception_handler(ValueError, handle_internal_http_error(400))
|
||||
app.add_exception_handler(500, handle_internal_http_error(500))
|
||||
app.add_exception_handler(Exception, handle_internal_http_error(500))
|
||||
app.include_router(backend.server.routers.v1.v1_router, tags=["v1"])
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from typing import Annotated, Any, List
|
||||
from typing import TYPE_CHECKING, Annotated, Any, Sequence
|
||||
|
||||
import pydantic
|
||||
from autogpt_libs.auth.middleware import auth_middleware
|
||||
@@ -30,6 +30,11 @@ from backend.data.block import BlockInput, CompletedBlockOutput
|
||||
from backend.data.credit import get_block_costs, get_user_credit_model
|
||||
from backend.data.user import get_or_create_user
|
||||
from backend.executor import ExecutionManager, ExecutionScheduler, scheduler
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.webhooks.graph_lifecycle_hooks import (
|
||||
on_graph_activate,
|
||||
on_graph_deactivate,
|
||||
)
|
||||
from backend.server.model import (
|
||||
CreateAPIKeyRequest,
|
||||
CreateAPIKeyResponse,
|
||||
@@ -41,6 +46,9 @@ from backend.server.utils import get_user_id
|
||||
from backend.util.service import get_service_client
|
||||
from backend.util.settings import Settings
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import Credentials
|
||||
|
||||
|
||||
@thread_cached
|
||||
def execution_manager_client() -> ExecutionManager:
|
||||
@@ -54,6 +62,7 @@ def execution_scheduler_client() -> ExecutionScheduler:
|
||||
|
||||
settings = Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
integration_creds_manager = IntegrationCredentialsManager()
|
||||
|
||||
|
||||
_user_credit_model = get_user_credit_model()
|
||||
@@ -62,14 +71,10 @@ _user_credit_model = get_user_credit_model()
|
||||
v1_router = APIRouter(prefix="/api")
|
||||
|
||||
|
||||
v1_router.dependencies.append(Depends(auth_middleware))
|
||||
|
||||
|
||||
v1_router.include_router(
|
||||
backend.server.integrations.router.router,
|
||||
prefix="/integrations",
|
||||
tags=["integrations"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
|
||||
v1_router.include_router(
|
||||
@@ -97,13 +102,17 @@ async def get_or_create_user_route(user_data: dict = Depends(auth_middleware)):
|
||||
|
||||
|
||||
@v1_router.get(path="/blocks", tags=["blocks"], dependencies=[Depends(auth_middleware)])
|
||||
def get_graph_blocks() -> list[dict[Any, Any]]:
|
||||
def get_graph_blocks() -> Sequence[dict[Any, Any]]:
|
||||
blocks = [block() for block in backend.data.block.get_blocks().values()]
|
||||
costs = get_block_costs()
|
||||
return [{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks]
|
||||
|
||||
|
||||
@v1_router.post(path="/blocks/{block_id}/execute", tags=["blocks"])
|
||||
@v1_router.post(
|
||||
path="/blocks/{block_id}/execute",
|
||||
tags=["blocks"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
def execute_graph_block(block_id: str, data: BlockInput) -> CompletedBlockOutput:
|
||||
obj = backend.data.block.get_block(block_id)
|
||||
if not obj:
|
||||
@@ -141,7 +150,7 @@ class DeleteGraphResponse(TypedDict):
|
||||
async def get_graphs(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
with_runs: bool = False,
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
return await graph_db.get_graphs(
|
||||
include_executions=with_runs, filter_by="active", user_id=user_id
|
||||
)
|
||||
@@ -181,13 +190,61 @@ async def get_graph(
|
||||
)
|
||||
async def get_graph_all_versions(
|
||||
graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
graphs = await graph_db.get_graph_all_versions(graph_id, user_id=user_id)
|
||||
if not graphs:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
return graphs
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def create_new_graph(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
|
||||
async def do_create_graph(
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.Graph:
|
||||
if create_graph.graph:
|
||||
graph = graph_db.make_graph_model(create_graph.graph, user_id)
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||
|
||||
graph = await graph_db.create_graph(graph, user_id=user_id)
|
||||
graph = await on_graph_activate(
|
||||
graph,
|
||||
get_credentials=lambda id: integration_creds_manager.get(user_id, id),
|
||||
)
|
||||
return graph
|
||||
|
||||
|
||||
@v1_router.delete(
|
||||
path="/graphs/{graph_id}", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
@@ -224,33 +281,41 @@ async def update_graph(
|
||||
latest_version_graph = next(
|
||||
v for v in existing_versions if v.version == latest_version_number
|
||||
)
|
||||
current_active_version = next((v for v in existing_versions if v.is_active), None)
|
||||
if latest_version_graph.is_template != graph.is_template:
|
||||
raise HTTPException(
|
||||
400, detail="Changing is_template on an existing graph is forbidden"
|
||||
)
|
||||
graph.is_active = not graph.is_template
|
||||
graph = graph_db.make_graph_model(graph, user_id)
|
||||
graph.reassign_ids(user_id=user_id)
|
||||
|
||||
new_graph_version = await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
if new_graph_version.is_active:
|
||||
|
||||
def get_credentials(credentials_id: str) -> "Credentials | None":
|
||||
return integration_creds_manager.get(user_id, credentials_id)
|
||||
|
||||
# Handle activation of the new graph first to ensure continuity
|
||||
new_graph_version = await on_graph_activate(
|
||||
new_graph_version,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
# Ensure new version is the only active version
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id, version=new_graph_version.version, user_id=user_id
|
||||
)
|
||||
if current_active_version:
|
||||
# Handle deactivation of the previously active version
|
||||
await on_graph_deactivate(
|
||||
current_active_version,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
|
||||
return new_graph_version
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/graphs", tags=["graphs"], dependencies=[Depends(auth_middleware)]
|
||||
)
|
||||
async def create_new_graph(
|
||||
create_graph: CreateGraph, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> graph_db.Graph:
|
||||
return await do_create_graph(create_graph, is_template=False, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.put(
|
||||
path="/graphs/{graph_id}/versions/active",
|
||||
tags=["graphs"],
|
||||
@@ -262,13 +327,34 @@ async def set_graph_active_version(
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
):
|
||||
new_active_version = request_body.active_graph_version
|
||||
if not await graph_db.get_graph(graph_id, new_active_version, user_id=user_id):
|
||||
new_active_graph = await graph_db.get_graph(
|
||||
graph_id, new_active_version, user_id=user_id
|
||||
)
|
||||
if not new_active_graph:
|
||||
raise HTTPException(404, f"Graph #{graph_id} v{new_active_version} not found")
|
||||
|
||||
current_active_graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
|
||||
def get_credentials(credentials_id: str) -> "Credentials | None":
|
||||
return integration_creds_manager.get(user_id, credentials_id)
|
||||
|
||||
# Handle activation of the new graph first to ensure continuity
|
||||
await on_graph_activate(
|
||||
new_active_graph,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
# Ensure new version is the only active version
|
||||
await graph_db.set_graph_active_version(
|
||||
graph_id=graph_id,
|
||||
version=request_body.active_graph_version,
|
||||
version=new_active_version,
|
||||
user_id=user_id,
|
||||
)
|
||||
if current_active_graph and current_active_graph.version != new_active_version:
|
||||
# Handle deactivation of the previously active version
|
||||
await on_graph_deactivate(
|
||||
current_active_graph,
|
||||
get_credentials=get_credentials,
|
||||
)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
@@ -298,7 +384,7 @@ def execute_graph(
|
||||
)
|
||||
async def stop_graph_run(
|
||||
graph_exec_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
) -> Sequence[execution_db.ExecutionResult]:
|
||||
if not await execution_db.get_graph_execution(graph_exec_id, user_id):
|
||||
raise HTTPException(404, detail=f"Agent execution #{graph_exec_id} not found")
|
||||
|
||||
@@ -319,7 +405,7 @@ async def list_graph_runs(
|
||||
graph_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
graph_version: int | None = None,
|
||||
) -> list[str]:
|
||||
) -> Sequence[str]:
|
||||
graph = await graph_db.get_graph(graph_id, graph_version, user_id=user_id)
|
||||
if not graph:
|
||||
rev = "" if graph_version is None else f" v{graph_version}"
|
||||
@@ -339,7 +425,7 @@ async def get_graph_run_node_execution_results(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> list[execution_db.ExecutionResult]:
|
||||
) -> Sequence[execution_db.ExecutionResult]:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
@@ -378,7 +464,7 @@ async def get_graph_run_status(
|
||||
)
|
||||
async def get_templates(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> list[graph_db.Graph]:
|
||||
) -> Sequence[graph_db.Graph]:
|
||||
return await graph_db.get_graphs(filter_by="template", user_id=user_id)
|
||||
|
||||
|
||||
@@ -394,40 +480,6 @@ async def get_template(graph_id: str, version: int | None = None) -> graph_db.Gr
|
||||
return graph
|
||||
|
||||
|
||||
async def do_create_graph(
|
||||
create_graph: CreateGraph,
|
||||
is_template: bool,
|
||||
# user_id doesn't have to be annotated like on other endpoints,
|
||||
# because create_graph isn't used directly as an endpoint
|
||||
user_id: str,
|
||||
) -> graph_db.Graph:
|
||||
if create_graph.graph:
|
||||
graph = create_graph.graph
|
||||
elif create_graph.template_id:
|
||||
# Create a new graph from a template
|
||||
graph = await graph_db.get_graph(
|
||||
create_graph.template_id,
|
||||
create_graph.template_version,
|
||||
template=True,
|
||||
user_id=user_id,
|
||||
)
|
||||
if not graph:
|
||||
raise HTTPException(
|
||||
400, detail=f"Template #{create_graph.template_id} not found"
|
||||
)
|
||||
graph.version = 1
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Either graph or template_id must be provided."
|
||||
)
|
||||
|
||||
graph.is_template = is_template
|
||||
graph.is_active = not is_template
|
||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||
|
||||
return await graph_db.create_graph(graph, user_id=user_id)
|
||||
|
||||
|
||||
@v1_router.post(
|
||||
path="/templates",
|
||||
tags=["templates", "graphs"],
|
||||
@@ -534,13 +586,13 @@ async def create_api_key(
|
||||
|
||||
@v1_router.get(
|
||||
"/api-keys",
|
||||
response_model=List[APIKeyWithoutHash],
|
||||
response_model=list[APIKeyWithoutHash],
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
async def get_api_keys(
|
||||
user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> List[APIKeyWithoutHash]:
|
||||
) -> list[APIKeyWithoutHash]:
|
||||
"""List all API keys for the user"""
|
||||
try:
|
||||
return await list_user_api_keys(user_id)
|
||||
|
||||
@@ -8,7 +8,7 @@ from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
from backend.data import redis
|
||||
from backend.data.queue import AsyncRedisExecutionEventBus
|
||||
from backend.data.execution import AsyncRedisExecutionEventBus
|
||||
from backend.data.user import DEFAULT_USER_ID
|
||||
from backend.server.conn_manager import ConnectionManager
|
||||
from backend.server.model import ExecutionSubscription, Methods, WsMessage
|
||||
|
||||
6
autogpt_platform/backend/backend/util/exceptions.py
Normal file
6
autogpt_platform/backend/backend/util/exceptions.py
Normal file
@@ -0,0 +1,6 @@
|
||||
class MissingConfigError(Exception):
|
||||
"""The attempted operation requires configuration which is not available"""
|
||||
|
||||
|
||||
class NeedConfirmation(Exception):
|
||||
"""The user must explicitly confirm that they want to proceed"""
|
||||
@@ -11,6 +11,7 @@ from types import NoneType, UnionType
|
||||
from typing import (
|
||||
Annotated,
|
||||
Any,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Coroutine,
|
||||
Dict,
|
||||
@@ -64,7 +65,13 @@ def expose(func: C) -> C:
|
||||
logger.exception(msg)
|
||||
raise
|
||||
|
||||
# Register custom serializers and deserializers for annotated Pydantic models
|
||||
register_pydantic_serializers(func)
|
||||
|
||||
return pyro.expose(wrapper) # type: ignore
|
||||
|
||||
|
||||
def register_pydantic_serializers(func: Callable):
|
||||
"""Register custom serializers and deserializers for annotated Pydantic models"""
|
||||
for name, annotation in func.__annotations__.items():
|
||||
try:
|
||||
pydantic_types = _pydantic_models_from_type_annotation(annotation)
|
||||
@@ -81,8 +88,6 @@ def expose(func: C) -> C:
|
||||
model.__qualname__, _make_custom_deserializer(model)
|
||||
)
|
||||
|
||||
return pyro.expose(wrapper) # type: ignore
|
||||
|
||||
|
||||
def _make_custom_serializer(model: Type[BaseModel]):
|
||||
def custom_class_to_dict(obj):
|
||||
@@ -252,6 +257,10 @@ def _pydantic_models_from_type_annotation(annotation) -> Iterator[type[BaseModel
|
||||
key_type, value_type = args
|
||||
yield from _pydantic_models_from_type_annotation(key_type)
|
||||
yield from _pydantic_models_from_type_annotation(value_type)
|
||||
elif origin in (Awaitable, Coroutine):
|
||||
# For coroutines and awaitables, check the return type
|
||||
return_type = args[-1]
|
||||
yield from _pydantic_models_from_type_annotation(return_type)
|
||||
else:
|
||||
annotype = annotation if origin is None else origin
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import os
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, Generic, List, Set, Tuple, Type, TypeVar
|
||||
|
||||
from pydantic import BaseModel, Field, PrivateAttr, field_validator
|
||||
from pydantic import BaseModel, Field, PrivateAttr, ValidationInfo, field_validator
|
||||
from pydantic_settings import (
|
||||
BaseSettings,
|
||||
JsonConfigSettingsSource,
|
||||
@@ -136,12 +136,32 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="The port for agent server API to run on",
|
||||
)
|
||||
|
||||
platform_base_url: str = Field(
|
||||
default="",
|
||||
description="Must be set so the application knows where it's hosted at. "
|
||||
"This is necessary to make sure webhooks find their way.",
|
||||
)
|
||||
|
||||
frontend_base_url: str = Field(
|
||||
default="http://localhost:3000",
|
||||
default="",
|
||||
description="Can be used to explicitly set the base URL for the frontend. "
|
||||
"This value is then used to generate redirect URLs for OAuth flows.",
|
||||
)
|
||||
|
||||
@field_validator("platform_base_url", "frontend_base_url")
|
||||
@classmethod
|
||||
def validate_platform_base_url(cls, v: str, info: ValidationInfo) -> str:
|
||||
if not v:
|
||||
return v
|
||||
if not v.startswith(("http://", "https://")):
|
||||
raise ValueError(
|
||||
f"{info.field_name} must be a full URL "
|
||||
"including a http:// or https:// schema"
|
||||
)
|
||||
if v.endswith("/"):
|
||||
return v.rstrip("/") # Remove trailing slash
|
||||
return v
|
||||
|
||||
app_env: AppEnvironment = Field(
|
||||
default=AppEnvironment.LOCAL,
|
||||
description="The name of the app environment: local or dev or prod",
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import logging
|
||||
import time
|
||||
from typing import Sequence
|
||||
|
||||
from backend.data import db
|
||||
from backend.data.block import Block, initialize_blocks
|
||||
from backend.data.execution import ExecutionStatus
|
||||
from backend.data.execution import ExecutionResult, ExecutionStatus
|
||||
from backend.data.model import CREDENTIALS_FIELD_NAME
|
||||
from backend.data.user import create_default_user
|
||||
from backend.executor import DatabaseManager, ExecutionManager, ExecutionScheduler
|
||||
@@ -57,7 +58,7 @@ async def wait_execution(
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
timeout: int = 20,
|
||||
) -> list:
|
||||
) -> Sequence[ExecutionResult]:
|
||||
async def is_execution_completed():
|
||||
status = await AgentServer().test_get_graph_run_status(
|
||||
graph_id, graph_exec_id, user_id
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
-- AlterTable
|
||||
ALTER TABLE "AgentNode" ADD COLUMN "webhookId" TEXT;
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "IntegrationWebhook" (
|
||||
"id" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3),
|
||||
"userId" TEXT NOT NULL,
|
||||
"provider" TEXT NOT NULL,
|
||||
"credentialsId" TEXT NOT NULL,
|
||||
"webhookType" TEXT NOT NULL,
|
||||
"resource" TEXT NOT NULL,
|
||||
"events" TEXT[],
|
||||
"config" JSONB NOT NULL,
|
||||
"secret" TEXT NOT NULL,
|
||||
"providerWebhookId" TEXT NOT NULL,
|
||||
|
||||
CONSTRAINT "IntegrationWebhook_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AgentNode" ADD CONSTRAINT "AgentNode_webhookId_fkey" FOREIGN KEY ("webhookId") REFERENCES "IntegrationWebhook"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "IntegrationWebhook" ADD CONSTRAINT "IntegrationWebhook_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
@@ -23,6 +23,7 @@ model User {
|
||||
// Relations
|
||||
AgentGraphs AgentGraph[]
|
||||
AgentGraphExecutions AgentGraphExecution[]
|
||||
IntegrationWebhooks IntegrationWebhook[]
|
||||
AnalyticsDetails AnalyticsDetails[]
|
||||
AnalyticsMetrics AnalyticsMetrics[]
|
||||
UserBlockCredit UserBlockCredit[]
|
||||
@@ -74,6 +75,10 @@ model AgentNode {
|
||||
// JSON serialized dict[str, str] containing predefined input values.
|
||||
constantInput String @default("{}")
|
||||
|
||||
// For webhook-triggered blocks: reference to the webhook that triggers the node
|
||||
webhookId String?
|
||||
Webhook IntegrationWebhook? @relation(fields: [webhookId], references: [id])
|
||||
|
||||
// JSON serialized dict[str, str] containing the node metadata.
|
||||
metadata String @default("{}")
|
||||
|
||||
@@ -186,6 +191,28 @@ model AgentNodeExecutionInputOutput {
|
||||
@@unique([referencedByInputExecId, referencedByOutputExecId, name])
|
||||
}
|
||||
|
||||
// Webhook that is registered with a provider and propagates to one or more nodes
|
||||
model IntegrationWebhook {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime? @updatedAt
|
||||
|
||||
userId String
|
||||
user User @relation(fields: [userId], references: [id], onDelete: Restrict) // Webhooks must be deregistered before deleting
|
||||
|
||||
provider String // e.g. 'github'
|
||||
credentialsId String // relation to the credentials that the webhook was created with
|
||||
webhookType String // e.g. 'repo'
|
||||
resource String // e.g. 'Significant-Gravitas/AutoGPT'
|
||||
events String[] // e.g. ['created', 'updated']
|
||||
config Json
|
||||
secret String // crypto string, used to verify payload authenticity
|
||||
|
||||
providerWebhookId String // Webhook ID assigned by the provider
|
||||
|
||||
AgentNodes AgentNode[]
|
||||
}
|
||||
|
||||
model AnalyticsDetails {
|
||||
// PK uses gen_random_uuid() to allow the db inserts to happen outside of prisma
|
||||
// typical uuid() inserts are handled by prisma
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
"@faker-js/faker": "^9.2.0",
|
||||
"@hookform/resolvers": "^3.9.1",
|
||||
"@next/third-parties": "^15.0.3",
|
||||
"@radix-ui/react-alert-dialog": "^1.1.2",
|
||||
"@radix-ui/react-avatar": "^1.1.1",
|
||||
"@radix-ui/react-checkbox": "^1.1.2",
|
||||
"@radix-ui/react-collapsible": "^1.1.1",
|
||||
|
||||
@@ -74,7 +74,7 @@
|
||||
}
|
||||
|
||||
.agpt-border-input {
|
||||
@apply border-input focus-visible:border-gray-400 focus-visible:outline-none;
|
||||
@apply border border-input focus-visible:border-gray-400 focus-visible:outline-none;
|
||||
}
|
||||
|
||||
.agpt-shadow-input {
|
||||
|
||||
@@ -4,7 +4,7 @@ import { useSupabase } from "@/components/SupabaseProvider";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import useUser from "@/hooks/useUser";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useCallback, useContext, useMemo } from "react";
|
||||
import { useCallback, useContext, useMemo, useState } from "react";
|
||||
import { FaSpinner } from "react-icons/fa";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { useToast } from "@/components/ui/use-toast";
|
||||
@@ -21,6 +21,16 @@ import {
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { CredentialsProviderName } from "@/lib/autogpt-server-api";
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
AlertDialogContent,
|
||||
AlertDialogDescription,
|
||||
AlertDialogFooter,
|
||||
AlertDialogHeader,
|
||||
AlertDialogTitle,
|
||||
} from "@/components/ui/alert-dialog";
|
||||
|
||||
export default function PrivatePage() {
|
||||
const { user, isLoading, error } = useUser();
|
||||
@@ -29,15 +39,40 @@ export default function PrivatePage() {
|
||||
const providers = useContext(CredentialsProvidersContext);
|
||||
const { toast } = useToast();
|
||||
|
||||
const [confirmationDialogState, setConfirmationDialogState] = useState<
|
||||
| {
|
||||
open: true;
|
||||
message: string;
|
||||
onConfirm: () => void;
|
||||
onReject: () => void;
|
||||
}
|
||||
| { open: false }
|
||||
>({ open: false });
|
||||
|
||||
const removeCredentials = useCallback(
|
||||
async (provider: CredentialsProviderName, id: string) => {
|
||||
async (
|
||||
provider: CredentialsProviderName,
|
||||
id: string,
|
||||
force: boolean = false,
|
||||
) => {
|
||||
if (!providers || !providers[provider]) {
|
||||
return;
|
||||
}
|
||||
|
||||
let result;
|
||||
try {
|
||||
const { revoked } = await providers[provider].deleteCredentials(id);
|
||||
if (revoked !== false) {
|
||||
result = await providers[provider].deleteCredentials(id, force);
|
||||
} catch (error: any) {
|
||||
toast({
|
||||
title: "Something went wrong when deleting credentials: " + error,
|
||||
variant: "destructive",
|
||||
duration: 2000,
|
||||
});
|
||||
setConfirmationDialogState({ open: false });
|
||||
return;
|
||||
}
|
||||
if (result.deleted) {
|
||||
if (result.revoked) {
|
||||
toast({
|
||||
title: "Credentials deleted",
|
||||
duration: 2000,
|
||||
@@ -49,11 +84,13 @@ export default function PrivatePage() {
|
||||
duration: 3000,
|
||||
});
|
||||
}
|
||||
} catch (error: any) {
|
||||
toast({
|
||||
title: "Something went wrong when deleting credentials: " + error,
|
||||
variant: "destructive",
|
||||
duration: 2000,
|
||||
setConfirmationDialogState({ open: false });
|
||||
} else if (result.need_confirmation) {
|
||||
setConfirmationDialogState({
|
||||
open: true,
|
||||
message: result.message,
|
||||
onConfirm: () => removeCredentials(provider, id, true),
|
||||
onReject: () => setConfirmationDialogState({ open: false }),
|
||||
});
|
||||
}
|
||||
},
|
||||
@@ -158,6 +195,36 @@ export default function PrivatePage() {
|
||||
))}
|
||||
</TableBody>
|
||||
</Table>
|
||||
|
||||
<AlertDialog open={confirmationDialogState.open}>
|
||||
<AlertDialogContent>
|
||||
<AlertDialogHeader>
|
||||
<AlertDialogTitle>Are you sure?</AlertDialogTitle>
|
||||
<AlertDialogDescription>
|
||||
{confirmationDialogState.open && confirmationDialogState.message}
|
||||
</AlertDialogDescription>
|
||||
</AlertDialogHeader>
|
||||
<AlertDialogFooter>
|
||||
<AlertDialogCancel
|
||||
onClick={() =>
|
||||
confirmationDialogState.open &&
|
||||
confirmationDialogState.onReject()
|
||||
}
|
||||
>
|
||||
Cancel
|
||||
</AlertDialogCancel>
|
||||
<AlertDialogAction
|
||||
variant="destructive"
|
||||
onClick={() =>
|
||||
confirmationDialogState.open &&
|
||||
confirmationDialogState.onConfirm()
|
||||
}
|
||||
>
|
||||
Continue
|
||||
</AlertDialogAction>
|
||||
</AlertDialogFooter>
|
||||
</AlertDialogContent>
|
||||
</AlertDialog>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ import { getPrimaryCategoryColor } from "@/lib/utils";
|
||||
import { FlowContext } from "./Flow";
|
||||
import { Badge } from "./ui/badge";
|
||||
import NodeOutputs from "./NodeOutputs";
|
||||
import SchemaTooltip from "./SchemaTooltip";
|
||||
import { IconCoin } from "./ui/icons";
|
||||
import * as Separator from "@radix-ui/react-separator";
|
||||
import * as ContextMenu from "@radix-ui/react-context-menu";
|
||||
@@ -166,7 +167,7 @@ export function CustomNode({
|
||||
<div key={key}>
|
||||
<NodeHandle
|
||||
keyName={key}
|
||||
isConnected={isHandleConnected(key)}
|
||||
isConnected={isOutputHandleConnected(key)}
|
||||
schema={schema.properties[key]}
|
||||
side="right"
|
||||
/>
|
||||
@@ -205,16 +206,18 @@ export function CustomNode({
|
||||
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
const isHidden = propSchema.hidden;
|
||||
const isConnectable =
|
||||
// No input connection handles on INPUT and WEBHOOK blocks
|
||||
![BlockUIType.INPUT, BlockUIType.WEBHOOK].includes(nodeType) &&
|
||||
// No input connection handles for credentials
|
||||
propKey !== "credentials" &&
|
||||
// No input connection handles on INPUT blocks
|
||||
nodeType !== BlockUIType.INPUT &&
|
||||
// For OUTPUT blocks, only show the 'value' (hides 'name') input connection handle
|
||||
!(nodeType == BlockUIType.OUTPUT && propKey == "name");
|
||||
const isConnected = isInputHandleConnected(propKey);
|
||||
return (
|
||||
!isHidden &&
|
||||
(isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
|
||||
<div key={propKey} data-id={`input-handle-${propKey}`}>
|
||||
{isConnectable ? (
|
||||
@@ -227,15 +230,15 @@ export function CustomNode({
|
||||
/>
|
||||
) : (
|
||||
propKey != "credentials" && (
|
||||
<span
|
||||
className="text-m green mb-0 text-gray-900"
|
||||
title={propSchema.description}
|
||||
>
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
<div className="flex gap-1">
|
||||
<span className="text-m green mb-0 text-gray-900">
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
<SchemaTooltip description={propSchema.description} />
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
{!isConnected && (
|
||||
{isConnected || (
|
||||
<NodeGenericInputField
|
||||
nodeId={id}
|
||||
propKey={getInputPropKey(propKey)}
|
||||
@@ -298,21 +301,28 @@ export function CustomNode({
|
||||
setErrors({ ...errors });
|
||||
};
|
||||
|
||||
const isHandleConnected = (key: string) => {
|
||||
const isInputHandleConnected = (key: string) => {
|
||||
return (
|
||||
data.connections &&
|
||||
data.connections.some((conn: any) => {
|
||||
if (typeof conn === "string") {
|
||||
const [source, target] = conn.split(" -> ");
|
||||
return (
|
||||
(target.includes(key) && target.includes(data.title)) ||
|
||||
(source.includes(key) && source.includes(data.title))
|
||||
);
|
||||
const [_source, target] = conn.split(" -> ");
|
||||
return target.includes(key) && target.includes(data.title);
|
||||
}
|
||||
return (
|
||||
(conn.target === id && conn.targetHandle === key) ||
|
||||
(conn.source === id && conn.sourceHandle === key)
|
||||
);
|
||||
return conn.target === id && conn.targetHandle === key;
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const isOutputHandleConnected = (key: string) => {
|
||||
return (
|
||||
data.connections &&
|
||||
data.connections.some((conn: any) => {
|
||||
if (typeof conn === "string") {
|
||||
const [source, _target] = conn.split(" -> ");
|
||||
return source.includes(key) && source.includes(data.title);
|
||||
}
|
||||
return conn.source === id && conn.sourceHandle === key;
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
@@ -11,22 +11,6 @@ code {
|
||||
monospace;
|
||||
}
|
||||
|
||||
input,
|
||||
textarea {
|
||||
background-color: #ffffff;
|
||||
color: #000000;
|
||||
border: 1px solid #555;
|
||||
padding: 8px;
|
||||
border-radius: 4px;
|
||||
width: calc(100% - 18px);
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
input::placeholder,
|
||||
textarea::placeholder {
|
||||
color: #aaa;
|
||||
}
|
||||
|
||||
.modal {
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
|
||||
@@ -3,6 +3,7 @@ import { cn } from "@/lib/utils";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import SchemaTooltip from "@/components/SchemaTooltip";
|
||||
import useCredentials from "@/hooks/useCredentials";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
@@ -235,12 +236,10 @@ export const CredentialsInput: FC<{
|
||||
if (savedApiKeys.length === 0 && savedOAuthCredentials.length === 0) {
|
||||
return (
|
||||
<>
|
||||
<span
|
||||
className="text-m green mb-0 text-gray-900"
|
||||
title={schema.description}
|
||||
>
|
||||
Credentials
|
||||
</span>
|
||||
<div className="mb-2 flex gap-1">
|
||||
<span className="text-m green text-gray-900">Credentials</span>
|
||||
<SchemaTooltip description={schema.description} />
|
||||
</div>
|
||||
<div className={cn("flex flex-row space-x-2", className)}>
|
||||
{supportsOAuth2 && (
|
||||
<Button onClick={handleOAuthLogin}>
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import AutoGPTServerAPI, {
|
||||
APIKeyCredentials,
|
||||
CredentialsDeleteNeedConfirmationResponse,
|
||||
CredentialsDeleteResponse,
|
||||
CredentialsMetaResponse,
|
||||
CredentialsProviderName,
|
||||
@@ -59,7 +60,12 @@ export type CredentialsProviderData = {
|
||||
createAPIKeyCredentials: (
|
||||
credentials: APIKeyCredentialsCreatable,
|
||||
) => Promise<CredentialsMetaResponse>;
|
||||
deleteCredentials: (id: string) => Promise<CredentialsDeleteResponse>;
|
||||
deleteCredentials: (
|
||||
id: string,
|
||||
force?: boolean,
|
||||
) => Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
>;
|
||||
};
|
||||
|
||||
export type CredentialsProvidersContextType = {
|
||||
@@ -144,8 +150,14 @@ export default function CredentialsProvider({
|
||||
async (
|
||||
provider: CredentialsProviderName,
|
||||
id: string,
|
||||
): Promise<CredentialsDeleteResponse> => {
|
||||
const result = await api.deleteCredentials(provider, id);
|
||||
force: boolean = false,
|
||||
): Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
> => {
|
||||
const result = await api.deleteCredentials(provider, id, force);
|
||||
if (!result.deleted) {
|
||||
return result;
|
||||
}
|
||||
setProviders((prev) => {
|
||||
if (!prev || !prev[provider]) return prev;
|
||||
|
||||
@@ -202,8 +214,8 @@ export default function CredentialsProvider({
|
||||
createAPIKeyCredentials: (
|
||||
credentials: APIKeyCredentialsCreatable,
|
||||
) => createAPIKeyCredentials(provider, credentials),
|
||||
deleteCredentials: (id: string) =>
|
||||
deleteCredentials(provider, id),
|
||||
deleteCredentials: (id: string, force: boolean = false) =>
|
||||
deleteCredentials(provider, id, force),
|
||||
},
|
||||
}));
|
||||
});
|
||||
|
||||
@@ -20,6 +20,14 @@ import {
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "./ui/select";
|
||||
import {
|
||||
MultiSelector,
|
||||
MultiSelectorContent,
|
||||
MultiSelectorInput,
|
||||
MultiSelectorItem,
|
||||
MultiSelectorList,
|
||||
MultiSelectorTrigger,
|
||||
} from "./ui/multiselect";
|
||||
import { LocalValuedInput } from "./ui/input";
|
||||
import NodeHandle from "./NodeHandle";
|
||||
import { ConnectionData } from "./CustomNode";
|
||||
@@ -133,6 +141,37 @@ export const NodeGenericInputField: FC<{
|
||||
}
|
||||
|
||||
if ("properties" in propSchema) {
|
||||
// Render a multi-select for all-boolean sub-schemas with more than 3 properties
|
||||
if (
|
||||
Object.values(propSchema.properties).every(
|
||||
(subSchema) => "type" in subSchema && subSchema.type == "boolean",
|
||||
) &&
|
||||
Object.keys(propSchema.properties).length >= 3
|
||||
) {
|
||||
const options = Object.keys(propSchema.properties);
|
||||
const selectedKeys = Object.entries(currentValue || {})
|
||||
.filter(([_, v]) => v)
|
||||
.map(([k, _]) => k);
|
||||
return (
|
||||
<NodeMultiSelectInput
|
||||
selfKey={propKey}
|
||||
schema={propSchema}
|
||||
selection={selectedKeys}
|
||||
error={errors[propKey]}
|
||||
className={className}
|
||||
displayName={displayName}
|
||||
handleInputChange={(key, selection) => {
|
||||
handleInputChange(
|
||||
key,
|
||||
Object.fromEntries(
|
||||
options.map((option) => [option, selection.includes(option)]),
|
||||
),
|
||||
);
|
||||
}}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<NodeObjectInputTree
|
||||
nodeId={nodeId}
|
||||
@@ -595,6 +634,56 @@ const NodeArrayInput: FC<{
|
||||
);
|
||||
};
|
||||
|
||||
const NodeMultiSelectInput: FC<{
|
||||
selfKey: string;
|
||||
schema: BlockIOObjectSubSchema; // TODO: Support BlockIOArraySubSchema
|
||||
selection?: string[];
|
||||
error?: string;
|
||||
className?: string;
|
||||
displayName?: string;
|
||||
handleInputChange: NodeObjectInputTreeProps["handleInputChange"];
|
||||
}> = ({
|
||||
selfKey,
|
||||
schema,
|
||||
selection = [],
|
||||
error,
|
||||
className,
|
||||
displayName,
|
||||
handleInputChange,
|
||||
}) => {
|
||||
const options = Object.keys(schema.properties);
|
||||
|
||||
return (
|
||||
<div className={cn("flex flex-col", className)}>
|
||||
<MultiSelector
|
||||
className="nodrag"
|
||||
values={selection}
|
||||
onValuesChange={(v) => handleInputChange(selfKey, v)}
|
||||
>
|
||||
<MultiSelectorTrigger>
|
||||
<MultiSelectorInput
|
||||
placeholder={
|
||||
schema.placeholder ?? `Select ${displayName || schema.title}...`
|
||||
}
|
||||
/>
|
||||
</MultiSelectorTrigger>
|
||||
<MultiSelectorContent className="nowheel">
|
||||
<MultiSelectorList>
|
||||
{options
|
||||
.map((key) => ({ ...schema.properties[key], key }))
|
||||
.map(({ key, title, description }) => (
|
||||
<MultiSelectorItem key={key} value={key} title={description}>
|
||||
{title ?? key}
|
||||
</MultiSelectorItem>
|
||||
))}
|
||||
</MultiSelectorList>
|
||||
</MultiSelectorContent>
|
||||
</MultiSelector>
|
||||
{error && <span className="error-message">{error}</span>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const NodeStringInput: FC<{
|
||||
selfKey: string;
|
||||
schema: BlockIOStringSubSchema;
|
||||
@@ -783,7 +872,7 @@ const NodeBooleanInput: FC<{
|
||||
defaultChecked={value}
|
||||
onCheckedChange={(v) => handleInputChange(selfKey, v)}
|
||||
/>
|
||||
<span className="ml-3">{displayName}</span>
|
||||
{displayName && <span className="ml-3">{displayName}</span>}
|
||||
</div>
|
||||
{error && <span className="error-message">{error}</span>}
|
||||
</div>
|
||||
|
||||
143
autogpt_platform/frontend/src/components/ui/alert-dialog.tsx
Normal file
143
autogpt_platform/frontend/src/components/ui/alert-dialog.tsx
Normal file
@@ -0,0 +1,143 @@
|
||||
"use client";
|
||||
|
||||
import * as React from "react";
|
||||
import * as AlertDialogPrimitive from "@radix-ui/react-alert-dialog";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import { buttonVariants } from "@/components/ui/button";
|
||||
import { VariantProps } from "class-variance-authority";
|
||||
|
||||
const AlertDialog = AlertDialogPrimitive.Root;
|
||||
|
||||
const AlertDialogTrigger = AlertDialogPrimitive.Trigger;
|
||||
|
||||
const AlertDialogPortal = AlertDialogPrimitive.Portal;
|
||||
|
||||
const AlertDialogOverlay = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Overlay>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Overlay>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Overlay
|
||||
className={cn(
|
||||
"fixed inset-0 z-50 bg-black/80 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
ref={ref}
|
||||
/>
|
||||
));
|
||||
AlertDialogOverlay.displayName = AlertDialogPrimitive.Overlay.displayName;
|
||||
|
||||
const AlertDialogContent = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Content>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Content>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPortal>
|
||||
<AlertDialogOverlay />
|
||||
<AlertDialogPrimitive.Content
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"fixed left-[50%] top-[50%] z-50 grid w-full max-w-lg translate-x-[-50%] translate-y-[-50%] gap-4 border border-neutral-200 bg-white p-6 shadow-lg duration-200 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out-95 data-[state=open]:zoom-in-95 data-[state=closed]:slide-out-to-left-1/2 data-[state=closed]:slide-out-to-top-[48%] data-[state=open]:slide-in-from-left-1/2 data-[state=open]:slide-in-from-top-[48%] dark:border-neutral-800 dark:bg-neutral-950 sm:rounded-lg",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
</AlertDialogPortal>
|
||||
));
|
||||
AlertDialogContent.displayName = AlertDialogPrimitive.Content.displayName;
|
||||
|
||||
const AlertDialogHeader = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-col space-y-2 text-center sm:text-left",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
AlertDialogHeader.displayName = "AlertDialogHeader";
|
||||
|
||||
const AlertDialogFooter = ({
|
||||
className,
|
||||
...props
|
||||
}: React.HTMLAttributes<HTMLDivElement>) => (
|
||||
<div
|
||||
className={cn(
|
||||
"flex flex-col-reverse sm:flex-row sm:justify-end sm:space-x-2",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
AlertDialogFooter.displayName = "AlertDialogFooter";
|
||||
|
||||
const AlertDialogTitle = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Title>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Title>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Title
|
||||
ref={ref}
|
||||
className={cn("text-lg font-semibold", className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogTitle.displayName = AlertDialogPrimitive.Title.displayName;
|
||||
|
||||
const AlertDialogDescription = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Description>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Description>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Description
|
||||
ref={ref}
|
||||
className={cn("text-sm text-neutral-500 dark:text-neutral-400", className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogDescription.displayName =
|
||||
AlertDialogPrimitive.Description.displayName;
|
||||
|
||||
const AlertDialogAction = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Action>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Action> &
|
||||
VariantProps<typeof buttonVariants>
|
||||
>(({ className, variant, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Action
|
||||
ref={ref}
|
||||
className={cn(buttonVariants({ variant: variant }), className)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogAction.displayName = AlertDialogPrimitive.Action.displayName;
|
||||
|
||||
const AlertDialogCancel = React.forwardRef<
|
||||
React.ElementRef<typeof AlertDialogPrimitive.Cancel>,
|
||||
React.ComponentPropsWithoutRef<typeof AlertDialogPrimitive.Cancel>
|
||||
>(({ className, ...props }, ref) => (
|
||||
<AlertDialogPrimitive.Cancel
|
||||
ref={ref}
|
||||
className={cn(
|
||||
buttonVariants({ variant: "outline" }),
|
||||
"mt-2 sm:mt-0",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
/>
|
||||
));
|
||||
AlertDialogCancel.displayName = AlertDialogPrimitive.Cancel.displayName;
|
||||
|
||||
export {
|
||||
AlertDialog,
|
||||
AlertDialogPortal,
|
||||
AlertDialogOverlay,
|
||||
AlertDialogTrigger,
|
||||
AlertDialogContent,
|
||||
AlertDialogHeader,
|
||||
AlertDialogFooter,
|
||||
AlertDialogTitle,
|
||||
AlertDialogDescription,
|
||||
AlertDialogAction,
|
||||
AlertDialogCancel,
|
||||
};
|
||||
@@ -144,7 +144,7 @@ const MultiSelector = forwardRef<HTMLDivElement, MultiSelectorProps>(
|
||||
ref={ref}
|
||||
onKeyDown={handleKeyDown}
|
||||
className={cn(
|
||||
"flex flex-col space-y-2 overflow-visible bg-transparent",
|
||||
"flex flex-col overflow-visible bg-transparent",
|
||||
className,
|
||||
)}
|
||||
dir={dir}
|
||||
@@ -174,7 +174,7 @@ const MultiSelectorTrigger = forwardRef<
|
||||
<div
|
||||
ref={ref}
|
||||
className={cn(
|
||||
"flex flex-wrap gap-1 rounded-lg border border-muted bg-background p-1 py-2",
|
||||
"agpt-border-input agpt-shadow-input flex flex-wrap gap-1 rounded-lg bg-background px-3 py-2 pl-1 text-sm",
|
||||
className,
|
||||
)}
|
||||
{...props}
|
||||
@@ -183,7 +183,7 @@ const MultiSelectorTrigger = forwardRef<
|
||||
<Badge
|
||||
key={item}
|
||||
className={cn(
|
||||
"flex items-center gap-1 rounded-xl px-1",
|
||||
"flex items-center gap-1 rounded-xl px-1 pl-2",
|
||||
activeIndex === index && "ring-2 ring-muted-foreground",
|
||||
)}
|
||||
variant={"secondary"}
|
||||
@@ -237,10 +237,10 @@ MultiSelectorInput.displayName = "MultiSelectorInput";
|
||||
const MultiSelectorContent = forwardRef<
|
||||
HTMLDivElement,
|
||||
React.HTMLAttributes<HTMLDivElement>
|
||||
>(({ children }, ref) => {
|
||||
>(({ children, className }, ref) => {
|
||||
const { open } = useMultiSelect();
|
||||
return (
|
||||
<div ref={ref} className="relative">
|
||||
<div ref={ref} className={cn("relative mt-2", className)}>
|
||||
{open && children}
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
import { SupabaseClient } from "@supabase/supabase-js";
|
||||
import {
|
||||
AnalyticsMetrics,
|
||||
AnalyticsDetails,
|
||||
AnalyticsMetrics,
|
||||
APIKeyCredentials,
|
||||
Block,
|
||||
CredentialsDeleteNeedConfirmationResponse,
|
||||
CredentialsDeleteResponse,
|
||||
CredentialsMetaResponse,
|
||||
ExecutionMeta,
|
||||
Graph,
|
||||
GraphCreatable,
|
||||
GraphUpdateable,
|
||||
GraphExecuteResponse,
|
||||
GraphMeta,
|
||||
GraphMetaWithRuns,
|
||||
GraphExecuteResponse,
|
||||
ExecutionMeta,
|
||||
GraphUpdateable,
|
||||
NodeExecutionResult,
|
||||
OAuth2Credentials,
|
||||
User,
|
||||
ScheduleCreatable,
|
||||
ScheduleUpdateable,
|
||||
Schedule,
|
||||
ScheduleCreatable,
|
||||
User,
|
||||
} from "./types";
|
||||
|
||||
export default class BaseAutoGPTServerAPI {
|
||||
@@ -226,10 +226,14 @@ export default class BaseAutoGPTServerAPI {
|
||||
deleteCredentials(
|
||||
provider: string,
|
||||
id: string,
|
||||
): Promise<CredentialsDeleteResponse> {
|
||||
force: boolean = true,
|
||||
): Promise<
|
||||
CredentialsDeleteResponse | CredentialsDeleteNeedConfirmationResponse
|
||||
> {
|
||||
return this._request(
|
||||
"DELETE",
|
||||
`/integrations/${provider}/credentials/${id}`,
|
||||
force ? { force: true } : undefined,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -271,13 +275,14 @@ export default class BaseAutoGPTServerAPI {
|
||||
?.access_token || "";
|
||||
|
||||
let url = this.baseUrl + path;
|
||||
if (method === "GET" && payload) {
|
||||
const payloadAsQuery = ["GET", "DELETE"].includes(method);
|
||||
if (payloadAsQuery && payload) {
|
||||
// For GET requests, use payload as query
|
||||
const queryParams = new URLSearchParams(payload);
|
||||
url += `?${queryParams.toString()}`;
|
||||
}
|
||||
|
||||
const hasRequestBody = method !== "GET" && payload !== undefined;
|
||||
const hasRequestBody = !payloadAsQuery && payload !== undefined;
|
||||
const response = await fetch(url, {
|
||||
method,
|
||||
headers: {
|
||||
|
||||
@@ -56,6 +56,7 @@ export type BlockIOSubSchemaMeta = {
|
||||
description?: string;
|
||||
placeholder?: string;
|
||||
advanced?: boolean;
|
||||
hidden?: boolean;
|
||||
};
|
||||
|
||||
export type BlockIOObjectSubSchema = BlockIOSubSchemaMeta & {
|
||||
@@ -271,6 +272,13 @@ export type CredentialsDeleteResponse = {
|
||||
revoked: boolean | null;
|
||||
};
|
||||
|
||||
/* Mirror of backend/server/integrations/router.py:CredentialsDeletionNeedsConfirmationResponse */
|
||||
export type CredentialsDeleteNeedConfirmationResponse = {
|
||||
deleted: false;
|
||||
need_confirmation: true;
|
||||
message: string;
|
||||
};
|
||||
|
||||
/* Mirror of backend/data/model.py:CredentialsMetaInput */
|
||||
export type CredentialsMetaInput = {
|
||||
id: string;
|
||||
@@ -317,6 +325,7 @@ export enum BlockUIType {
|
||||
INPUT = "Input",
|
||||
OUTPUT = "Output",
|
||||
NOTE = "Note",
|
||||
WEBHOOK = "Webhook",
|
||||
AGENT = "Agent",
|
||||
}
|
||||
|
||||
|
||||
@@ -2100,6 +2100,18 @@
|
||||
resolved "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz"
|
||||
integrity sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==
|
||||
|
||||
"@radix-ui/react-alert-dialog@^1.1.2":
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-alert-dialog/-/react-alert-dialog-1.1.2.tgz#ac3bb7f71f5cbb595d3d0949bb12b598c2a99981"
|
||||
integrity sha512-eGSlLzPhKO+TErxkiGcCZGuvbVMnLA1MTnyBksGOeGRGkxHiiJUujsjmNTdWTm4iHVSRaUao9/4Ur671auMghQ==
|
||||
dependencies:
|
||||
"@radix-ui/primitive" "1.1.0"
|
||||
"@radix-ui/react-compose-refs" "1.1.0"
|
||||
"@radix-ui/react-context" "1.1.1"
|
||||
"@radix-ui/react-dialog" "1.1.2"
|
||||
"@radix-ui/react-primitive" "2.0.0"
|
||||
"@radix-ui/react-slot" "1.1.0"
|
||||
|
||||
"@radix-ui/react-arrow@1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.0.tgz"
|
||||
@@ -2182,7 +2194,7 @@
|
||||
resolved "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.1.tgz"
|
||||
integrity sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q==
|
||||
|
||||
"@radix-ui/react-dialog@^1.1.2":
|
||||
"@radix-ui/react-dialog@1.1.2", "@radix-ui/react-dialog@^1.1.2":
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-dialog/-/react-dialog-1.1.2.tgz#d9345575211d6f2d13e209e84aec9a8584b54d6c"
|
||||
integrity sha512-Yj4dZtqa2o+kG61fzB0H2qUvmwBA2oyQroGLyNtBj1beo1khoQ3q1a2AO8rrQYjd8256CO9+N8L9tvsS+bnIyA==
|
||||
@@ -2428,13 +2440,20 @@
|
||||
dependencies:
|
||||
"@radix-ui/react-primitive" "2.0.0"
|
||||
|
||||
"@radix-ui/react-slot@1.1.0", "@radix-ui/react-slot@^1.1.0":
|
||||
"@radix-ui/react-slot@1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.0.tgz"
|
||||
integrity sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw==
|
||||
dependencies:
|
||||
"@radix-ui/react-compose-refs" "1.1.0"
|
||||
|
||||
"@radix-ui/react-slot@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-slot/-/react-slot-1.1.0.tgz#7c5e48c36ef5496d97b08f1357bb26ed7c714b84"
|
||||
integrity sha512-FUCf5XMfmW4dtYl69pdS4DbxKy8nj4M7SafBgPllysxmdachynNflAdp/gCsnYWNDnge6tI9onzMp5ARYc1KNw==
|
||||
dependencies:
|
||||
"@radix-ui/react-compose-refs" "1.1.0"
|
||||
|
||||
"@radix-ui/react-switch@^1.1.1":
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@radix-ui/react-switch/-/react-switch-1.1.1.tgz#1401658c24d66a18610f18793afbaa7fedf5429a"
|
||||
|
||||
@@ -83,7 +83,7 @@ Follow these steps to create and test a new block:
|
||||
|
||||
In this case, we're mocking the `get_request` method to always return a dictionary with an 'extract' key, simulating a successful API response. This allows us to test the block's logic without making actual network requests, which could be slow, unreliable, or rate-limited.
|
||||
|
||||
5. **Implement the `run` method with error handling:**, this should contain the main logic of the block:
|
||||
5. **Implement the `run` method with error handling.** This should contain the main logic of the block:
|
||||
|
||||
```python
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
@@ -234,7 +234,7 @@ All our existing handlers and the base class can be found [here][OAuth2 handlers
|
||||
|
||||
Every handler must implement the following parts of the [`BaseOAuthHandler`] interface:
|
||||
|
||||
```python title="autogpt_platform/backend/backend/integrations/oauth/base.py"
|
||||
```python title="backend/integrations/oauth/base.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler1"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler2"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/base.py:BaseOAuthHandler3"
|
||||
@@ -249,13 +249,13 @@ Aside from implementing the `OAuthHandler` itself, adding a handler into the sys
|
||||
|
||||
- Adding the handler class to `HANDLERS_BY_NAME` under [`integrations/oauth/__init__.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/__init__.py)
|
||||
|
||||
```python title="autogpt_platform/backend/backend/integrations/oauth/__init__.py"
|
||||
```python title="backend/integrations/oauth/__init__.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/__init__.py:HANDLERS_BY_NAMEExample"
|
||||
```
|
||||
|
||||
- Adding `{provider}_client_id` and `{provider}_client_secret` to the application's `Secrets` under [`util/settings.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/util/settings.py)
|
||||
|
||||
```python title="autogpt_platform/backend/backend/util/settings.py"
|
||||
```python title="backend/util/settings.py"
|
||||
--8<-- "autogpt_platform/backend/backend/util/settings.py:OAuthServerCredentialsExample"
|
||||
```
|
||||
|
||||
@@ -286,13 +286,13 @@ Finally you will need to add the provider to the `CredentialsType` enum in [`fro
|
||||
|
||||
- GitHub blocks with API key + OAuth2 support: [`blocks/github`](https://github.com/Significant-Gravitas/AutoGPT/tree/master/autogpt_platform/backend/backend/blocks/github/)
|
||||
|
||||
```python title="blocks/github/issues.py"
|
||||
```python title="backend/blocks/github/issues.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/issues.py:GithubCommentBlockExample"
|
||||
```
|
||||
|
||||
- GitHub OAuth2 handler: [`integrations/oauth/github.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/github.py)
|
||||
|
||||
```python title="blocks/github/github.py"
|
||||
```python title="backend/integrations/oauth/github.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/github.py:GithubOAuthHandlerExample"
|
||||
```
|
||||
|
||||
@@ -300,18 +300,148 @@ Finally you will need to add the provider to the `CredentialsType` enum in [`fro
|
||||
|
||||
- Google OAuth2 handler: [`integrations/oauth/google.py`](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/oauth/google.py)
|
||||
|
||||
```python title="integrations/oauth/google.py"
|
||||
```python title="backend/integrations/oauth/google.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/oauth/google.py:GoogleOAuthHandlerExample"
|
||||
```
|
||||
|
||||
You can see that google has defined a `DEFAULT_SCOPES` variable, this is used to set the scopes that are requested no matter what the user asks for.
|
||||
|
||||
```python title="blocks/google/_auth.py"
|
||||
```python title="backend/blocks/google/_auth.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/google/_auth.py:GoogleOAuthIsConfigured"
|
||||
```
|
||||
|
||||
You can also see that `GOOGLE_OAUTH_IS_CONFIGURED` is used to disable the blocks that require OAuth if the oauth is not configured. This is in the `__init__` method of each block. This is because there is no api key fallback for google blocks so we need to make sure that the oauth is configured before we allow the user to use the blocks.
|
||||
|
||||
### Webhook-triggered Blocks
|
||||
|
||||
Webhook-triggered blocks allow your agent to respond to external events in real-time.
|
||||
These blocks are triggered by incoming webhooks from third-party services
|
||||
rather than being executed manually.
|
||||
|
||||
Creating and running a webhook-triggered block involves three main components:
|
||||
|
||||
- The block itself, which specifies:
|
||||
- Inputs for the user to select a resource and events to subscribe to
|
||||
- A `credentials` input with the scopes needed to manage webhooks
|
||||
- Logic to turn the webhook payload into outputs for the webhook block
|
||||
- The `WebhooksManager` for the corresponding webhook service provider, which handles:
|
||||
- (De)registering webhooks with the provider
|
||||
- Parsing and validating incoming webhook payloads
|
||||
- The credentials system for the corresponding service provider, which may include an `OAuthHandler`
|
||||
|
||||
There is more going on under the hood, e.g. to store and retrieve webhooks and their
|
||||
links to nodes, but to add a webhook-triggered block you shouldn't need to make changes
|
||||
to those parts of the system.
|
||||
|
||||
#### Creating a Webhook-triggered Block
|
||||
|
||||
To create a webhook-triggered block, follow these additional steps on top of the basic block creation process:
|
||||
|
||||
1. **Define `webhook_config`** in your block's `__init__` method.
|
||||
|
||||
<details>
|
||||
<summary>Example: <code>GitHubPullRequestTriggerBlock</code></summary>
|
||||
|
||||
```python title="backend/blocks/github/triggers.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-webhook_config"
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><code>BlockWebhookConfig</code> definition</summary>
|
||||
|
||||
```python title="backend/data/block.py"
|
||||
--8<-- "autogpt_platform/backend/backend/data/block.py:BlockWebhookConfig"
|
||||
```
|
||||
</details>
|
||||
|
||||
2. **Define event filter input** in your block's Input schema.
|
||||
This allows the user to select which specific types of events will trigger the block in their agent.
|
||||
|
||||
<details>
|
||||
<summary>Example: <code>GitHubPullRequestTriggerBlock</code></summary>
|
||||
|
||||
```python title="backend/blocks/github/triggers.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-event-filter"
|
||||
```
|
||||
</details>
|
||||
|
||||
- The name of the input field (`events` in this case) must match `webhook_config.event_filter_input`.
|
||||
- The event filter itself must be a Pydantic model with only boolean fields.
|
||||
|
||||
4. **Include payload field** in your block's Input schema.
|
||||
|
||||
<details>
|
||||
<summary>Example: <code>GitHubTriggerBase</code></summary>
|
||||
|
||||
```python title="backend/blocks/github/triggers.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:example-payload-field"
|
||||
```
|
||||
</details>
|
||||
|
||||
5. **Define `credentials` input** in your block's Input schema.
|
||||
- Its scopes must be sufficient to manage a user's webhooks through the provider's API
|
||||
- See [Blocks with authentication](#blocks-with-authentication) for further details
|
||||
|
||||
6. **Process webhook payload** and output relevant parts of it in your block's `run` method.
|
||||
|
||||
<details>
|
||||
<summary>Example: <code>GitHubPullRequestTriggerBlock</code></summary>
|
||||
|
||||
```python
|
||||
def run(self, input_data: Input, **kwargs) -> BlockOutput:
|
||||
yield "payload", input_data.payload
|
||||
yield "sender", input_data.payload["sender"]
|
||||
yield "event", input_data.payload["action"]
|
||||
yield "number", input_data.payload["number"]
|
||||
yield "pull_request", input_data.payload["pull_request"]
|
||||
```
|
||||
|
||||
Note that the `credentials` parameter can be omitted if the credentials
|
||||
aren't used at block runtime, like in the example.
|
||||
</details>
|
||||
|
||||
#### Adding a Webhooks Manager
|
||||
|
||||
To add support for a new webhook provider, you'll need to create a WebhooksManager that implements the `BaseWebhooksManager` interface:
|
||||
|
||||
```python title="backend/integrations/webhooks/base.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager1"
|
||||
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager2"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager3"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager4"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/base.py:BaseWebhooksManager5"
|
||||
```
|
||||
|
||||
And add a reference to your `WebhooksManager` class in `WEBHOOK_MANAGERS_BY_NAME`:
|
||||
|
||||
```python title="backend/integrations/webhooks/__init__.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/__init__.py:WEBHOOK_MANAGERS_BY_NAME"
|
||||
```
|
||||
|
||||
#### Example: GitHub Webhook Integration
|
||||
|
||||
<details>
|
||||
<summary>
|
||||
GitHub Webhook triggers: <a href="https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/blocks/github/triggers.py"><code>blocks/github/triggers.py</code></a>
|
||||
</summary>
|
||||
|
||||
```python title="backend/blocks/github/triggers.py"
|
||||
--8<-- "autogpt_platform/backend/backend/blocks/github/triggers.py:GithubTriggerExample"
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>
|
||||
GitHub Webhooks Manager: <a href="https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/backend/backend/integrations/webhooks/github.py"><code>integrations/webhooks/github.py</code></a>
|
||||
</summary>
|
||||
|
||||
```python title="backend/integrations/webhooks/github.py"
|
||||
--8<-- "autogpt_platform/backend/backend/integrations/webhooks/github.py:GithubWebhooksManager"
|
||||
```
|
||||
</details>
|
||||
|
||||
## Key Points to Remember
|
||||
|
||||
- **Unique ID**: Give your block a unique ID in the **init** method.
|
||||
|
||||
Reference in New Issue
Block a user