mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-12 00:28:31 -05:00
Compare commits
44 Commits
clarify-li
...
fix-path
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6301f8c34e | ||
|
|
f711c057da | ||
|
|
0c6b08d882 | ||
|
|
5b08913ab9 | ||
|
|
815cf8b4ac | ||
|
|
3abe821533 | ||
|
|
6ae6c711b7 | ||
|
|
c8f55bc518 | ||
|
|
df2126c1a8 | ||
|
|
dfcfd003df | ||
|
|
4e33399d31 | ||
|
|
369b1d9023 | ||
|
|
241f21ab5f | ||
|
|
7551782cd1 | ||
|
|
430835e539 | ||
|
|
f5040fa3ab | ||
|
|
6ced85d203 | ||
|
|
5e1a3d5717 | ||
|
|
d35b91cde4 | ||
|
|
aeab5aac67 | ||
|
|
31cd6dc652 | ||
|
|
13b82c86f5 | ||
|
|
ff11d00f74 | ||
|
|
9d7dfb0a6d | ||
|
|
f1bf7f269b | ||
|
|
46cc8ae3ea | ||
|
|
43bf6f2349 | ||
|
|
2582eb1ee8 | ||
|
|
10cefc149f | ||
|
|
d62fe001b8 | ||
|
|
f583a15fd0 | ||
|
|
2cad2093eb | ||
|
|
4e569f4562 | ||
|
|
7f514c10cf | ||
|
|
d7aba4f6c0 | ||
|
|
ba30aa2fce | ||
|
|
efeba4400e | ||
|
|
ba206e3bec | ||
|
|
be16fd90d4 | ||
|
|
d10167ceab | ||
|
|
d593f76437 | ||
|
|
bda938422e | ||
|
|
8397b78ec2 | ||
|
|
0d7342826b |
@@ -1,18 +0,0 @@
|
||||
version = 1
|
||||
|
||||
test_patterns = ["**/*.spec.ts","**/*_test.py","**/*_tests.py","**/test_*.py"]
|
||||
|
||||
exclude_patterns = ["classic/**"]
|
||||
|
||||
[[analyzers]]
|
||||
name = "javascript"
|
||||
|
||||
[analyzers.meta]
|
||||
plugins = ["react"]
|
||||
environment = ["nodejs"]
|
||||
|
||||
[[analyzers]]
|
||||
name = "python"
|
||||
|
||||
[analyzers.meta]
|
||||
runtime_version = "3.x.x"
|
||||
@@ -1,61 +0,0 @@
|
||||
# Ignore everything by default, selectively add things to context
|
||||
*
|
||||
|
||||
# Platform - Libs
|
||||
!autogpt_platform/autogpt_libs/autogpt_libs/
|
||||
!autogpt_platform/autogpt_libs/pyproject.toml
|
||||
!autogpt_platform/autogpt_libs/poetry.lock
|
||||
!autogpt_platform/autogpt_libs/README.md
|
||||
|
||||
# Platform - Backend
|
||||
!autogpt_platform/backend/backend/
|
||||
!autogpt_platform/backend/migrations/
|
||||
!autogpt_platform/backend/schema.prisma
|
||||
!autogpt_platform/backend/pyproject.toml
|
||||
!autogpt_platform/backend/poetry.lock
|
||||
!autogpt_platform/backend/README.md
|
||||
|
||||
# Platform - Market
|
||||
!autogpt_platform/market/market/
|
||||
!autogpt_platform/market/scripts.py
|
||||
!autogpt_platform/market/schema.prisma
|
||||
!autogpt_platform/market/pyproject.toml
|
||||
!autogpt_platform/market/poetry.lock
|
||||
!autogpt_platform/market/README.md
|
||||
|
||||
# Platform - Frontend
|
||||
!autogpt_platform/frontend/src/
|
||||
!autogpt_platform/frontend/public/
|
||||
!autogpt_platform/frontend/package.json
|
||||
!autogpt_platform/frontend/yarn.lock
|
||||
!autogpt_platform/frontend/tsconfig.json
|
||||
!autogpt_platform/frontend/README.md
|
||||
## config
|
||||
!autogpt_platform/frontend/*.config.*
|
||||
!autogpt_platform/frontend/.env.*
|
||||
|
||||
# Classic - AutoGPT
|
||||
!classic/original_autogpt/autogpt/
|
||||
!classic/original_autogpt/pyproject.toml
|
||||
!classic/original_autogpt/poetry.lock
|
||||
!classic/original_autogpt/README.md
|
||||
!classic/original_autogpt/tests/
|
||||
|
||||
# Classic - Benchmark
|
||||
!classic/benchmark/agbenchmark/
|
||||
!classic/benchmark/pyproject.toml
|
||||
!classic/benchmark/poetry.lock
|
||||
!classic/benchmark/README.md
|
||||
|
||||
# Classic - Forge
|
||||
!classic/forge/
|
||||
!classic/forge/pyproject.toml
|
||||
!classic/forge/poetry.lock
|
||||
!classic/forge/README.md
|
||||
|
||||
# Classic - Frontend
|
||||
!classic/frontend/build/web/
|
||||
|
||||
# Explicitly re-ignore some folders
|
||||
.*
|
||||
**/__pycache__
|
||||
43
.github/PULL_REQUEST_TEMPLATE.md
vendored
43
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -1,38 +1,23 @@
|
||||
### Background
|
||||
|
||||
<!-- Clearly explain the need for these changes: -->
|
||||
|
||||
### Changes 🏗️
|
||||
|
||||
<!-- Concisely describe all of the changes made in this pull request: -->
|
||||
|
||||
### Checklist 📋
|
||||
|
||||
#### For code changes:
|
||||
- [ ] I have clearly listed my changes in the PR description
|
||||
- [ ] I have made a test plan
|
||||
- [ ] I have tested my changes according to the test plan:
|
||||
<!-- Put your test plan here: -->
|
||||
- [ ] ...
|
||||
### Testing 🔍
|
||||
> [!NOTE]
|
||||
Only for the new autogpt platform, currently in autogpt_platform/
|
||||
|
||||
<details>
|
||||
<summary>Example test plan</summary>
|
||||
|
||||
- [ ] Create from scratch and execute an agent with at least 3 blocks
|
||||
- [ ] Import an agent from file upload, and confirm it executes correctly
|
||||
- [ ] Upload agent to marketplace
|
||||
- [ ] Import an agent from marketplace and confirm it executes correctly
|
||||
- [ ] Edit an agent from monitor, and confirm it executes correctly
|
||||
</details>
|
||||
<!--
|
||||
Please make sure your changes have been tested and are in good working condition.
|
||||
Here is a list of our critical paths, if you need some inspiration on what and how to test:
|
||||
-->
|
||||
|
||||
#### For configuration changes:
|
||||
- [ ] `.env.example` is updated or already compatible with my changes
|
||||
- [ ] `docker-compose.yml` is updated or already compatible with my changes
|
||||
- [ ] I have included a list of my configuration changes in the PR description (under **Changes**)
|
||||
|
||||
<details>
|
||||
<summary>Examples of configuration changes</summary>
|
||||
|
||||
- Changing ports
|
||||
- Adding new services that need to communicate with each other
|
||||
- Secrets or environment variable changes
|
||||
- New or infrastructure changes such as databases
|
||||
</details>
|
||||
- Create from scratch and execute an agent with at least 3 blocks
|
||||
- Import an agent from file upload, and confirm it executes correctly
|
||||
- Upload agent to marketplace
|
||||
- Import an agent from marketplace and confirm it executes correctly
|
||||
- Edit an agent from monitor, and confirm it executes correctly
|
||||
|
||||
151
.github/dependabot.yml
vendored
151
.github/dependabot.yml
vendored
@@ -1,151 +0,0 @@
|
||||
version: 2
|
||||
updates:
|
||||
# autogpt_libs (Poetry project)
|
||||
- package-ecosystem: "pip"
|
||||
directory: "autogpt_platform/autogpt_libs"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(libs/deps)"
|
||||
prefix-development: "chore(libs/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
# backend (Poetry project)
|
||||
- package-ecosystem: "pip"
|
||||
directory: "autogpt_platform/backend"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(backend/deps)"
|
||||
prefix-development: "chore(backend/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
# frontend (Next.js project)
|
||||
- package-ecosystem: "npm"
|
||||
directory: "autogpt_platform/frontend"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(frontend/deps)"
|
||||
prefix-development: "chore(frontend/deps-dev)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
# infra (Terraform)
|
||||
- package-ecosystem: "terraform"
|
||||
directory: "autogpt_platform/infra"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(infra/deps)"
|
||||
prefix-development: "chore(infra/deps-dev)"
|
||||
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
|
||||
# GitHub Actions
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
target-branch: "dev"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
|
||||
# Docker
|
||||
- package-ecosystem: "docker"
|
||||
directory: "autogpt_platform/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 5
|
||||
target-branch: "dev"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
# Docs
|
||||
- package-ecosystem: 'pip'
|
||||
directory: "docs/"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 1
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(docs/deps)"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
31
.github/labeler.yml
vendored
31
.github/labeler.yml
vendored
@@ -1,32 +1,27 @@
|
||||
Classic AutoGPT Agent:
|
||||
AutoGPT Agent:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: classic/original_autogpt/**
|
||||
|
||||
Classic Benchmark:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: classic/benchmark/**
|
||||
|
||||
Classic Frontend:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: classic/frontend/**
|
||||
|
||||
Forge:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: classic/forge/**
|
||||
|
||||
Benchmark:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: classic/benchmark/**
|
||||
|
||||
Frontend:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: classic/frontend/**
|
||||
|
||||
documentation:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: docs/**
|
||||
|
||||
platform/frontend:
|
||||
Builder:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: autogpt_platform/frontend/**
|
||||
- any-glob-to-any-file: autogpt_platform/autogpt_builder/**
|
||||
|
||||
platform/backend:
|
||||
Server:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: autogpt_platform/backend/**
|
||||
- all-globs-to-all-files: '!autogpt_platform/backend/backend/blocks/**'
|
||||
|
||||
platform/blocks:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: autogpt_platform/backend/backend/blocks/**
|
||||
- any-glob-to-any-file: autogpt_platform/autogpt_server/**
|
||||
|
||||
13
.github/workflows/classic-autogpt-ci.yml
vendored
13
.github/workflows/classic-autogpt-ci.yml
vendored
@@ -2,12 +2,12 @@ name: Classic - AutoGPT CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, dev, ci-test* ]
|
||||
branches: [ master, development, ci-test* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-autogpt-ci.yml'
|
||||
- 'classic/original_autogpt/**'
|
||||
pull_request:
|
||||
branches: [ master, dev, release-* ]
|
||||
branches: [ master, development, release-* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-autogpt-ci.yml'
|
||||
- 'classic/original_autogpt/**'
|
||||
@@ -115,7 +115,6 @@ jobs:
|
||||
poetry run pytest -vv \
|
||||
--cov=autogpt --cov-branch --cov-report term-missing --cov-report xml \
|
||||
--numprocesses=logical --durations=10 \
|
||||
--junitxml=junit.xml -o junit_family=legacy \
|
||||
tests/unit tests/integration
|
||||
env:
|
||||
CI: true
|
||||
@@ -125,14 +124,8 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: minioadmin
|
||||
AWS_SECRET_ACCESS_KEY: minioadmin
|
||||
|
||||
- name: Upload test results to Codecov
|
||||
if: ${{ !cancelled() }} # Run even if tests fail
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: autogpt-agent,${{ runner.os }}
|
||||
|
||||
@@ -5,7 +5,7 @@ on:
|
||||
- cron: 20 4 * * 1,4
|
||||
|
||||
env:
|
||||
BASE_BRANCH: dev
|
||||
BASE_BRANCH: development
|
||||
IMAGE_NAME: auto-gpt
|
||||
|
||||
jobs:
|
||||
@@ -15,46 +15,46 @@ jobs:
|
||||
matrix:
|
||||
build-type: [release, dev]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: classic/
|
||||
file: classic/Dockerfile.autogpt
|
||||
build-args: BUILD_TYPE=${{ matrix.build-type }}
|
||||
load: true # save to docker images
|
||||
# use GHA cache as read-only
|
||||
cache-to: type=gha,scope=autogpt-docker-${{ matrix.build-type }},mode=max
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: classic/
|
||||
file: classic/Dockerfile.autogpt
|
||||
build-args: BUILD_TYPE=${{ matrix.build-type }}
|
||||
load: true # save to docker images
|
||||
# use GHA cache as read-only
|
||||
cache-to: type=gha,scope=autogpt-docker-${{ matrix.build-type }},mode=max
|
||||
|
||||
- name: Generate build report
|
||||
env:
|
||||
event_name: ${{ github.event_name }}
|
||||
event_ref: ${{ github.event.schedule }}
|
||||
- name: Generate build report
|
||||
env:
|
||||
event_name: ${{ github.event_name }}
|
||||
event_ref: ${{ github.event.schedule }}
|
||||
|
||||
build_type: ${{ matrix.build-type }}
|
||||
build_type: ${{ matrix.build-type }}
|
||||
|
||||
prod_branch: master
|
||||
dev_branch: dev
|
||||
repository: ${{ github.repository }}
|
||||
base_branch: ${{ github.ref_name != 'master' && github.ref_name != 'dev' && 'dev' || 'master' }}
|
||||
prod_branch: master
|
||||
dev_branch: development
|
||||
repository: ${{ github.repository }}
|
||||
base_branch: ${{ github.ref_name != 'master' && github.ref_name != 'development' && 'development' || 'master' }}
|
||||
|
||||
current_ref: ${{ github.ref_name }}
|
||||
commit_hash: ${{ github.sha }}
|
||||
source_url: ${{ format('{0}/tree/{1}', github.event.repository.url, github.sha) }}
|
||||
push_forced_label:
|
||||
current_ref: ${{ github.ref_name }}
|
||||
commit_hash: ${{ github.sha }}
|
||||
source_url: ${{ format('{0}/tree/{1}', github.event.repository.url, github.sha) }}
|
||||
push_forced_label:
|
||||
|
||||
new_commits_json: ${{ null }}
|
||||
compare_url_template: ${{ format('/{0}/compare/{{base}}...{{head}}', github.repository) }}
|
||||
new_commits_json: ${{ null }}
|
||||
compare_url_template: ${{ format('/{0}/compare/{{base}}...{{head}}', github.repository) }}
|
||||
|
||||
github_context_json: ${{ toJSON(github) }}
|
||||
job_env_json: ${{ toJSON(env) }}
|
||||
vars_json: ${{ toJSON(vars) }}
|
||||
github_context_json: ${{ toJSON(github) }}
|
||||
job_env_json: ${{ toJSON(env) }}
|
||||
vars_json: ${{ toJSON(vars) }}
|
||||
|
||||
run: .github/workflows/scripts/docker-ci-summary.sh >> $GITHUB_STEP_SUMMARY
|
||||
continue-on-error: true
|
||||
run: .github/workflows/scripts/docker-ci-summary.sh >> $GITHUB_STEP_SUMMARY
|
||||
continue-on-error: true
|
||||
|
||||
94
.github/workflows/classic-autogpt-docker-ci.yml
vendored
94
.github/workflows/classic-autogpt-docker-ci.yml
vendored
@@ -2,13 +2,13 @@ name: Classic - AutoGPT Docker CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, dev]
|
||||
branches: [ master, development ]
|
||||
paths:
|
||||
- '.github/workflows/classic-autogpt-docker-ci.yml'
|
||||
- 'classic/original_autogpt/**'
|
||||
- 'classic/forge/**'
|
||||
pull_request:
|
||||
branches: [ master, dev, release-* ]
|
||||
branches: [ master, development, release-* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-autogpt-docker-ci.yml'
|
||||
- 'classic/original_autogpt/**'
|
||||
@@ -34,58 +34,58 @@ jobs:
|
||||
matrix:
|
||||
build-type: [release, dev]
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- if: runner.debug
|
||||
run: |
|
||||
ls -al
|
||||
du -hs *
|
||||
- if: runner.debug
|
||||
run: |
|
||||
ls -al
|
||||
du -hs *
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: classic/
|
||||
file: classic/Dockerfile.autogpt
|
||||
build-args: BUILD_TYPE=${{ matrix.build-type }}
|
||||
tags: ${{ env.IMAGE_NAME }}
|
||||
labels: GIT_REVISION=${{ github.sha }}
|
||||
load: true # save to docker images
|
||||
# cache layers in GitHub Actions cache to speed up builds
|
||||
cache-from: type=gha,scope=autogpt-docker-${{ matrix.build-type }}
|
||||
cache-to: type=gha,scope=autogpt-docker-${{ matrix.build-type }},mode=max
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: classic/
|
||||
file: classic/Dockerfile.autogpt
|
||||
build-args: BUILD_TYPE=${{ matrix.build-type }}
|
||||
tags: ${{ env.IMAGE_NAME }}
|
||||
labels: GIT_REVISION=${{ github.sha }}
|
||||
load: true # save to docker images
|
||||
# cache layers in GitHub Actions cache to speed up builds
|
||||
cache-from: type=gha,scope=autogpt-docker-${{ matrix.build-type }}
|
||||
cache-to: type=gha,scope=autogpt-docker-${{ matrix.build-type }},mode=max
|
||||
|
||||
- name: Generate build report
|
||||
env:
|
||||
event_name: ${{ github.event_name }}
|
||||
event_ref: ${{ github.event.ref }}
|
||||
event_ref_type: ${{ github.event.ref}}
|
||||
- name: Generate build report
|
||||
env:
|
||||
event_name: ${{ github.event_name }}
|
||||
event_ref: ${{ github.event.ref }}
|
||||
event_ref_type: ${{ github.event.ref}}
|
||||
|
||||
build_type: ${{ matrix.build-type }}
|
||||
build_type: ${{ matrix.build-type }}
|
||||
|
||||
prod_branch: master
|
||||
dev_branch: dev
|
||||
repository: ${{ github.repository }}
|
||||
base_branch: ${{ github.ref_name != 'master' && github.ref_name != 'dev' && 'dev' || 'master' }}
|
||||
prod_branch: master
|
||||
dev_branch: development
|
||||
repository: ${{ github.repository }}
|
||||
base_branch: ${{ github.ref_name != 'master' && github.ref_name != 'development' && 'development' || 'master' }}
|
||||
|
||||
current_ref: ${{ github.ref_name }}
|
||||
commit_hash: ${{ github.event.after }}
|
||||
source_url: ${{ format('{0}/tree/{1}', github.event.repository.url, github.event.release && github.event.release.tag_name || github.sha) }}
|
||||
push_forced_label: ${{ github.event.forced && '☢️ forced' || '' }}
|
||||
current_ref: ${{ github.ref_name }}
|
||||
commit_hash: ${{ github.event.after }}
|
||||
source_url: ${{ format('{0}/tree/{1}', github.event.repository.url, github.event.release && github.event.release.tag_name || github.sha) }}
|
||||
push_forced_label: ${{ github.event.forced && '☢️ forced' || '' }}
|
||||
|
||||
new_commits_json: ${{ toJSON(github.event.commits) }}
|
||||
compare_url_template: ${{ format('/{0}/compare/{{base}}...{{head}}', github.repository) }}
|
||||
new_commits_json: ${{ toJSON(github.event.commits) }}
|
||||
compare_url_template: ${{ format('/{0}/compare/{{base}}...{{head}}', github.repository) }}
|
||||
|
||||
github_context_json: ${{ toJSON(github) }}
|
||||
job_env_json: ${{ toJSON(env) }}
|
||||
vars_json: ${{ toJSON(vars) }}
|
||||
github_context_json: ${{ toJSON(github) }}
|
||||
job_env_json: ${{ toJSON(env) }}
|
||||
vars_json: ${{ toJSON(vars) }}
|
||||
|
||||
run: .github/workflows/scripts/docker-ci-summary.sh >> $GITHUB_STEP_SUMMARY
|
||||
continue-on-error: true
|
||||
run: .github/workflows/scripts/docker-ci-summary.sh >> $GITHUB_STEP_SUMMARY
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -117,16 +117,16 @@ jobs:
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v6
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: classic/
|
||||
file: classic/Dockerfile.autogpt
|
||||
build-args: BUILD_TYPE=dev # include pytest
|
||||
build-args: BUILD_TYPE=dev # include pytest
|
||||
tags: >
|
||||
${{ env.IMAGE_NAME }},
|
||||
${{ env.DEPLOY_IMAGE_NAME }}:${{ env.DEV_IMAGE_TAG }}
|
||||
labels: GIT_REVISION=${{ github.sha }}
|
||||
load: true # save to docker images
|
||||
load: true # save to docker images
|
||||
# cache layers in GitHub Actions cache to speed up builds
|
||||
cache-from: type=gha,scope=autogpt-docker-dev
|
||||
cache-to: type=gha,scope=autogpt-docker-dev,mode=max
|
||||
|
||||
112
.github/workflows/classic-autogpt-docker-release.yml
vendored
112
.github/workflows/classic-autogpt-docker-release.yml
vendored
@@ -2,7 +2,7 @@ name: Classic - AutoGPT Docker Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published, edited]
|
||||
types: [ published, edited ]
|
||||
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
@@ -19,69 +19,69 @@ jobs:
|
||||
if: startsWith(github.ref, 'refs/tags/autogpt-')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Log in to Docker hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Log in to Docker hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USER }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
# slashes are not allowed in image tags, but can appear in git branch or tag names
|
||||
- id: sanitize_tag
|
||||
name: Sanitize image tag
|
||||
run: |
|
||||
tag=${raw_tag//\//-}
|
||||
echo tag=${tag#autogpt-} >> $GITHUB_OUTPUT
|
||||
env:
|
||||
raw_tag: ${{ github.ref_name }}
|
||||
# slashes are not allowed in image tags, but can appear in git branch or tag names
|
||||
- id: sanitize_tag
|
||||
name: Sanitize image tag
|
||||
run: |
|
||||
tag=${raw_tag//\//-}
|
||||
echo tag=${tag#autogpt-} >> $GITHUB_OUTPUT
|
||||
env:
|
||||
raw_tag: ${{ github.ref_name }}
|
||||
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: classic/
|
||||
file: Dockerfile.autogpt
|
||||
build-args: BUILD_TYPE=release
|
||||
load: true # save to docker images
|
||||
# push: true # TODO: uncomment when this issue is fixed: https://github.com/moby/buildkit/issues/1555
|
||||
tags: >
|
||||
${{ env.IMAGE_NAME }},
|
||||
${{ env.DEPLOY_IMAGE_NAME }}:latest,
|
||||
${{ env.DEPLOY_IMAGE_NAME }}:${{ steps.sanitize_tag.outputs.tag }}
|
||||
labels: GIT_REVISION=${{ github.sha }}
|
||||
- id: build
|
||||
name: Build image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: classic/
|
||||
file: Dockerfile.autogpt
|
||||
build-args: BUILD_TYPE=release
|
||||
load: true # save to docker images
|
||||
# push: true # TODO: uncomment when this issue is fixed: https://github.com/moby/buildkit/issues/1555
|
||||
tags: >
|
||||
${{ env.IMAGE_NAME }},
|
||||
${{ env.DEPLOY_IMAGE_NAME }}:latest,
|
||||
${{ env.DEPLOY_IMAGE_NAME }}:${{ steps.sanitize_tag.outputs.tag }}
|
||||
labels: GIT_REVISION=${{ github.sha }}
|
||||
|
||||
# cache layers in GitHub Actions cache to speed up builds
|
||||
cache-from: ${{ !inputs.no_cache && 'type=gha' || '' }},scope=autogpt-docker-release
|
||||
cache-to: type=gha,scope=autogpt-docker-release,mode=max
|
||||
# cache layers in GitHub Actions cache to speed up builds
|
||||
cache-from: ${{ !inputs.no_cache && 'type=gha' || '' }},scope=autogpt-docker-release
|
||||
cache-to: type=gha,scope=autogpt-docker-release,mode=max
|
||||
|
||||
- name: Push image to Docker Hub
|
||||
run: docker push --all-tags ${{ env.DEPLOY_IMAGE_NAME }}
|
||||
- name: Push image to Docker Hub
|
||||
run: docker push --all-tags ${{ env.DEPLOY_IMAGE_NAME }}
|
||||
|
||||
- name: Generate build report
|
||||
env:
|
||||
event_name: ${{ github.event_name }}
|
||||
event_ref: ${{ github.event.ref }}
|
||||
event_ref_type: ${{ github.event.ref}}
|
||||
inputs_no_cache: ${{ inputs.no_cache }}
|
||||
- name: Generate build report
|
||||
env:
|
||||
event_name: ${{ github.event_name }}
|
||||
event_ref: ${{ github.event.ref }}
|
||||
event_ref_type: ${{ github.event.ref}}
|
||||
inputs_no_cache: ${{ inputs.no_cache }}
|
||||
|
||||
prod_branch: master
|
||||
dev_branch: dev
|
||||
repository: ${{ github.repository }}
|
||||
base_branch: ${{ github.ref_name != 'master' && github.ref_name != 'dev' && 'dev' || 'master' }}
|
||||
prod_branch: master
|
||||
dev_branch: development
|
||||
repository: ${{ github.repository }}
|
||||
base_branch: ${{ github.ref_name != 'master' && github.ref_name != 'development' && 'development' || 'master' }}
|
||||
|
||||
ref_type: ${{ github.ref_type }}
|
||||
current_ref: ${{ github.ref_name }}
|
||||
commit_hash: ${{ github.sha }}
|
||||
source_url: ${{ format('{0}/tree/{1}', github.event.repository.url, github.event.release && github.event.release.tag_name || github.sha) }}
|
||||
ref_type: ${{ github.ref_type }}
|
||||
current_ref: ${{ github.ref_name }}
|
||||
commit_hash: ${{ github.sha }}
|
||||
source_url: ${{ format('{0}/tree/{1}', github.event.repository.url, github.event.release && github.event.release.tag_name || github.sha) }}
|
||||
|
||||
github_context_json: ${{ toJSON(github) }}
|
||||
job_env_json: ${{ toJSON(env) }}
|
||||
vars_json: ${{ toJSON(vars) }}
|
||||
github_context_json: ${{ toJSON(github) }}
|
||||
job_env_json: ${{ toJSON(env) }}
|
||||
vars_json: ${{ toJSON(vars) }}
|
||||
|
||||
run: .github/workflows/scripts/docker-release-summary.sh >> $GITHUB_STEP_SUMMARY
|
||||
continue-on-error: true
|
||||
run: .github/workflows/scripts/docker-release-summary.sh >> $GITHUB_STEP_SUMMARY
|
||||
continue-on-error: true
|
||||
|
||||
4
.github/workflows/classic-autogpts-ci.yml
vendored
4
.github/workflows/classic-autogpts-ci.yml
vendored
@@ -5,7 +5,7 @@ on:
|
||||
schedule:
|
||||
- cron: '0 8 * * *'
|
||||
push:
|
||||
branches: [ master, dev, ci-test* ]
|
||||
branches: [ master, development, ci-test* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-autogpts-ci.yml'
|
||||
- 'classic/original_autogpt/**'
|
||||
@@ -16,7 +16,7 @@ on:
|
||||
- 'classic/setup.py'
|
||||
- '!**/*.md'
|
||||
pull_request:
|
||||
branches: [ master, dev, release-* ]
|
||||
branches: [ master, development, release-* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-autogpts-ci.yml'
|
||||
- 'classic/original_autogpt/**'
|
||||
|
||||
37
.github/workflows/classic-benchmark-ci.yml
vendored
37
.github/workflows/classic-benchmark-ci.yml
vendored
@@ -2,13 +2,13 @@ name: Classic - AGBenchmark CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, dev, ci-test* ]
|
||||
branches: [ master, development, ci-test* ]
|
||||
paths:
|
||||
- 'classic/benchmark/**'
|
||||
- '!classic/benchmark/reports/**'
|
||||
- .github/workflows/classic-benchmark-ci.yml
|
||||
pull_request:
|
||||
branches: [ master, dev, release-* ]
|
||||
branches: [ master, development, release-* ]
|
||||
paths:
|
||||
- 'classic/benchmark/**'
|
||||
- '!classic/benchmark/reports/**'
|
||||
@@ -87,20 +87,13 @@ jobs:
|
||||
poetry run pytest -vv \
|
||||
--cov=agbenchmark --cov-branch --cov-report term-missing --cov-report xml \
|
||||
--durations=10 \
|
||||
--junitxml=junit.xml -o junit_family=legacy \
|
||||
tests
|
||||
env:
|
||||
CI: true
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Upload test results to Codecov
|
||||
if: ${{ !cancelled() }} # Run even if tests fail
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: agbenchmark,${{ runner.os }}
|
||||
@@ -109,7 +102,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
agent-name: [forge]
|
||||
agent-name: [ forge ]
|
||||
fail-fast: false
|
||||
timeout-minutes: 20
|
||||
steps:
|
||||
@@ -153,23 +146,23 @@ jobs:
|
||||
echo "Running the following command: poetry run agbenchmark --mock --category=coding"
|
||||
poetry run agbenchmark --mock --category=coding
|
||||
|
||||
# echo "Running the following command: poetry run agbenchmark --test=WriteFile"
|
||||
# poetry run agbenchmark --test=WriteFile
|
||||
echo "Running the following command: poetry run agbenchmark --test=WriteFile"
|
||||
poetry run agbenchmark --test=WriteFile
|
||||
cd ../benchmark
|
||||
poetry install
|
||||
echo "Adding the BUILD_SKILL_TREE environment variable. This will attempt to add new elements in the skill tree. If new elements are added, the CI fails because they should have been pushed"
|
||||
export BUILD_SKILL_TREE=true
|
||||
|
||||
# poetry run agbenchmark --mock
|
||||
poetry run agbenchmark --mock
|
||||
|
||||
# CHANGED=$(git diff --name-only | grep -E '(agbenchmark/challenges)|(../classic/frontend/assets)') || echo "No diffs"
|
||||
# if [ ! -z "$CHANGED" ]; then
|
||||
# echo "There are unstaged changes please run agbenchmark and commit those changes since they are needed."
|
||||
# echo "$CHANGED"
|
||||
# exit 1
|
||||
# else
|
||||
# echo "No unstaged changes."
|
||||
# fi
|
||||
CHANGED=$(git diff --name-only | grep -E '(agbenchmark/challenges)|(../classic/frontend/assets)') || echo "No diffs"
|
||||
if [ ! -z "$CHANGED" ]; then
|
||||
echo "There are unstaged changes please run agbenchmark and commit those changes since they are needed."
|
||||
echo "$CHANGED"
|
||||
exit 1
|
||||
else
|
||||
echo "No unstaged changes."
|
||||
fi
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
TELEMETRY_ENVIRONMENT: autogpt-benchmark-ci
|
||||
|
||||
13
.github/workflows/classic-forge-ci.yml
vendored
13
.github/workflows/classic-forge-ci.yml
vendored
@@ -2,13 +2,13 @@ name: Classic - Forge CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, dev, ci-test* ]
|
||||
branches: [ master, development, ci-test* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-forge-ci.yml'
|
||||
- 'classic/forge/**'
|
||||
- '!classic/forge/tests/vcr_cassettes'
|
||||
pull_request:
|
||||
branches: [ master, dev, release-* ]
|
||||
branches: [ master, development, release-* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-forge-ci.yml'
|
||||
- 'classic/forge/**'
|
||||
@@ -139,7 +139,6 @@ jobs:
|
||||
poetry run pytest -vv \
|
||||
--cov=forge --cov-branch --cov-report term-missing --cov-report xml \
|
||||
--durations=10 \
|
||||
--junitxml=junit.xml -o junit_family=legacy \
|
||||
forge
|
||||
env:
|
||||
CI: true
|
||||
@@ -149,14 +148,8 @@ jobs:
|
||||
AWS_ACCESS_KEY_ID: minioadmin
|
||||
AWS_SECRET_ACCESS_KEY: minioadmin
|
||||
|
||||
- name: Upload test results to Codecov
|
||||
if: ${{ !cancelled() }} # Run even if tests fail
|
||||
uses: codecov/test-results-action@v1
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
- name: Upload coverage reports to Codecov
|
||||
uses: codecov/codecov-action@v5
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
flags: forge,${{ runner.os }}
|
||||
|
||||
68
.github/workflows/classic-frontend-ci.yml
vendored
68
.github/workflows/classic-frontend-ci.yml
vendored
@@ -4,15 +4,15 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- dev
|
||||
- development
|
||||
- 'ci-test*' # This will match any branch that starts with "ci-test"
|
||||
paths:
|
||||
- 'classic/frontend/**'
|
||||
- '.github/workflows/classic-frontend-ci.yml'
|
||||
- '.github/workflows/frontend-ci.yml'
|
||||
pull_request:
|
||||
paths:
|
||||
- 'classic/frontend/**'
|
||||
- '.github/workflows/classic-frontend-ci.yml'
|
||||
- '.github/workflows/frontend-ci.yml'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
@@ -21,40 +21,40 @@ jobs:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
BUILD_BRANCH: ${{ format('classic-frontend-build/{0}', github.ref_name) }}
|
||||
BUILD_BRANCH: ${{ format('frontend-build/{0}', github.ref_name) }}
|
||||
|
||||
steps:
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
- name: Checkout Repo
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Flutter
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
flutter-version: '3.13.2'
|
||||
- name: Setup Flutter
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
flutter-version: '3.13.2'
|
||||
|
||||
- name: Build Flutter to Web
|
||||
run: |
|
||||
cd classic/frontend
|
||||
flutter build web --base-href /app/
|
||||
- name: Build Flutter to Web
|
||||
run: |
|
||||
cd classic/frontend
|
||||
flutter build web --base-href /app/
|
||||
|
||||
# - name: Commit and Push to ${{ env.BUILD_BRANCH }}
|
||||
# if: github.event_name == 'push'
|
||||
# run: |
|
||||
# git config --local user.email "action@github.com"
|
||||
# git config --local user.name "GitHub Action"
|
||||
# git add classic/frontend/build/web
|
||||
# git checkout -B ${{ env.BUILD_BRANCH }}
|
||||
# git commit -m "Update frontend build to ${GITHUB_SHA:0:7}" -a
|
||||
# git push -f origin ${{ env.BUILD_BRANCH }}
|
||||
# - name: Commit and Push to ${{ env.BUILD_BRANCH }}
|
||||
# if: github.event_name == 'push'
|
||||
# run: |
|
||||
# git config --local user.email "action@github.com"
|
||||
# git config --local user.name "GitHub Action"
|
||||
# git add classic/frontend/build/web
|
||||
# git checkout -B ${{ env.BUILD_BRANCH }}
|
||||
# git commit -m "Update frontend build to ${GITHUB_SHA:0:7}" -a
|
||||
# git push -f origin ${{ env.BUILD_BRANCH }}
|
||||
|
||||
- name: Create PR ${{ env.BUILD_BRANCH }} -> ${{ github.ref_name }}
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
add-paths: classic/frontend/build/web
|
||||
base: ${{ github.ref_name }}
|
||||
branch: ${{ env.BUILD_BRANCH }}
|
||||
delete-branch: true
|
||||
title: "Update frontend build in `${{ github.ref_name }}`"
|
||||
body: "This PR updates the frontend build based on commit ${{ github.sha }}."
|
||||
commit-message: "Update frontend build based on commit ${{ github.sha }}"
|
||||
- name: Create PR ${{ env.BUILD_BRANCH }} -> ${{ github.ref_name }}
|
||||
if: github.event_name == 'push'
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
add-paths: classic/frontend/build/web
|
||||
base: ${{ github.ref_name }}
|
||||
branch: ${{ env.BUILD_BRANCH }}
|
||||
delete-branch: true
|
||||
title: "Update frontend build in `${{ github.ref_name }}`"
|
||||
body: "This PR updates the frontend build based on commit ${{ github.sha }}."
|
||||
commit-message: "Update frontend build based on commit ${{ github.sha }}"
|
||||
|
||||
10
.github/workflows/classic-python-checks.yml
vendored
10
.github/workflows/classic-python-checks.yml
vendored
@@ -2,18 +2,18 @@ name: Classic - Python checks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master, dev, ci-test* ]
|
||||
branches: [ master, development, ci-test* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-python-checks-ci.yml'
|
||||
- '.github/workflows/lint-ci.yml'
|
||||
- 'classic/original_autogpt/**'
|
||||
- 'classic/forge/**'
|
||||
- 'classic/benchmark/**'
|
||||
- '**.py'
|
||||
- '!classic/forge/tests/vcr_cassettes'
|
||||
pull_request:
|
||||
branches: [ master, dev, release-* ]
|
||||
branches: [ master, development, release-* ]
|
||||
paths:
|
||||
- '.github/workflows/classic-python-checks-ci.yml'
|
||||
- '.github/workflows/lint-ci.yml'
|
||||
- 'classic/original_autogpt/**'
|
||||
- 'classic/forge/**'
|
||||
- 'classic/benchmark/**'
|
||||
@@ -21,7 +21,7 @@ on:
|
||||
- '!classic/forge/tests/vcr_cassettes'
|
||||
|
||||
concurrency:
|
||||
group: ${{ format('classic-python-checks-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
|
||||
group: ${{ format('lint-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
|
||||
cancel-in-progress: ${{ startsWith(github.event_name, 'pull_request') }}
|
||||
|
||||
defaults:
|
||||
|
||||
98
.github/workflows/codeql.yml
vendored
98
.github/workflows/codeql.yml
vendored
@@ -1,98 +0,0 @@
|
||||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master", "release-*", "dev" ]
|
||||
pull_request:
|
||||
branches: [ "master", "release-*", "dev" ]
|
||||
merge_group:
|
||||
schedule:
|
||||
- cron: '15 4 * * 0'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners (GitHub.com only)
|
||||
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# required to fetch internal or private CodeQL packs
|
||||
packages: read
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: typescript
|
||||
build-mode: none
|
||||
- language: python
|
||||
build-mode: none
|
||||
# CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
|
||||
# Use `c-cpp` to analyze code written in C, C++ or both
|
||||
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
|
||||
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
|
||||
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
|
||||
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
config: |
|
||||
paths-ignore:
|
||||
- classic/frontend/build/**
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
# If the analyze step fails for one of the languages you are analyzing with
|
||||
# "We were unable to automatically build your code", modify the matrix above
|
||||
# to set the build mode to "manual" for that language. Then modify this step
|
||||
# to build your code.
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
- if: matrix.build-mode == 'manual'
|
||||
shell: bash
|
||||
run: |
|
||||
echo 'If you are using a "manual" build mode for one or more of the' \
|
||||
'languages you are analyzing, replace this with the commands to build' \
|
||||
'your code, for example:'
|
||||
echo ' make bootstrap'
|
||||
echo ' make release'
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
@@ -1,49 +0,0 @@
|
||||
name: AutoGPT Platform - Deploy Prod Environment
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: 'read'
|
||||
id-token: 'write'
|
||||
|
||||
jobs:
|
||||
migrate:
|
||||
environment: production
|
||||
name: Run migrations for AutoGPT Platform
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install prisma
|
||||
|
||||
- name: Run Backend Migrations
|
||||
working-directory: ./autogpt_platform/backend
|
||||
run: |
|
||||
python -m prisma migrate deploy
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
|
||||
|
||||
trigger:
|
||||
needs: migrate
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger deploy workflow
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.DEPLOY_TOKEN }}
|
||||
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
|
||||
event-type: build_deploy_prod
|
||||
client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}", "repository": "${{ github.repository }}"}'
|
||||
41
.github/workflows/platform-autogpt-builder-ci.yml
vendored
Normal file
41
.github/workflows/platform-autogpt-builder-ci.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Platform - AutoGPT Builder CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
paths:
|
||||
- '.github/workflows/autogpt-builder-ci.yml'
|
||||
- 'autogpt_platform/autogpt_builder/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/autogpt-builder-ci.yml'
|
||||
- 'autogpt_platform/autogpt_builder/**'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: autogpt_platform/autogpt_builder
|
||||
|
||||
jobs:
|
||||
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '21'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
npm install
|
||||
|
||||
- name: Check formatting with Prettier
|
||||
run: |
|
||||
npx prettier --check .
|
||||
|
||||
- name: Run lint
|
||||
run: |
|
||||
npm run lint
|
||||
@@ -1,50 +0,0 @@
|
||||
name: AutoGPT Platform - Deploy Dev Environment
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ dev ]
|
||||
paths:
|
||||
- 'autogpt_platform/**'
|
||||
|
||||
permissions:
|
||||
contents: 'read'
|
||||
id-token: 'write'
|
||||
|
||||
jobs:
|
||||
migrate:
|
||||
environment: develop
|
||||
name: Run migrations for AutoGPT Platform
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install prisma
|
||||
|
||||
- name: Run Backend Migrations
|
||||
working-directory: ./autogpt_platform/backend
|
||||
run: |
|
||||
python -m prisma migrate deploy
|
||||
env:
|
||||
DATABASE_URL: ${{ secrets.BACKEND_DATABASE_URL }}
|
||||
|
||||
trigger:
|
||||
needs: migrate
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Trigger deploy workflow
|
||||
uses: peter-evans/repository-dispatch@v3
|
||||
with:
|
||||
token: ${{ secrets.DEPLOY_TOKEN }}
|
||||
repository: Significant-Gravitas/AutoGPT_cloud_infrastructure
|
||||
event-type: build_deploy_dev
|
||||
client-payload: '{"ref": "${{ github.ref }}", "sha": "${{ github.sha }}", "repository": "${{ github.repository }}"}'
|
||||
56
.github/workflows/platform-autogpt-infra-ci.yml
vendored
Normal file
56
.github/workflows/platform-autogpt-infra-ci.yml
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
name: Platform - AutoGPT Builder Infra
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
paths:
|
||||
- '.github/workflows/autogpt-infra-ci.yml'
|
||||
- 'autogpt_platform/infra/**'
|
||||
pull_request:
|
||||
paths:
|
||||
- '.github/workflows/autogpt-infra-ci.yml'
|
||||
- 'autogpt_platform/infra/**'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: autogpt_platform/infra
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: TFLint
|
||||
uses: pauloconnor/tflint-action@v0.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tflint_path: terraform/
|
||||
tflint_recurse: true
|
||||
tflint_changed_only: false
|
||||
|
||||
- name: Set up Helm
|
||||
uses: azure/setup-helm@v4.2.0
|
||||
with:
|
||||
version: v3.14.4
|
||||
|
||||
- name: Set up chart-testing
|
||||
uses: helm/chart-testing-action@v2.6.0
|
||||
|
||||
- name: Run chart-testing (list-changed)
|
||||
id: list-changed
|
||||
run: |
|
||||
changed=$(ct list-changed --target-branch ${{ github.event.repository.default_branch }})
|
||||
if [[ -n "$changed" ]]; then
|
||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Run chart-testing (lint)
|
||||
if: steps.list-changed.outputs.changed == 'true'
|
||||
run: ct lint --target-branch ${{ github.event.repository.default_branch }}
|
||||
155
.github/workflows/platform-autogpt-server-ci.yml
vendored
Normal file
155
.github/workflows/platform-autogpt-server-ci.yml
vendored
Normal file
@@ -0,0 +1,155 @@
|
||||
name: Platform - AutoGPT Server CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, development, ci-test*]
|
||||
paths:
|
||||
- ".github/workflows/autogpt-server-ci.yml"
|
||||
- "autogpt_platform/autogpt_server/**"
|
||||
pull_request:
|
||||
branches: [master, development, release-*]
|
||||
paths:
|
||||
- ".github/workflows/autogpt-server-ci.yml"
|
||||
- "autogpt_platform/autogpt_server/**"
|
||||
|
||||
concurrency:
|
||||
group: ${{ format('autogpt-server-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
|
||||
cancel-in-progress: ${{ startsWith(github.event_name, 'pull_request') }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: autogpt_platform/autogpt_server
|
||||
|
||||
jobs:
|
||||
test:
|
||||
permissions:
|
||||
contents: read
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
platform-os: [ubuntu, macos, macos-arm64, windows]
|
||||
runs-on: ${{ matrix.platform-os != 'macos-arm64' && format('{0}-latest', matrix.platform-os) || 'macos-14' }}
|
||||
|
||||
steps:
|
||||
- name: Setup PostgreSQL
|
||||
uses: ikalnytskyi/action-setup-postgres@v6
|
||||
with:
|
||||
username: ${{ secrets.DB_USER || 'postgres' }}
|
||||
password: ${{ secrets.DB_PASS || 'postgres' }}
|
||||
database: postgres
|
||||
port: 5432
|
||||
id: postgres
|
||||
|
||||
# Quite slow on macOS (2~4 minutes to set up Docker)
|
||||
# - name: Set up Docker (macOS)
|
||||
# if: runner.os == 'macOS'
|
||||
# uses: crazy-max/ghaction-setup-docker@v3
|
||||
|
||||
- name: Start MinIO service (Linux)
|
||||
if: runner.os == 'Linux'
|
||||
working-directory: "."
|
||||
run: |
|
||||
docker pull minio/minio:edge-cicd
|
||||
docker run -d -p 9000:9000 minio/minio:edge-cicd
|
||||
|
||||
- name: Start MinIO service (macOS)
|
||||
if: runner.os == 'macOS'
|
||||
working-directory: ${{ runner.temp }}
|
||||
run: |
|
||||
brew install minio/stable/minio
|
||||
mkdir data
|
||||
minio server ./data &
|
||||
|
||||
# No MinIO on Windows:
|
||||
# - Windows doesn't support running Linux Docker containers
|
||||
# - It doesn't seem possible to start background processes on Windows. They are
|
||||
# killed after the step returns.
|
||||
# See: https://github.com/actions/runner/issues/598#issuecomment-2011890429
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- id: get_date
|
||||
name: Get date
|
||||
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up Python dependency cache
|
||||
# On Windows, unpacking cached dependencies takes longer than just installing them
|
||||
if: runner.os != 'Windows'
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ${{ runner.os == 'macOS' && '~/Library/Caches/pypoetry' || '~/.cache/pypoetry' }}
|
||||
key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/autogpt_server/poetry.lock') }}
|
||||
|
||||
- name: Install Poetry (Unix)
|
||||
if: runner.os != 'Windows'
|
||||
run: |
|
||||
curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
if [ "${{ runner.os }}" = "macOS" ]; then
|
||||
PATH="$HOME/.local/bin:$PATH"
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
fi
|
||||
|
||||
- name: Install Poetry (Windows)
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
(Invoke-WebRequest -Uri https://install.python-poetry.org -UseBasicParsing).Content | python -
|
||||
|
||||
$env:PATH += ";$env:APPDATA\Python\Scripts"
|
||||
echo "$env:APPDATA\Python\Scripts" >> $env:GITHUB_PATH
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: poetry install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: poetry run prisma generate
|
||||
|
||||
- name: Run Database Migrations
|
||||
run: poetry run prisma migrate dev --name updates
|
||||
env:
|
||||
CONNECTION_STR: ${{ steps.postgres.outputs.connection-uri }}
|
||||
|
||||
- id: lint
|
||||
name: Run Linter
|
||||
run: poetry run lint
|
||||
|
||||
- name: Run pytest with coverage
|
||||
run: |
|
||||
if [[ "${{ runner.debug }}" == "1" ]]; then
|
||||
poetry run pytest -vv -o log_cli=true -o log_cli_level=DEBUG test
|
||||
else
|
||||
poetry run pytest -vv test
|
||||
fi
|
||||
if: success() || (failure() && steps.lint.outcome == 'failure')
|
||||
env:
|
||||
LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
|
||||
env:
|
||||
CI: true
|
||||
PLAIN_OUTPUT: True
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
DB_USER: ${{ secrets.DB_USER || 'postgres' }}
|
||||
DB_PASS: ${{ secrets.DB_PASS || 'postgres' }}
|
||||
DB_NAME: postgres
|
||||
DB_PORT: 5432
|
||||
RUN_ENV: local
|
||||
PORT: 8080
|
||||
DATABASE_URL: postgresql://${{ secrets.DB_USER || 'postgres' }}:${{ secrets.DB_PASS || 'postgres' }}@localhost:5432/${{ secrets.DB_NAME || 'postgres'}}
|
||||
|
||||
# - name: Upload coverage reports to Codecov
|
||||
# uses: codecov/codecov-action@v4
|
||||
# with:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
# flags: autogpt-server,${{ runner.os }}
|
||||
162
.github/workflows/platform-backend-ci.yml
vendored
162
.github/workflows/platform-backend-ci.yml
vendored
@@ -1,162 +0,0 @@
|
||||
name: AutoGPT Platform - Backend CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, dev, ci-test*]
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
- "autogpt_platform/autogpt_libs/**"
|
||||
pull_request:
|
||||
branches: [master, dev, release-*]
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
- "autogpt_platform/autogpt_libs/**"
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
group: ${{ format('backend-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }}
|
||||
cancel-in-progress: ${{ startsWith(github.event_name, 'pull_request') }}
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: autogpt_platform/backend
|
||||
|
||||
jobs:
|
||||
test:
|
||||
permissions:
|
||||
contents: read
|
||||
timeout-minutes: 30
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: bitnami/redis:6.2
|
||||
env:
|
||||
REDIS_PASSWORD: testpassword
|
||||
ports:
|
||||
- 6379:6379
|
||||
rabbitmq:
|
||||
image: rabbitmq:3.12-management
|
||||
ports:
|
||||
- 5672:5672
|
||||
- 15672:15672
|
||||
env:
|
||||
RABBITMQ_DEFAULT_USER: ${{ env.RABBITMQ_DEFAULT_USER }}
|
||||
RABBITMQ_DEFAULT_PASS: ${{ env.RABBITMQ_DEFAULT_PASS }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
submodules: true
|
||||
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
|
||||
- name: Setup Supabase
|
||||
uses: supabase/setup-cli@v1
|
||||
with:
|
||||
version: 1.178.1
|
||||
|
||||
- id: get_date
|
||||
name: Get date
|
||||
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set up Python dependency cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pypoetry
|
||||
key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/backend/poetry.lock') }}
|
||||
|
||||
- name: Install Poetry (Unix)
|
||||
run: |
|
||||
curl -sSL https://install.python-poetry.org | python3 -
|
||||
|
||||
if [ "${{ runner.os }}" = "macOS" ]; then
|
||||
PATH="$HOME/.local/bin:$PATH"
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
fi
|
||||
|
||||
- name: Check poetry.lock
|
||||
run: |
|
||||
poetry lock
|
||||
|
||||
if ! git diff --quiet poetry.lock; then
|
||||
echo "Error: poetry.lock not up to date."
|
||||
echo
|
||||
git diff poetry.lock
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Install Python dependencies
|
||||
run: poetry install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
run: poetry run prisma generate
|
||||
|
||||
- id: supabase
|
||||
name: Start Supabase
|
||||
working-directory: .
|
||||
run: |
|
||||
supabase init
|
||||
supabase start --exclude postgres-meta,realtime,storage-api,imgproxy,inbucket,studio,edge-runtime,logflare,vector,supavisor
|
||||
supabase status -o env | sed 's/="/=/; s/"$//' >> $GITHUB_OUTPUT
|
||||
# outputs:
|
||||
# DB_URL, API_URL, GRAPHQL_URL, ANON_KEY, SERVICE_ROLE_KEY, JWT_SECRET
|
||||
|
||||
- name: Run Database Migrations
|
||||
run: poetry run prisma migrate dev --name updates
|
||||
env:
|
||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
|
||||
- id: lint
|
||||
name: Run Linter
|
||||
run: poetry run lint
|
||||
|
||||
- name: Run pytest with coverage
|
||||
run: |
|
||||
if [[ "${{ runner.debug }}" == "1" ]]; then
|
||||
poetry run pytest -s -vv -o log_cli=true -o log_cli_level=DEBUG test
|
||||
else
|
||||
poetry run pytest -s -vv test
|
||||
fi
|
||||
if: success() || (failure() && steps.lint.outcome == 'failure')
|
||||
env:
|
||||
LOG_LEVEL: ${{ runner.debug && 'DEBUG' || 'INFO' }}
|
||||
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||
SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
|
||||
SUPABASE_JWT_SECRET: ${{ steps.supabase.outputs.JWT_SECRET }}
|
||||
REDIS_HOST: 'localhost'
|
||||
REDIS_PORT: '6379'
|
||||
REDIS_PASSWORD: 'testpassword'
|
||||
|
||||
env:
|
||||
CI: true
|
||||
PLAIN_OUTPUT: True
|
||||
RUN_ENV: local
|
||||
PORT: 8080
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
# We know these are here, don't report this as a security vulnerability
|
||||
# This is used as the default credential for the entire system's RabbitMQ instance
|
||||
# If you want to replace this, you can do so by making our entire system generate
|
||||
# new credentials for each local user and update the environment variables in
|
||||
# the backend service, docker composes, and examples
|
||||
RABBITMQ_DEFAULT_USER: 'rabbitmq_user_default'
|
||||
RABBITMQ_DEFAULT_PASS: 'k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7'
|
||||
|
||||
# - name: Upload coverage reports to Codecov
|
||||
# uses: codecov/codecov-action@v4
|
||||
# with:
|
||||
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||
# flags: backend,${{ runner.os }}
|
||||
121
.github/workflows/platform-frontend-ci.yml
vendored
121
.github/workflows/platform-frontend-ci.yml
vendored
@@ -1,121 +0,0 @@
|
||||
name: AutoGPT Platform - Frontend CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, dev]
|
||||
paths:
|
||||
- ".github/workflows/platform-frontend-ci.yml"
|
||||
- "autogpt_platform/frontend/**"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/platform-frontend-ci.yml"
|
||||
- "autogpt_platform/frontend/**"
|
||||
merge_group:
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: autogpt_platform/frontend
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
yarn install --frozen-lockfile
|
||||
|
||||
- name: Run lint
|
||||
run: |
|
||||
yarn lint
|
||||
|
||||
type-check:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
yarn install --frozen-lockfile
|
||||
|
||||
- name: Run tsc check
|
||||
run: |
|
||||
yarn type-check
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
browser: [chromium, webkit]
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
|
||||
- name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@main
|
||||
with:
|
||||
large-packages: false # slow
|
||||
docker-images: false # limited benefit
|
||||
|
||||
- name: Copy default supabase .env
|
||||
run: |
|
||||
cp ../.env.example ../.env
|
||||
|
||||
- name: Copy backend .env
|
||||
run: |
|
||||
cp ../backend/.env.example ../backend/.env
|
||||
|
||||
- name: Run docker compose
|
||||
run: |
|
||||
docker compose -f ../docker-compose.yml up -d
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
yarn install --frozen-lockfile
|
||||
|
||||
- name: Setup Builder .env
|
||||
run: |
|
||||
cp .env.example .env
|
||||
|
||||
- name: Install Browser '${{ matrix.browser }}'
|
||||
run: yarn playwright install --with-deps ${{ matrix.browser }}
|
||||
|
||||
- name: Run tests
|
||||
timeout-minutes: 20
|
||||
run: |
|
||||
yarn test --project=${{ matrix.browser }}
|
||||
|
||||
- name: Print Final Docker Compose logs
|
||||
if: always()
|
||||
run: |
|
||||
docker compose -f ../docker-compose.yml logs
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
if: ${{ !cancelled() }}
|
||||
with:
|
||||
name: playwright-report-${{ matrix.browser }}
|
||||
path: playwright-report/
|
||||
retention-days: 30
|
||||
@@ -25,7 +25,7 @@ jobs:
|
||||
close-issue-message: >
|
||||
This issue was closed automatically because it has been stale for 10 days
|
||||
with no activity.
|
||||
days-before-stale: 100
|
||||
days-before-stale: 50
|
||||
days-before-close: 10
|
||||
# Do not touch meta issues:
|
||||
exempt-issue-labels: meta,fridge,project management
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
name: Repo - Enforce dev as base branch
|
||||
on:
|
||||
pull_request_target:
|
||||
branches: [ master ]
|
||||
types: [ opened ]
|
||||
|
||||
jobs:
|
||||
check_pr_target:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Check if PR is from dev or hotfix
|
||||
if: ${{ !(startsWith(github.event.pull_request.head.ref, 'hotfix/') || github.event.pull_request.head.ref == 'dev') }}
|
||||
run: |
|
||||
gh pr comment ${{ github.event.number }} --repo "$REPO" \
|
||||
--body $'This PR targets the `master` branch but does not come from `dev` or a `hotfix/*` branch.\n\nAutomatically setting the base branch to `dev`.'
|
||||
gh pr edit ${{ github.event.number }} --base dev --repo "$REPO"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
REPO: ${{ github.repository }}
|
||||
2
.github/workflows/repo-pr-label.yml
vendored
2
.github/workflows/repo-pr-label.yml
vendored
@@ -3,7 +3,7 @@ name: Repo - Pull Request auto-label
|
||||
on:
|
||||
# So that PRs touching the same files as the push are updated
|
||||
push:
|
||||
branches: [ master, dev, release-* ]
|
||||
branches: [ master, development, release-* ]
|
||||
paths-ignore:
|
||||
- 'classic/forge/tests/vcr_cassettes'
|
||||
- 'classic/benchmark/reports/**'
|
||||
|
||||
1
.github/workflows/repo-workflow-checker.yml
vendored
1
.github/workflows/repo-workflow-checker.yml
vendored
@@ -2,7 +2,6 @@ name: Repo - PR Status Checker
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
merge_group:
|
||||
|
||||
jobs:
|
||||
status-check:
|
||||
|
||||
@@ -5,8 +5,6 @@ import sys
|
||||
import time
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
CHECK_INTERVAL = 30
|
||||
|
||||
|
||||
def get_environment_variables() -> Tuple[str, str, str, str, str]:
|
||||
"""Retrieve and return necessary environment variables."""
|
||||
@@ -14,11 +12,7 @@ def get_environment_variables() -> Tuple[str, str, str, str, str]:
|
||||
with open(os.environ["GITHUB_EVENT_PATH"]) as f:
|
||||
event = json.load(f)
|
||||
|
||||
# Handle both PR and merge group events
|
||||
if "pull_request" in event:
|
||||
sha = event["pull_request"]["head"]["sha"]
|
||||
else:
|
||||
sha = os.environ["GITHUB_SHA"]
|
||||
sha = event["pull_request"]["head"]["sha"]
|
||||
|
||||
return (
|
||||
os.environ["GITHUB_API_URL"],
|
||||
@@ -99,10 +93,9 @@ def main():
|
||||
break
|
||||
|
||||
print(
|
||||
"Some check runs are still in progress. "
|
||||
f"Waiting {CHECK_INTERVAL} seconds before checking again..."
|
||||
"Some check runs are still in progress. Waiting 3 minutes before checking again..."
|
||||
)
|
||||
time.sleep(CHECK_INTERVAL)
|
||||
time.sleep(180)
|
||||
|
||||
if all_others_passed:
|
||||
print("All other completed check runs have passed. This check passes.")
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -170,9 +170,4 @@ pri*
|
||||
ig*
|
||||
.github_access_token
|
||||
LICENSE.rtf
|
||||
autogpt_platform/backend/settings.py
|
||||
/.auth
|
||||
/autogpt_platform/frontend/.auth
|
||||
|
||||
*.ign.*
|
||||
.test-contents
|
||||
autogpt_platform/autogpt_server/settings.py
|
||||
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -1,3 +1,6 @@
|
||||
[submodule "classic/forge/tests/vcr_cassettes"]
|
||||
path = classic/forge/tests/vcr_cassettes
|
||||
url = https://github.com/Significant-Gravitas/Auto-GPT-test-cassettes
|
||||
[submodule "autogpt_platform/supabase"]
|
||||
path = autogpt_platform/supabase
|
||||
url = https://github.com/supabase/supabase.git
|
||||
|
||||
@@ -10,142 +10,39 @@ repos:
|
||||
- id: check-symlinks
|
||||
- id: debug-statements
|
||||
|
||||
- repo: https://github.com/Yelp/detect-secrets
|
||||
rev: v1.5.0
|
||||
hooks:
|
||||
- id: detect-secrets
|
||||
name: Detect secrets
|
||||
description: Detects high entropy strings that are likely to be passwords.
|
||||
files: ^autogpt_platform/
|
||||
stages: [push]
|
||||
|
||||
- repo: local
|
||||
# For proper type checking, all dependencies need to be up-to-date.
|
||||
# It's also a good idea to check that poetry.lock is consistent with pyproject.toml.
|
||||
hooks:
|
||||
- id: poetry-install
|
||||
name: Check & Install dependencies - AutoGPT Platform - Backend
|
||||
alias: poetry-install-platform-backend
|
||||
entry: poetry -C autogpt_platform/backend install
|
||||
# include autogpt_libs source (since it's a path dependency)
|
||||
files: ^autogpt_platform/(backend|autogpt_libs)/poetry\.lock$
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-install
|
||||
name: Check & Install dependencies - AutoGPT Platform - Libs
|
||||
alias: poetry-install-platform-libs
|
||||
entry: poetry -C autogpt_platform/autogpt_libs install
|
||||
files: ^autogpt_platform/autogpt_libs/poetry\.lock$
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-install
|
||||
name: Check & Install dependencies - Classic - AutoGPT
|
||||
alias: poetry-install-classic-autogpt
|
||||
entry: poetry -C classic/original_autogpt install
|
||||
# include forge source (since it's a path dependency)
|
||||
files: ^classic/(original_autogpt|forge)/poetry\.lock$
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-install
|
||||
name: Check & Install dependencies - Classic - Forge
|
||||
alias: poetry-install-classic-forge
|
||||
entry: poetry -C classic/forge install
|
||||
files: ^classic/forge/poetry\.lock$
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: poetry-install
|
||||
name: Check & Install dependencies - Classic - Benchmark
|
||||
alias: poetry-install-classic-benchmark
|
||||
entry: poetry -C classic/benchmark install
|
||||
files: ^classic/benchmark/poetry\.lock$
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
# For proper type checking, Prisma client must be up-to-date.
|
||||
hooks:
|
||||
- id: prisma-generate
|
||||
name: Prisma Generate - AutoGPT Platform - Backend
|
||||
alias: prisma-generate-platform-backend
|
||||
entry: bash -c 'cd autogpt_platform/backend && poetry run prisma generate'
|
||||
# include everything that triggers poetry install + the prisma schema
|
||||
files: ^autogpt_platform/((backend|autogpt_libs)/poetry\.lock|backend/schema.prisma)$
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: Lint (Ruff) - AutoGPT Platform - Backend
|
||||
alias: ruff-lint-platform-backend
|
||||
files: ^autogpt_platform/backend/
|
||||
args: [--fix]
|
||||
|
||||
- id: ruff
|
||||
name: Lint (Ruff) - AutoGPT Platform - Libs
|
||||
alias: ruff-lint-platform-libs
|
||||
files: ^autogpt_platform/autogpt_libs/
|
||||
args: [--fix]
|
||||
|
||||
- id: ruff-format
|
||||
name: Format (Ruff) - AutoGPT Platform - Libs
|
||||
alias: ruff-lint-platform-libs
|
||||
files: ^autogpt_platform/autogpt_libs/
|
||||
|
||||
- repo: local
|
||||
# isort needs the context of which packages are installed to function, so we
|
||||
# can't use a vendored isort pre-commit hook (which runs in its own isolated venv).
|
||||
hooks:
|
||||
- id: isort
|
||||
name: Lint (isort) - AutoGPT Platform - Backend
|
||||
alias: isort-platform-backend
|
||||
entry: poetry -P autogpt_platform/backend run isort -p backend
|
||||
files: ^autogpt_platform/backend/
|
||||
types: [file, python]
|
||||
language: system
|
||||
|
||||
- id: isort
|
||||
name: Lint (isort) - Classic - AutoGPT
|
||||
alias: isort-classic-autogpt
|
||||
entry: poetry -P classic/original_autogpt run isort -p autogpt
|
||||
- id: isort-autogpt
|
||||
name: Lint (isort) - AutoGPT
|
||||
entry: poetry -C classic/original_autogpt run isort
|
||||
files: ^classic/original_autogpt/
|
||||
types: [file, python]
|
||||
language: system
|
||||
|
||||
- id: isort
|
||||
name: Lint (isort) - Classic - Forge
|
||||
alias: isort-classic-forge
|
||||
entry: poetry -P classic/forge run isort -p forge
|
||||
- id: isort-forge
|
||||
name: Lint (isort) - Forge
|
||||
entry: poetry -C classic/forge run isort
|
||||
files: ^classic/forge/
|
||||
types: [file, python]
|
||||
language: system
|
||||
|
||||
- id: isort
|
||||
name: Lint (isort) - Classic - Benchmark
|
||||
alias: isort-classic-benchmark
|
||||
entry: poetry -P classic/benchmark run isort -p agbenchmark
|
||||
- id: isort-benchmark
|
||||
name: Lint (isort) - Benchmark
|
||||
entry: poetry -C classic/benchmark run isort
|
||||
files: ^classic/benchmark/
|
||||
types: [file, python]
|
||||
language: system
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.10.0
|
||||
rev: 23.12.1
|
||||
# Black has sensible defaults, doesn't need package context, and ignores
|
||||
# everything in .gitignore, so it works fine without any config or arguments.
|
||||
hooks:
|
||||
- id: black
|
||||
name: Format (Black)
|
||||
name: Lint (Black)
|
||||
language_version: python3.10
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.0.0
|
||||
@@ -153,79 +50,53 @@ repos:
|
||||
# them separately.
|
||||
hooks:
|
||||
- id: flake8
|
||||
name: Lint (Flake8) - Classic - AutoGPT
|
||||
alias: flake8-classic-autogpt
|
||||
name: Lint (Flake8) - AutoGPT
|
||||
alias: flake8-autogpt
|
||||
files: ^classic/original_autogpt/(autogpt|scripts|tests)/
|
||||
args: [--config=classic/original_autogpt/.flake8]
|
||||
|
||||
- id: flake8
|
||||
name: Lint (Flake8) - Classic - Forge
|
||||
alias: flake8-classic-forge
|
||||
name: Lint (Flake8) - Forge
|
||||
alias: flake8-forge
|
||||
files: ^classic/forge/(forge|tests)/
|
||||
args: [--config=classic/forge/.flake8]
|
||||
|
||||
- id: flake8
|
||||
name: Lint (Flake8) - Classic - Benchmark
|
||||
alias: flake8-classic-benchmark
|
||||
name: Lint (Flake8) - Benchmark
|
||||
alias: flake8-benchmark
|
||||
files: ^classic/benchmark/(agbenchmark|tests)/((?!reports).)*[/.]
|
||||
args: [--config=classic/benchmark/.flake8]
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: prettier
|
||||
name: Format (Prettier) - AutoGPT Platform - Frontend
|
||||
alias: format-platform-frontend
|
||||
entry: bash -c 'cd autogpt_platform/frontend && npx prettier --write $(echo "$@" | sed "s|autogpt_platform/frontend/||g")' --
|
||||
files: ^autogpt_platform/frontend/
|
||||
types: [file]
|
||||
language: system
|
||||
|
||||
- repo: local
|
||||
# To have watertight type checking, we check *all* the files in an affected
|
||||
# project. To trigger on poetry.lock we also reset the file `types` filter.
|
||||
hooks:
|
||||
- id: pyright
|
||||
name: Typecheck - AutoGPT Platform - Backend
|
||||
alias: pyright-platform-backend
|
||||
entry: poetry -C autogpt_platform/backend run pyright
|
||||
# include forge source (since it's a path dependency) but exclude *_test.py files:
|
||||
files: ^autogpt_platform/(backend/((backend|test)/|(\w+\.py|poetry\.lock)$)|autogpt_libs/(autogpt_libs/.*(?<!_test)\.py|poetry\.lock)$)
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: pyright
|
||||
name: Typecheck - AutoGPT Platform - Libs
|
||||
alias: pyright-platform-libs
|
||||
entry: poetry -C autogpt_platform/autogpt_libs run pyright
|
||||
files: ^autogpt_platform/autogpt_libs/(autogpt_libs/|poetry\.lock$)
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: pyright
|
||||
name: Typecheck - Classic - AutoGPT
|
||||
alias: pyright-classic-autogpt
|
||||
name: Typecheck - AutoGPT
|
||||
alias: pyright-autogpt
|
||||
entry: poetry -C classic/original_autogpt run pyright
|
||||
args: [-p, autogpt, autogpt]
|
||||
# include forge source (since it's a path dependency) but exclude *_test.py files:
|
||||
files: ^(classic/original_autogpt/((autogpt|scripts|tests)/|poetry\.lock$)|classic/forge/(forge/.*(?<!_test)\.py|poetry\.lock)$)
|
||||
files: ^(classic/original_autogpt/((autogpt|scripts|tests)/|poetry\.lock$)|classic/forge/(classic/forge/.*(?<!_test)\.py|poetry\.lock)$)
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: pyright
|
||||
name: Typecheck - Classic - Forge
|
||||
alias: pyright-classic-forge
|
||||
name: Typecheck - Forge
|
||||
alias: pyright-forge
|
||||
entry: poetry -C classic/forge run pyright
|
||||
files: ^classic/forge/(forge/|poetry\.lock$)
|
||||
args: [-p, forge, forge]
|
||||
files: ^classic/forge/(classic/forge/|poetry\.lock$)
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: pyright
|
||||
name: Typecheck - Classic - Benchmark
|
||||
alias: pyright-classic-benchmark
|
||||
name: Typecheck - Benchmark
|
||||
alias: pyright-benchmark
|
||||
entry: poetry -C classic/benchmark run pyright
|
||||
args: [-p, benchmark, benchmark]
|
||||
files: ^classic/benchmark/(agbenchmark/|tests/|poetry\.lock$)
|
||||
types: [file]
|
||||
language: system
|
||||
@@ -233,45 +104,23 @@ repos:
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: tsc
|
||||
name: Typecheck - AutoGPT Platform - Frontend
|
||||
entry: bash -c 'cd autogpt_platform/frontend && npm run type-check'
|
||||
files: ^autogpt_platform/frontend/
|
||||
types: [file]
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pytest
|
||||
name: Run tests - AutoGPT Platform - Backend
|
||||
alias: pytest-platform-backend
|
||||
entry: bash -c 'cd autogpt_platform/backend && poetry run pytest'
|
||||
# include autogpt_libs source (since it's a path dependency) but exclude *_test.py files:
|
||||
files: ^autogpt_platform/(backend/((backend|test)/|poetry\.lock$)|autogpt_libs/(autogpt_libs/.*(?<!_test)\.py|poetry\.lock)$)
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: pytest
|
||||
name: Run tests - Classic - AutoGPT (excl. slow tests)
|
||||
alias: pytest-classic-autogpt
|
||||
- id: pytest-autogpt
|
||||
name: Run tests - AutoGPT (excl. slow tests)
|
||||
entry: bash -c 'cd classic/original_autogpt && poetry run pytest --cov=autogpt -m "not slow" tests/unit tests/integration'
|
||||
# include forge source (since it's a path dependency) but exclude *_test.py files:
|
||||
files: ^(classic/original_autogpt/((autogpt|tests)/|poetry\.lock$)|classic/forge/(forge/.*(?<!_test)\.py|poetry\.lock)$)
|
||||
files: ^(classic/original_autogpt/((autogpt|tests)/|poetry\.lock$)|classic/forge/(classic/forge/.*(?<!_test)\.py|poetry\.lock)$)
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: pytest
|
||||
name: Run tests - Classic - Forge (excl. slow tests)
|
||||
alias: pytest-classic-forge
|
||||
- id: pytest-forge
|
||||
name: Run tests - Forge (excl. slow tests)
|
||||
entry: bash -c 'cd classic/forge && poetry run pytest --cov=forge -m "not slow"'
|
||||
files: ^classic/forge/(forge/|tests/|poetry\.lock$)
|
||||
files: ^classic/forge/(classic/forge/|tests/|poetry\.lock$)
|
||||
language: system
|
||||
pass_filenames: false
|
||||
|
||||
- id: pytest
|
||||
name: Run tests - Classic - Benchmark
|
||||
alias: pytest-classic-benchmark
|
||||
- id: pytest-benchmark
|
||||
name: Run tests - Benchmark
|
||||
entry: bash -c 'cd classic/benchmark && poetry run pytest --cov=benchmark'
|
||||
files: ^classic/benchmark/(agbenchmark/|tests/|poetry\.lock$)
|
||||
language: system
|
||||
|
||||
17
.vscode/all-projects.code-workspace
vendored
17
.vscode/all-projects.code-workspace
vendored
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"folders": [
|
||||
{
|
||||
"name": "frontend",
|
||||
"path": "../autogpt_platform/frontend"
|
||||
"name": "autogpt_server",
|
||||
"path": "../autogpt_platform/autogpt_server"
|
||||
},
|
||||
{
|
||||
"name": "backend",
|
||||
"path": "../autogpt_platform/backend"
|
||||
"name": "autogpt_builder",
|
||||
"path": "../autogpt_platform/autogpt_builder"
|
||||
},
|
||||
{
|
||||
"name": "market",
|
||||
@@ -24,7 +24,10 @@
|
||||
"name": "docs",
|
||||
"path": "../docs"
|
||||
},
|
||||
|
||||
{
|
||||
"name": "[root]",
|
||||
"path": ".."
|
||||
},
|
||||
{
|
||||
"name": "classic - autogpt",
|
||||
"path": "../classic/original_autogpt"
|
||||
@@ -41,10 +44,6 @@
|
||||
"name": "classic - frontend",
|
||||
"path": "../classic/frontend"
|
||||
},
|
||||
{
|
||||
"name": "[root]",
|
||||
"path": ".."
|
||||
}
|
||||
],
|
||||
"settings": {
|
||||
"python.analysis.typeCheckingMode": "basic"
|
||||
|
||||
67
.vscode/launch.json
vendored
67
.vscode/launch.json
vendored
@@ -1,67 +0,0 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Frontend: Server Side",
|
||||
"type": "node-terminal",
|
||||
"request": "launch",
|
||||
"cwd": "${workspaceFolder}/autogpt_platform/frontend",
|
||||
"command": "yarn dev"
|
||||
},
|
||||
{
|
||||
"name": "Frontend: Client Side",
|
||||
"type": "msedge",
|
||||
"request": "launch",
|
||||
"url": "http://localhost:3000"
|
||||
},
|
||||
{
|
||||
"name": "Frontend: Full Stack",
|
||||
"type": "node-terminal",
|
||||
|
||||
"request": "launch",
|
||||
"command": "yarn dev",
|
||||
"cwd": "${workspaceFolder}/autogpt_platform/frontend",
|
||||
"serverReadyAction": {
|
||||
"pattern": "- Local:.+(https?://.+)",
|
||||
"uriFormat": "%s",
|
||||
"action": "debugWithEdge"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Backend",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "backend.app",
|
||||
// "env": {
|
||||
// "ENV": "dev"
|
||||
// },
|
||||
"envFile": "${workspaceFolder}/backend/.env",
|
||||
"justMyCode": false,
|
||||
"cwd": "${workspaceFolder}/autogpt_platform/backend"
|
||||
},
|
||||
{
|
||||
"name": "Marketplace",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "autogpt_platform.market.main",
|
||||
"env": {
|
||||
"ENV": "dev"
|
||||
},
|
||||
"envFile": "${workspaceFolder}/market/.env",
|
||||
"justMyCode": false,
|
||||
"cwd": "${workspaceFolder}/market"
|
||||
}
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Everything",
|
||||
"configurations": ["Backend", "Frontend: Full Stack"],
|
||||
// "preLaunchTask": "${defaultBuildTask}",
|
||||
"stopAll": true,
|
||||
"presentation": {
|
||||
"hidden": false,
|
||||
"order": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -2,14 +2,14 @@
|
||||
If you are reading this, you are probably looking for the full **[contribution guide]**,
|
||||
which is part of our [wiki].
|
||||
|
||||
Also check out our [🚀 Roadmap][roadmap] for information about our priorities and associated tasks.
|
||||
<!-- You can find our immediate priorities and their progress on our public [kanban board]. -->
|
||||
|
||||
[contribution guide]: https://github.com/Significant-Gravitas/AutoGPT/wiki/Contributing
|
||||
[wiki]: https://github.com/Significant-Gravitas/AutoGPT/wiki
|
||||
[roadmap]: https://github.com/Significant-Gravitas/AutoGPT/discussions/6971
|
||||
[kanban board]: https://github.com/orgs/Significant-Gravitas/projects/1
|
||||
|
||||
## Contributing to the AutoGPT Platform Folder
|
||||
All contributions to [the autogpt_platform folder](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform) will be under our [Contribution License Agreement](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/Contributor%20License%20Agreement%20(CLA).md). By making a pull request contributing to this folder, you agree to the terms of our CLA for your contribution. All contributions to other folders will be under the MIT license.
|
||||
|
||||
## In short
|
||||
1. Avoid duplicate work, issues, PRs etc.
|
||||
2. We encourage you to collaborate with fellow community members on some of our bigger
|
||||
|
||||
177
LICENSE
177
LICENSE
@@ -1,16 +1,7 @@
|
||||
All portions of this repository are under one of two licenses.
|
||||
|
||||
The all files outside of the autogpt_platform folder are under the MIT License below.
|
||||
|
||||
The autogpt_platform folder is under the Polyform Shield License below.
|
||||
|
||||
|
||||
MIT License
|
||||
|
||||
|
||||
Copyright (c) 2023 Toran Bruce Richards
|
||||
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
@@ -18,11 +9,9 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
@@ -30,169 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
# PolyForm Shield License 1.0.0
|
||||
|
||||
<https://polyformproject.org/licenses/shield/1.0.0>
|
||||
|
||||
## Acceptance
|
||||
|
||||
In order to get any license under these terms, you must agree
|
||||
to them as both strict obligations and conditions to all
|
||||
your licenses.
|
||||
|
||||
## Copyright License
|
||||
|
||||
The licensor grants you a copyright license for the
|
||||
software to do everything you might do with the software
|
||||
that would otherwise infringe the licensor's copyright
|
||||
in it for any permitted purpose. However, you may
|
||||
only distribute the software according to [Distribution
|
||||
License](#distribution-license) and make changes or new works
|
||||
based on the software according to [Changes and New Works
|
||||
License](#changes-and-new-works-license).
|
||||
|
||||
## Distribution License
|
||||
|
||||
The licensor grants you an additional copyright license
|
||||
to distribute copies of the software. Your license
|
||||
to distribute covers distributing the software with
|
||||
changes and new works permitted by [Changes and New Works
|
||||
License](#changes-and-new-works-license).
|
||||
|
||||
## Notices
|
||||
|
||||
You must ensure that anyone who gets a copy of any part of
|
||||
the software from you also gets a copy of these terms or the
|
||||
URL for them above, as well as copies of any plain-text lines
|
||||
beginning with `Required Notice:` that the licensor provided
|
||||
with the software. For example:
|
||||
|
||||
> Required Notice: Copyright Yoyodyne, Inc. (http://example.com)
|
||||
|
||||
## Changes and New Works License
|
||||
|
||||
The licensor grants you an additional copyright license to
|
||||
make changes and new works based on the software for any
|
||||
permitted purpose.
|
||||
|
||||
## Patent License
|
||||
|
||||
The licensor grants you a patent license for the software that
|
||||
covers patent claims the licensor can license, or becomes able
|
||||
to license, that you would infringe by using the software.
|
||||
|
||||
## Noncompete
|
||||
|
||||
Any purpose is a permitted purpose, except for providing any
|
||||
product that competes with the software or any product the
|
||||
licensor or any of its affiliates provides using the software.
|
||||
|
||||
## Competition
|
||||
|
||||
Goods and services compete even when they provide functionality
|
||||
through different kinds of interfaces or for different technical
|
||||
platforms. Applications can compete with services, libraries
|
||||
with plugins, frameworks with development tools, and so on,
|
||||
even if they're written in different programming languages
|
||||
or for different computer architectures. Goods and services
|
||||
compete even when provided free of charge. If you market a
|
||||
product as a practical substitute for the software or another
|
||||
product, it definitely competes.
|
||||
|
||||
## New Products
|
||||
|
||||
If you are using the software to provide a product that does
|
||||
not compete, but the licensor or any of its affiliates brings
|
||||
your product into competition by providing a new version of
|
||||
the software or another product using the software, you may
|
||||
continue using versions of the software available under these
|
||||
terms beforehand to provide your competing product, but not
|
||||
any later versions.
|
||||
|
||||
## Discontinued Products
|
||||
|
||||
You may begin using the software to compete with a product
|
||||
or service that the licensor or any of its affiliates has
|
||||
stopped providing, unless the licensor includes a plain-text
|
||||
line beginning with `Licensor Line of Business:` with the
|
||||
software that mentions that line of business. For example:
|
||||
|
||||
> Licensor Line of Business: YoyodyneCMS Content Management
|
||||
System (http://example.com/cms)
|
||||
|
||||
## Sales of Business
|
||||
|
||||
If the licensor or any of its affiliates sells a line of
|
||||
business developing the software or using the software
|
||||
to provide a product, the buyer can also enforce
|
||||
[Noncompete](#noncompete) for that product.
|
||||
|
||||
## Fair Use
|
||||
|
||||
You may have "fair use" rights for the software under the
|
||||
law. These terms do not limit them.
|
||||
|
||||
## No Other Rights
|
||||
|
||||
These terms do not allow you to sublicense or transfer any of
|
||||
your licenses to anyone else, or prevent the licensor from
|
||||
granting licenses to anyone else. These terms do not imply
|
||||
any other licenses.
|
||||
|
||||
## Patent Defense
|
||||
|
||||
If you make any written claim that the software infringes or
|
||||
contributes to infringement of any patent, your patent license
|
||||
for the software granted under these terms ends immediately. If
|
||||
your company makes such a claim, your patent license ends
|
||||
immediately for work on behalf of your company.
|
||||
|
||||
## Violations
|
||||
|
||||
The first time you are notified in writing that you have
|
||||
violated any of these terms, or done anything with the software
|
||||
not covered by your licenses, your licenses can nonetheless
|
||||
continue if you come into full compliance with these terms,
|
||||
and take practical steps to correct past violations, within
|
||||
32 days of receiving notice. Otherwise, all your licenses
|
||||
end immediately.
|
||||
|
||||
## No Liability
|
||||
|
||||
***As far as the law allows, the software comes as is, without
|
||||
any warranty or condition, and the licensor will not be liable
|
||||
to you for any damages arising out of these terms or the use
|
||||
or nature of the software, under any kind of legal claim.***
|
||||
|
||||
## Definitions
|
||||
|
||||
The **licensor** is the individual or entity offering these
|
||||
terms, and the **software** is the software the licensor makes
|
||||
available under these terms.
|
||||
|
||||
A **product** can be a good or service, or a combination
|
||||
of them.
|
||||
|
||||
**You** refers to the individual or entity agreeing to these
|
||||
terms.
|
||||
|
||||
**Your company** is any legal entity, sole proprietorship,
|
||||
or other kind of organization that you work for, plus all
|
||||
its affiliates.
|
||||
|
||||
**Affiliates** means the other organizations than an
|
||||
organization has control over, is under the control of, or is
|
||||
under common control with.
|
||||
|
||||
**Control** means ownership of substantially all the assets of
|
||||
an entity, or the power to direct its management and policies
|
||||
by vote, contract, or otherwise. Control can be direct or
|
||||
indirect.
|
||||
|
||||
**Your licenses** are all the licenses granted to you for the
|
||||
software under these terms.
|
||||
|
||||
**Use** means anything you do with the software requiring one
|
||||
of your licenses.
|
||||
79
README.md
79
README.md
@@ -1,70 +1,43 @@
|
||||
# AutoGPT: Build, Deploy, and Run AI Agents
|
||||
# AutoGPT: Build & Use AI Agents
|
||||
|
||||
[](https://discord.gg/autogpt)  
|
||||
[](https://twitter.com/Auto_GPT)  
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
**AutoGPT** is a powerful platform that allows you to create, deploy, and manage continuous AI agents that automate complex workflows.
|
||||
**AutoGPT** is a powerful tool that lets you create and run intelligent agents. These agents can perform various tasks automatically, making your life easier.
|
||||
|
||||
## Hosting Options
|
||||
- Download to self-host
|
||||
- [Join the Waitlist](https://bit.ly/3ZDijAI) for the cloud-hosted beta
|
||||
## How to Get Started
|
||||
|
||||
## How to Setup for Self-Hosting
|
||||
> [!NOTE]
|
||||
> Setting up and hosting the AutoGPT Platform yourself is a technical process.
|
||||
> If you'd rather something that just works, we recommend [joining the waitlist](https://bit.ly/3ZDijAI) for the cloud-hosted beta.
|
||||
https://github.com/user-attachments/assets/8508f4dc-b362-4cab-900f-644964a96cdf
|
||||
|
||||
https://github.com/user-attachments/assets/d04273a5-b36a-4a37-818e-f631ce72d603
|
||||
### 🧱 AutoGPT Builder
|
||||
|
||||
This tutorial assumes you have Docker, VSCode, git and npm installed.
|
||||
The AutoGPT Builder is the frontend. It allows you to design agents using an easy flowchart style. You build your agent by connecting blocks, where each block performs a single action. It's simple and intuitive!
|
||||
|
||||
### 🧱 AutoGPT Frontend
|
||||
|
||||
The AutoGPT frontend is where users interact with our powerful AI automation platform. It offers multiple ways to engage with and leverage our AI agents. This is the interface where you'll bring your AI automation ideas to life:
|
||||
|
||||
**Agent Builder:** For those who want to customize, our intuitive, low-code interface allows you to design and configure your own AI agents.
|
||||
|
||||
**Workflow Management:** Build, modify, and optimize your automation workflows with ease. You build your agent by connecting blocks, where each block performs a single action.
|
||||
|
||||
**Deployment Controls:** Manage the lifecycle of your agents, from testing to production.
|
||||
|
||||
**Ready-to-Use Agents:** Don't want to build? Simply select from our library of pre-configured agents and put them to work immediately.
|
||||
|
||||
**Agent Interaction:** Whether you've built your own or are using pre-configured agents, easily run and interact with them through our user-friendly interface.
|
||||
|
||||
**Monitoring and Analytics:** Keep track of your agents' performance and gain insights to continually improve your automation processes.
|
||||
|
||||
[Read this guide](https://docs.agpt.co/platform/new_blocks/) to learn how to build your own custom blocks.
|
||||
[Read this guide](https://docs.agpt.co/server/new_blocks/) to learn how to build your own custom blocks.
|
||||
|
||||
### 💽 AutoGPT Server
|
||||
|
||||
The AutoGPT Server is the powerhouse of our platform This is where your agents run. Once deployed, agents can be triggered by external sources and can operate continuously. It contains all the essential components that make AutoGPT run smoothly.
|
||||
|
||||
**Source Code:** The core logic that drives our agents and automation processes.
|
||||
|
||||
**Infrastructure:** Robust systems that ensure reliable and scalable performance.
|
||||
|
||||
**Marketplace:** A comprehensive marketplace where you can find and deploy a wide range of pre-built agents.
|
||||
The AutoGPT Server is the backend. This is where your agents run. Once deployed, agents can be triggered by external sources and can operate continuously.
|
||||
|
||||
### 🐙 Example Agents
|
||||
|
||||
Here are two examples of what you can do with AutoGPT:
|
||||
|
||||
1. **Generate Viral Videos from Trending Topics**
|
||||
- This agent reads topics on Reddit.
|
||||
- It identifies trending topics.
|
||||
- It then automatically creates a short-form video based on the content.
|
||||
1. **Reddit Marketing Agent**
|
||||
- This agent reads comments on Reddit.
|
||||
- It looks for people asking about your product.
|
||||
- It then automatically responds to them.
|
||||
|
||||
2. **Identify Top Quotes from Videos for Social Media**
|
||||
2. **YouTube Content Repurposing Agent**
|
||||
- This agent subscribes to your YouTube channel.
|
||||
- When you post a new video, it transcribes it.
|
||||
- It uses AI to identify the most impactful quotes to generate a summary.
|
||||
- Then, it writes a post to automatically publish to your social media.
|
||||
- It uses AI to write a search engine optimized blog post.
|
||||
- Then, it publishes this blog post to your Medium account.
|
||||
|
||||
These examples show just a glimpse of what you can achieve with AutoGPT! You can create customized workflows to build agents for any use case.
|
||||
These examples show just a glimpse of what you can achieve with AutoGPT!
|
||||
|
||||
---
|
||||
### Mission and Licencing
|
||||
Our mission is to provide the tools, so that you can focus on what matters:
|
||||
|
||||
- 🏗️ **Building** - Lay the foundation for something amazing.
|
||||
@@ -77,13 +50,6 @@ Be part of the revolution! **AutoGPT** is here to stay, at the forefront of AI i
|
||||
 | 
|
||||
**🚀 [Contributing](CONTRIBUTING.md)**
|
||||
|
||||
**Licensing:**
|
||||
|
||||
MIT License: All files outside of autogpt_platform folder are under the MIT License.
|
||||
|
||||
Polyform Shield License: This license applies to the autogpt_platform folder.
|
||||
|
||||
For more information, see https://agpt.co/blog/introducing-the-autogpt-platform
|
||||
|
||||
---
|
||||
## 🤖 AutoGPT Classic
|
||||
@@ -108,7 +74,7 @@ This guide will walk you through the process of creating your own agent and usin
|
||||
|
||||
📦 [`agbenchmark`](https://pypi.org/project/agbenchmark/) on Pypi
|
||||
 | 
|
||||
📘 [Learn More](https://github.com/Significant-Gravitas/AutoGPT/tree/master/classic/benchmark) about the Benchmark
|
||||
📘 [Learn More](https://github.com/Significant-Gravitas/AutoGPT/blob/master/benchmark) about the Benchmark
|
||||
|
||||
### 💻 UI
|
||||
|
||||
@@ -157,8 +123,6 @@ To maintain a uniform standard and ensure seamless compatibility with many curre
|
||||
|
||||
---
|
||||
|
||||
## Stars stats
|
||||
|
||||
<p align="center">
|
||||
<a href="https://star-history.com/#Significant-Gravitas/AutoGPT">
|
||||
<picture>
|
||||
@@ -168,10 +132,3 @@ To maintain a uniform standard and ensure seamless compatibility with many curre
|
||||
</picture>
|
||||
</a>
|
||||
</p>
|
||||
|
||||
|
||||
## ⚡ Contributors
|
||||
|
||||
<a href="https://github.com/Significant-Gravitas/AutoGPT/graphs/contributors" alt="View Contributors">
|
||||
<img src="https://contrib.rocks/image?repo=Significant-Gravitas/AutoGPT&max=1000&columns=10" alt="Contributors" />
|
||||
</a>
|
||||
|
||||
48
SECURITY.md
48
SECURITY.md
@@ -1,48 +0,0 @@
|
||||
# Security Policy
|
||||
|
||||
## Reporting Security Issues
|
||||
|
||||
We take the security of our project seriously. If you believe you have found a security vulnerability, please report it to us privately. **Please do not report security vulnerabilities through public GitHub issues, discussions, or pull requests.**
|
||||
|
||||
> **Important Note**: Any code within the `classic/` folder is considered legacy, unsupported, and out of scope for security reports. We will not address security vulnerabilities in this deprecated code.
|
||||
|
||||
Instead, please report them via:
|
||||
- [GitHub Security Advisory](https://github.com/Significant-Gravitas/AutoGPT/security/advisories/new)
|
||||
<!--- [Huntr.dev](https://huntr.com/repos/significant-gravitas/autogpt) - where you may be eligible for a bounty-->
|
||||
|
||||
### Reporting Process
|
||||
1. **Submit Report**: Use one of the above channels to submit your report
|
||||
2. **Response Time**: Our team will acknowledge receipt of your report within 14 business days.
|
||||
3. **Collaboration**: We will collaborate with you to understand and validate the issue
|
||||
4. **Resolution**: We will work on a fix and coordinate the release process
|
||||
|
||||
### Disclosure Policy
|
||||
- Please provide detailed reports with reproducible steps
|
||||
- Include the version/commit hash where you discovered the vulnerability
|
||||
- Allow us a 90-day security fix window before any public disclosure
|
||||
- After patch is released, allow 30 days for users to update before public disclosure (for a total of 120 days max between update time and fix time)
|
||||
- Share any potential mitigations or workarounds if known
|
||||
|
||||
## Supported Versions
|
||||
Only the following versions are eligible for security updates:
|
||||
|
||||
| Version | Supported |
|
||||
|---------|-----------|
|
||||
| Latest release on master branch | ✅ |
|
||||
| Development commits (pre-master) | ✅ |
|
||||
| Classic folder (deprecated) | ❌ |
|
||||
| All other versions | ❌ |
|
||||
|
||||
## Security Best Practices
|
||||
When using this project:
|
||||
1. Always use the latest stable version
|
||||
2. Review security advisories before updating
|
||||
3. Follow our security documentation and guidelines
|
||||
4. Keep your dependencies up to date
|
||||
5. Do not use code from the `classic/` folder as it is deprecated and unsupported
|
||||
|
||||
## Past Security Advisories
|
||||
For a list of past security advisories, please visit our [Security Advisory Page](https://github.com/Significant-Gravitas/AutoGPT/security/advisories) and [Huntr Disclosures Page](https://huntr.com/repos/significant-gravitas/autogpt).
|
||||
|
||||
---
|
||||
Last updated: November 2024
|
||||
@@ -1,123 +0,0 @@
|
||||
############
|
||||
# Secrets
|
||||
# YOU MUST CHANGE THESE BEFORE GOING INTO PRODUCTION
|
||||
############
|
||||
|
||||
POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
|
||||
JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
DASHBOARD_USERNAME=supabase
|
||||
DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated
|
||||
SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||
VAULT_ENC_KEY=your-encryption-key-32-chars-min
|
||||
|
||||
|
||||
############
|
||||
# Database - You can change these to any PostgreSQL database that has logical replication enabled.
|
||||
############
|
||||
|
||||
POSTGRES_HOST=db
|
||||
POSTGRES_DB=postgres
|
||||
POSTGRES_PORT=5432
|
||||
# default user is postgres
|
||||
|
||||
|
||||
############
|
||||
# Supavisor -- Database pooler
|
||||
############
|
||||
POOLER_PROXY_PORT_TRANSACTION=6543
|
||||
POOLER_DEFAULT_POOL_SIZE=20
|
||||
POOLER_MAX_CLIENT_CONN=100
|
||||
POOLER_TENANT_ID=your-tenant-id
|
||||
|
||||
|
||||
############
|
||||
# API Proxy - Configuration for the Kong Reverse proxy.
|
||||
############
|
||||
|
||||
KONG_HTTP_PORT=8000
|
||||
KONG_HTTPS_PORT=8443
|
||||
|
||||
|
||||
############
|
||||
# API - Configuration for PostgREST.
|
||||
############
|
||||
|
||||
PGRST_DB_SCHEMAS=public,storage,graphql_public
|
||||
|
||||
|
||||
############
|
||||
# Auth - Configuration for the GoTrue authentication server.
|
||||
############
|
||||
|
||||
## General
|
||||
SITE_URL=http://localhost:3000
|
||||
ADDITIONAL_REDIRECT_URLS=
|
||||
JWT_EXPIRY=3600
|
||||
DISABLE_SIGNUP=false
|
||||
API_EXTERNAL_URL=http://localhost:8000
|
||||
|
||||
## Mailer Config
|
||||
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
|
||||
MAILER_URLPATHS_INVITE="/auth/v1/verify"
|
||||
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
|
||||
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
|
||||
|
||||
## Email auth
|
||||
ENABLE_EMAIL_SIGNUP=true
|
||||
ENABLE_EMAIL_AUTOCONFIRM=false
|
||||
SMTP_ADMIN_EMAIL=admin@example.com
|
||||
SMTP_HOST=supabase-mail
|
||||
SMTP_PORT=2500
|
||||
SMTP_USER=fake_mail_user
|
||||
SMTP_PASS=fake_mail_password
|
||||
SMTP_SENDER_NAME=fake_sender
|
||||
ENABLE_ANONYMOUS_USERS=false
|
||||
|
||||
## Phone auth
|
||||
ENABLE_PHONE_SIGNUP=true
|
||||
ENABLE_PHONE_AUTOCONFIRM=true
|
||||
|
||||
|
||||
############
|
||||
# Studio - Configuration for the Dashboard
|
||||
############
|
||||
|
||||
STUDIO_DEFAULT_ORGANIZATION=Default Organization
|
||||
STUDIO_DEFAULT_PROJECT=Default Project
|
||||
|
||||
STUDIO_PORT=3000
|
||||
# replace if you intend to use Studio outside of localhost
|
||||
SUPABASE_PUBLIC_URL=http://localhost:8000
|
||||
|
||||
# Enable webp support
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION=true
|
||||
|
||||
# Add your OpenAI API key to enable SQL Editor Assistant
|
||||
OPENAI_API_KEY=
|
||||
|
||||
|
||||
############
|
||||
# Functions - Configuration for Functions
|
||||
############
|
||||
# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet.
|
||||
FUNCTIONS_VERIFY_JWT=false
|
||||
|
||||
|
||||
############
|
||||
# Logs - Configuration for Logflare
|
||||
# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction
|
||||
############
|
||||
|
||||
LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
|
||||
|
||||
# Change vector.toml sinks to reflect this change
|
||||
LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
|
||||
|
||||
# Docker socket location - this value will differ depending on your OS
|
||||
DOCKER_SOCKET_LOCATION=/var/run/docker.sock
|
||||
|
||||
# Google Cloud Project details
|
||||
GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID
|
||||
GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER
|
||||
2
autogpt_platform/.gitignore
vendored
2
autogpt_platform/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
*.ignore.*
|
||||
*.ign.*
|
||||
@@ -1,21 +0,0 @@
|
||||
**Determinist Ltd**
|
||||
|
||||
**Contributor License Agreement (“Agreement”)**
|
||||
|
||||
Thank you for your interest in the AutoGPT project at [https://github.com/Significant-Gravitas/AutoGPT](https://github.com/Significant-Gravitas/AutoGPT) stewarded by Determinist Ltd (“**Determinist**”), with offices at 3rd Floor 1 Ashley Road, Altrincham, Cheshire, WA14 2DT, United Kingdom. The form of license below is a document that clarifies the terms under which You, the person listed below, may contribute software code described below (the “**Contribution**”) to the project. We appreciate your participation in our project, and your help in improving our products, so we want you to understand what will be done with the Contributions. This license is for your protection as well as the protection of Determinist and its licensees; it does not change your rights to use your own Contributions for any other purpose.
|
||||
|
||||
By submitting a Pull Request which modifies the content of the “autogpt\_platform” folder at [https://github.com/Significant-Gravitas/AutoGPT/tree/master/autogpt\_platform](https://github.com/Significant-Gravitas/AutoGPT/tree/master/autogpt_platform), You hereby agree:
|
||||
|
||||
1\. **You grant us the ability to use the Contributions in any way**. You hereby grant to Determinist a non-exclusive, irrevocable, worldwide, royalty-free, sublicenseable, transferable license under all of Your relevant intellectual property rights (including copyright, patent, and any other rights), to use, copy, prepare derivative works of, distribute and publicly perform and display the Contributions on any licensing terms, including without limitation: (a) open source licenses like the GNU General Public License (GPL), the GNU Lesser General Public License (LGPL), the Common Public License, or the Berkeley Science Division license (BSD); and (b) binary, proprietary, or commercial licenses.
|
||||
|
||||
2\. **Grant of Patent License**. You hereby grant to Determinist a worldwide, non-exclusive, royalty-free, irrevocable, license, under any rights you may have, now or in the future, in any patents or patent applications, to make, have made, use, offer to sell, sell, and import products containing the Contribution or portions of the Contribution. This license extends to patent claims that are infringed by the Contribution alone or by combination of the Contribution with other inventions.
|
||||
|
||||
4\. **Limitations on Licenses**. The licenses granted in this Agreement will continue for the duration of the applicable patent or intellectual property right under which such license is granted. The licenses granted in this Agreement will include the right to grant and authorize sublicenses, so long as the sublicenses are within the scope of the licenses granted in this Agreement. Except for the licenses granted herein, You reserve all right, title, and interest in and to the Contribution.
|
||||
|
||||
5\. **You are able to grant us these rights**. You represent that You are legally entitled to grant the above license. If Your employer has rights to intellectual property that You create, You represent that You are authorized to make the Contributions on behalf of that employer, or that Your employer has waived such rights for the Contributions.
|
||||
|
||||
3\. **The Contributions are your original work**. You represent that the Contributions are Your original works of authorship, and to Your knowledge, no other person claims, or has the right to claim, any right in any invention or patent related to the Contributions. You also represent that You are not legally obligated, whether by entering into an agreement or otherwise, in any way that conflicts with the terms of this license. For example, if you have signed an agreement requiring you to assign the intellectual property rights in the Contributions to an employer or customer, that would conflict with the terms of this license.
|
||||
|
||||
6\. **We determine the code that is in our products**. You understand that the decision to include the Contribution in any product or source repository is entirely that of Determinist, and this agreement does not guarantee that the Contributions will be included in any product.
|
||||
|
||||
7\. **No Implied Warranties.** Determinist acknowledges that, except as explicitly described in this Agreement, the Contribution is provided on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE.
|
||||
@@ -1,164 +0,0 @@
|
||||
# PolyForm Shield License 1.0.0
|
||||
|
||||
<https://polyformproject.org/licenses/shield/1.0.0>
|
||||
|
||||
## Acceptance
|
||||
|
||||
In order to get any license under these terms, you must agree
|
||||
to them as both strict obligations and conditions to all
|
||||
your licenses.
|
||||
|
||||
## Copyright License
|
||||
|
||||
The licensor grants you a copyright license for the
|
||||
software to do everything you might do with the software
|
||||
that would otherwise infringe the licensor's copyright
|
||||
in it for any permitted purpose. However, you may
|
||||
only distribute the software according to [Distribution
|
||||
License](#distribution-license) and make changes or new works
|
||||
based on the software according to [Changes and New Works
|
||||
License](#changes-and-new-works-license).
|
||||
|
||||
## Distribution License
|
||||
|
||||
The licensor grants you an additional copyright license
|
||||
to distribute copies of the software. Your license
|
||||
to distribute covers distributing the software with
|
||||
changes and new works permitted by [Changes and New Works
|
||||
License](#changes-and-new-works-license).
|
||||
|
||||
## Notices
|
||||
|
||||
You must ensure that anyone who gets a copy of any part of
|
||||
the software from you also gets a copy of these terms or the
|
||||
URL for them above, as well as copies of any plain-text lines
|
||||
beginning with `Required Notice:` that the licensor provided
|
||||
with the software. For example:
|
||||
|
||||
> Required Notice: Copyright Yoyodyne, Inc. (http://example.com)
|
||||
|
||||
## Changes and New Works License
|
||||
|
||||
The licensor grants you an additional copyright license to
|
||||
make changes and new works based on the software for any
|
||||
permitted purpose.
|
||||
|
||||
## Patent License
|
||||
|
||||
The licensor grants you a patent license for the software that
|
||||
covers patent claims the licensor can license, or becomes able
|
||||
to license, that you would infringe by using the software.
|
||||
|
||||
## Noncompete
|
||||
|
||||
Any purpose is a permitted purpose, except for providing any
|
||||
product that competes with the software or any product the
|
||||
licensor or any of its affiliates provides using the software.
|
||||
|
||||
## Competition
|
||||
|
||||
Goods and services compete even when they provide functionality
|
||||
through different kinds of interfaces or for different technical
|
||||
platforms. Applications can compete with services, libraries
|
||||
with plugins, frameworks with development tools, and so on,
|
||||
even if they're written in different programming languages
|
||||
or for different computer architectures. Goods and services
|
||||
compete even when provided free of charge. If you market a
|
||||
product as a practical substitute for the software or another
|
||||
product, it definitely competes.
|
||||
|
||||
## New Products
|
||||
|
||||
If you are using the software to provide a product that does
|
||||
not compete, but the licensor or any of its affiliates brings
|
||||
your product into competition by providing a new version of
|
||||
the software or another product using the software, you may
|
||||
continue using versions of the software available under these
|
||||
terms beforehand to provide your competing product, but not
|
||||
any later versions.
|
||||
|
||||
## Discontinued Products
|
||||
|
||||
You may begin using the software to compete with a product
|
||||
or service that the licensor or any of its affiliates has
|
||||
stopped providing, unless the licensor includes a plain-text
|
||||
line beginning with `Licensor Line of Business:` with the
|
||||
software that mentions that line of business. For example:
|
||||
|
||||
> Licensor Line of Business: YoyodyneCMS Content Management
|
||||
System (http://example.com/cms)
|
||||
|
||||
## Sales of Business
|
||||
|
||||
If the licensor or any of its affiliates sells a line of
|
||||
business developing the software or using the software
|
||||
to provide a product, the buyer can also enforce
|
||||
[Noncompete](#noncompete) for that product.
|
||||
|
||||
## Fair Use
|
||||
|
||||
You may have "fair use" rights for the software under the
|
||||
law. These terms do not limit them.
|
||||
|
||||
## No Other Rights
|
||||
|
||||
These terms do not allow you to sublicense or transfer any of
|
||||
your licenses to anyone else, or prevent the licensor from
|
||||
granting licenses to anyone else. These terms do not imply
|
||||
any other licenses.
|
||||
|
||||
## Patent Defense
|
||||
|
||||
If you make any written claim that the software infringes or
|
||||
contributes to infringement of any patent, your patent license
|
||||
for the software granted under these terms ends immediately. If
|
||||
your company makes such a claim, your patent license ends
|
||||
immediately for work on behalf of your company.
|
||||
|
||||
## Violations
|
||||
|
||||
The first time you are notified in writing that you have
|
||||
violated any of these terms, or done anything with the software
|
||||
not covered by your licenses, your licenses can nonetheless
|
||||
continue if you come into full compliance with these terms,
|
||||
and take practical steps to correct past violations, within
|
||||
32 days of receiving notice. Otherwise, all your licenses
|
||||
end immediately.
|
||||
|
||||
## No Liability
|
||||
|
||||
***As far as the law allows, the software comes as is, without
|
||||
any warranty or condition, and the licensor will not be liable
|
||||
to you for any damages arising out of these terms or the use
|
||||
or nature of the software, under any kind of legal claim.***
|
||||
|
||||
## Definitions
|
||||
|
||||
The **licensor** is the individual or entity offering these
|
||||
terms, and the **software** is the software the licensor makes
|
||||
available under these terms.
|
||||
|
||||
A **product** can be a good or service, or a combination
|
||||
of them.
|
||||
|
||||
**You** refers to the individual or entity agreeing to these
|
||||
terms.
|
||||
|
||||
**Your company** is any legal entity, sole proprietorship,
|
||||
or other kind of organization that you work for, plus all
|
||||
its affiliates.
|
||||
|
||||
**Affiliates** means the other organizations than an
|
||||
organization has control over, is under the control of, or is
|
||||
under common control with.
|
||||
|
||||
**Control** means ownership of substantially all the assets of
|
||||
an entity, or the power to direct its management and policies
|
||||
by vote, contract, or otherwise. Control can be direct or
|
||||
indirect.
|
||||
|
||||
**Your licenses** are all the licenses granted to you for the
|
||||
software under these terms.
|
||||
|
||||
**Use** means anything you do with the software requiring one
|
||||
of your licenses.
|
||||
@@ -8,61 +8,46 @@ Welcome to the AutoGPT Platform - a powerful system for creating and running AI
|
||||
|
||||
- Docker
|
||||
- Docker Compose V2 (comes with Docker Desktop, or can be installed separately)
|
||||
- Node.js & NPM (for running the frontend application)
|
||||
|
||||
### Running the System
|
||||
|
||||
To run the AutoGPT Platform, follow these steps:
|
||||
|
||||
1. Clone this repository to your local machine and navigate to the `autogpt_platform` directory within the repository:
|
||||
```
|
||||
git clone <https://github.com/Significant-Gravitas/AutoGPT.git | git@github.com:Significant-Gravitas/AutoGPT.git>
|
||||
cd AutoGPT/autogpt_platform
|
||||
```
|
||||
|
||||
2. Run the following command:
|
||||
```
|
||||
cp .env.example .env
|
||||
```
|
||||
This command will copy the `.env.example` file to `.env`. You can modify the `.env` file to add your own environment variables.
|
||||
|
||||
1. Clone this repository to your local machine.
|
||||
2. Navigate to autogpt_platform/supabase
|
||||
3. Run the following command:
|
||||
```
|
||||
docker compose up -d
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
This command will start all the necessary backend services defined in the `docker-compose.yml` file in detached mode.
|
||||
|
||||
4. Navigate to `frontend` within the `autogpt_platform` directory:
|
||||
4. Navigate back to rnd (cd ..)
|
||||
5. Run the following command:
|
||||
```
|
||||
cd frontend
|
||||
cp supabase/docker/.env.example .env
|
||||
```
|
||||
You will need to run your frontend application separately on your local machine.
|
||||
|
||||
5. Run the following command:
|
||||
```
|
||||
cp .env.example .env.local
|
||||
```
|
||||
This command will copy the `.env.example` file to `.env.local` in the `frontend` directory. You can modify the `.env.local` within this folder to add your own environment variables for the frontend application.
|
||||
|
||||
6. Run the following command:
|
||||
|
||||
```
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
This command will install the necessary dependencies and start the frontend application in development mode.
|
||||
If you are using Yarn, you can run the following commands instead:
|
||||
```
|
||||
yarn install && yarn dev
|
||||
docker compose -f docker-compose.combined.yml up -d
|
||||
|
||||
```
|
||||
|
||||
7. Open your browser and navigate to `http://localhost:3000` to access the AutoGPT Platform frontend.
|
||||
This command will start all the necessary backend services defined in the `docker-compose.combined.yml` file in detached mode.
|
||||
7. Navigate to autogpt_platform/autogpt_builder.
|
||||
8. Run the following command:
|
||||
```
|
||||
cp .env.example .env.local
|
||||
```
|
||||
9. Run the following command:
|
||||
```
|
||||
yarn dev
|
||||
```
|
||||
|
||||
### Docker Compose Commands
|
||||
|
||||
Here are some useful Docker Compose commands for managing your AutoGPT Platform:
|
||||
|
||||
- `docker compose up -d`: Start the services in detached mode.
|
||||
- `docker compose stop`: Stop the running services without removing them.
|
||||
- `docker compose -f docker-compose.combined.yml up -d`: Start the services in detached mode.
|
||||
- `docker compose -f docker-compose.combined.yml stop`: Stop the running services without removing them.
|
||||
- `docker compose rm`: Remove stopped service containers.
|
||||
- `docker compose build`: Build or rebuild services.
|
||||
- `docker compose down`: Stop and remove containers, networks, and volumes.
|
||||
@@ -143,3 +128,6 @@ To persist data for PostgreSQL and Redis, you can modify the `docker-compose.yml
|
||||
3. Save the file and run `docker compose up -d` to apply the changes.
|
||||
|
||||
This configuration will create named volumes for PostgreSQL and Redis, ensuring that your data persists across container restarts.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -2,14 +2,6 @@ NEXT_PUBLIC_AUTH_CALLBACK_URL=http://localhost:8006/auth/callback
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_ENABLED=false
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID=
|
||||
NEXT_PUBLIC_APP_ENV=dev
|
||||
|
||||
## Locale settings
|
||||
|
||||
NEXT_PUBLIC_DEFAULT_LOCALE=en
|
||||
NEXT_PUBLIC_LOCALES=en,es
|
||||
|
||||
## Supabase credentials
|
||||
|
||||
@@ -21,6 +13,3 @@ NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAic
|
||||
## Only used if you're using Supabase and OAuth
|
||||
AUTH_CALLBACK_URL=http://localhost:3000/auth/callback
|
||||
GA_MEASUREMENT_ID=G-FH2XK2W4GN
|
||||
|
||||
# When running locally, set NEXT_PUBLIC_BEHAVE_AS=CLOUD to use the a locally hosted marketplace (as is typical in development, and the cloud deployment), otherwise set it to LOCAL to have the marketplace open in a new tab
|
||||
NEXT_PUBLIC_BEHAVE_AS=LOCAL
|
||||
3
autogpt_platform/autogpt_builder/.eslintrc.json
Normal file
3
autogpt_platform/autogpt_builder/.eslintrc.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"extends": "next/core-web-vitals"
|
||||
}
|
||||
@@ -37,14 +37,3 @@ next-env.d.ts
|
||||
|
||||
# Sentry Config File
|
||||
.env.sentry-build-plugin
|
||||
node_modules/
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
|
||||
*storybook.log
|
||||
storybook-static
|
||||
*.ignore.*
|
||||
*.ign.*
|
||||
.cursorrules
|
||||
32
autogpt_platform/autogpt_builder/Dockerfile
Normal file
32
autogpt_platform/autogpt_builder/Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
||||
# Base stage for both dev and prod
|
||||
FROM node:21-alpine AS base
|
||||
WORKDIR /app
|
||||
COPY autogpt_platform/autogpt_builder/package.json autogpt_platform/autogpt_builder/yarn.lock ./
|
||||
RUN yarn install --frozen-lockfile
|
||||
|
||||
# Dev stage
|
||||
FROM base AS dev
|
||||
ENV NODE_ENV=development
|
||||
COPY autogpt_platform/autogpt_builder/ .
|
||||
EXPOSE 3000
|
||||
CMD ["yarn", "run", "dev"]
|
||||
|
||||
# Build stage for prod
|
||||
FROM base AS build
|
||||
COPY autogpt_platform/autogpt_builder/ .
|
||||
RUN npm run build
|
||||
|
||||
# Prod stage
|
||||
FROM node:21-alpine AS prod
|
||||
ENV NODE_ENV=production
|
||||
WORKDIR /app
|
||||
|
||||
COPY --from=build /app/package.json /app/yarn.lock ./
|
||||
RUN yarn install --frozen-lockfile
|
||||
|
||||
COPY --from=build /app/.next ./.next
|
||||
COPY --from=build /app/public ./public
|
||||
COPY --from=build /app/next.config.mjs ./next.config.mjs
|
||||
|
||||
EXPOSE 3000
|
||||
CMD ["npm", "start"]
|
||||
41
autogpt_platform/autogpt_builder/README.md
Normal file
41
autogpt_platform/autogpt_builder/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
This is the frontend for AutoGPT's next generation
|
||||
|
||||
## Getting Started
|
||||
|
||||
Run the following installation once.
|
||||
|
||||
```bash
|
||||
npm install
|
||||
# or
|
||||
yarn install
|
||||
# or
|
||||
pnpm install
|
||||
# or
|
||||
bun install
|
||||
```
|
||||
|
||||
Next, run the development server:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
# or
|
||||
yarn dev
|
||||
# or
|
||||
pnpm dev
|
||||
# or
|
||||
bun dev
|
||||
```
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
||||
|
||||
You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
|
||||
|
||||
For subsequent runs, you do not have to `npm install` again. Simply do `npm run dev`.
|
||||
|
||||
If the project is updated via git, you will need to `npm install` after each update.
|
||||
|
||||
This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
|
||||
|
||||
## Deploy
|
||||
|
||||
TODO
|
||||
@@ -1,26 +1,32 @@
|
||||
import { withSentryConfig } from "@sentry/nextjs";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
images: {
|
||||
domains: [
|
||||
"images.unsplash.com",
|
||||
"ddz4ak4pa3d19.cloudfront.net",
|
||||
"upload.wikimedia.org",
|
||||
"storage.googleapis.com",
|
||||
|
||||
"ideogram.ai", // for generated images
|
||||
"picsum.photos", // for placeholder images
|
||||
"dummyimage.com", // for placeholder images
|
||||
"placekitten.com", // for placeholder images
|
||||
],
|
||||
env: {
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL: process.env.NEXT_PUBLIC_AGPT_SERVER_URL,
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL:
|
||||
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL,
|
||||
},
|
||||
images: {
|
||||
domains: ["images.unsplash.com"],
|
||||
},
|
||||
async redirects() {
|
||||
return [
|
||||
{
|
||||
source: "/monitor", // FIXME: Remove after 2024-09-01
|
||||
destination: "/",
|
||||
permanent: false,
|
||||
},
|
||||
];
|
||||
},
|
||||
output: "standalone",
|
||||
// TODO: Re-enable TypeScript checks once current issues are resolved
|
||||
typescript: {
|
||||
ignoreBuildErrors: true,
|
||||
},
|
||||
transpilePackages: ["geist"],
|
||||
};
|
||||
|
||||
export default withSentryConfig(nextConfig, {
|
||||
@@ -48,7 +54,7 @@ export default withSentryConfig(nextConfig, {
|
||||
// This can increase your server load as well as your hosting bill.
|
||||
// Note: Check that the configured route will not match with your Next.js middleware, otherwise reporting of client-
|
||||
// side errors will fail.
|
||||
tunnelRoute: "/store",
|
||||
tunnelRoute: "/monitoring",
|
||||
|
||||
// Hides source maps from generated client bundles
|
||||
hideSourceMaps: true,
|
||||
72
autogpt_platform/autogpt_builder/package.json
Normal file
72
autogpt_platform/autogpt_builder/package.json
Normal file
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"name": "autogpt_builder",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint",
|
||||
"format": "prettier --write ."
|
||||
},
|
||||
"dependencies": {
|
||||
"@hookform/resolvers": "^3.9.0",
|
||||
"@next/third-parties": "^14.2.5",
|
||||
"@radix-ui/react-avatar": "^1.1.0",
|
||||
"@radix-ui/react-checkbox": "^1.1.1",
|
||||
"@radix-ui/react-collapsible": "^1.1.0",
|
||||
"@radix-ui/react-dialog": "^1.1.1",
|
||||
"@radix-ui/react-dropdown-menu": "^2.1.1",
|
||||
"@radix-ui/react-icons": "^1.3.0",
|
||||
"@radix-ui/react-label": "^2.1.0",
|
||||
"@radix-ui/react-popover": "^1.1.1",
|
||||
"@radix-ui/react-scroll-area": "^1.1.0",
|
||||
"@radix-ui/react-select": "^2.1.1",
|
||||
"@radix-ui/react-separator": "^1.1.0",
|
||||
"@radix-ui/react-slot": "^1.1.0",
|
||||
"@radix-ui/react-switch": "^1.1.0",
|
||||
"@radix-ui/react-toast": "^1.2.1",
|
||||
"@radix-ui/react-tooltip": "^1.1.2",
|
||||
"@sentry/nextjs": "^8",
|
||||
"@supabase/ssr": "^0.4.0",
|
||||
"@supabase/supabase-js": "^2.45.0",
|
||||
"@tanstack/react-table": "^8.20.5",
|
||||
"@xyflow/react": "^12.1.0",
|
||||
"ajv": "^8.17.1",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.1.1",
|
||||
"cmdk": "1.0.0",
|
||||
"date-fns": "^3.6.0",
|
||||
"dotenv": "^16.4.5",
|
||||
"lucide-react": "^0.407.0",
|
||||
"moment": "^2.30.1",
|
||||
"next": "14.2.4",
|
||||
"next-themes": "^0.3.0",
|
||||
"react": "^18",
|
||||
"react-day-picker": "^8.10.1",
|
||||
"react-dom": "^18",
|
||||
"react-hook-form": "^7.52.1",
|
||||
"react-icons": "^5.2.1",
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-modal": "^3.16.1",
|
||||
"react-shepherd": "^6.1.1",
|
||||
"recharts": "^2.12.7",
|
||||
"tailwind-merge": "^2.3.0",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"uuid": "^10.0.0",
|
||||
"zod": "^3.23.8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
"@types/react-modal": "^3.16.3",
|
||||
"eslint": "^8",
|
||||
"eslint-config-next": "14.2.4",
|
||||
"postcss": "^8",
|
||||
"prettier": "^3.3.3",
|
||||
"prettier-plugin-tailwindcss": "^0.6.6",
|
||||
"tailwindcss": "^3.4.1",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
|
Before Width: | Height: | Size: 29 KiB After Width: | Height: | Size: 29 KiB |
|
Before Width: | Height: | Size: 28 KiB After Width: | Height: | Size: 28 KiB |
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
@@ -7,8 +7,6 @@ import * as Sentry from "@sentry/nextjs";
|
||||
Sentry.init({
|
||||
dsn: "https://fe4e4aa4a283391808a5da396da20159@o4505260022104064.ingest.us.sentry.io/4507946746380288",
|
||||
|
||||
enabled: process.env.DISABLE_SENTRY !== "true",
|
||||
|
||||
// Add optional integrations for additional features
|
||||
integrations: [
|
||||
Sentry.replayIntegration(),
|
||||
@@ -31,6 +29,14 @@ Sentry.init({
|
||||
/^https:\/\/dev\-builder\.agpt\.co\/api/,
|
||||
],
|
||||
|
||||
beforeSend(event, hint) {
|
||||
// Check if it is an exception, and if so, show the report dialog
|
||||
if (event.exception && event.event_id) {
|
||||
Sentry.showReportDialog({ eventId: event.event_id });
|
||||
}
|
||||
return event;
|
||||
},
|
||||
|
||||
// Define how likely Replay events are sampled.
|
||||
// This sets the sample rate to be 10%. You may want this to be 100% while
|
||||
// in development and sample at a lower rate in production
|
||||
@@ -8,8 +8,6 @@ import * as Sentry from "@sentry/nextjs";
|
||||
Sentry.init({
|
||||
dsn: "https://fe4e4aa4a283391808a5da396da20159@o4505260022104064.ingest.us.sentry.io/4507946746380288",
|
||||
|
||||
enabled: process.env.NODE_ENV !== "development",
|
||||
|
||||
// Define how likely traces are sampled. Adjust this value in production, or use tracesSampler for greater control.
|
||||
tracesSampleRate: 1,
|
||||
|
||||
@@ -8,8 +8,6 @@ import * as Sentry from "@sentry/nextjs";
|
||||
Sentry.init({
|
||||
dsn: "https://fe4e4aa4a283391808a5da396da20159@o4505260022104064.ingest.us.sentry.io/4507946746380288",
|
||||
|
||||
enabled: process.env.NODE_ENV !== "development",
|
||||
|
||||
// Define how likely traces are sampled. Adjust this value in production, or use tracesSampler for greater control.
|
||||
tracesSampleRate: 1,
|
||||
|
||||
100
autogpt_platform/autogpt_builder/src/app/admin/layout.tsx
Normal file
100
autogpt_platform/autogpt_builder/src/app/admin/layout.tsx
Normal file
@@ -0,0 +1,100 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import Link from "next/link";
|
||||
import { BinaryIcon, XIcon } from "lucide-react";
|
||||
import { usePathname } from "next/navigation"; // Add this import
|
||||
|
||||
const tabs = [
|
||||
{ name: "Dashboard", href: "/admin/dashboard" },
|
||||
{ name: "Marketplace", href: "/admin/marketplace" },
|
||||
{ name: "Users", href: "/admin/users" },
|
||||
{ name: "Settings", href: "/admin/settings" },
|
||||
];
|
||||
|
||||
export default function AdminLayout({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
const pathname = usePathname(); // Get the current pathname
|
||||
const [activeTab, setActiveTab] = useState(() => {
|
||||
// Set active tab based on the current route
|
||||
return tabs.find((tab) => tab.href === pathname)?.name || tabs[0].name;
|
||||
});
|
||||
const [mobileMenuOpen, setMobileMenuOpen] = useState(false);
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-gray-100">
|
||||
<nav className="bg-white shadow-sm">
|
||||
<div className="max-w-10xl mx-auto px-4 sm:px-6 lg:px-8">
|
||||
<div className="flex h-16 items-center justify-between">
|
||||
<div className="flex items-center">
|
||||
<div className="flex-shrink-0">
|
||||
<h1 className="text-xl font-bold">Admin Panel</h1>
|
||||
</div>
|
||||
<div className="hidden sm:ml-6 sm:flex sm:space-x-8">
|
||||
{tabs.map((tab) => (
|
||||
<Link
|
||||
key={tab.name}
|
||||
href={tab.href}
|
||||
className={`${
|
||||
activeTab === tab.name
|
||||
? "border-indigo-500 text-indigo-600"
|
||||
: "border-transparent text-gray-500 hover:border-gray-300 hover:text-gray-700"
|
||||
} inline-flex items-center border-b-2 px-1 pt-1 text-sm font-medium`}
|
||||
onClick={() => setActiveTab(tab.name)}
|
||||
>
|
||||
{tab.name}
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
<div className="sm:hidden">
|
||||
<button
|
||||
type="button"
|
||||
className="inline-flex items-center justify-center rounded-md p-2 text-gray-400 hover:bg-gray-100 hover:text-gray-500 focus:outline-none focus:ring-2 focus:ring-inset focus:ring-indigo-500"
|
||||
onClick={() => setMobileMenuOpen(!mobileMenuOpen)}
|
||||
>
|
||||
<span className="sr-only">Open main menu</span>
|
||||
{mobileMenuOpen ? (
|
||||
<XIcon className="block h-6 w-6" aria-hidden="true" />
|
||||
) : (
|
||||
<BinaryIcon className="block h-6 w-6" aria-hidden="true" />
|
||||
)}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{mobileMenuOpen && (
|
||||
<div className="sm:hidden">
|
||||
<div className="space-y-1 pb-3 pt-2">
|
||||
{tabs.map((tab) => (
|
||||
<Link
|
||||
key={tab.name}
|
||||
href={tab.href}
|
||||
className={`${
|
||||
activeTab === tab.name
|
||||
? "border-indigo-500 bg-indigo-50 text-indigo-700"
|
||||
: "border-transparent text-gray-600 hover:border-gray-300 hover:bg-gray-50 hover:text-gray-800"
|
||||
} block border-l-4 py-2 pl-3 pr-4 text-base font-medium`}
|
||||
onClick={() => {
|
||||
setActiveTab(tab.name);
|
||||
setMobileMenuOpen(false);
|
||||
}}
|
||||
>
|
||||
{tab.name}
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</nav>
|
||||
|
||||
<main className="py-10">
|
||||
<div className="mx-auto max-w-7xl px-4 sm:px-6 lg:px-8">{children}</div>
|
||||
</main>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import { withRoleAccess } from "@/lib/withRoleAccess";
|
||||
|
||||
import React from "react";
|
||||
import { getReviewableAgents } from "@/components/admin/marketplace/actions";
|
||||
import AdminMarketplaceAgentList from "@/components/admin/marketplace/AdminMarketplaceAgentList";
|
||||
import AdminFeaturedAgentsControl from "@/components/admin/marketplace/AdminFeaturedAgentsControl";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
async function AdminMarketplace() {
|
||||
const reviewableAgents = await getReviewableAgents();
|
||||
|
||||
return (
|
||||
<>
|
||||
<AdminMarketplaceAgentList agents={reviewableAgents.agents} />
|
||||
<Separator className="my-4" />
|
||||
<AdminFeaturedAgentsControl className="mt-4" />
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export default async function AdminDashboardPage() {
|
||||
"use server";
|
||||
const withAdminAccess = await withRoleAccess(["admin"]);
|
||||
const ProtectedAdminMarketplace = await withAdminAccess(AdminMarketplace);
|
||||
return <ProtectedAdminMarketplace />;
|
||||
}
|
||||
@@ -1,15 +1,15 @@
|
||||
import getServerSupabase from "@/lib/supabase/getServerSupabase";
|
||||
import { NextResponse } from "next/server";
|
||||
import { createServerClient } from "@/lib/supabase/server";
|
||||
|
||||
// Handle the callback to complete the user session login
|
||||
export async function GET(request: Request) {
|
||||
const { searchParams, origin } = new URL(request.url);
|
||||
const code = searchParams.get("code");
|
||||
// if "next" is in param, use it as the redirect URL
|
||||
const next = searchParams.get("next") ?? "/";
|
||||
const next = searchParams.get("next") ?? "/profile";
|
||||
|
||||
if (code) {
|
||||
const supabase = getServerSupabase();
|
||||
const supabase = createServerClient();
|
||||
|
||||
if (!supabase) {
|
||||
return NextResponse.redirect(`${origin}/error`);
|
||||
@@ -2,7 +2,7 @@ import { type EmailOtpType } from "@supabase/supabase-js";
|
||||
import { type NextRequest } from "next/server";
|
||||
|
||||
import { redirect } from "next/navigation";
|
||||
import getServerSupabase from "@/lib/supabase/getServerSupabase";
|
||||
import { createServerClient } from "@/lib/supabase/server";
|
||||
|
||||
// Email confirmation route
|
||||
export async function GET(request: NextRequest) {
|
||||
@@ -12,7 +12,7 @@ export async function GET(request: NextRequest) {
|
||||
const next = searchParams.get("next") ?? "/";
|
||||
|
||||
if (token_hash && type) {
|
||||
const supabase = getServerSupabase();
|
||||
const supabase = createServerClient();
|
||||
|
||||
if (!supabase) {
|
||||
redirect("/error");
|
||||
16
autogpt_platform/autogpt_builder/src/app/build/page.tsx
Normal file
16
autogpt_platform/autogpt_builder/src/app/build/page.tsx
Normal file
@@ -0,0 +1,16 @@
|
||||
"use client";
|
||||
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import FlowEditor from '@/components/Flow';
|
||||
|
||||
export default function Home() {
|
||||
const query = useSearchParams();
|
||||
|
||||
return (
|
||||
<FlowEditor
|
||||
className="flow-container w-full min-h-[86vh] border border-gray-300 dark:border-gray-700 rounded-lg"
|
||||
flowID={query.get("flowID") ?? query.get("templateID") ?? undefined}
|
||||
template={!!query.get("templateID")}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -2,9 +2,15 @@
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@layer utilities {
|
||||
.text-balance {
|
||||
text-wrap: balance;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
:root {
|
||||
--background: 0 0% 99.6%; /* #FEFEFE */
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 240 10% 3.9%;
|
||||
--card: 0 0% 100%;
|
||||
--card-foreground: 240 10% 3.9%;
|
||||
@@ -16,12 +22,12 @@
|
||||
--secondary-foreground: 240 5.9% 10%;
|
||||
--muted: 240 4.8% 95.9%;
|
||||
--muted-foreground: 240 3.8% 46.1%;
|
||||
--accent: 262 83% 58%;
|
||||
--accent-foreground: 0 0% 100%;
|
||||
--accent: 240 4.8% 95.9%;
|
||||
--accent-foreground: 240 5.9% 10%;
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 0 0% 98%;
|
||||
--border: 240 5.9% 90%;
|
||||
--input: 240 5.9% 85%;
|
||||
--input: 240 5.9% 90%;
|
||||
--ring: 240 5.9% 10%;
|
||||
--radius: 0.5rem;
|
||||
--chart-1: 12 76% 61%;
|
||||
@@ -57,90 +63,13 @@
|
||||
--chart-4: 280 65% 60%;
|
||||
--chart-5: 340 75% 55%;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground;
|
||||
}
|
||||
|
||||
.font-neue {
|
||||
font-family: "PP Neue Montreal TT", sans-serif;
|
||||
}
|
||||
}
|
||||
|
||||
/* *** AutoGPT Design Components *** */
|
||||
|
||||
@layer components {
|
||||
.agpt-border-input {
|
||||
@apply m-0.5 border border-input focus-visible:border-gray-400 focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-gray-400;
|
||||
}
|
||||
|
||||
.agpt-shadow-input {
|
||||
@apply shadow-sm focus-visible:shadow-md;
|
||||
}
|
||||
|
||||
.agpt-rounded-card {
|
||||
@apply rounded-2xl;
|
||||
}
|
||||
|
||||
.agpt-rounded-box {
|
||||
@apply rounded-3xl;
|
||||
}
|
||||
|
||||
.agpt-card {
|
||||
@apply agpt-rounded-card border border-zinc-300 bg-white p-[1px];
|
||||
}
|
||||
|
||||
.agpt-box {
|
||||
@apply agpt-card agpt-rounded-box;
|
||||
}
|
||||
|
||||
.agpt-div {
|
||||
@apply border-zinc-200 p-5;
|
||||
}
|
||||
}
|
||||
|
||||
@layer utilities {
|
||||
.agpt-card-selected {
|
||||
@apply border-2 border-accent bg-violet-50/50 p-0;
|
||||
}
|
||||
}
|
||||
|
||||
@layer utilities {
|
||||
/* TODO: 1. remove unused utility classes */
|
||||
/* TODO: 2. fix naming of numbered dimensions so that the number is 4*dimension */
|
||||
/* TODO: 3. move to tailwind.config.ts spacing config */
|
||||
.h-7\.5 {
|
||||
height: 1.1875rem;
|
||||
}
|
||||
.h-18 {
|
||||
height: 4.5rem;
|
||||
}
|
||||
.h-238 {
|
||||
height: 14.875rem;
|
||||
}
|
||||
.top-158 {
|
||||
top: 9.875rem;
|
||||
}
|
||||
.top-254 {
|
||||
top: 15.875rem;
|
||||
}
|
||||
.top-284 {
|
||||
top: 17.75rem;
|
||||
}
|
||||
.top-360 {
|
||||
top: 22.5rem;
|
||||
}
|
||||
.left-297 {
|
||||
left: 18.5625rem;
|
||||
}
|
||||
.left-34 {
|
||||
left: 2.125rem;
|
||||
}
|
||||
|
||||
.text-balance {
|
||||
text-wrap: balance;
|
||||
}
|
||||
}
|
||||
49
autogpt_platform/autogpt_builder/src/app/layout.tsx
Normal file
49
autogpt_platform/autogpt_builder/src/app/layout.tsx
Normal file
@@ -0,0 +1,49 @@
|
||||
import React from "react";
|
||||
import type { Metadata } from "next";
|
||||
import { Inter } from "next/font/google";
|
||||
import { Providers } from "@/app/providers";
|
||||
import { NavBar } from "@/components/NavBar";
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
import "./globals.css";
|
||||
import TallyPopupSimple from "@/components/TallyPopup";
|
||||
import { GoogleAnalytics } from "@next/third-parties/google";
|
||||
import { Toaster } from "@/components/ui/toaster";
|
||||
|
||||
const inter = Inter({ subsets: ["latin"] });
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: "NextGen AutoGPT",
|
||||
description: "Your one stop shop to creating AI Agents",
|
||||
};
|
||||
|
||||
export default function RootLayout({
|
||||
children,
|
||||
}: Readonly<{
|
||||
children: React.ReactNode;
|
||||
}>) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<body className={cn("antialiased transition-colors", inter.className)}>
|
||||
<Providers
|
||||
attribute="class"
|
||||
defaultTheme="light"
|
||||
// Feel free to remove this line if you want to use the system theme by default
|
||||
// enableSystem
|
||||
disableTransitionOnChange
|
||||
>
|
||||
<div className="flex min-h-screen flex-col">
|
||||
<NavBar />
|
||||
<main className="flex-1 overflow-hidden p-4">{children}</main>
|
||||
<TallyPopupSimple />
|
||||
</div>
|
||||
<Toaster />
|
||||
</Providers>
|
||||
</body>
|
||||
|
||||
<GoogleAnalytics
|
||||
gaId={process.env.GA_MEASUREMENT_ID || "G-FH2XK2W4GN"} // This is the measurement Id for the Google Analytics dev project
|
||||
/>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
64
autogpt_platform/autogpt_builder/src/app/login/actions.ts
Normal file
64
autogpt_platform/autogpt_builder/src/app/login/actions.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
"use server";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
import { createServerClient } from "@/lib/supabase/server";
|
||||
import { z } from "zod";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
const loginFormSchema = z.object({
|
||||
email: z.string().email().min(2).max(64),
|
||||
password: z.string().min(6).max(64),
|
||||
});
|
||||
|
||||
export async function login(values: z.infer<typeof loginFormSchema>) {
|
||||
return await Sentry.withServerActionInstrumentation("login", {}, async () => {
|
||||
const supabase = createServerClient();
|
||||
|
||||
if (!supabase) {
|
||||
redirect("/error");
|
||||
}
|
||||
|
||||
// We are sure that the values are of the correct type because zod validates the form
|
||||
const { data, error } = await supabase.auth.signInWithPassword(values);
|
||||
|
||||
if (error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (data.session) {
|
||||
await supabase.auth.setSession(data.session);
|
||||
}
|
||||
|
||||
revalidatePath("/", "layout");
|
||||
redirect("/profile");
|
||||
});
|
||||
}
|
||||
|
||||
export async function signup(values: z.infer<typeof loginFormSchema>) {
|
||||
"use server";
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"signup",
|
||||
{},
|
||||
async () => {
|
||||
const supabase = createServerClient();
|
||||
|
||||
if (!supabase) {
|
||||
redirect("/error");
|
||||
}
|
||||
|
||||
// We are sure that the values are of the correct type because zod validates the form
|
||||
const { data, error } = await supabase.auth.signUp(values);
|
||||
|
||||
if (error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (data.session) {
|
||||
await supabase.auth.setSession(data.session);
|
||||
}
|
||||
|
||||
revalidatePath("/", "layout");
|
||||
redirect("/profile");
|
||||
},
|
||||
);
|
||||
}
|
||||
234
autogpt_platform/autogpt_builder/src/app/login/page.tsx
Normal file
234
autogpt_platform/autogpt_builder/src/app/login/page.tsx
Normal file
@@ -0,0 +1,234 @@
|
||||
"use client";
|
||||
import useUser from "@/hooks/useUser";
|
||||
import { login, signup } from "./actions";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormDescription,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from "@/components/ui/form";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { z } from "zod";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { PasswordInput } from "@/components/PasswordInput";
|
||||
import { FaGoogle, FaGithub, FaDiscord, FaSpinner } from "react-icons/fa";
|
||||
import { useState } from "react";
|
||||
import { useSupabase } from "@/components/SupabaseProvider";
|
||||
import { useRouter } from "next/navigation";
|
||||
import Link from "next/link";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
|
||||
const loginFormSchema = z.object({
|
||||
email: z.string().email().min(2).max(64),
|
||||
password: z.string().min(6).max(64),
|
||||
agreeToTerms: z.boolean().refine((value) => value === true, {
|
||||
message: "You must agree to the Terms of Service and Privacy Policy",
|
||||
}),
|
||||
});
|
||||
|
||||
export default function LoginPage() {
|
||||
const { supabase, isLoading: isSupabaseLoading } = useSupabase();
|
||||
const { user, isLoading: isUserLoading } = useUser();
|
||||
const [feedback, setFeedback] = useState<string | null>(null);
|
||||
const router = useRouter();
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const form = useForm<z.infer<typeof loginFormSchema>>({
|
||||
resolver: zodResolver(loginFormSchema),
|
||||
defaultValues: {
|
||||
email: "",
|
||||
password: "",
|
||||
agreeToTerms: false,
|
||||
},
|
||||
});
|
||||
|
||||
if (user) {
|
||||
console.log("User exists, redirecting to profile");
|
||||
router.push("/profile");
|
||||
}
|
||||
|
||||
if (isUserLoading || isSupabaseLoading || user) {
|
||||
return (
|
||||
<div className="flex h-[80vh] items-center justify-center">
|
||||
<FaSpinner className="mr-2 h-16 w-16 animate-spin" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (!supabase) {
|
||||
return (
|
||||
<div>
|
||||
User accounts are disabled because Supabase client is unavailable
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
async function handleSignInWithProvider(
|
||||
provider: "google" | "github" | "discord",
|
||||
) {
|
||||
const { data, error } = await supabase!.auth.signInWithOAuth({
|
||||
provider: provider,
|
||||
options: {
|
||||
redirectTo:
|
||||
process.env.AUTH_CALLBACK_URL ??
|
||||
`http://localhost:3000/auth/callback`,
|
||||
},
|
||||
});
|
||||
|
||||
if (!error) {
|
||||
setFeedback(null);
|
||||
return;
|
||||
}
|
||||
setFeedback(error.message);
|
||||
}
|
||||
|
||||
const onLogin = async (data: z.infer<typeof loginFormSchema>) => {
|
||||
setIsLoading(true);
|
||||
const error = await login(data);
|
||||
setIsLoading(false);
|
||||
if (error) {
|
||||
setFeedback(error);
|
||||
return;
|
||||
}
|
||||
setFeedback(null);
|
||||
};
|
||||
|
||||
const onSignup = async (data: z.infer<typeof loginFormSchema>) => {
|
||||
if (await form.trigger()) {
|
||||
setIsLoading(true);
|
||||
const error = await signup(data);
|
||||
setIsLoading(false);
|
||||
if (error) {
|
||||
setFeedback(error);
|
||||
return;
|
||||
}
|
||||
setFeedback(null);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="flex h-[80vh] items-center justify-center">
|
||||
<div className="w-full max-w-md space-y-6 rounded-lg p-8 shadow-md">
|
||||
<div className="mb-6 space-y-2">
|
||||
<Button
|
||||
className="w-full"
|
||||
onClick={() => handleSignInWithProvider("google")}
|
||||
variant="outline"
|
||||
type="button"
|
||||
disabled={isLoading}
|
||||
>
|
||||
<FaGoogle className="mr-2 h-4 w-4" />
|
||||
Sign in with Google
|
||||
</Button>
|
||||
<Button
|
||||
className="w-full"
|
||||
onClick={() => handleSignInWithProvider("github")}
|
||||
variant="outline"
|
||||
type="button"
|
||||
disabled={isLoading}
|
||||
>
|
||||
<FaGithub className="mr-2 h-4 w-4" />
|
||||
Sign in with GitHub
|
||||
</Button>
|
||||
<Button
|
||||
className="w-full"
|
||||
onClick={() => handleSignInWithProvider("discord")}
|
||||
variant="outline"
|
||||
type="button"
|
||||
disabled={isLoading}
|
||||
>
|
||||
<FaDiscord className="mr-2 h-4 w-4" />
|
||||
Sign in with Discord
|
||||
</Button>
|
||||
</div>
|
||||
<Form {...form}>
|
||||
<form onSubmit={form.handleSubmit(onLogin)}>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="email"
|
||||
render={({ field }) => (
|
||||
<FormItem className="mb-4">
|
||||
<FormLabel>Email</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder="user@email.com" {...field} />
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="password"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Password</FormLabel>
|
||||
<FormControl>
|
||||
<PasswordInput placeholder="password" {...field} />
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
Password needs to be at least 6 characters long
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="agreeToTerms"
|
||||
render={({ field }) => (
|
||||
<FormItem className="mt-4 flex flex-row items-start space-x-3 space-y-0">
|
||||
<FormControl>
|
||||
<Checkbox
|
||||
checked={field.value}
|
||||
onCheckedChange={field.onChange}
|
||||
/>
|
||||
</FormControl>
|
||||
<div className="space-y-1 leading-none">
|
||||
<FormLabel>
|
||||
I agree to the{" "}
|
||||
<Link href="/terms-of-service" className="underline">
|
||||
Terms of Service
|
||||
</Link>{" "}
|
||||
and{" "}
|
||||
<Link
|
||||
href="https://www.notion.so/auto-gpt/Privacy-Policy-ab11c9c20dbd4de1a15dcffe84d77984"
|
||||
className="underline"
|
||||
>
|
||||
Privacy Policy
|
||||
</Link>
|
||||
</FormLabel>
|
||||
<FormMessage />
|
||||
</div>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
<div className="mb-6 mt-6 flex w-full space-x-4">
|
||||
<Button
|
||||
className="flex w-1/2 justify-center"
|
||||
type="submit"
|
||||
disabled={isLoading}
|
||||
>
|
||||
Log in
|
||||
</Button>
|
||||
<Button
|
||||
className="flex w-1/2 justify-center"
|
||||
variant="outline"
|
||||
type="button"
|
||||
onClick={form.handleSubmit(onSignup)}
|
||||
disabled={isLoading}
|
||||
>
|
||||
Sign up
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
<p className="text-sm text-red-500">{feedback}</p>
|
||||
</Form>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
import { Suspense } from "react";
|
||||
import { notFound } from "next/navigation";
|
||||
import MarketplaceAPI from "@/lib/marketplace-api";
|
||||
import { AgentDetailResponse } from "@/lib/marketplace-api";
|
||||
import AgentDetailContent from "@/components/marketplace/AgentDetailContent";
|
||||
|
||||
async function getAgentDetails(id: string): Promise<AgentDetailResponse> {
|
||||
const apiUrl =
|
||||
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
|
||||
"http://localhost:8015/api/v1/market";
|
||||
const api = new MarketplaceAPI(apiUrl);
|
||||
try {
|
||||
console.log(`Fetching agent details for id: ${id}`);
|
||||
const agent = await api.getAgentDetails(id);
|
||||
console.log(`Agent details fetched:`, agent);
|
||||
return agent;
|
||||
} catch (error) {
|
||||
console.error(`Error fetching agent details:`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export default async function AgentDetailPage({
|
||||
params,
|
||||
}: {
|
||||
params: { id: string };
|
||||
}) {
|
||||
let agent: AgentDetailResponse;
|
||||
|
||||
try {
|
||||
agent = await getAgentDetails(params.id);
|
||||
} catch (error) {
|
||||
return notFound();
|
||||
}
|
||||
|
||||
return (
|
||||
<Suspense fallback={<div>Loading...</div>}>
|
||||
<AgentDetailContent agent={agent} />
|
||||
</Suspense>
|
||||
);
|
||||
}
|
||||
317
autogpt_platform/autogpt_builder/src/app/marketplace/page.tsx
Normal file
317
autogpt_platform/autogpt_builder/src/app/marketplace/page.tsx
Normal file
@@ -0,0 +1,317 @@
|
||||
"use client";
|
||||
import React, { useEffect, useMemo, useState, useCallback } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import Image from "next/image";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import MarketplaceAPI, {
|
||||
AgentResponse,
|
||||
AgentListResponse,
|
||||
AgentWithRank,
|
||||
} from "@/lib/marketplace-api";
|
||||
import {
|
||||
ChevronLeft,
|
||||
ChevronRight,
|
||||
PlusCircle,
|
||||
Search,
|
||||
Star,
|
||||
} from "lucide-react";
|
||||
|
||||
// Utility Functions
|
||||
function debounce<T extends (...args: any[]) => any>(
|
||||
func: T,
|
||||
wait: number,
|
||||
): (...args: Parameters<T>) => void {
|
||||
let timeout: NodeJS.Timeout | null = null;
|
||||
return (...args: Parameters<T>) => {
|
||||
if (timeout) clearTimeout(timeout);
|
||||
timeout = setTimeout(() => func(...args), wait);
|
||||
};
|
||||
}
|
||||
|
||||
// Types
|
||||
type Agent = AgentResponse | AgentWithRank;
|
||||
|
||||
// Components
|
||||
const HeroSection: React.FC = () => {
|
||||
const router = useRouter();
|
||||
|
||||
return (
|
||||
<div className="relative bg-indigo-600 py-6">
|
||||
<div className="absolute inset-0 z-0">
|
||||
<Image
|
||||
src="https://images.unsplash.com/photo-1562408590-e32931084e23?auto=format&fit=crop&w=2070&q=80"
|
||||
alt="Marketplace background"
|
||||
layout="fill"
|
||||
objectFit="cover"
|
||||
quality={75}
|
||||
priority
|
||||
className="opacity-20"
|
||||
/>
|
||||
<div
|
||||
className="absolute inset-0 bg-indigo-600 mix-blend-multiply"
|
||||
aria-hidden="true"
|
||||
></div>
|
||||
</div>
|
||||
<div className="relative mx-auto flex max-w-7xl items-center justify-between px-4 py-4 sm:px-6 lg:px-8">
|
||||
<div>
|
||||
<h1 className="text-2xl font-extrabold tracking-tight text-white sm:text-3xl lg:text-4xl">
|
||||
AutoGPT Marketplace
|
||||
</h1>
|
||||
<p className="mt-2 max-w-3xl text-sm text-indigo-100 sm:text-base">
|
||||
Discover and share proven AI Agents to supercharge your business.
|
||||
</p>
|
||||
</div>
|
||||
<Button
|
||||
onClick={() => router.push("/marketplace/submit")}
|
||||
className="flex items-center bg-white text-indigo-600 hover:bg-indigo-50"
|
||||
>
|
||||
<PlusCircle className="mr-2 h-4 w-4" />
|
||||
Submit Agent
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const SearchInput: React.FC<{
|
||||
value: string;
|
||||
onChange: (e: React.ChangeEvent<HTMLInputElement>) => void;
|
||||
}> = ({ value, onChange }) => (
|
||||
<div className="relative mb-8">
|
||||
<Input
|
||||
placeholder="Search agents..."
|
||||
type="text"
|
||||
className="w-full rounded-full border-gray-300 py-2 pl-10 pr-4 focus:border-indigo-500 focus:ring-indigo-500"
|
||||
value={value}
|
||||
onChange={onChange}
|
||||
/>
|
||||
<Search
|
||||
className="absolute left-3 top-1/2 -translate-y-1/2 transform text-gray-400"
|
||||
size={20}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
const AgentCard: React.FC<{ agent: Agent; featured?: boolean }> = ({
|
||||
agent,
|
||||
featured = false,
|
||||
}) => {
|
||||
const router = useRouter();
|
||||
|
||||
const handleClick = () => {
|
||||
router.push(`/marketplace/${agent.id}`);
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`flex cursor-pointer flex-col justify-between rounded-lg border p-6 transition-colors duration-200 hover:bg-gray-50 ${featured ? "border-indigo-500 shadow-md" : "border-gray-200"}`}
|
||||
onClick={handleClick}
|
||||
>
|
||||
<div>
|
||||
<div className="mb-2 flex items-center justify-between">
|
||||
<h3 className="truncate text-lg font-semibold text-gray-900">
|
||||
{agent.name}
|
||||
</h3>
|
||||
{featured && <Star className="text-indigo-500" size={20} />}
|
||||
</div>
|
||||
<p className="mb-4 line-clamp-2 text-sm text-gray-500">
|
||||
{agent.description}
|
||||
</p>
|
||||
<div className="mb-2 text-xs text-gray-400">
|
||||
Categories: {agent.categories.join(", ")}
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-end justify-between">
|
||||
<div className="text-xs text-gray-400">
|
||||
Updated {new Date(agent.updatedAt).toLocaleDateString()}
|
||||
</div>
|
||||
<div className="text-xs text-gray-400">Downloads {agent.downloads}</div>
|
||||
{"rank" in agent && (
|
||||
<div className="text-xs text-indigo-600">
|
||||
Rank: {agent.rank.toFixed(2)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const AgentGrid: React.FC<{
|
||||
agents: Agent[];
|
||||
title: string;
|
||||
featured?: boolean;
|
||||
}> = ({ agents, title, featured = false }) => (
|
||||
<div className="mb-12">
|
||||
<h2 className="mb-4 text-2xl font-bold text-gray-900">{title}</h2>
|
||||
<div className="grid grid-cols-1 gap-6 md:grid-cols-2 lg:grid-cols-3">
|
||||
{agents.map((agent) => (
|
||||
<AgentCard agent={agent} key={agent.id} featured={featured} />
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
const Pagination: React.FC<{
|
||||
page: number;
|
||||
totalPages: number;
|
||||
onPrevPage: () => void;
|
||||
onNextPage: () => void;
|
||||
}> = ({ page, totalPages, onPrevPage, onNextPage }) => (
|
||||
<div className="mt-8 flex items-center justify-between">
|
||||
<Button
|
||||
onClick={onPrevPage}
|
||||
disabled={page === 1}
|
||||
className="flex items-center space-x-2 rounded-md border border-gray-300 bg-white px-4 py-2 text-sm font-medium text-gray-700 shadow-sm hover:bg-gray-50"
|
||||
>
|
||||
<ChevronLeft size={16} />
|
||||
<span>Previous</span>
|
||||
</Button>
|
||||
<span className="text-sm text-gray-700">
|
||||
Page {page} of {totalPages}
|
||||
</span>
|
||||
<Button
|
||||
onClick={onNextPage}
|
||||
disabled={page === totalPages}
|
||||
className="flex items-center space-x-2 rounded-md border border-gray-300 bg-white px-4 py-2 text-sm font-medium text-gray-700 shadow-sm hover:bg-gray-50"
|
||||
>
|
||||
<span>Next</span>
|
||||
<ChevronRight size={16} />
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
|
||||
// Main Component
|
||||
const Marketplace: React.FC = () => {
|
||||
const apiUrl =
|
||||
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
|
||||
"http://localhost:8015/api/v1/market";
|
||||
const api = useMemo(() => new MarketplaceAPI(apiUrl), [apiUrl]);
|
||||
|
||||
const [searchValue, setSearchValue] = useState("");
|
||||
const [searchResults, setSearchResults] = useState<Agent[]>([]);
|
||||
const [featuredAgents, setFeaturedAgents] = useState<Agent[]>([]);
|
||||
const [topAgents, setTopAgents] = useState<Agent[]>([]);
|
||||
const [page, setPage] = useState(1);
|
||||
const [totalPages, setTotalPages] = useState(1);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const fetchTopAgents = useCallback(
|
||||
async (currentPage: number) => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const response = await api.getTopDownloadedAgents(currentPage, 9);
|
||||
setTopAgents(response.agents);
|
||||
setTotalPages(response.total_pages);
|
||||
} catch (error) {
|
||||
console.error("Error fetching top agents:", error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
},
|
||||
[api],
|
||||
);
|
||||
|
||||
const fetchFeaturedAgents = useCallback(async () => {
|
||||
try {
|
||||
const featured = await api.getFeaturedAgents();
|
||||
setFeaturedAgents(featured.agents);
|
||||
} catch (error) {
|
||||
console.error("Error fetching featured agents:", error);
|
||||
}
|
||||
}, [api]);
|
||||
|
||||
const searchAgents = useCallback(
|
||||
async (searchTerm: string) => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const response = await api.searchAgents(searchTerm, 1, 30);
|
||||
const filteredAgents = response.filter((agent) => agent.rank > 0);
|
||||
setSearchResults(filteredAgents);
|
||||
} catch (error) {
|
||||
console.error("Error searching agents:", error);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
},
|
||||
[api],
|
||||
);
|
||||
|
||||
const debouncedSearch = useMemo(
|
||||
() => debounce(searchAgents, 300),
|
||||
[searchAgents],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
if (searchValue) {
|
||||
debouncedSearch(searchValue);
|
||||
} else {
|
||||
fetchTopAgents(page);
|
||||
}
|
||||
}, [searchValue, page, debouncedSearch, fetchTopAgents]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchFeaturedAgents();
|
||||
}, [fetchFeaturedAgents]);
|
||||
|
||||
const handleInputChange = (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
setSearchValue(e.target.value);
|
||||
setPage(1);
|
||||
};
|
||||
|
||||
const handleNextPage = () => {
|
||||
if (page < totalPages) {
|
||||
setPage(page + 1);
|
||||
}
|
||||
};
|
||||
|
||||
const handlePrevPage = () => {
|
||||
if (page > 1) {
|
||||
setPage(page - 1);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="min-h-screen bg-gray-50">
|
||||
<HeroSection />
|
||||
<div className="mx-auto max-w-7xl px-4 py-12 sm:px-6 lg:px-8">
|
||||
<SearchInput value={searchValue} onChange={handleInputChange} />
|
||||
{isLoading ? (
|
||||
<div className="py-12 text-center">
|
||||
<div className="inline-block h-8 w-8 animate-spin rounded-full border-b-2 border-gray-900"></div>
|
||||
<p className="mt-2 text-gray-600">Loading agents...</p>
|
||||
</div>
|
||||
) : searchValue ? (
|
||||
searchResults.length > 0 ? (
|
||||
<AgentGrid agents={searchResults} title="Search Results" />
|
||||
) : (
|
||||
<div className="py-12 text-center">
|
||||
<p className="text-gray-600">
|
||||
No agents found matching your search criteria.
|
||||
</p>
|
||||
</div>
|
||||
)
|
||||
) : (
|
||||
<>
|
||||
{featuredAgents.length > 0 && (
|
||||
<AgentGrid
|
||||
agents={featuredAgents}
|
||||
title="Featured Agents"
|
||||
featured={true}
|
||||
/>
|
||||
)}
|
||||
<AgentGrid agents={topAgents} title="Top Downloaded Agents" />
|
||||
<Pagination
|
||||
page={page}
|
||||
totalPages={totalPages}
|
||||
onPrevPage={handlePrevPage}
|
||||
onNextPage={handleNextPage}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default Marketplace;
|
||||
@@ -0,0 +1,408 @@
|
||||
"use client";
|
||||
|
||||
import React, { useState, useEffect, useMemo } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useForm, Controller } from "react-hook-form";
|
||||
import MarketplaceAPI from "@/lib/marketplace-api";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
import { Card } from "@/components/ui/card";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Textarea } from "@/components/ui/textarea";
|
||||
import { Alert, AlertTitle, AlertDescription } from "@/components/ui/alert";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import {
|
||||
MultiSelector,
|
||||
MultiSelectorContent,
|
||||
MultiSelectorInput,
|
||||
MultiSelectorItem,
|
||||
MultiSelectorList,
|
||||
MultiSelectorTrigger,
|
||||
} from "@/components/ui/multiselect";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
|
||||
type FormData = {
|
||||
name: string;
|
||||
description: string;
|
||||
author: string;
|
||||
keywords: string[];
|
||||
categories: string[];
|
||||
agreeToTerms: boolean;
|
||||
selectedAgentId: string;
|
||||
};
|
||||
|
||||
const SubmitPage: React.FC = () => {
|
||||
const router = useRouter();
|
||||
const {
|
||||
control,
|
||||
handleSubmit,
|
||||
watch,
|
||||
setValue,
|
||||
formState: { errors },
|
||||
} = useForm<FormData>({
|
||||
defaultValues: {
|
||||
selectedAgentId: "", // Initialize with an empty string
|
||||
name: "",
|
||||
description: "",
|
||||
author: "",
|
||||
keywords: [],
|
||||
categories: [],
|
||||
agreeToTerms: false,
|
||||
},
|
||||
});
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const [submitError, setSubmitError] = useState<string | null>(null);
|
||||
const [userAgents, setUserAgents] = useState<
|
||||
Array<{ id: string; name: string; version: number }>
|
||||
>([]);
|
||||
const [selectedAgentGraph, setSelectedAgentGraph] = useState<any>(null);
|
||||
|
||||
const selectedAgentId = watch("selectedAgentId");
|
||||
|
||||
useEffect(() => {
|
||||
const fetchUserAgents = async () => {
|
||||
const api = new AutoGPTServerAPI();
|
||||
const agents = await api.listGraphs();
|
||||
console.log(agents);
|
||||
setUserAgents(
|
||||
agents.map((agent) => ({
|
||||
id: agent.id,
|
||||
name: agent.name || `Agent (${agent.id})`,
|
||||
version: agent.version,
|
||||
})),
|
||||
);
|
||||
};
|
||||
|
||||
fetchUserAgents();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const fetchAgentGraph = async () => {
|
||||
if (selectedAgentId) {
|
||||
const api = new AutoGPTServerAPI();
|
||||
const graph = await api.getGraph(selectedAgentId);
|
||||
setSelectedAgentGraph(graph);
|
||||
setValue("name", graph.name);
|
||||
setValue("description", graph.description);
|
||||
}
|
||||
};
|
||||
|
||||
fetchAgentGraph();
|
||||
}, [selectedAgentId, setValue]);
|
||||
|
||||
const onSubmit = async (data: FormData) => {
|
||||
setIsSubmitting(true);
|
||||
setSubmitError(null);
|
||||
|
||||
if (!data.agreeToTerms) {
|
||||
throw new Error("You must agree to the terms of service");
|
||||
}
|
||||
|
||||
try {
|
||||
if (!selectedAgentGraph) {
|
||||
throw new Error("Please select an agent");
|
||||
}
|
||||
|
||||
const api = new MarketplaceAPI();
|
||||
await api.submitAgent(
|
||||
{
|
||||
...selectedAgentGraph,
|
||||
name: data.name,
|
||||
description: data.description,
|
||||
},
|
||||
data.author,
|
||||
data.keywords,
|
||||
data.categories,
|
||||
);
|
||||
|
||||
router.push("/marketplace?submission=success");
|
||||
} catch (error) {
|
||||
console.error("Submission error:", error);
|
||||
setSubmitError(
|
||||
error instanceof Error ? error.message : "An unknown error occurred",
|
||||
);
|
||||
} finally {
|
||||
setIsSubmitting(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="container mx-auto px-4 py-8">
|
||||
<h1 className="mb-6 text-3xl font-bold">Submit Your Agent</h1>
|
||||
<Card className="p-6">
|
||||
<form onSubmit={handleSubmit(onSubmit)}>
|
||||
<div className="space-y-4">
|
||||
<Controller
|
||||
name="selectedAgentId"
|
||||
control={control}
|
||||
rules={{ required: "Please select an agent" }}
|
||||
render={({ field }) => (
|
||||
<div>
|
||||
<label
|
||||
htmlFor={field.name}
|
||||
className="block text-sm font-medium text-gray-700"
|
||||
>
|
||||
Select Agent
|
||||
</label>
|
||||
<Select
|
||||
onValueChange={field.onChange}
|
||||
value={field.value || ""}
|
||||
>
|
||||
<SelectTrigger className="w-full">
|
||||
<SelectValue placeholder="Select an agent" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{userAgents.map((agent) => (
|
||||
<SelectItem key={agent.id} value={agent.id}>
|
||||
{agent.name} (v{agent.version})
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
{errors.selectedAgentId && (
|
||||
<p className="mt-1 text-sm text-red-600">
|
||||
{errors.selectedAgentId.message}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
|
||||
{/* {selectedAgentGraph && (
|
||||
<div className="mt-4" style={{ height: "600px" }}>
|
||||
<ReactFlow
|
||||
nodes={nodes}
|
||||
edges={edges}
|
||||
fitView
|
||||
attributionPosition="bottom-left"
|
||||
nodesConnectable={false}
|
||||
nodesDraggable={false}
|
||||
zoomOnScroll={false}
|
||||
panOnScroll={false}
|
||||
elementsSelectable={false}
|
||||
>
|
||||
<Controls showInteractive={false} />
|
||||
<Background />
|
||||
</ReactFlow>
|
||||
</div>
|
||||
)} */}
|
||||
|
||||
<Controller
|
||||
name="name"
|
||||
control={control}
|
||||
rules={{ required: "Name is required" }}
|
||||
render={({ field }) => (
|
||||
<div>
|
||||
<label
|
||||
htmlFor={field.name}
|
||||
className="block text-sm font-medium text-gray-700"
|
||||
>
|
||||
Agent Name
|
||||
</label>
|
||||
<Input
|
||||
id={field.name}
|
||||
placeholder="Enter your agent's name"
|
||||
{...field}
|
||||
/>
|
||||
{errors.name && (
|
||||
<p className="mt-1 text-sm text-red-600">
|
||||
{errors.name.message}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
|
||||
<Controller
|
||||
name="description"
|
||||
control={control}
|
||||
rules={{ required: "Description is required" }}
|
||||
render={({ field }) => (
|
||||
<div>
|
||||
<label
|
||||
htmlFor={field.name}
|
||||
className="block text-sm font-medium text-gray-700"
|
||||
>
|
||||
Description
|
||||
</label>
|
||||
<Textarea
|
||||
id={field.name}
|
||||
placeholder="Describe your agent"
|
||||
{...field}
|
||||
/>
|
||||
{errors.description && (
|
||||
<p className="mt-1 text-sm text-red-600">
|
||||
{errors.description.message}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
|
||||
<Controller
|
||||
name="author"
|
||||
control={control}
|
||||
rules={{ required: "Author is required" }}
|
||||
render={({ field }) => (
|
||||
<div>
|
||||
<label
|
||||
htmlFor={field.name}
|
||||
className="block text-sm font-medium text-gray-700"
|
||||
>
|
||||
Author
|
||||
</label>
|
||||
<Input
|
||||
id={field.name}
|
||||
placeholder="Your name or username"
|
||||
{...field}
|
||||
/>
|
||||
{errors.author && (
|
||||
<p className="mt-1 text-sm text-red-600">
|
||||
{errors.author.message}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
|
||||
<Controller
|
||||
name="keywords"
|
||||
control={control}
|
||||
rules={{ required: "At least one keyword is required" }}
|
||||
render={({ field }) => (
|
||||
<div>
|
||||
<label
|
||||
htmlFor={field.name}
|
||||
className="block text-sm font-medium text-gray-700"
|
||||
>
|
||||
Keywords
|
||||
</label>
|
||||
<MultiSelector
|
||||
values={field.value || []}
|
||||
onValuesChange={field.onChange}
|
||||
>
|
||||
<MultiSelectorTrigger>
|
||||
<MultiSelectorInput placeholder="Add keywords" />
|
||||
</MultiSelectorTrigger>
|
||||
<MultiSelectorContent>
|
||||
<MultiSelectorList>
|
||||
<MultiSelectorItem value="keyword1">
|
||||
Keyword 1
|
||||
</MultiSelectorItem>
|
||||
<MultiSelectorItem value="keyword2">
|
||||
Keyword 2
|
||||
</MultiSelectorItem>
|
||||
{/* Add more predefined keywords as needed */}
|
||||
</MultiSelectorList>
|
||||
</MultiSelectorContent>
|
||||
</MultiSelector>
|
||||
{errors.keywords && (
|
||||
<p className="mt-1 text-sm text-red-600">
|
||||
{errors.keywords.message}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
|
||||
<Controller
|
||||
name="categories"
|
||||
control={control}
|
||||
rules={{ required: "At least one category is required" }}
|
||||
render={({ field }) => (
|
||||
<div>
|
||||
<label
|
||||
htmlFor={field.name}
|
||||
className="block text-sm font-medium text-gray-700"
|
||||
>
|
||||
Categories
|
||||
</label>
|
||||
<MultiSelector
|
||||
values={field.value || []}
|
||||
onValuesChange={field.onChange}
|
||||
>
|
||||
<MultiSelectorTrigger>
|
||||
<MultiSelectorInput placeholder="Select categories" />
|
||||
</MultiSelectorTrigger>
|
||||
<MultiSelectorContent>
|
||||
<MultiSelectorList>
|
||||
<MultiSelectorItem value="productivity">
|
||||
Productivity
|
||||
</MultiSelectorItem>
|
||||
<MultiSelectorItem value="entertainment">
|
||||
Entertainment
|
||||
</MultiSelectorItem>
|
||||
<MultiSelectorItem value="education">
|
||||
Education
|
||||
</MultiSelectorItem>
|
||||
<MultiSelectorItem value="business">
|
||||
Business
|
||||
</MultiSelectorItem>
|
||||
<MultiSelectorItem value="other">
|
||||
Other
|
||||
</MultiSelectorItem>
|
||||
</MultiSelectorList>
|
||||
</MultiSelectorContent>
|
||||
</MultiSelector>
|
||||
{errors.categories && (
|
||||
<p className="mt-1 text-sm text-red-600">
|
||||
{errors.categories.message}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
|
||||
<Controller
|
||||
name="agreeToTerms"
|
||||
control={control}
|
||||
rules={{ required: "You must agree to the terms of service" }}
|
||||
render={({ field }) => (
|
||||
<div className="flex items-center space-x-2">
|
||||
<Checkbox
|
||||
id="agreeToTerms"
|
||||
checked={field.value}
|
||||
onCheckedChange={field.onChange}
|
||||
/>
|
||||
<label
|
||||
htmlFor="agreeToTerms"
|
||||
className="text-sm font-medium leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70"
|
||||
>
|
||||
I agree to the{" "}
|
||||
<a href="/terms" className="text-blue-500 hover:underline">
|
||||
terms of service
|
||||
</a>
|
||||
</label>
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
{errors.agreeToTerms && (
|
||||
<p className="mt-1 text-sm text-red-600">
|
||||
{errors.agreeToTerms.message}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{submitError && (
|
||||
<Alert variant="destructive">
|
||||
<AlertTitle>Submission Failed</AlertTitle>
|
||||
<AlertDescription>{submitError}</AlertDescription>
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
<Button type="submit" className="w-full" disabled={isSubmitting}>
|
||||
{isSubmitting ? "Submitting..." : "Submit Agent"}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</Card>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default SubmitPage;
|
||||
178
autogpt_platform/autogpt_builder/src/app/page.tsx
Normal file
178
autogpt_platform/autogpt_builder/src/app/page.tsx
Normal file
@@ -0,0 +1,178 @@
|
||||
"use client";
|
||||
import React, { useCallback, useEffect, useMemo, useState } from "react";
|
||||
|
||||
import AutoGPTServerAPI, {
|
||||
GraphMeta,
|
||||
NodeExecutionResult,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
|
||||
import { Card } from "@/components/ui/card";
|
||||
import { FlowRun } from "@/lib/types";
|
||||
import {
|
||||
AgentFlowList,
|
||||
FlowInfo,
|
||||
FlowRunInfo,
|
||||
FlowRunsList,
|
||||
FlowRunsStats,
|
||||
} from "@/components/monitor";
|
||||
|
||||
const Monitor = () => {
|
||||
const [flows, setFlows] = useState<GraphMeta[]>([]);
|
||||
const [flowRuns, setFlowRuns] = useState<FlowRun[]>([]);
|
||||
const [selectedFlow, setSelectedFlow] = useState<GraphMeta | null>(null);
|
||||
const [selectedRun, setSelectedRun] = useState<FlowRun | null>(null);
|
||||
|
||||
const api = useMemo(() => new AutoGPTServerAPI(), []);
|
||||
|
||||
const refreshFlowRuns = useCallback(
|
||||
(flowID: string) => {
|
||||
// Fetch flow run IDs
|
||||
api.listGraphRunIDs(flowID).then((runIDs) =>
|
||||
runIDs.map((runID) => {
|
||||
let run;
|
||||
if (
|
||||
(run = flowRuns.find((fr) => fr.id == runID)) &&
|
||||
!["waiting", "running"].includes(run.status)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Fetch flow run
|
||||
api.getGraphExecutionInfo(flowID, runID).then((execInfo) =>
|
||||
setFlowRuns((flowRuns) => {
|
||||
if (execInfo.length == 0) return flowRuns;
|
||||
|
||||
const flowRunIndex = flowRuns.findIndex((fr) => fr.id == runID);
|
||||
const flowRun = flowRunFromNodeExecutionResults(execInfo);
|
||||
if (flowRunIndex > -1) {
|
||||
flowRuns.splice(flowRunIndex, 1, flowRun);
|
||||
} else {
|
||||
flowRuns.push(flowRun);
|
||||
}
|
||||
return [...flowRuns];
|
||||
}),
|
||||
);
|
||||
}),
|
||||
);
|
||||
},
|
||||
[api, flowRuns],
|
||||
);
|
||||
|
||||
const fetchFlowsAndRuns = useCallback(() => {
|
||||
api.listGraphs().then((flows) => {
|
||||
setFlows(flows);
|
||||
flows.map((flow) => refreshFlowRuns(flow.id));
|
||||
});
|
||||
}, [api, refreshFlowRuns]);
|
||||
|
||||
useEffect(() => fetchFlowsAndRuns(), [fetchFlowsAndRuns]);
|
||||
useEffect(() => {
|
||||
const intervalId = setInterval(
|
||||
() => flows.map((f) => refreshFlowRuns(f.id)),
|
||||
5000,
|
||||
);
|
||||
return () => clearInterval(intervalId);
|
||||
}, [flows, refreshFlowRuns]);
|
||||
|
||||
const column1 = "md:col-span-2 xl:col-span-3 xxl:col-span-2";
|
||||
const column2 = "md:col-span-3 lg:col-span-2 xl:col-span-3 space-y-4";
|
||||
const column3 = "col-span-full xl:col-span-4 xxl:col-span-5";
|
||||
|
||||
return (
|
||||
<div className="grid grid-cols-1 gap-4 md:grid-cols-5 lg:grid-cols-4 xl:grid-cols-10">
|
||||
<AgentFlowList
|
||||
className={column1}
|
||||
flows={flows}
|
||||
flowRuns={flowRuns}
|
||||
selectedFlow={selectedFlow}
|
||||
onSelectFlow={(f) => {
|
||||
setSelectedRun(null);
|
||||
setSelectedFlow(f.id == selectedFlow?.id ? null : f);
|
||||
}}
|
||||
/>
|
||||
<FlowRunsList
|
||||
className={column2}
|
||||
flows={flows}
|
||||
runs={[
|
||||
...(selectedFlow
|
||||
? flowRuns.filter((v) => v.graphID == selectedFlow.id)
|
||||
: flowRuns),
|
||||
].sort((a, b) => Number(a.startTime) - Number(b.startTime))}
|
||||
selectedRun={selectedRun}
|
||||
onSelectRun={(r) => setSelectedRun(r.id == selectedRun?.id ? null : r)}
|
||||
/>
|
||||
{(selectedRun && (
|
||||
<FlowRunInfo
|
||||
flow={selectedFlow || flows.find((f) => f.id == selectedRun.graphID)!}
|
||||
flowRun={selectedRun}
|
||||
className={column3}
|
||||
/>
|
||||
)) ||
|
||||
(selectedFlow && (
|
||||
<FlowInfo
|
||||
flow={selectedFlow}
|
||||
flowRuns={flowRuns.filter((r) => r.graphID == selectedFlow.id)}
|
||||
className={column3}
|
||||
/>
|
||||
)) || (
|
||||
<Card className={`p-6 ${column3}`}>
|
||||
<FlowRunsStats flows={flows} flowRuns={flowRuns} />
|
||||
</Card>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
function flowRunFromNodeExecutionResults(
|
||||
nodeExecutionResults: NodeExecutionResult[],
|
||||
): FlowRun {
|
||||
// Determine overall status
|
||||
let status: "running" | "waiting" | "success" | "failed" = "success";
|
||||
for (const execution of nodeExecutionResults) {
|
||||
if (execution.status === "FAILED") {
|
||||
status = "failed";
|
||||
break;
|
||||
} else if (["QUEUED", "RUNNING"].includes(execution.status)) {
|
||||
status = "running";
|
||||
break;
|
||||
} else if (execution.status === "INCOMPLETE") {
|
||||
status = "waiting";
|
||||
}
|
||||
}
|
||||
|
||||
// Determine aggregate startTime, endTime, and totalRunTime
|
||||
const now = Date.now();
|
||||
const startTime = Math.min(
|
||||
...nodeExecutionResults.map((ner) => ner.add_time.getTime()),
|
||||
now,
|
||||
);
|
||||
const endTime = ["success", "failed"].includes(status)
|
||||
? Math.max(
|
||||
...nodeExecutionResults.map((ner) => ner.end_time?.getTime() || 0),
|
||||
startTime,
|
||||
)
|
||||
: now;
|
||||
const duration = (endTime - startTime) / 1000; // Convert to seconds
|
||||
const totalRunTime =
|
||||
nodeExecutionResults.reduce(
|
||||
(cum, node) =>
|
||||
cum +
|
||||
((node.end_time?.getTime() ?? now) -
|
||||
(node.start_time?.getTime() ?? now)),
|
||||
0,
|
||||
) / 1000;
|
||||
|
||||
return {
|
||||
id: nodeExecutionResults[0].graph_exec_id,
|
||||
graphID: nodeExecutionResults[0].graph_id,
|
||||
graphVersion: nodeExecutionResults[0].graph_version,
|
||||
status,
|
||||
startTime,
|
||||
endTime,
|
||||
duration,
|
||||
totalRunTime,
|
||||
nodeExecutionResults: nodeExecutionResults,
|
||||
};
|
||||
}
|
||||
|
||||
export default Monitor;
|
||||
33
autogpt_platform/autogpt_builder/src/app/profile/page.tsx
Normal file
33
autogpt_platform/autogpt_builder/src/app/profile/page.tsx
Normal file
@@ -0,0 +1,33 @@
|
||||
"use client";
|
||||
|
||||
import { useSupabase } from "@/components/SupabaseProvider";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import useUser from "@/hooks/useUser";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { FaSpinner } from "react-icons/fa";
|
||||
|
||||
export default function PrivatePage() {
|
||||
const { user, isLoading, error } = useUser();
|
||||
const { supabase } = useSupabase();
|
||||
const router = useRouter();
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<div className="flex h-[80vh] items-center justify-center">
|
||||
<FaSpinner className="mr-2 h-16 w-16 animate-spin" />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error || !user || !supabase) {
|
||||
router.push("/login");
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div>
|
||||
<p>Hello {user.email}</p>
|
||||
<Button onClick={() => supabase.auth.signOut()}>Log out</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
17
autogpt_platform/autogpt_builder/src/app/providers.tsx
Normal file
17
autogpt_platform/autogpt_builder/src/app/providers.tsx
Normal file
@@ -0,0 +1,17 @@
|
||||
"use client";
|
||||
|
||||
import * as React from "react";
|
||||
import { ThemeProvider as NextThemesProvider } from "next-themes";
|
||||
import { ThemeProviderProps } from "next-themes/dist/types";
|
||||
import { TooltipProvider } from "@/components/ui/tooltip";
|
||||
import SupabaseProvider from "@/components/SupabaseProvider";
|
||||
|
||||
export function Providers({ children, ...props }: ThemeProviderProps) {
|
||||
return (
|
||||
<NextThemesProvider {...props}>
|
||||
<SupabaseProvider>
|
||||
<TooltipProvider>{children}</TooltipProvider>
|
||||
</SupabaseProvider>
|
||||
</NextThemesProvider>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,32 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { IconRefresh } from "@/components/ui/icons";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
|
||||
export default function CreditButton() {
|
||||
const [credit, setCredit] = useState<number | null>(null);
|
||||
const api = new AutoGPTServerAPI();
|
||||
|
||||
const fetchCredit = async () => {
|
||||
const response = await api.getUserCredit();
|
||||
setCredit(response.credits);
|
||||
};
|
||||
useEffect(() => {
|
||||
fetchCredit();
|
||||
}, [api]);
|
||||
|
||||
return (
|
||||
credit !== null && (
|
||||
<Button
|
||||
onClick={fetchCredit}
|
||||
variant="outline"
|
||||
className="flex items-center space-x-2 text-muted-foreground"
|
||||
>
|
||||
<span>Credits: {credit}</span>
|
||||
<IconRefresh />
|
||||
</Button>
|
||||
)
|
||||
);
|
||||
}
|
||||
@@ -40,6 +40,7 @@ export function CustomEdge({
|
||||
targetY,
|
||||
markerEnd,
|
||||
}: EdgeProps<CustomEdge>) {
|
||||
const [isHovered, setIsHovered] = useState(false);
|
||||
const [beads, setBeads] = useState<{
|
||||
beads: Bead[];
|
||||
created: number;
|
||||
@@ -47,9 +48,9 @@ export function CustomEdge({
|
||||
}>({ beads: [], created: 0, destroyed: 0 });
|
||||
const { svgPath, length, getPointForT, getTForDistance } = useBezierPath(
|
||||
sourceX - 5,
|
||||
sourceY - 5,
|
||||
sourceY,
|
||||
targetX + 3,
|
||||
targetY - 5,
|
||||
targetY,
|
||||
);
|
||||
const { deleteElements } = useReactFlow<Node, CustomEdge>();
|
||||
const { visualizeBeads } = useContext(FlowContext) ?? {
|
||||
@@ -181,7 +182,13 @@ export function CustomEdge({
|
||||
<BaseEdge
|
||||
path={svgPath}
|
||||
markerEnd={markerEnd}
|
||||
className={`transition-all duration-200 ${data?.isStatic ? "[stroke-dasharray:5_3]" : "[stroke-dasharray:0]"} [stroke-width:${data?.isStatic ? 2.5 : 2}px] hover:[stroke-width:${data?.isStatic ? 3.5 : 3}px] ${selected ? `[stroke:${data?.edgeColor ?? "#555555"}]` : `[stroke:${data?.edgeColor ?? "#555555"}80] hover:[stroke:${data?.edgeColor ?? "#555555"}]`}`}
|
||||
style={{
|
||||
strokeWidth: (isHovered ? 3 : 2) + (data?.isStatic ? 0.5 : 0),
|
||||
stroke:
|
||||
(data?.edgeColor ?? "#555555") +
|
||||
(selected || isHovered ? "" : "80"),
|
||||
strokeDasharray: data?.isStatic ? "5 3" : "0",
|
||||
}}
|
||||
/>
|
||||
<path
|
||||
d={svgPath}
|
||||
@@ -189,6 +196,8 @@ export function CustomEdge({
|
||||
strokeOpacity={0}
|
||||
strokeWidth={20}
|
||||
className="react-flow__edge-interaction"
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
/>
|
||||
<EdgeLabelRenderer>
|
||||
<div
|
||||
@@ -200,7 +209,9 @@ export function CustomEdge({
|
||||
className="edge-label-renderer"
|
||||
>
|
||||
<button
|
||||
className="edge-label-button opacity-0 transition-opacity duration-200 hover:opacity-100"
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
className={`edge-label-button ${isHovered ? "visible" : ""}`}
|
||||
onClick={onEdgeRemoveClick}
|
||||
>
|
||||
<X className="size-4" />
|
||||
660
autogpt_platform/autogpt_builder/src/components/CustomNode.tsx
Normal file
660
autogpt_platform/autogpt_builder/src/components/CustomNode.tsx
Normal file
@@ -0,0 +1,660 @@
|
||||
import React, {
|
||||
useState,
|
||||
useEffect,
|
||||
useCallback,
|
||||
useRef,
|
||||
useContext,
|
||||
} from "react";
|
||||
import { NodeProps, useReactFlow, Node, Edge } from "@xyflow/react";
|
||||
import "@xyflow/react/dist/style.css";
|
||||
import "./customnode.css";
|
||||
import InputModalComponent from "./InputModalComponent";
|
||||
import OutputModalComponent from "./OutputModalComponent";
|
||||
import {
|
||||
BlockIORootSchema,
|
||||
BlockIOStringSubSchema,
|
||||
Category,
|
||||
NodeExecutionResult,
|
||||
BlockUIType,
|
||||
BlockCost,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import { beautifyString, cn, setNestedProperty } from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { Copy, Trash2 } from "lucide-react";
|
||||
import { history } from "./history";
|
||||
import NodeHandle from "./NodeHandle";
|
||||
import {
|
||||
NodeGenericInputField,
|
||||
NodeTextBoxInput,
|
||||
} from "./node-input-components";
|
||||
import SchemaTooltip from "./SchemaTooltip";
|
||||
import { getPrimaryCategoryColor } from "@/lib/utils";
|
||||
import { FlowContext } from "./Flow";
|
||||
import { Badge } from "./ui/badge";
|
||||
import DataTable from "./DataTable";
|
||||
|
||||
type ParsedKey = { key: string; index?: number };
|
||||
|
||||
export type ConnectionData = Array<{
|
||||
edge_id: string;
|
||||
source: string;
|
||||
sourceHandle: string;
|
||||
target: string;
|
||||
targetHandle: string;
|
||||
}>;
|
||||
|
||||
export type CustomNodeData = {
|
||||
blockType: string;
|
||||
blockCosts: BlockCost[];
|
||||
title: string;
|
||||
description: string;
|
||||
categories: Category[];
|
||||
inputSchema: BlockIORootSchema;
|
||||
outputSchema: BlockIORootSchema;
|
||||
hardcodedValues: { [key: string]: any };
|
||||
connections: ConnectionData;
|
||||
isOutputOpen: boolean;
|
||||
status?: NodeExecutionResult["status"];
|
||||
/** executionResults contains outputs across multiple executions
|
||||
* with the last element being the most recent output */
|
||||
executionResults?: {
|
||||
execId: string;
|
||||
data: NodeExecutionResult["output_data"];
|
||||
}[];
|
||||
block_id: string;
|
||||
backend_id?: string;
|
||||
errors?: { [key: string]: string };
|
||||
isOutputStatic?: boolean;
|
||||
uiType: BlockUIType;
|
||||
};
|
||||
|
||||
export type CustomNode = Node<CustomNodeData, "custom">;
|
||||
|
||||
export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
|
||||
const [isOutputOpen, setIsOutputOpen] = useState(data.isOutputOpen || false);
|
||||
const [isAdvancedOpen, setIsAdvancedOpen] = useState(false);
|
||||
const [isModalOpen, setIsModalOpen] = useState(false);
|
||||
const [activeKey, setActiveKey] = useState<string | null>(null);
|
||||
const [inputModalValue, setInputModalValue] = useState<string>("");
|
||||
const [isOutputModalOpen, setIsOutputModalOpen] = useState(false);
|
||||
const [isHovered, setIsHovered] = useState(false);
|
||||
const { updateNodeData, deleteElements, addNodes, getNode } = useReactFlow<
|
||||
CustomNode,
|
||||
Edge
|
||||
>();
|
||||
const isInitialSetup = useRef(true);
|
||||
const flowContext = useContext(FlowContext);
|
||||
|
||||
if (!flowContext) {
|
||||
throw new Error("FlowContext consumer must be inside FlowEditor component");
|
||||
}
|
||||
|
||||
const { setIsAnyModalOpen, getNextNodeId } = flowContext;
|
||||
|
||||
useEffect(() => {
|
||||
if (data.executionResults || data.status) {
|
||||
setIsOutputOpen(true);
|
||||
}
|
||||
}, [data.executionResults, data.status]);
|
||||
|
||||
useEffect(() => {
|
||||
setIsOutputOpen(data.isOutputOpen);
|
||||
}, [data.isOutputOpen]);
|
||||
|
||||
useEffect(() => {
|
||||
setIsAnyModalOpen?.(isModalOpen || isOutputModalOpen);
|
||||
}, [isModalOpen, isOutputModalOpen, data, setIsAnyModalOpen]);
|
||||
|
||||
useEffect(() => {
|
||||
isInitialSetup.current = false;
|
||||
}, []);
|
||||
|
||||
const setHardcodedValues = (values: any) => {
|
||||
updateNodeData(id, { hardcodedValues: values });
|
||||
};
|
||||
|
||||
const setErrors = (errors: { [key: string]: string }) => {
|
||||
updateNodeData(id, { errors });
|
||||
};
|
||||
|
||||
const toggleOutput = (checked: boolean) => {
|
||||
setIsOutputOpen(checked);
|
||||
};
|
||||
|
||||
const toggleAdvancedSettings = (checked: boolean) => {
|
||||
setIsAdvancedOpen(checked);
|
||||
};
|
||||
|
||||
const generateOutputHandles = (
|
||||
schema: BlockIORootSchema,
|
||||
nodeType: BlockUIType,
|
||||
) => {
|
||||
if (
|
||||
!schema?.properties ||
|
||||
nodeType === BlockUIType.OUTPUT ||
|
||||
nodeType === BlockUIType.NOTE
|
||||
)
|
||||
return null;
|
||||
const keys = Object.keys(schema.properties);
|
||||
return keys.map((key) => (
|
||||
<div key={key}>
|
||||
<NodeHandle
|
||||
keyName={key}
|
||||
isConnected={isHandleConnected(key)}
|
||||
schema={schema.properties[key]}
|
||||
side="right"
|
||||
/>
|
||||
</div>
|
||||
));
|
||||
};
|
||||
|
||||
const generateInputHandles = (
|
||||
schema: BlockIORootSchema,
|
||||
nodeType: BlockUIType,
|
||||
) => {
|
||||
if (!schema?.properties) return null;
|
||||
let keys = Object.entries(schema.properties);
|
||||
switch (nodeType) {
|
||||
case BlockUIType.INPUT:
|
||||
// For INPUT blocks, dont include connection handles
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
return (
|
||||
(isRequired || isAdvancedOpen || !isAdvanced) && (
|
||||
<div key={propKey}>
|
||||
<span className="text-m green -mb-1 text-gray-900">
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
<div key={propKey} onMouseOver={() => {}}>
|
||||
{!isConnected && (
|
||||
<NodeGenericInputField
|
||||
className="mb-2 mt-1"
|
||||
propKey={propKey}
|
||||
propSchema={propSchema}
|
||||
currentValue={getValue(propKey)}
|
||||
connections={data.connections}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
errors={data.errors ?? {}}
|
||||
displayName={propSchema.title || beautifyString(propKey)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
case BlockUIType.NOTE:
|
||||
// For NOTE blocks, don't render any input handles
|
||||
const [noteKey, noteSchema] = keys[0];
|
||||
return (
|
||||
<div key={noteKey}>
|
||||
<NodeTextBoxInput
|
||||
className=""
|
||||
selfKey={noteKey}
|
||||
schema={noteSchema as BlockIOStringSubSchema}
|
||||
value={getValue(noteKey)}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
error={data.errors?.[noteKey] ?? ""}
|
||||
displayName={noteSchema.title || beautifyString(noteKey)}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
case BlockUIType.OUTPUT:
|
||||
// For OUTPUT blocks, only show the 'value' property
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
return (
|
||||
(isRequired || isAdvancedOpen || !isAdvanced) && (
|
||||
<div key={propKey} onMouseOver={() => {}}>
|
||||
{propKey !== "value" ? (
|
||||
<span className="text-m green -mb-1 text-gray-900">
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
) : (
|
||||
<NodeHandle
|
||||
keyName={propKey}
|
||||
isConnected={isConnected}
|
||||
isRequired={isRequired}
|
||||
schema={propSchema}
|
||||
side="left"
|
||||
/>
|
||||
)}
|
||||
{!isConnected && (
|
||||
<NodeGenericInputField
|
||||
className="mb-2 mt-1"
|
||||
propKey={propKey}
|
||||
propSchema={propSchema}
|
||||
currentValue={getValue(propKey)}
|
||||
connections={data.connections}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
errors={data.errors ?? {}}
|
||||
displayName={propSchema.title || beautifyString(propKey)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
default:
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
return (
|
||||
(isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
|
||||
<div key={propKey} onMouseOver={() => {}}>
|
||||
<NodeHandle
|
||||
keyName={propKey}
|
||||
isConnected={isConnected}
|
||||
isRequired={isRequired}
|
||||
schema={propSchema}
|
||||
side="left"
|
||||
/>
|
||||
{!isConnected && (
|
||||
<NodeGenericInputField
|
||||
className="mb-2 mt-1"
|
||||
propKey={propKey}
|
||||
propSchema={propSchema}
|
||||
currentValue={getValue(propKey)}
|
||||
connections={data.connections}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
errors={data.errors ?? {}}
|
||||
displayName={propSchema.title || beautifyString(propKey)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
};
|
||||
const handleInputChange = (path: string, value: any) => {
|
||||
const keys = parseKeys(path);
|
||||
const newValues = JSON.parse(JSON.stringify(data.hardcodedValues));
|
||||
let current = newValues;
|
||||
|
||||
for (let i = 0; i < keys.length - 1; i++) {
|
||||
const { key: currentKey, index } = keys[i];
|
||||
if (index !== undefined) {
|
||||
if (!current[currentKey]) current[currentKey] = [];
|
||||
if (!current[currentKey][index]) current[currentKey][index] = {};
|
||||
current = current[currentKey][index];
|
||||
} else {
|
||||
if (!current[currentKey]) current[currentKey] = {};
|
||||
current = current[currentKey];
|
||||
}
|
||||
}
|
||||
|
||||
const lastKey = keys[keys.length - 1];
|
||||
if (lastKey.index !== undefined) {
|
||||
if (!current[lastKey.key]) current[lastKey.key] = [];
|
||||
current[lastKey.key][lastKey.index] = value;
|
||||
} else {
|
||||
current[lastKey.key] = value;
|
||||
}
|
||||
|
||||
// console.log(`Updating hardcoded values for node ${id}:`, newValues);
|
||||
|
||||
if (!isInitialSetup.current) {
|
||||
history.push({
|
||||
type: "UPDATE_INPUT",
|
||||
payload: { nodeId: id, oldValues: data.hardcodedValues, newValues },
|
||||
undo: () => setHardcodedValues(data.hardcodedValues),
|
||||
redo: () => setHardcodedValues(newValues),
|
||||
});
|
||||
}
|
||||
|
||||
setHardcodedValues(newValues);
|
||||
const errors = data.errors || {};
|
||||
// Remove error with the same key
|
||||
setNestedProperty(errors, path, null);
|
||||
setErrors({ ...errors });
|
||||
};
|
||||
|
||||
// Helper function to parse keys with array indices
|
||||
//TODO move to utils
|
||||
const parseKeys = (key: string): ParsedKey[] => {
|
||||
const splits = key.split(/_@_|_#_|_\$_|\./);
|
||||
const keys: ParsedKey[] = [];
|
||||
let currentKey: string | null = null;
|
||||
|
||||
splits.forEach((split) => {
|
||||
const isInteger = /^\d+$/.test(split);
|
||||
if (!isInteger) {
|
||||
if (currentKey !== null) {
|
||||
keys.push({ key: currentKey });
|
||||
}
|
||||
currentKey = split;
|
||||
} else {
|
||||
if (currentKey !== null) {
|
||||
keys.push({ key: currentKey, index: parseInt(split, 10) });
|
||||
currentKey = null;
|
||||
} else {
|
||||
throw new Error("Invalid key format: array index without a key");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (currentKey !== null) {
|
||||
keys.push({ key: currentKey });
|
||||
}
|
||||
|
||||
return keys;
|
||||
};
|
||||
|
||||
const getValue = (key: string) => {
|
||||
const keys = parseKeys(key);
|
||||
return keys.reduce((acc, k) => {
|
||||
if (acc === undefined) return undefined;
|
||||
if (k.index !== undefined) {
|
||||
return Array.isArray(acc[k.key]) ? acc[k.key][k.index] : undefined;
|
||||
}
|
||||
return acc[k.key];
|
||||
}, data.hardcodedValues as any);
|
||||
};
|
||||
|
||||
const isHandleConnected = (key: string) => {
|
||||
return (
|
||||
data.connections &&
|
||||
data.connections.some((conn: any) => {
|
||||
if (typeof conn === "string") {
|
||||
const [source, target] = conn.split(" -> ");
|
||||
return (
|
||||
(target.includes(key) && target.includes(data.title)) ||
|
||||
(source.includes(key) && source.includes(data.title))
|
||||
);
|
||||
}
|
||||
return (
|
||||
(conn.target === id && conn.targetHandle === key) ||
|
||||
(conn.source === id && conn.sourceHandle === key)
|
||||
);
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
const handleInputClick = (key: string) => {
|
||||
console.log(`Opening modal for key: ${key}`);
|
||||
setActiveKey(key);
|
||||
const value = getValue(key);
|
||||
setInputModalValue(
|
||||
typeof value === "object" ? JSON.stringify(value, null, 2) : value,
|
||||
);
|
||||
setIsModalOpen(true);
|
||||
};
|
||||
|
||||
const handleModalSave = (value: string) => {
|
||||
if (activeKey) {
|
||||
try {
|
||||
const parsedValue = JSON.parse(value);
|
||||
handleInputChange(activeKey, parsedValue);
|
||||
} catch (error) {
|
||||
handleInputChange(activeKey, value);
|
||||
}
|
||||
}
|
||||
setIsModalOpen(false);
|
||||
setActiveKey(null);
|
||||
};
|
||||
|
||||
const handleOutputClick = () => {
|
||||
setIsOutputModalOpen(true);
|
||||
};
|
||||
|
||||
const handleHovered = () => {
|
||||
setIsHovered(true);
|
||||
};
|
||||
|
||||
const handleMouseLeave = () => {
|
||||
setIsHovered(false);
|
||||
};
|
||||
|
||||
const deleteNode = useCallback(() => {
|
||||
console.log("Deleting node:", id);
|
||||
|
||||
// Remove the node
|
||||
deleteElements({ nodes: [{ id }] });
|
||||
}, [id, deleteElements]);
|
||||
|
||||
const copyNode = useCallback(() => {
|
||||
const newId = getNextNodeId();
|
||||
const currentNode = getNode(id);
|
||||
|
||||
if (!currentNode) {
|
||||
console.error("Cannot copy node: current node not found");
|
||||
return;
|
||||
}
|
||||
|
||||
const verticalOffset = height ?? 100;
|
||||
|
||||
const newNode: CustomNode = {
|
||||
id: newId,
|
||||
type: currentNode.type,
|
||||
position: {
|
||||
x: currentNode.position.x,
|
||||
y: currentNode.position.y - verticalOffset - 20,
|
||||
},
|
||||
data: {
|
||||
...data,
|
||||
title: `${data.title} (Copy)`,
|
||||
block_id: data.block_id,
|
||||
connections: [],
|
||||
isOutputOpen: false,
|
||||
},
|
||||
};
|
||||
|
||||
addNodes(newNode);
|
||||
|
||||
history.push({
|
||||
type: "ADD_NODE",
|
||||
payload: { node: newNode },
|
||||
undo: () => deleteElements({ nodes: [{ id: newId }] }),
|
||||
redo: () => addNodes(newNode),
|
||||
});
|
||||
}, [id, data, height, addNodes, deleteElements, getNode, getNextNodeId]);
|
||||
|
||||
const hasConfigErrors =
|
||||
data.errors &&
|
||||
Object.entries(data.errors).some(([_, value]) => value !== null);
|
||||
const outputData = data.executionResults?.at(-1)?.data;
|
||||
const hasOutputError =
|
||||
typeof outputData === "object" &&
|
||||
outputData !== null &&
|
||||
"error" in outputData;
|
||||
|
||||
useEffect(() => {
|
||||
if (hasConfigErrors) {
|
||||
const filteredErrors = Object.fromEntries(
|
||||
Object.entries(data.errors || {}).filter(
|
||||
([_, value]) => value !== null,
|
||||
),
|
||||
);
|
||||
console.error(
|
||||
"Block configuration errors for",
|
||||
data.title,
|
||||
":",
|
||||
filteredErrors,
|
||||
);
|
||||
}
|
||||
if (hasOutputError) {
|
||||
console.error(
|
||||
"Block output contains error for",
|
||||
data.title,
|
||||
":",
|
||||
outputData.error,
|
||||
);
|
||||
}
|
||||
}, [hasConfigErrors, hasOutputError, data.errors, outputData, data.title]);
|
||||
|
||||
const blockClasses = [
|
||||
"custom-node",
|
||||
"dark-theme",
|
||||
"rounded-xl",
|
||||
"border",
|
||||
"bg-white/[.9]",
|
||||
"shadow-md",
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join(" ");
|
||||
|
||||
const errorClass =
|
||||
hasConfigErrors || hasOutputError ? "border-red-500 border-2" : "";
|
||||
|
||||
const statusClass =
|
||||
hasConfigErrors || hasOutputError
|
||||
? "failed"
|
||||
: (data.status?.toLowerCase() ?? "");
|
||||
|
||||
const hasAdvancedFields =
|
||||
data.inputSchema &&
|
||||
Object.entries(data.inputSchema.properties).some(([key, value]) => {
|
||||
return (
|
||||
value.advanced === true && !data.inputSchema.required?.includes(key)
|
||||
);
|
||||
});
|
||||
|
||||
const inputValues = data.hardcodedValues;
|
||||
const blockCost =
|
||||
data.blockCosts &&
|
||||
data.blockCosts.find((cost) =>
|
||||
Object.entries(cost.cost_filter).every(
|
||||
// Undefined, null, or empty values are considered equal
|
||||
([key, value]) =>
|
||||
value === inputValues[key] || (!value && !inputValues[key]),
|
||||
),
|
||||
);
|
||||
console.debug(`Block cost ${inputValues}|${data.blockCosts}=${blockCost}`);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`${data.uiType === BlockUIType.NOTE ? "w-[300px]" : "w-[500px]"} ${blockClasses} ${errorClass} ${statusClass} ${data.uiType === BlockUIType.NOTE ? "bg-yellow-100" : "bg-white"}`}
|
||||
onMouseEnter={handleHovered}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
data-id={`custom-node-${id}`}
|
||||
>
|
||||
<div
|
||||
className={`mb-2 p-3 ${data.uiType === BlockUIType.NOTE ? "bg-yellow-100" : getPrimaryCategoryColor(data.categories)} rounded-t-xl`}
|
||||
>
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="font-roboto p-3 text-lg font-semibold">
|
||||
{beautifyString(
|
||||
data.blockType?.replace(/Block$/, "") || data.title,
|
||||
)}
|
||||
</div>
|
||||
<SchemaTooltip description={data.description} />
|
||||
</div>
|
||||
<div className="flex gap-[5px]">
|
||||
{isHovered && (
|
||||
<>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={copyNode}
|
||||
title="Copy node"
|
||||
>
|
||||
<Copy size={18} />
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
onClick={deleteNode}
|
||||
title="Delete node"
|
||||
>
|
||||
<Trash2 size={18} />
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{blockCost && (
|
||||
<div className="p-3 text-right font-semibold">
|
||||
Cost: {blockCost.cost_amount} / {blockCost.cost_type}
|
||||
</div>
|
||||
)}
|
||||
{data.uiType !== BlockUIType.NOTE ? (
|
||||
<div className="flex items-start justify-between p-3">
|
||||
<div>
|
||||
{data.inputSchema &&
|
||||
generateInputHandles(data.inputSchema, data.uiType)}
|
||||
</div>
|
||||
<div className="flex-none">
|
||||
{data.outputSchema &&
|
||||
generateOutputHandles(data.outputSchema, data.uiType)}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div>
|
||||
{data.inputSchema &&
|
||||
generateInputHandles(data.inputSchema, data.uiType)}
|
||||
</div>
|
||||
)}
|
||||
{isOutputOpen && data.uiType !== BlockUIType.NOTE && (
|
||||
<div
|
||||
data-id="latest-output"
|
||||
className="nodrag m-3 break-words rounded-md border-[1.5px] p-2"
|
||||
>
|
||||
{(data.executionResults?.length ?? 0) > 0 ? (
|
||||
<>
|
||||
<DataTable
|
||||
title="Latest Output"
|
||||
truncateLongData
|
||||
data={data.executionResults!.at(-1)?.data || {}}
|
||||
/>
|
||||
<div className="flex justify-end">
|
||||
<Button variant="ghost" onClick={handleOutputClick}>
|
||||
View More
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
) : (
|
||||
<span>No outputs yet</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{data.uiType !== BlockUIType.NOTE && (
|
||||
<div className="mt-2.5 flex items-center pb-4 pl-4">
|
||||
<Switch checked={isOutputOpen} onCheckedChange={toggleOutput} />
|
||||
<span className="m-1 mr-4">Output</span>
|
||||
{hasAdvancedFields && (
|
||||
<>
|
||||
<Switch onCheckedChange={toggleAdvancedSettings} />
|
||||
<span className="m-1">Advanced</span>
|
||||
</>
|
||||
)}
|
||||
{data.status && (
|
||||
<Badge
|
||||
variant="outline"
|
||||
data-id={`badge-${id}-${data.status}`}
|
||||
className={cn(data.status.toLowerCase(), "ml-auto mr-5")}
|
||||
>
|
||||
{data.status}
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<InputModalComponent
|
||||
title={activeKey ? `Enter ${beautifyString(activeKey)}` : undefined}
|
||||
isOpen={isModalOpen}
|
||||
onClose={() => setIsModalOpen(false)}
|
||||
onSave={handleModalSave}
|
||||
defaultValue={inputModalValue}
|
||||
key={activeKey}
|
||||
/>
|
||||
<OutputModalComponent
|
||||
isOpen={isOutputModalOpen}
|
||||
onClose={() => setIsOutputModalOpen(false)}
|
||||
executionResults={data.executionResults?.toReversed() || []}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
import React from "react";
|
||||
import { beautifyString } from "@/lib/utils";
|
||||
import { Button } from "./ui/button";
|
||||
import {
|
||||
@@ -11,7 +10,6 @@ import {
|
||||
} from "./ui/table";
|
||||
import { Clipboard } from "lucide-react";
|
||||
import { useToast } from "./ui/use-toast";
|
||||
import { ContentRenderer } from "./ui/render";
|
||||
|
||||
type DataTableProps = {
|
||||
title?: string;
|
||||
@@ -53,7 +51,7 @@ export default function DataTable({
|
||||
{beautifyString(key)}
|
||||
</TableCell>
|
||||
<TableCell className="cursor-text">
|
||||
<div className="flex min-h-9 items-center whitespace-pre-wrap">
|
||||
<div className="flex min-h-9 items-center">
|
||||
<Button
|
||||
className="absolute right-1 top-auto m-1 hidden p-2 group-hover:block"
|
||||
variant="outline"
|
||||
@@ -64,7 +62,7 @@ export default function DataTable({
|
||||
value
|
||||
.map((i) =>
|
||||
typeof i === "object"
|
||||
? JSON.stringify(i, null, 2)
|
||||
? JSON.stringify(i)
|
||||
: String(i),
|
||||
)
|
||||
.join(", "),
|
||||
@@ -74,15 +72,15 @@ export default function DataTable({
|
||||
>
|
||||
<Clipboard size={18} />
|
||||
</Button>
|
||||
{value.map((item, index) => (
|
||||
<React.Fragment key={index}>
|
||||
<ContentRenderer
|
||||
value={item}
|
||||
truncateLongData={truncateLongData}
|
||||
/>
|
||||
{index < value.length - 1 && ", "}
|
||||
</React.Fragment>
|
||||
))}
|
||||
{value
|
||||
.map((i) => {
|
||||
const text =
|
||||
typeof i === "object" ? JSON.stringify(i) : String(i);
|
||||
return truncateLongData && text.length > maxChars
|
||||
? text.slice(0, maxChars) + "..."
|
||||
: text;
|
||||
})
|
||||
.join(", ")}
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
@@ -26,31 +26,29 @@ import {
|
||||
import "@xyflow/react/dist/style.css";
|
||||
import { CustomNode } from "./CustomNode";
|
||||
import "./flow.css";
|
||||
import {
|
||||
BlockUIType,
|
||||
formatEdgeID,
|
||||
GraphExecutionID,
|
||||
GraphID,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { getTypeColor, findNewlyAddedBlockCoordinates } from "@/lib/utils";
|
||||
import { Link } from "@/lib/autogpt-server-api";
|
||||
import { getTypeColor, filterBlocksByType } from "@/lib/utils";
|
||||
import { history } from "./history";
|
||||
import { CustomEdge } from "./CustomEdge";
|
||||
import ConnectionLine from "./ConnectionLine";
|
||||
import { Control, ControlPanel } from "@/components/edit/control/ControlPanel";
|
||||
import { SaveControl } from "@/components/edit/control/SaveControl";
|
||||
import { BlocksControl } from "@/components/edit/control/BlocksControl";
|
||||
import { IconUndo2, IconRedo2 } from "@/components/ui/icons";
|
||||
import {
|
||||
IconPlay,
|
||||
IconUndo2,
|
||||
IconRedo2,
|
||||
IconSquare,
|
||||
IconOutput,
|
||||
} from "@/components/ui/icons";
|
||||
import { startTutorial } from "./tutorial";
|
||||
import useAgentGraph from "@/hooks/useAgentGraph";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { useRouter, usePathname, useSearchParams } from "next/navigation";
|
||||
import { LogOut } from "lucide-react";
|
||||
import RunnerUIWrapper, {
|
||||
RunnerUIWrapperRef,
|
||||
} from "@/components/RunnerUIWrapper";
|
||||
import PrimaryActionBar from "@/components/PrimaryActionButton";
|
||||
import { useToast } from "@/components/ui/use-toast";
|
||||
import { useCopyPaste } from "../hooks/useCopyPaste";
|
||||
import { CronScheduler } from "./cronScheduler";
|
||||
|
||||
// This is for the history, this is the minimum distance a block must move before it is logged
|
||||
// It helps to prevent spamming the history with small movements especially when pressing on a input in a block
|
||||
@@ -62,38 +60,22 @@ type FlowContextType = {
|
||||
getNextNodeId: () => string;
|
||||
};
|
||||
|
||||
export type NodeDimension = {
|
||||
[nodeId: string]: {
|
||||
x: number;
|
||||
y: number;
|
||||
width: number;
|
||||
height: number;
|
||||
};
|
||||
};
|
||||
|
||||
export const FlowContext = createContext<FlowContextType | null>(null);
|
||||
|
||||
const FlowEditor: React.FC<{
|
||||
flowID?: GraphID;
|
||||
flowVersion?: string;
|
||||
flowID?: string;
|
||||
template?: boolean;
|
||||
className?: string;
|
||||
}> = ({ flowID, flowVersion, className }) => {
|
||||
const {
|
||||
addNodes,
|
||||
addEdges,
|
||||
getNode,
|
||||
deleteElements,
|
||||
updateNode,
|
||||
setViewport,
|
||||
} = useReactFlow<CustomNode, CustomEdge>();
|
||||
}> = ({ flowID, template, className }) => {
|
||||
const { addNodes, addEdges, getNode, deleteElements, updateNode } =
|
||||
useReactFlow<CustomNode, CustomEdge>();
|
||||
const [nodeId, setNodeId] = useState<number>(1);
|
||||
const [copiedNodes, setCopiedNodes] = useState<CustomNode[]>([]);
|
||||
const [copiedEdges, setCopiedEdges] = useState<CustomEdge[]>([]);
|
||||
const [isAnyModalOpen, setIsAnyModalOpen] = useState(false);
|
||||
const [visualizeBeads, setVisualizeBeads] = useState<
|
||||
"no" | "static" | "animate"
|
||||
>("animate");
|
||||
const [flowExecutionID, setFlowExecutionID] = useState<
|
||||
GraphExecutionID | undefined
|
||||
>();
|
||||
const {
|
||||
agentName,
|
||||
setAgentName,
|
||||
@@ -101,80 +83,52 @@ const FlowEditor: React.FC<{
|
||||
setAgentDescription,
|
||||
savedAgent,
|
||||
availableNodes,
|
||||
availableFlows,
|
||||
getOutputType,
|
||||
requestSave,
|
||||
requestSaveAndRun,
|
||||
requestStopRun,
|
||||
scheduleRunner,
|
||||
isSaving,
|
||||
isRunning,
|
||||
isStopping,
|
||||
isScheduling,
|
||||
setIsScheduling,
|
||||
nodes,
|
||||
setNodes,
|
||||
edges,
|
||||
setEdges,
|
||||
} = useAgentGraph(
|
||||
flowID,
|
||||
flowVersion ? parseInt(flowVersion) : undefined,
|
||||
flowExecutionID,
|
||||
visualizeBeads !== "no",
|
||||
);
|
||||
} = useAgentGraph(flowID, template, visualizeBeads !== "no");
|
||||
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
const params = useSearchParams();
|
||||
const initialPositionRef = useRef<{
|
||||
[key: string]: { x: number; y: number };
|
||||
}>({});
|
||||
const isDragging = useRef(false);
|
||||
|
||||
// State to control if tutorial has started
|
||||
const [tutorialStarted, setTutorialStarted] = useState(false);
|
||||
// State to control if blocks menu should be pinned open
|
||||
const [pinBlocksPopover, setPinBlocksPopover] = useState(false);
|
||||
// State to control if save popover should be pinned open
|
||||
const [pinSavePopover, setPinSavePopover] = useState(false);
|
||||
|
||||
const runnerUIRef = useRef<RunnerUIWrapperRef>(null);
|
||||
|
||||
const [openCron, setOpenCron] = useState(false);
|
||||
|
||||
const { toast } = useToast();
|
||||
|
||||
const TUTORIAL_STORAGE_KEY = "shepherd-tour";
|
||||
|
||||
// It stores the dimension of all nodes with position as well
|
||||
const [nodeDimensions, setNodeDimensions] = useState<NodeDimension>({});
|
||||
|
||||
useEffect(() => {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
|
||||
// If resetting tutorial
|
||||
if (params.get("resetTutorial") === "true") {
|
||||
localStorage.removeItem(TUTORIAL_STORAGE_KEY);
|
||||
localStorage.removeItem("shepherd-tour"); // Clear tutorial flag
|
||||
router.push(pathname);
|
||||
} else if (!localStorage.getItem(TUTORIAL_STORAGE_KEY)) {
|
||||
const emptyNodes = (forceRemove: boolean = false) =>
|
||||
forceRemove ? (setNodes([]), setEdges([]), true) : nodes.length === 0;
|
||||
startTutorial(emptyNodes, setPinBlocksPopover, setPinSavePopover);
|
||||
localStorage.setItem(TUTORIAL_STORAGE_KEY, "yes");
|
||||
} else {
|
||||
// Otherwise, start tutorial if conditions are met
|
||||
const shouldStartTutorial = !localStorage.getItem("shepherd-tour");
|
||||
if (
|
||||
shouldStartTutorial &&
|
||||
availableNodes.length > 0 &&
|
||||
!tutorialStarted
|
||||
) {
|
||||
startTutorial(setPinBlocksPopover);
|
||||
setTutorialStarted(true);
|
||||
localStorage.setItem("shepherd-tour", "yes");
|
||||
}
|
||||
}
|
||||
}, [
|
||||
availableNodes,
|
||||
router,
|
||||
pathname,
|
||||
params,
|
||||
setEdges,
|
||||
setNodes,
|
||||
nodes.length,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (params.get("open_scheduling") === "true") {
|
||||
setOpenCron(true);
|
||||
}
|
||||
setFlowExecutionID(
|
||||
(params.get("flowExecutionID") as GraphExecutionID) || undefined,
|
||||
);
|
||||
}, [params]);
|
||||
}, [availableNodes, tutorialStarted, router, pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
@@ -267,7 +221,7 @@ const FlowEditor: React.FC<{
|
||||
if (deletedNodeData) {
|
||||
history.push({
|
||||
type: "DELETE_NODE",
|
||||
payload: { node: deletedNodeData.data },
|
||||
payload: { node: deletedNodeData },
|
||||
undo: () => addNodes(deletedNodeData),
|
||||
redo: () => deleteElements({ nodes: [{ id: nodeID }] }),
|
||||
});
|
||||
@@ -284,6 +238,14 @@ const FlowEditor: React.FC<{
|
||||
[deleteElements, setNodes, nodes, edges, addNodes],
|
||||
);
|
||||
|
||||
const formatEdgeID = useCallback((conn: Link | Connection): string => {
|
||||
if ("sink_id" in conn) {
|
||||
return `${conn.source_id}_${conn.source_name}_${conn.sink_id}_${conn.sink_name}`;
|
||||
} else {
|
||||
return `${conn.source}_${conn.sourceHandle}_${conn.target}_${conn.targetHandle}`;
|
||||
}
|
||||
}, []);
|
||||
|
||||
const onConnect: OnConnect = useCallback(
|
||||
(connection: Connection) => {
|
||||
// Check if this exact connection already exists
|
||||
@@ -432,104 +394,24 @@ const FlowEditor: React.FC<{
|
||||
|
||||
const { x, y, zoom } = useViewport();
|
||||
|
||||
// Set the initial view port to center the canvas.
|
||||
useEffect(() => {
|
||||
if (nodes.length <= 0 || x !== 0 || y !== 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const topLeft = { x: Infinity, y: Infinity };
|
||||
const bottomRight = { x: -Infinity, y: -Infinity };
|
||||
|
||||
nodes.forEach((node) => {
|
||||
const { x, y } = node.position;
|
||||
topLeft.x = Math.min(topLeft.x, x);
|
||||
topLeft.y = Math.min(topLeft.y, y);
|
||||
// Rough estimate of the width and height of the node: 500x400.
|
||||
bottomRight.x = Math.max(bottomRight.x, x + 500);
|
||||
bottomRight.y = Math.max(bottomRight.y, y + 400);
|
||||
});
|
||||
|
||||
const centerX = (topLeft.x + bottomRight.x) / 2;
|
||||
const centerY = (topLeft.y + bottomRight.y) / 2;
|
||||
const zoom = 0.8;
|
||||
|
||||
setViewport({
|
||||
x: window.innerWidth / 2 - centerX * zoom,
|
||||
y: window.innerHeight / 2 - centerY * zoom,
|
||||
zoom: zoom,
|
||||
});
|
||||
}, [nodes, setViewport, x, y]);
|
||||
|
||||
const fillDefaults = useCallback((obj: any, schema: any) => {
|
||||
// Iterate over the schema properties
|
||||
for (const key in schema.properties) {
|
||||
if (schema.properties.hasOwnProperty(key)) {
|
||||
const propertySchema = schema.properties[key];
|
||||
|
||||
// If the property is not in the object, initialize it with the default value
|
||||
if (!obj.hasOwnProperty(key)) {
|
||||
if (propertySchema.default !== undefined) {
|
||||
obj[key] = propertySchema.default;
|
||||
} else if (propertySchema.type === "object") {
|
||||
// Recursively fill defaults for nested objects
|
||||
obj[key] = fillDefaults({}, propertySchema);
|
||||
} else if (propertySchema.type === "array") {
|
||||
// Recursively fill defaults for arrays
|
||||
obj[key] = fillDefaults([], propertySchema);
|
||||
}
|
||||
} else {
|
||||
// If the property exists, recursively fill defaults for nested objects/arrays
|
||||
if (propertySchema.type === "object") {
|
||||
obj[key] = fillDefaults(obj[key], propertySchema);
|
||||
} else if (propertySchema.type === "array") {
|
||||
obj[key] = fillDefaults(obj[key], propertySchema);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return obj;
|
||||
}, []);
|
||||
|
||||
const addNode = useCallback(
|
||||
(blockId: string, nodeType: string, hardcodedValues: any = {}) => {
|
||||
(blockId: string, nodeType: string) => {
|
||||
const nodeSchema = availableNodes.find((node) => node.id === blockId);
|
||||
if (!nodeSchema) {
|
||||
console.error(`Schema not found for block ID: ${blockId}`);
|
||||
return;
|
||||
}
|
||||
|
||||
/*
|
||||
Calculate a position to the right of the newly added block, allowing for some margin.
|
||||
If adding to the right side causes the new block to collide with an existing block, attempt to place it at the bottom or left.
|
||||
Why not the top? Because the height of the new block is unknown.
|
||||
If it still collides, run a loop to find the best position where it does not collide.
|
||||
Then, adjust the canvas to center on the newly added block.
|
||||
Note: The width is known, e.g., w = 300px for a note and w = 500px for others, but the height is dynamic.
|
||||
*/
|
||||
|
||||
// Alternative: We could also use D3 force, Intersection for this (React flow Pro examples)
|
||||
|
||||
const viewportCoordinates =
|
||||
nodeDimensions && Object.keys(nodeDimensions).length > 0
|
||||
? // we will get all the dimension of nodes, then store
|
||||
findNewlyAddedBlockCoordinates(
|
||||
nodeDimensions,
|
||||
nodeSchema.uiType == BlockUIType.NOTE ? 300 : 500,
|
||||
60,
|
||||
1.0,
|
||||
)
|
||||
: // we will get all the dimension of nodes, then store
|
||||
{
|
||||
x: window.innerWidth / 2 - x,
|
||||
y: window.innerHeight / 2 - y,
|
||||
};
|
||||
// Calculate the center of the viewport considering zoom
|
||||
const viewportCenter = {
|
||||
x: (window.innerWidth / 2 - x) / zoom,
|
||||
y: (window.innerHeight / 2 - y) / zoom,
|
||||
};
|
||||
|
||||
const newNode: CustomNode = {
|
||||
id: nodeId.toString(),
|
||||
type: "custom",
|
||||
position: viewportCoordinates, // Set the position to the calculated viewport center
|
||||
position: viewportCenter, // Set the position to the calculated viewport center
|
||||
data: {
|
||||
blockType: nodeType,
|
||||
blockCosts: nodeSchema.costs,
|
||||
@@ -538,10 +420,7 @@ const FlowEditor: React.FC<{
|
||||
categories: nodeSchema.categories,
|
||||
inputSchema: nodeSchema.inputSchema,
|
||||
outputSchema: nodeSchema.outputSchema,
|
||||
hardcodedValues: {
|
||||
...fillDefaults({}, nodeSchema.inputSchema),
|
||||
...hardcodedValues,
|
||||
},
|
||||
hardcodedValues: {},
|
||||
connections: [],
|
||||
isOutputOpen: false,
|
||||
block_id: blockId,
|
||||
@@ -554,69 +433,25 @@ const FlowEditor: React.FC<{
|
||||
setNodeId((prevId) => prevId + 1);
|
||||
clearNodesStatusAndOutput(); // Clear status and output when a new node is added
|
||||
|
||||
setViewport(
|
||||
{
|
||||
// Rough estimate of the dimension of the node is: 500x400px.
|
||||
// Though we skip shifting the X, considering the block menu side-bar.
|
||||
x: -viewportCoordinates.x * 0.8 + (window.innerWidth - 0.0) / 2,
|
||||
y: -viewportCoordinates.y * 0.8 + (window.innerHeight - 400) / 2,
|
||||
zoom: 0.8,
|
||||
},
|
||||
{ duration: 500 },
|
||||
);
|
||||
|
||||
history.push({
|
||||
type: "ADD_NODE",
|
||||
payload: { node: { ...newNode, ...newNode.data } },
|
||||
payload: { node: newNode.data },
|
||||
undo: () => deleteElements({ nodes: [{ id: newNode.id }] }),
|
||||
redo: () => addNodes(newNode),
|
||||
});
|
||||
},
|
||||
[
|
||||
nodeId,
|
||||
setViewport,
|
||||
availableNodes,
|
||||
addNodes,
|
||||
nodeDimensions,
|
||||
deleteElements,
|
||||
clearNodesStatusAndOutput,
|
||||
x,
|
||||
y,
|
||||
zoom,
|
||||
],
|
||||
);
|
||||
|
||||
const findNodeDimensions = useCallback(() => {
|
||||
const newNodeDimensions: NodeDimension = nodes.reduce((acc, node) => {
|
||||
const nodeElement = document.querySelector(
|
||||
`[data-id="custom-node-${node.id}"]`,
|
||||
);
|
||||
if (nodeElement) {
|
||||
const rect = nodeElement.getBoundingClientRect();
|
||||
const { left, top, width, height } = rect;
|
||||
|
||||
// Convert screen coordinates to flow coordinates
|
||||
const flowX = (left - x) / zoom;
|
||||
const flowY = (top - y) / zoom;
|
||||
const flowWidth = width / zoom;
|
||||
const flowHeight = height / zoom;
|
||||
|
||||
acc[node.id] = {
|
||||
x: flowX,
|
||||
y: flowY,
|
||||
width: flowWidth,
|
||||
height: flowHeight,
|
||||
};
|
||||
}
|
||||
return acc;
|
||||
}, {} as NodeDimension);
|
||||
|
||||
setNodeDimensions(newNodeDimensions);
|
||||
}, [nodes, x, y, zoom]);
|
||||
|
||||
useEffect(() => {
|
||||
findNodeDimensions();
|
||||
}, [nodes, findNodeDimensions]);
|
||||
|
||||
const handleUndo = () => {
|
||||
history.undo();
|
||||
};
|
||||
@@ -625,8 +460,6 @@ const FlowEditor: React.FC<{
|
||||
history.redo();
|
||||
};
|
||||
|
||||
const handleCopyPaste = useCopyPaste(getNextNodeId);
|
||||
|
||||
const handleKeyDown = useCallback(
|
||||
(event: KeyboardEvent) => {
|
||||
// Prevent copy/paste if any modal is open or if the focus is on an input element
|
||||
@@ -638,9 +471,68 @@ const FlowEditor: React.FC<{
|
||||
|
||||
if (isAnyModalOpen || isInputField) return;
|
||||
|
||||
handleCopyPaste(event);
|
||||
if (event.ctrlKey || event.metaKey) {
|
||||
if (event.key === "c" || event.key === "C") {
|
||||
// Copy selected nodes
|
||||
const selectedNodes = nodes.filter((node) => node.selected);
|
||||
const selectedEdges = edges.filter((edge) => edge.selected);
|
||||
setCopiedNodes(selectedNodes);
|
||||
setCopiedEdges(selectedEdges);
|
||||
}
|
||||
if (event.key === "v" || event.key === "V") {
|
||||
// Paste copied nodes
|
||||
if (copiedNodes.length > 0) {
|
||||
const oldToNewNodeIDMap: Record<string, string> = {};
|
||||
const pastedNodes = copiedNodes.map((node, index) => {
|
||||
const newNodeId = (nodeId + index).toString();
|
||||
oldToNewNodeIDMap[node.id] = newNodeId;
|
||||
return {
|
||||
...node,
|
||||
id: newNodeId,
|
||||
position: {
|
||||
x: node.position.x + 20, // Offset pasted nodes
|
||||
y: node.position.y + 20,
|
||||
},
|
||||
data: {
|
||||
...node.data,
|
||||
status: undefined, // Reset status
|
||||
executionResults: undefined, // Clear output data
|
||||
},
|
||||
};
|
||||
});
|
||||
setNodes((existingNodes) =>
|
||||
// Deselect copied nodes
|
||||
existingNodes.map((node) => ({ ...node, selected: false })),
|
||||
);
|
||||
addNodes(pastedNodes);
|
||||
setNodeId((prevId) => prevId + copiedNodes.length);
|
||||
|
||||
const pastedEdges = copiedEdges.map((edge) => {
|
||||
const newSourceId = oldToNewNodeIDMap[edge.source] ?? edge.source;
|
||||
const newTargetId = oldToNewNodeIDMap[edge.target] ?? edge.target;
|
||||
return {
|
||||
...edge,
|
||||
id: `${newSourceId}_${edge.sourceHandle}_${newTargetId}_${edge.targetHandle}_${Date.now()}`,
|
||||
source: newSourceId,
|
||||
target: newTargetId,
|
||||
};
|
||||
});
|
||||
addEdges(pastedEdges);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
[isAnyModalOpen, handleCopyPaste],
|
||||
[
|
||||
isAnyModalOpen,
|
||||
nodes,
|
||||
edges,
|
||||
copiedNodes,
|
||||
setNodes,
|
||||
addNodes,
|
||||
copiedEdges,
|
||||
addEdges,
|
||||
nodeId,
|
||||
],
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -665,26 +557,25 @@ const FlowEditor: React.FC<{
|
||||
icon: <IconRedo2 />,
|
||||
onClick: handleRedo,
|
||||
},
|
||||
{
|
||||
label: !savedAgent
|
||||
? "Please save the agent to run"
|
||||
: !isRunning
|
||||
? "Run"
|
||||
: "Stop",
|
||||
icon: !isRunning ? <IconPlay /> : <IconSquare />,
|
||||
onClick: !isRunning
|
||||
? () => runnerUIRef.current?.runOrOpenInput()
|
||||
: requestStopRun,
|
||||
disabled: !savedAgent,
|
||||
},
|
||||
{
|
||||
label: "Runner Output",
|
||||
icon: <LogOut size={18} strokeWidth={1.8} />,
|
||||
onClick: () => runnerUIRef.current?.openRunnerOutput(),
|
||||
},
|
||||
];
|
||||
|
||||
// This function is called after cron expression is created
|
||||
// So you can collect inputs for scheduling
|
||||
const afterCronCreation = (cronExpression: string) => {
|
||||
runnerUIRef.current?.collectInputsForScheduling(cronExpression);
|
||||
};
|
||||
|
||||
// This function Opens up form for creating cron expression
|
||||
const handleScheduleButton = () => {
|
||||
if (!savedAgent) {
|
||||
toast({
|
||||
title: `Please save the agent using the button in the left sidebar before running it.`,
|
||||
duration: 2000,
|
||||
});
|
||||
return;
|
||||
}
|
||||
setOpenCron(true);
|
||||
};
|
||||
|
||||
return (
|
||||
<FlowContext.Provider
|
||||
value={{ visualizeBeads, setIsAnyModalOpen, getNextNodeId }}
|
||||
@@ -703,75 +594,33 @@ const FlowEditor: React.FC<{
|
||||
onNodeDragStop={onNodeDragEnd}
|
||||
onNodeDragStart={onNodeDragStart}
|
||||
deleteKeyCode={["Backspace", "Delete"]}
|
||||
minZoom={0.1}
|
||||
minZoom={0.2}
|
||||
maxZoom={2}
|
||||
className="dark:bg-slate-900"
|
||||
>
|
||||
<Controls />
|
||||
<Background className="dark:bg-slate-800" />
|
||||
<ControlPanel
|
||||
className="absolute z-10"
|
||||
controls={editorControls}
|
||||
topChildren={
|
||||
<BlocksControl
|
||||
pinBlocksPopover={pinBlocksPopover} // Pass the state to BlocksControl
|
||||
blocks={availableNodes}
|
||||
addBlock={addNode}
|
||||
flows={availableFlows}
|
||||
nodes={nodes}
|
||||
/>
|
||||
}
|
||||
botChildren={
|
||||
<SaveControl
|
||||
agentMeta={savedAgent}
|
||||
canSave={!isSaving && !isRunning && !isStopping}
|
||||
onSave={() => requestSave()}
|
||||
agentDescription={agentDescription}
|
||||
onDescriptionChange={setAgentDescription}
|
||||
agentName={agentName}
|
||||
onNameChange={setAgentName}
|
||||
pinSavePopover={pinSavePopover}
|
||||
/>
|
||||
}
|
||||
></ControlPanel>
|
||||
<PrimaryActionBar
|
||||
onClickAgentOutputs={() => runnerUIRef.current?.openRunnerOutput()}
|
||||
onClickRunAgent={() => {
|
||||
if (!savedAgent) {
|
||||
toast({
|
||||
title: `Please save the agent using the button in the left sidebar before running it.`,
|
||||
duration: 2000,
|
||||
});
|
||||
return;
|
||||
}
|
||||
if (!isRunning) {
|
||||
runnerUIRef.current?.runOrOpenInput();
|
||||
} else {
|
||||
requestStopRun();
|
||||
}
|
||||
}}
|
||||
onClickScheduleButton={handleScheduleButton}
|
||||
isScheduling={isScheduling}
|
||||
isDisabled={!savedAgent}
|
||||
isRunning={isRunning}
|
||||
requestStopRun={requestStopRun}
|
||||
runAgentTooltip={!isRunning ? "Run Agent" : "Stop Agent"}
|
||||
/>
|
||||
<CronScheduler
|
||||
afterCronCreation={afterCronCreation}
|
||||
open={openCron}
|
||||
setOpen={setOpenCron}
|
||||
/>
|
||||
<Background />
|
||||
<ControlPanel className="absolute z-10" controls={editorControls}>
|
||||
<BlocksControl
|
||||
pinBlocksPopover={pinBlocksPopover} // Pass the state to BlocksControl
|
||||
blocks={availableNodes}
|
||||
addBlock={addNode}
|
||||
/>
|
||||
<SaveControl
|
||||
agentMeta={savedAgent}
|
||||
onSave={(isTemplate) => requestSave(isTemplate ?? false)}
|
||||
agentDescription={agentDescription}
|
||||
onDescriptionChange={setAgentDescription}
|
||||
agentName={agentName}
|
||||
onNameChange={setAgentName}
|
||||
/>
|
||||
</ControlPanel>
|
||||
</ReactFlow>
|
||||
</div>
|
||||
<RunnerUIWrapper
|
||||
ref={runnerUIRef}
|
||||
nodes={nodes}
|
||||
setNodes={setNodes}
|
||||
setIsScheduling={setIsScheduling}
|
||||
isScheduling={isScheduling}
|
||||
isRunning={isRunning}
|
||||
scheduleRunner={scheduleRunner}
|
||||
requestSaveAndRun={requestSaveAndRun}
|
||||
/>
|
||||
</FlowContext.Provider>
|
||||
117
autogpt_platform/autogpt_builder/src/components/NavBar.tsx
Normal file
117
autogpt_platform/autogpt_builder/src/components/NavBar.tsx
Normal file
@@ -0,0 +1,117 @@
|
||||
import Link from "next/link";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import React from "react";
|
||||
import { Sheet, SheetContent, SheetTrigger } from "@/components/ui/sheet";
|
||||
import Image from "next/image";
|
||||
import getServerUser from "@/hooks/getServerUser";
|
||||
import ProfileDropdown from "./ProfileDropdown";
|
||||
import {
|
||||
IconCircleUser,
|
||||
IconMenu,
|
||||
IconPackage2,
|
||||
IconRefresh,
|
||||
IconSquareActivity,
|
||||
IconWorkFlow,
|
||||
} from "@/components/ui/icons";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
import CreditButton from "@/components/CreditButton";
|
||||
|
||||
export async function NavBar() {
|
||||
const isAvailable = Boolean(
|
||||
process.env.NEXT_PUBLIC_SUPABASE_URL &&
|
||||
process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY,
|
||||
);
|
||||
const { user } = await getServerUser();
|
||||
|
||||
return (
|
||||
<header className="sticky top-0 z-50 flex h-16 items-center gap-4 border-b bg-background px-4 md:px-6">
|
||||
<div className="flex flex-1 items-center gap-4">
|
||||
<Sheet>
|
||||
<SheetTrigger asChild>
|
||||
<Button
|
||||
variant="outline"
|
||||
size="icon"
|
||||
className="shrink-0 md:hidden"
|
||||
>
|
||||
<IconMenu />
|
||||
<span className="sr-only">Toggle navigation menu</span>
|
||||
</Button>
|
||||
</SheetTrigger>
|
||||
<SheetContent side="left">
|
||||
<nav className="grid gap-6 text-lg font-medium">
|
||||
<Link
|
||||
href="/"
|
||||
className="flex flex-row gap-2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<IconSquareActivity /> Monitor
|
||||
</Link>
|
||||
<Link
|
||||
href="/build"
|
||||
className="flex flex-row gap-2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<IconWorkFlow /> Build
|
||||
</Link>
|
||||
<Link
|
||||
href="/marketplace"
|
||||
className="flex flex-row gap-2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<IconPackage2 /> Marketplace
|
||||
</Link>
|
||||
</nav>
|
||||
</SheetContent>
|
||||
</Sheet>
|
||||
<nav className="hidden md:flex md:flex-row md:items-center md:gap-5 lg:gap-6">
|
||||
<Link
|
||||
href="/"
|
||||
className="flex flex-row items-center gap-2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<IconSquareActivity /> Monitor
|
||||
</Link>
|
||||
<Link
|
||||
href="/build"
|
||||
className="flex flex-row items-center gap-2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<IconWorkFlow /> Build
|
||||
</Link>
|
||||
<Link
|
||||
href="/marketplace"
|
||||
className="flex flex-row items-center gap-2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
<IconPackage2 /> Marketplace
|
||||
</Link>
|
||||
</nav>
|
||||
</div>
|
||||
<div className="relative flex flex-1 justify-center">
|
||||
<a
|
||||
className="pointer-events-auto flex place-items-center gap-2"
|
||||
href="https://news.agpt.co/"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
By{" "}
|
||||
<Image
|
||||
src="/AUTOgpt_Logo_dark.png"
|
||||
alt="AutoGPT Logo"
|
||||
width={100}
|
||||
height={20}
|
||||
priority
|
||||
/>
|
||||
</a>
|
||||
</div>
|
||||
<div className="flex flex-1 items-center justify-end gap-4">
|
||||
{isAvailable && user && <CreditButton />}
|
||||
|
||||
{isAvailable && !user && (
|
||||
<Link
|
||||
href="/login"
|
||||
className="flex flex-row items-center gap-2 text-muted-foreground hover:text-foreground"
|
||||
>
|
||||
Log In
|
||||
<IconCircleUser />
|
||||
</Link>
|
||||
)}
|
||||
{isAvailable && user && <ProfileDropdown />}
|
||||
</div>
|
||||
</header>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
import { BlockIOSubSchema } from "@/lib/autogpt-server-api/types";
|
||||
import { beautifyString, getTypeBgColor, getTypeTextColor } from "@/lib/utils";
|
||||
import { FC } from "react";
|
||||
import { Handle, Position } from "@xyflow/react";
|
||||
import SchemaTooltip from "./SchemaTooltip";
|
||||
|
||||
type HandleProps = {
|
||||
keyName: string;
|
||||
schema: BlockIOSubSchema;
|
||||
isConnected: boolean;
|
||||
isRequired?: boolean;
|
||||
side: "left" | "right";
|
||||
};
|
||||
|
||||
const NodeHandle: FC<HandleProps> = ({
|
||||
keyName,
|
||||
schema,
|
||||
isConnected,
|
||||
isRequired,
|
||||
side,
|
||||
}) => {
|
||||
const typeName: Record<string, string> = {
|
||||
string: "text",
|
||||
number: "number",
|
||||
boolean: "true/false",
|
||||
object: "object",
|
||||
array: "list",
|
||||
null: "null",
|
||||
};
|
||||
|
||||
const typeClass = `text-sm ${getTypeTextColor(schema.type || "any")} ${side === "left" ? "text-left" : "text-right"}`;
|
||||
|
||||
const label = (
|
||||
<div className="flex flex-grow flex-col">
|
||||
<span className="text-m green -mb-1 text-gray-900">
|
||||
{schema.title || beautifyString(keyName)}
|
||||
{isRequired ? "*" : ""}
|
||||
</span>
|
||||
<span className={typeClass}>{typeName[schema.type] || "any"}</span>
|
||||
</div>
|
||||
);
|
||||
|
||||
const dot = (
|
||||
<div
|
||||
className={`m-1 h-4 w-4 border-2 bg-white ${isConnected ? getTypeBgColor(schema.type || "any") : "border-gray-300"} rounded-full transition-colors duration-100 group-hover:bg-gray-300`}
|
||||
/>
|
||||
);
|
||||
|
||||
if (side === "left") {
|
||||
return (
|
||||
<div key={keyName} className="handle-container">
|
||||
<Handle
|
||||
type="target"
|
||||
position={Position.Left}
|
||||
id={keyName}
|
||||
className="background-color: white; border: 2px solid black; width: 15px; height: 15px; border-radius: 50%; bottom: -7px; left: 20%; group -ml-[26px]"
|
||||
>
|
||||
<div className="pointer-events-none flex items-center">
|
||||
{dot}
|
||||
{label}
|
||||
</div>
|
||||
</Handle>
|
||||
<SchemaTooltip description={schema.description} />
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<div key={keyName} className="handle-container justify-end">
|
||||
<Handle
|
||||
type="source"
|
||||
position={Position.Right}
|
||||
id={keyName}
|
||||
className="group -mr-[26px]"
|
||||
>
|
||||
<div className="pointer-events-none flex items-center">
|
||||
{label}
|
||||
{dot}
|
||||
</div>
|
||||
</Handle>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
export default NodeHandle;
|
||||
@@ -26,15 +26,10 @@ const OutputModalComponent: FC<OutputModalProps> = ({
|
||||
<div className="nodrag nowheel fixed inset-0 flex items-center justify-center bg-white bg-opacity-60">
|
||||
<div className="w-[500px] max-w-[90%] rounded-lg border-[1.5px] bg-white p-5">
|
||||
<strong>Output Data History</strong>
|
||||
<div className="my-2 max-h-[384px] flex-grow overflow-y-auto rounded-md p-2">
|
||||
<div className="my-2 max-h-[384px] flex-grow overflow-y-auto rounded-md border-[1.5px] p-2">
|
||||
{executionResults.map((data, i) => (
|
||||
<>
|
||||
<DataTable
|
||||
key={i}
|
||||
title={data.execId}
|
||||
data={data.data}
|
||||
truncateLongData={true}
|
||||
/>
|
||||
<DataTable key={i} title={data.execId} data={data.data} />
|
||||
<Separator />
|
||||
</>
|
||||
))}
|
||||
@@ -16,7 +16,6 @@ const PasswordInput = forwardRef<HTMLInputElement, InputProps>(
|
||||
type={showPassword ? "text" : "password"}
|
||||
className={cn("hide-password-toggle pr-10", className)}
|
||||
ref={ref}
|
||||
title="password"
|
||||
{...props}
|
||||
/>
|
||||
<Button
|
||||
@@ -24,11 +23,8 @@ const PasswordInput = forwardRef<HTMLInputElement, InputProps>(
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="absolute right-0 top-0 h-full px-3 py-2 hover:bg-transparent"
|
||||
onMouseDown={() => setShowPassword(true)}
|
||||
onMouseUp={() => setShowPassword(false)}
|
||||
onMouseLeave={() => setShowPassword(false)}
|
||||
onClick={() => setShowPassword((prev) => !prev)}
|
||||
disabled={disabled}
|
||||
tabIndex={-1}
|
||||
>
|
||||
{showPassword && !disabled ? (
|
||||
<EyeIcon className="h-4 w-4" aria-hidden="true" />
|
||||
@@ -7,14 +7,16 @@ import {
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
import { Button } from "./ui/button";
|
||||
import { useSupabase } from "./SupabaseProvider";
|
||||
import { useRouter } from "next/navigation";
|
||||
import useSupabase from "@/hooks/useSupabase";
|
||||
import useUser from "@/hooks/useUser";
|
||||
|
||||
const ProfileDropdown = () => {
|
||||
const { supabase, user, isUserLoading } = useSupabase();
|
||||
const { supabase } = useSupabase();
|
||||
const router = useRouter();
|
||||
const { user, role, isLoading } = useUser();
|
||||
|
||||
if (isUserLoading) {
|
||||
if (isLoading) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -35,16 +37,12 @@ const ProfileDropdown = () => {
|
||||
<DropdownMenuItem onClick={() => router.push("/profile")}>
|
||||
Profile
|
||||
</DropdownMenuItem>
|
||||
{user!.role === "admin" && (
|
||||
{role === "admin" && (
|
||||
<DropdownMenuItem onClick={() => router.push("/admin/dashboard")}>
|
||||
Admin Dashboard
|
||||
</DropdownMenuItem>
|
||||
)}
|
||||
<DropdownMenuItem
|
||||
onClick={() =>
|
||||
supabase?.auth.signOut().then(() => router.replace("/login"))
|
||||
}
|
||||
>
|
||||
<DropdownMenuItem onClick={() => supabase?.auth.signOut()}>
|
||||
Log out
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
@@ -1,6 +1,6 @@
|
||||
// components/RoleBasedAccess.tsx
|
||||
import useSupabase from "@/hooks/useSupabase";
|
||||
import React from "react";
|
||||
import useUser from "@/hooks/useUser";
|
||||
|
||||
interface RoleBasedAccessProps {
|
||||
allowedRoles: string[];
|
||||
@@ -11,13 +11,13 @@ const RoleBasedAccess: React.FC<RoleBasedAccessProps> = ({
|
||||
allowedRoles,
|
||||
children,
|
||||
}) => {
|
||||
const { user, isUserLoading } = useSupabase();
|
||||
const { role, isLoading } = useUser();
|
||||
|
||||
if (isUserLoading) {
|
||||
if (isLoading) {
|
||||
return <div>Loading...</div>;
|
||||
}
|
||||
|
||||
if (!user!.role || !allowedRoles.includes(user!.role)) {
|
||||
if (!role || !allowedRoles.includes(role)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,141 @@
|
||||
import React, {
|
||||
useState,
|
||||
useCallback,
|
||||
forwardRef,
|
||||
useImperativeHandle,
|
||||
} from "react";
|
||||
import RunnerInputUI from "./runner-ui/RunnerInputUI";
|
||||
import RunnerOutputUI from "./runner-ui/RunnerOutputUI";
|
||||
import { Node } from "@xyflow/react";
|
||||
import { filterBlocksByType } from "@/lib/utils";
|
||||
import { BlockIORootSchema } from "@/lib/autogpt-server-api/types";
|
||||
|
||||
interface RunnerUIWrapperProps {
|
||||
nodes: Node[];
|
||||
setNodes: React.Dispatch<React.SetStateAction<Node[]>>;
|
||||
isRunning: boolean;
|
||||
requestSaveAndRun: () => void;
|
||||
}
|
||||
|
||||
export interface RunnerUIWrapperRef {
|
||||
openRunnerInput: () => void;
|
||||
openRunnerOutput: () => void;
|
||||
runOrOpenInput: () => void;
|
||||
}
|
||||
|
||||
const RunnerUIWrapper = forwardRef<RunnerUIWrapperRef, RunnerUIWrapperProps>(
|
||||
({ nodes, setNodes, isRunning, requestSaveAndRun }, ref) => {
|
||||
const [isRunnerInputOpen, setIsRunnerInputOpen] = useState(false);
|
||||
const [isRunnerOutputOpen, setIsRunnerOutputOpen] = useState(false);
|
||||
|
||||
const getBlockInputsAndOutputs = useCallback(() => {
|
||||
const inputBlocks = filterBlocksByType(
|
||||
nodes,
|
||||
(node) => node.data.block_id === "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
);
|
||||
|
||||
const outputBlocks = filterBlocksByType(
|
||||
nodes,
|
||||
(node) => node.data.block_id === "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
);
|
||||
|
||||
const inputs = inputBlocks.map((node) => ({
|
||||
id: node.id,
|
||||
type: "input" as const,
|
||||
inputSchema: node.data.inputSchema as BlockIORootSchema,
|
||||
hardcodedValues: {
|
||||
name: (node.data.hardcodedValues as any).name || "",
|
||||
description: (node.data.hardcodedValues as any).description || "",
|
||||
value: (node.data.hardcodedValues as any).value,
|
||||
placeholder_values:
|
||||
(node.data.hardcodedValues as any).placeholder_values || [],
|
||||
limit_to_placeholder_values:
|
||||
(node.data.hardcodedValues as any).limit_to_placeholder_values ||
|
||||
false,
|
||||
},
|
||||
}));
|
||||
|
||||
const outputs = outputBlocks.map((node) => ({
|
||||
id: node.id,
|
||||
type: "output" as const,
|
||||
outputSchema: node.data.outputSchema as BlockIORootSchema,
|
||||
hardcodedValues: {
|
||||
name: (node.data.hardcodedValues as any).name || "Output",
|
||||
description:
|
||||
(node.data.hardcodedValues as any).description ||
|
||||
"Output from the agent",
|
||||
value: (node.data.hardcodedValues as any).value,
|
||||
},
|
||||
result: (node.data.executionResults as any)?.at(-1)?.data?.output,
|
||||
}));
|
||||
|
||||
return { inputs, outputs };
|
||||
}, [nodes]);
|
||||
|
||||
const handleInputChange = useCallback(
|
||||
(nodeId: string, field: string, value: string) => {
|
||||
setNodes((nds) =>
|
||||
nds.map((node) => {
|
||||
if (node.id === nodeId) {
|
||||
return {
|
||||
...node,
|
||||
data: {
|
||||
...node.data,
|
||||
hardcodedValues: {
|
||||
...(node.data.hardcodedValues as any),
|
||||
[field]: value,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
return node;
|
||||
}),
|
||||
);
|
||||
},
|
||||
[setNodes],
|
||||
);
|
||||
|
||||
const openRunnerInput = () => setIsRunnerInputOpen(true);
|
||||
const openRunnerOutput = () => setIsRunnerOutputOpen(true);
|
||||
|
||||
const runOrOpenInput = () => {
|
||||
const { inputs } = getBlockInputsAndOutputs();
|
||||
if (inputs.length > 0) {
|
||||
openRunnerInput();
|
||||
} else {
|
||||
requestSaveAndRun();
|
||||
}
|
||||
};
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
openRunnerInput,
|
||||
openRunnerOutput,
|
||||
runOrOpenInput,
|
||||
}));
|
||||
|
||||
return (
|
||||
<>
|
||||
<RunnerInputUI
|
||||
isOpen={isRunnerInputOpen}
|
||||
onClose={() => setIsRunnerInputOpen(false)}
|
||||
blockInputs={getBlockInputsAndOutputs().inputs}
|
||||
onInputChange={handleInputChange}
|
||||
onRun={() => {
|
||||
setIsRunnerInputOpen(false);
|
||||
requestSaveAndRun();
|
||||
}}
|
||||
isRunning={isRunning}
|
||||
/>
|
||||
<RunnerOutputUI
|
||||
isOpen={isRunnerOutputOpen}
|
||||
onClose={() => setIsRunnerOutputOpen(false)}
|
||||
blockOutputs={getBlockInputsAndOutputs().outputs}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
RunnerUIWrapper.displayName = "RunnerUIWrapper";
|
||||
|
||||
export default RunnerUIWrapper;
|
||||
@@ -14,18 +14,15 @@ const SchemaTooltip: React.FC<{ description?: string }> = ({ description }) => {
|
||||
<TooltipProvider delayDuration={400}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<Info
|
||||
className="rounded-full p-1 hover:bg-gray-300 dark:hover:bg-gray-700"
|
||||
size={24}
|
||||
/>
|
||||
<Info className="rounded-full p-1 hover:bg-gray-300" size={24} />
|
||||
</TooltipTrigger>
|
||||
<TooltipContent className="tooltip-content max-w-xs bg-white text-gray-900 dark:bg-gray-800 dark:text-gray-100">
|
||||
<TooltipContent className="tooltip-content max-w-xs">
|
||||
<ReactMarkdown
|
||||
components={{
|
||||
a: ({ node, ...props }) => (
|
||||
<a
|
||||
target="_blank"
|
||||
className="text-blue-400 underline dark:text-blue-300"
|
||||
className="text-blue-400 underline"
|
||||
{...props}
|
||||
/>
|
||||
),
|
||||
@@ -0,0 +1,65 @@
|
||||
"use client";
|
||||
|
||||
import { createClient } from "@/lib/supabase/client";
|
||||
import { SupabaseClient } from "@supabase/supabase-js";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { createContext, useContext, useEffect, useState } from "react";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
|
||||
type SupabaseContextType = {
|
||||
supabase: SupabaseClient | null;
|
||||
isLoading: boolean;
|
||||
};
|
||||
|
||||
const Context = createContext<SupabaseContextType | undefined>(undefined);
|
||||
|
||||
export default function SupabaseProvider({
|
||||
children,
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
}) {
|
||||
const [supabase, setSupabase] = useState<SupabaseClient | null>(null);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
const initializeSupabase = async () => {
|
||||
setIsLoading(true);
|
||||
const client = createClient();
|
||||
const api = new AutoGPTServerAPI();
|
||||
setSupabase(client);
|
||||
setIsLoading(false);
|
||||
|
||||
if (client) {
|
||||
const {
|
||||
data: { subscription },
|
||||
} = client.auth.onAuthStateChange((event, session) => {
|
||||
if (event === "SIGNED_IN") {
|
||||
api.createUser();
|
||||
}
|
||||
router.refresh();
|
||||
});
|
||||
|
||||
return () => {
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
initializeSupabase();
|
||||
}, [router]);
|
||||
|
||||
return (
|
||||
<Context.Provider value={{ supabase, isLoading }}>
|
||||
{children}
|
||||
</Context.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export const useSupabase = () => {
|
||||
const context = useContext(Context);
|
||||
if (context === undefined) {
|
||||
throw new Error("useSupabase must be used inside SupabaseProvider");
|
||||
}
|
||||
return context;
|
||||
};
|
||||
@@ -1,20 +1,10 @@
|
||||
"use client";
|
||||
|
||||
import React, { useEffect, useState } from "react";
|
||||
import { Button } from "./ui/button";
|
||||
import { QuestionMarkCircledIcon } from "@radix-ui/react-icons";
|
||||
import { useRouter, usePathname } from "next/navigation";
|
||||
import { IconMegaphone } from "@/components/ui/icons";
|
||||
|
||||
const TallyPopupSimple = () => {
|
||||
const [isFormVisible, setIsFormVisible] = useState(false);
|
||||
const router = useRouter();
|
||||
const pathname = usePathname();
|
||||
|
||||
const [show_tutorial, setShowTutorial] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
setShowTutorial(pathname.includes("build"));
|
||||
}, [pathname]);
|
||||
|
||||
useEffect(() => {
|
||||
// Load Tally script
|
||||
@@ -52,28 +42,22 @@ const TallyPopupSimple = () => {
|
||||
}
|
||||
|
||||
const resetTutorial = () => {
|
||||
router.push("/build?resetTutorial=true");
|
||||
const url = `${window.location.origin}/build?resetTutorial=true`;
|
||||
window.location.href = url;
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="fixed bottom-1 right-24 z-50 hidden select-none items-center gap-4 p-3 transition-all duration-300 ease-in-out md:flex">
|
||||
{show_tutorial && (
|
||||
<Button
|
||||
variant="default"
|
||||
onClick={resetTutorial}
|
||||
className="mb-0 h-14 w-28 rounded-2xl bg-[rgba(65,65,64,1)] text-left font-inter text-lg font-medium leading-6"
|
||||
>
|
||||
Tutorial
|
||||
</Button>
|
||||
)}
|
||||
<div className="fixed bottom-6 right-6 z-50 flex items-center gap-4 p-3 transition-all duration-300 ease-in-out">
|
||||
<Button variant="default" onClick={resetTutorial} className="mb-0">
|
||||
Tutorial
|
||||
</Button>
|
||||
<Button
|
||||
className="h-14 w-14 rounded-full bg-[rgba(65,65,64,1)]"
|
||||
variant="default"
|
||||
data-tally-open="3yx2L0"
|
||||
data-tally-emoji-text="👋"
|
||||
data-tally-emoji-animation="wave"
|
||||
>
|
||||
<QuestionMarkCircledIcon className="h-14 w-14" />
|
||||
<IconMegaphone size="lg" />
|
||||
<span className="sr-only">Reach Out</span>
|
||||
</Button>
|
||||
</div>
|
||||
@@ -0,0 +1,149 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogClose,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
DialogTrigger,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
MultiSelector,
|
||||
MultiSelectorContent,
|
||||
MultiSelectorInput,
|
||||
MultiSelectorItem,
|
||||
MultiSelectorList,
|
||||
MultiSelectorTrigger,
|
||||
} from "@/components/ui/multiselect";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { useState } from "react";
|
||||
import { addFeaturedAgent } from "./actions";
|
||||
import { Agent } from "@/lib/marketplace-api/types";
|
||||
|
||||
type FormData = {
|
||||
agent: string;
|
||||
categories: string[];
|
||||
};
|
||||
|
||||
export const AdminAddFeaturedAgentDialog = ({
|
||||
categories,
|
||||
agents,
|
||||
}: {
|
||||
categories: string[];
|
||||
agents: Agent[];
|
||||
}) => {
|
||||
const [selectedAgent, setSelectedAgent] = useState<string>("");
|
||||
const [selectedCategories, setSelectedCategories] = useState<string[]>([]);
|
||||
|
||||
const {
|
||||
control,
|
||||
handleSubmit,
|
||||
watch,
|
||||
setValue,
|
||||
formState: { errors },
|
||||
} = useForm<FormData>({
|
||||
defaultValues: {
|
||||
agent: "",
|
||||
categories: [],
|
||||
},
|
||||
});
|
||||
|
||||
return (
|
||||
<Dialog>
|
||||
<DialogTrigger asChild>
|
||||
<Button variant="outline" size="sm">
|
||||
Add Featured Agent
|
||||
</Button>
|
||||
</DialogTrigger>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>Add Featured Agent</DialogTitle>
|
||||
</DialogHeader>
|
||||
<div className="flex flex-col gap-4">
|
||||
<Controller
|
||||
name="agent"
|
||||
control={control}
|
||||
rules={{ required: true }}
|
||||
render={({ field }) => (
|
||||
<div>
|
||||
<label htmlFor={field.name}>Agent</label>
|
||||
<Select
|
||||
onValueChange={(value) => {
|
||||
field.onChange(value);
|
||||
setSelectedAgent(value);
|
||||
}}
|
||||
value={field.value || ""}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select an agent" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{/* Populate with agents */}
|
||||
{agents.map((agent) => (
|
||||
<SelectItem key={agent.id} value={agent.id}>
|
||||
{agent.name}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
<Controller
|
||||
name="categories"
|
||||
control={control}
|
||||
render={({ field }) => (
|
||||
<MultiSelector
|
||||
values={field.value || []}
|
||||
onValuesChange={(values) => {
|
||||
field.onChange(values);
|
||||
setSelectedCategories(values);
|
||||
}}
|
||||
>
|
||||
<MultiSelectorTrigger>
|
||||
<MultiSelectorInput placeholder="Select categories" />
|
||||
</MultiSelectorTrigger>
|
||||
<MultiSelectorContent>
|
||||
<MultiSelectorList>
|
||||
{categories.map((category) => (
|
||||
<MultiSelectorItem key={category} value={category}>
|
||||
{category}
|
||||
</MultiSelectorItem>
|
||||
))}
|
||||
</MultiSelectorList>
|
||||
</MultiSelectorContent>
|
||||
</MultiSelector>
|
||||
)}
|
||||
/>
|
||||
</div>
|
||||
<DialogFooter>
|
||||
<DialogClose asChild>
|
||||
<Button variant="outline">Cancel</Button>
|
||||
</DialogClose>
|
||||
<DialogClose asChild>
|
||||
<Button
|
||||
type="submit"
|
||||
onClick={async () => {
|
||||
// Handle adding the featured agent
|
||||
await addFeaturedAgent(selectedAgent, selectedCategories);
|
||||
// close the dialog
|
||||
}}
|
||||
>
|
||||
Add
|
||||
</Button>
|
||||
</DialogClose>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,74 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
getFeaturedAgents,
|
||||
removeFeaturedAgent,
|
||||
getCategories,
|
||||
getNotFeaturedAgents,
|
||||
} from "./actions";
|
||||
|
||||
import FeaturedAgentsTable from "./FeaturedAgentsTable";
|
||||
import { AdminAddFeaturedAgentDialog } from "./AdminAddFeaturedAgentDialog";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
export default async function AdminFeaturedAgentsControl({
|
||||
className,
|
||||
}: {
|
||||
className?: string;
|
||||
}) {
|
||||
// add featured agent button
|
||||
// modal to select agent?
|
||||
// modal to select categories?
|
||||
// table of featured agents
|
||||
// in table
|
||||
// remove featured agent button
|
||||
// edit featured agent categories button
|
||||
// table footer
|
||||
// Next page button
|
||||
// Previous page button
|
||||
// Page number input
|
||||
// Page size input
|
||||
// Total pages input
|
||||
// Go to page button
|
||||
|
||||
const page = 1;
|
||||
const pageSize = 10;
|
||||
|
||||
const agents = await getFeaturedAgents(page, pageSize);
|
||||
|
||||
const categories = await getCategories();
|
||||
|
||||
const notFeaturedAgents = await getNotFeaturedAgents();
|
||||
|
||||
return (
|
||||
<div className={`flex flex-col gap-4 ${className}`}>
|
||||
<div className="mb-4 flex justify-between">
|
||||
<h3 className="text-lg font-semibold">Featured Agent Controls</h3>
|
||||
<AdminAddFeaturedAgentDialog
|
||||
categories={categories.unique_categories}
|
||||
agents={notFeaturedAgents.agents}
|
||||
/>
|
||||
</div>
|
||||
<FeaturedAgentsTable
|
||||
agents={agents.agents}
|
||||
globalActions={[
|
||||
{
|
||||
component: <Button>Remove</Button>,
|
||||
action: async (rows) => {
|
||||
"use server";
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"removeFeaturedAgent",
|
||||
{},
|
||||
async () => {
|
||||
const all = rows.map((row) => removeFeaturedAgent(row.id));
|
||||
await Promise.all(all);
|
||||
revalidatePath("/marketplace");
|
||||
},
|
||||
);
|
||||
},
|
||||
},
|
||||
]}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user