mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-04-08 03:00:28 -04:00
Merge branch 'dev'
This commit is contained in:
@@ -15,6 +15,7 @@
|
||||
!autogpt_platform/backend/pyproject.toml
|
||||
!autogpt_platform/backend/poetry.lock
|
||||
!autogpt_platform/backend/README.md
|
||||
!autogpt_platform/backend/.env
|
||||
|
||||
# Platform - Market
|
||||
!autogpt_platform/market/market/
|
||||
@@ -27,6 +28,7 @@
|
||||
# Platform - Frontend
|
||||
!autogpt_platform/frontend/src/
|
||||
!autogpt_platform/frontend/public/
|
||||
!autogpt_platform/frontend/scripts/
|
||||
!autogpt_platform/frontend/package.json
|
||||
!autogpt_platform/frontend/pnpm-lock.yaml
|
||||
!autogpt_platform/frontend/tsconfig.json
|
||||
@@ -34,6 +36,7 @@
|
||||
## config
|
||||
!autogpt_platform/frontend/*.config.*
|
||||
!autogpt_platform/frontend/.env.*
|
||||
!autogpt_platform/frontend/.env
|
||||
|
||||
# Classic - AutoGPT
|
||||
!classic/original_autogpt/autogpt/
|
||||
|
||||
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
3
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -24,7 +24,8 @@
|
||||
</details>
|
||||
|
||||
#### For configuration changes:
|
||||
- [ ] `.env.example` is updated or already compatible with my changes
|
||||
|
||||
- [ ] `.env.default` is updated or already compatible with my changes
|
||||
- [ ] `docker-compose.yml` is updated or already compatible with my changes
|
||||
- [ ] I have included a list of my configuration changes in the PR description (under **Changes**)
|
||||
|
||||
|
||||
46
.github/workflows/platform-frontend-ci.yml
vendored
46
.github/workflows/platform-frontend-ci.yml
vendored
@@ -82,37 +82,6 @@ jobs:
|
||||
- name: Run lint
|
||||
run: pnpm lint
|
||||
|
||||
type-check:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Restore dependencies cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ needs.setup.outputs.cache-key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Run tsc check
|
||||
run: pnpm type-check
|
||||
|
||||
chromatic:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
@@ -176,11 +145,7 @@ jobs:
|
||||
|
||||
- name: Copy default supabase .env
|
||||
run: |
|
||||
cp ../.env.example ../.env
|
||||
|
||||
- name: Copy backend .env
|
||||
run: |
|
||||
cp ../backend/.env.example ../backend/.env
|
||||
cp ../.env.default ../.env
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
@@ -252,15 +217,6 @@ jobs:
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Setup .env
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Build frontend
|
||||
run: pnpm build --turbo
|
||||
# uses Turbopack, much faster and safe enough for a test pipeline
|
||||
env:
|
||||
NEXT_PUBLIC_PW_TEST: true
|
||||
|
||||
- name: Install Browser 'chromium'
|
||||
run: pnpm playwright install --with-deps chromium
|
||||
|
||||
|
||||
132
.github/workflows/platform-fullstack-ci.yml
vendored
Normal file
132
.github/workflows/platform-fullstack-ci.yml
vendored
Normal file
@@ -0,0 +1,132 @@
|
||||
name: AutoGPT Platform - Frontend CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master, dev]
|
||||
paths:
|
||||
- ".github/workflows/platform-fullstack-ci.yml"
|
||||
- "autogpt_platform/**"
|
||||
pull_request:
|
||||
paths:
|
||||
- ".github/workflows/platform-fullstack-ci.yml"
|
||||
- "autogpt_platform/**"
|
||||
merge_group:
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: autogpt_platform/frontend
|
||||
|
||||
jobs:
|
||||
setup:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Generate cache key
|
||||
id: cache-key
|
||||
run: echo "key=${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ steps.cache-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
types:
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "21"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Copy default supabase .env
|
||||
run: |
|
||||
cp ../.env.default ../.env
|
||||
|
||||
- name: Copy backend .env
|
||||
run: |
|
||||
cp ../backend/.env.default ../backend/.env
|
||||
|
||||
- name: Run docker compose
|
||||
run: |
|
||||
docker compose -f ../docker-compose.yml --profile local --profile deps_backend up -d
|
||||
|
||||
- name: Restore dependencies cache
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ needs.setup.outputs.cache-key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Setup .env
|
||||
run: cp .env.default .env
|
||||
|
||||
- name: Wait for services to be ready
|
||||
run: |
|
||||
echo "Waiting for rest_server to be ready..."
|
||||
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
||||
echo "Waiting for database to be ready..."
|
||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
||||
|
||||
- name: Generate API queries
|
||||
run: pnpm generate:api:force
|
||||
|
||||
- name: Check for API schema changes
|
||||
run: |
|
||||
if ! git diff --exit-code src/app/api/openapi.json; then
|
||||
echo "❌ API schema changes detected in src/app/api/openapi.json"
|
||||
echo ""
|
||||
echo "The openapi.json file has been modified after running 'pnpm generate:api-all'."
|
||||
echo "This usually means changes have been made in the BE endpoints without updating the Frontend."
|
||||
echo "The API schema is now out of sync with the Front-end queries."
|
||||
echo ""
|
||||
echo "To fix this:"
|
||||
echo "1. Pull the backend 'docker compose pull && docker compose up -d --build --force-recreate'"
|
||||
echo "2. Run 'pnpm generate:api' locally"
|
||||
echo "3. Run 'pnpm types' locally"
|
||||
echo "4. Fix any TypeScript errors that may have been introduced"
|
||||
echo "5. Commit and push your changes"
|
||||
echo ""
|
||||
exit 1
|
||||
else
|
||||
echo "✅ No API schema changes detected"
|
||||
fi
|
||||
|
||||
- name: Run Typescript checks
|
||||
run: pnpm types
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -5,6 +5,8 @@ classic/original_autogpt/*.json
|
||||
auto_gpt_workspace/*
|
||||
*.mpeg
|
||||
.env
|
||||
# Root .env files
|
||||
/.env
|
||||
azure.yaml
|
||||
.vscode
|
||||
.idea/*
|
||||
@@ -121,7 +123,6 @@ celerybeat.pid
|
||||
|
||||
# Environments
|
||||
.direnv/
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv*/
|
||||
|
||||
@@ -235,7 +235,7 @@ repos:
|
||||
hooks:
|
||||
- id: tsc
|
||||
name: Typecheck - AutoGPT Platform - Frontend
|
||||
entry: bash -c 'cd autogpt_platform/frontend && pnpm type-check'
|
||||
entry: bash -c 'cd autogpt_platform/frontend && pnpm types'
|
||||
files: ^autogpt_platform/frontend/
|
||||
types: [file]
|
||||
language: system
|
||||
|
||||
10
README.md
10
README.md
@@ -3,6 +3,16 @@
|
||||
[](https://discord.gg/autogpt)  
|
||||
[](https://twitter.com/Auto_GPT)  
|
||||
|
||||
<!-- Keep these links. Translations will automatically update with the README. -->
|
||||
[Deutsch](https://zdoc.app/de/Significant-Gravitas/AutoGPT) |
|
||||
[Español](https://zdoc.app/es/Significant-Gravitas/AutoGPT) |
|
||||
[français](https://zdoc.app/fr/Significant-Gravitas/AutoGPT) |
|
||||
[日本語](https://zdoc.app/ja/Significant-Gravitas/AutoGPT) |
|
||||
[한국어](https://zdoc.app/ko/Significant-Gravitas/AutoGPT) |
|
||||
[Português](https://zdoc.app/pt/Significant-Gravitas/AutoGPT) |
|
||||
[Русский](https://zdoc.app/ru/Significant-Gravitas/AutoGPT) |
|
||||
[中文](https://zdoc.app/zh/Significant-Gravitas/AutoGPT)
|
||||
|
||||
**AutoGPT** is a powerful platform that allows you to create, deploy, and manage continuous AI agents that automate complex workflows.
|
||||
|
||||
## Hosting Options
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
# CLAUDE.md
|
||||
|
||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||
|
||||
## Repository Overview
|
||||
|
||||
AutoGPT Platform is a monorepo containing:
|
||||
|
||||
- **Backend** (`/backend`): Python FastAPI server with async support
|
||||
- **Frontend** (`/frontend`): Next.js React application
|
||||
- **Shared Libraries** (`/autogpt_libs`): Common Python utilities
|
||||
@@ -11,6 +13,7 @@ AutoGPT Platform is a monorepo containing:
|
||||
## Essential Commands
|
||||
|
||||
### Backend Development
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
cd backend && poetry install
|
||||
@@ -41,6 +44,7 @@ poetry run pytest 'backend/blocks/test/test_block.py::test_available_blocks[GetC
|
||||
poetry run format # Black + isort
|
||||
poetry run lint # ruff
|
||||
```
|
||||
|
||||
More details can be found in TESTING.md
|
||||
|
||||
#### Creating/Updating Snapshots
|
||||
@@ -53,8 +57,8 @@ poetry run pytest path/to/test.py --snapshot-update
|
||||
|
||||
⚠️ **Important**: Always review snapshot changes before committing! Use `git diff` to verify the changes are expected.
|
||||
|
||||
|
||||
### Frontend Development
|
||||
|
||||
```bash
|
||||
# Install dependencies
|
||||
cd frontend && npm install
|
||||
@@ -72,12 +76,13 @@ npm run storybook
|
||||
npm run build
|
||||
|
||||
# Type checking
|
||||
npm run type-check
|
||||
npm run types
|
||||
```
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
### Backend Architecture
|
||||
|
||||
- **API Layer**: FastAPI with REST and WebSocket endpoints
|
||||
- **Database**: PostgreSQL with Prisma ORM, includes pgvector for embeddings
|
||||
- **Queue System**: RabbitMQ for async task processing
|
||||
@@ -86,6 +91,7 @@ npm run type-check
|
||||
- **Security**: Cache protection middleware prevents sensitive data caching in browsers/proxies
|
||||
|
||||
### Frontend Architecture
|
||||
|
||||
- **Framework**: Next.js App Router with React Server Components
|
||||
- **State Management**: React hooks + Supabase client for real-time updates
|
||||
- **Workflow Builder**: Visual graph editor using @xyflow/react
|
||||
@@ -93,6 +99,7 @@ npm run type-check
|
||||
- **Feature Flags**: LaunchDarkly integration
|
||||
|
||||
### Key Concepts
|
||||
|
||||
1. **Agent Graphs**: Workflow definitions stored as JSON, executed by the backend
|
||||
2. **Blocks**: Reusable components in `/backend/blocks/` that perform specific tasks
|
||||
3. **Integrations**: OAuth and API connections stored per user
|
||||
@@ -100,13 +107,16 @@ npm run type-check
|
||||
5. **Virus Scanning**: ClamAV integration for file upload security
|
||||
|
||||
### Testing Approach
|
||||
|
||||
- Backend uses pytest with snapshot testing for API responses
|
||||
- Test files are colocated with source files (`*_test.py`)
|
||||
- Frontend uses Playwright for E2E tests
|
||||
- Component testing via Storybook
|
||||
|
||||
### Database Schema
|
||||
|
||||
Key models (defined in `/backend/schema.prisma`):
|
||||
|
||||
- `User`: Authentication and profile data
|
||||
- `AgentGraph`: Workflow definitions with version control
|
||||
- `AgentGraphExecution`: Execution history and results
|
||||
@@ -114,13 +124,31 @@ Key models (defined in `/backend/schema.prisma`):
|
||||
- `StoreListing`: Marketplace listings for sharing agents
|
||||
|
||||
### Environment Configuration
|
||||
- Backend: `.env` file in `/backend`
|
||||
- Frontend: `.env.local` file in `/frontend`
|
||||
- Both require Supabase credentials and API keys for various services
|
||||
|
||||
#### Configuration Files
|
||||
|
||||
- **Backend**: `/backend/.env.default` (defaults) → `/backend/.env` (user overrides)
|
||||
- **Frontend**: `/frontend/.env.default` (defaults) → `/frontend/.env` (user overrides)
|
||||
- **Platform**: `/.env.default` (Supabase/shared defaults) → `/.env` (user overrides)
|
||||
|
||||
#### Docker Environment Loading Order
|
||||
|
||||
1. `.env.default` files provide base configuration (tracked in git)
|
||||
2. `.env` files provide user-specific overrides (gitignored)
|
||||
3. Docker Compose `environment:` sections provide service-specific overrides
|
||||
4. Shell environment variables have highest precedence
|
||||
|
||||
#### Key Points
|
||||
|
||||
- All services use hardcoded defaults in docker-compose files (no `${VARIABLE}` substitutions)
|
||||
- The `env_file` directive loads variables INTO containers at runtime
|
||||
- Backend/Frontend services use YAML anchors for consistent configuration
|
||||
- Supabase services (`db/docker/docker-compose.yml`) follow the same pattern
|
||||
|
||||
### Common Development Tasks
|
||||
|
||||
**Adding a new block:**
|
||||
|
||||
1. Create new file in `/backend/backend/blocks/`
|
||||
2. Inherit from `Block` base class
|
||||
3. Define input/output schemas
|
||||
@@ -132,12 +160,14 @@ Note: when making many new blocks analyze the interfaces for each of these blcok
|
||||
ex: do the inputs and outputs tie well together?
|
||||
|
||||
**Modifying the API:**
|
||||
|
||||
1. Update route in `/backend/backend/server/routers/`
|
||||
2. Add/update Pydantic models in same directory
|
||||
3. Write tests alongside the route file
|
||||
4. Run `poetry run test` to verify
|
||||
|
||||
**Frontend feature development:**
|
||||
|
||||
1. Components go in `/frontend/src/components/`
|
||||
2. Use existing UI components from `/frontend/src/components/ui/`
|
||||
3. Add Storybook stories for new components
|
||||
@@ -146,6 +176,7 @@ ex: do the inputs and outputs tie well together?
|
||||
### Security Implementation
|
||||
|
||||
**Cache Protection Middleware:**
|
||||
|
||||
- Located in `/backend/backend/server/middleware/security.py`
|
||||
- Default behavior: Disables caching for ALL endpoints with `Cache-Control: no-store, no-cache, must-revalidate, private`
|
||||
- Uses an allow list approach - only explicitly permitted paths can be cached
|
||||
@@ -154,14 +185,20 @@ ex: do the inputs and outputs tie well together?
|
||||
- To allow caching for a new endpoint, add it to `CACHEABLE_PATHS` in the middleware
|
||||
- Applied to both main API server and external API applications
|
||||
|
||||
|
||||
### Creating Pull Requests
|
||||
|
||||
- Create the PR aginst the `dev` branch of the repository.
|
||||
- Ensure the branch name is descriptive (e.g., `feature/add-new-block`)/
|
||||
- Use conventional commit messages (see below)/
|
||||
- Fill out the .github/PULL_REQUEST_TEMPLATE.md template as the PR description/
|
||||
- Run the github pre-commit hooks to ensure code quality.
|
||||
|
||||
### Reviewing/Revising Pull Requests
|
||||
|
||||
- When the user runs /pr-comments or tries to fetch them, also run gh api /repos/Significant-Gravitas/AutoGPT/pulls/[issuenum]/reviews to get the reviews
|
||||
- Use gh api /repos/Significant-Gravitas/AutoGPT/pulls/[issuenum]/reviews/[review_id]/comments to get the review contents
|
||||
- Use gh api /repos/Significant-Gravitas/AutoGPT/issues/9924/comments to get the pr specific comments
|
||||
|
||||
### Conventional Commits
|
||||
|
||||
Use this format for commit messages and Pull Request titles:
|
||||
|
||||
@@ -8,7 +8,6 @@ Welcome to the AutoGPT Platform - a powerful system for creating and running AI
|
||||
|
||||
- Docker
|
||||
- Docker Compose V2 (comes with Docker Desktop, or can be installed separately)
|
||||
- Node.js & NPM (for running the frontend application)
|
||||
|
||||
### Running the System
|
||||
|
||||
@@ -24,10 +23,10 @@ To run the AutoGPT Platform, follow these steps:
|
||||
2. Run the following command:
|
||||
|
||||
```
|
||||
cp .env.example .env
|
||||
cp .env.default .env
|
||||
```
|
||||
|
||||
This command will copy the `.env.example` file to `.env`. You can modify the `.env` file to add your own environment variables.
|
||||
This command will copy the `.env.default` file to `.env`. You can modify the `.env` file to add your own environment variables.
|
||||
|
||||
3. Run the following command:
|
||||
|
||||
@@ -37,44 +36,7 @@ To run the AutoGPT Platform, follow these steps:
|
||||
|
||||
This command will start all the necessary backend services defined in the `docker-compose.yml` file in detached mode.
|
||||
|
||||
4. Navigate to `frontend` within the `autogpt_platform` directory:
|
||||
|
||||
```
|
||||
cd frontend
|
||||
```
|
||||
|
||||
You will need to run your frontend application separately on your local machine.
|
||||
|
||||
5. Run the following command:
|
||||
|
||||
```
|
||||
cp .env.example .env.local
|
||||
```
|
||||
|
||||
This command will copy the `.env.example` file to `.env.local` in the `frontend` directory. You can modify the `.env.local` within this folder to add your own environment variables for the frontend application.
|
||||
|
||||
6. Run the following command:
|
||||
|
||||
Enable corepack and install dependencies by running:
|
||||
|
||||
```
|
||||
corepack enable
|
||||
pnpm i
|
||||
```
|
||||
|
||||
Generate the API client (this step is required before running the frontend):
|
||||
|
||||
```
|
||||
pnpm generate:api-client
|
||||
```
|
||||
|
||||
Then start the frontend application in development mode:
|
||||
|
||||
```
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
7. Open your browser and navigate to `http://localhost:3000` to access the AutoGPT Platform frontend.
|
||||
4. After all the services are in ready state, open your browser and navigate to `http://localhost:3000` to access the AutoGPT Platform frontend.
|
||||
|
||||
### Docker Compose Commands
|
||||
|
||||
@@ -177,20 +139,21 @@ The platform includes scripts for generating and managing the API client:
|
||||
|
||||
- `pnpm fetch:openapi`: Fetches the OpenAPI specification from the backend service (requires backend to be running on port 8006)
|
||||
- `pnpm generate:api-client`: Generates the TypeScript API client from the OpenAPI specification using Orval
|
||||
- `pnpm generate:api-all`: Runs both fetch and generate commands in sequence
|
||||
- `pnpm generate:api`: Runs both fetch and generate commands in sequence
|
||||
|
||||
#### Manual API Client Updates
|
||||
|
||||
If you need to update the API client after making changes to the backend API:
|
||||
|
||||
1. Ensure the backend services are running:
|
||||
|
||||
```
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
2. Generate the updated API client:
|
||||
```
|
||||
pnpm generate:api-all
|
||||
pnpm generate:api
|
||||
```
|
||||
|
||||
This will fetch the latest OpenAPI specification and regenerate the TypeScript client code.
|
||||
|
||||
52
autogpt_platform/backend/.dockerignore
Normal file
52
autogpt_platform/backend/.dockerignore
Normal file
@@ -0,0 +1,52 @@
|
||||
# Development and testing files
|
||||
**/__pycache__
|
||||
**/*.pyc
|
||||
**/*.pyo
|
||||
**/*.pyd
|
||||
**/.Python
|
||||
**/env/
|
||||
**/venv/
|
||||
**/.venv/
|
||||
**/pip-log.txt
|
||||
**/.pytest_cache/
|
||||
**/test-results/
|
||||
**/snapshots/
|
||||
**/test/
|
||||
|
||||
# IDE and editor files
|
||||
**/.vscode/
|
||||
**/.idea/
|
||||
**/*.swp
|
||||
**/*.swo
|
||||
*~
|
||||
|
||||
# OS files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Logs
|
||||
**/*.log
|
||||
**/logs/
|
||||
|
||||
# Git
|
||||
.git/
|
||||
.gitignore
|
||||
|
||||
# Documentation
|
||||
**/*.md
|
||||
!README.md
|
||||
|
||||
# Local development files
|
||||
.env
|
||||
.env.local
|
||||
**/.env.test
|
||||
|
||||
# Build artifacts
|
||||
**/dist/
|
||||
**/build/
|
||||
**/target/
|
||||
|
||||
# Docker files (avoid recursion)
|
||||
Dockerfile*
|
||||
docker-compose*
|
||||
.dockerignore
|
||||
@@ -1,3 +1,9 @@
|
||||
# Backend Configuration
|
||||
# This file contains environment variables that MUST be set for the AutoGPT platform
|
||||
# Variables with working defaults in settings.py are not included here
|
||||
|
||||
## ===== REQUIRED DATABASE CONFIGURATION ===== ##
|
||||
# PostgreSQL Database Connection
|
||||
DB_USER=postgres
|
||||
DB_PASS=your-super-secret-and-long-postgres-password
|
||||
DB_NAME=postgres
|
||||
@@ -10,74 +16,50 @@ DB_SCHEMA=platform
|
||||
DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:${DB_PORT}/${DB_NAME}?schema=${DB_SCHEMA}&connect_timeout=${DB_CONNECT_TIMEOUT}"
|
||||
DIRECT_URL="postgresql://${DB_USER}:${DB_PASS}@${DB_HOST}:${DB_PORT}/${DB_NAME}?schema=${DB_SCHEMA}&connect_timeout=${DB_CONNECT_TIMEOUT}"
|
||||
PRISMA_SCHEMA="postgres/schema.prisma"
|
||||
ENABLE_AUTH=true
|
||||
|
||||
# EXECUTOR
|
||||
NUM_GRAPH_WORKERS=10
|
||||
|
||||
BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
|
||||
# generate using `from cryptography.fernet import Fernet;Fernet.generate_key().decode()`
|
||||
ENCRYPTION_KEY='dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw='
|
||||
UNSUBSCRIBE_SECRET_KEY = 'HlP8ivStJjmbf6NKi78m_3FnOogut0t5ckzjsIqeaio='
|
||||
|
||||
## ===== REQUIRED SERVICE CREDENTIALS ===== ##
|
||||
# Redis Configuration
|
||||
REDIS_HOST=localhost
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=password
|
||||
|
||||
ENABLE_CREDIT=false
|
||||
STRIPE_API_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
# RabbitMQ Credentials
|
||||
RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
|
||||
# What environment things should be logged under: local dev or prod
|
||||
APP_ENV=local
|
||||
# What environment to behave as: "local" or "cloud"
|
||||
BEHAVE_AS=local
|
||||
PYRO_HOST=localhost
|
||||
SENTRY_DSN=
|
||||
|
||||
# Email For Postmark so we can send emails
|
||||
POSTMARK_SERVER_API_TOKEN=
|
||||
POSTMARK_SENDER_EMAIL=invalid@invalid.com
|
||||
POSTMARK_WEBHOOK_TOKEN=
|
||||
|
||||
## User auth with Supabase is required for any of the 3rd party integrations with auth to work.
|
||||
ENABLE_AUTH=true
|
||||
# Supabase Authentication
|
||||
SUPABASE_URL=http://localhost:8000
|
||||
SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
|
||||
# RabbitMQ credentials -- Used for communication between services
|
||||
RABBITMQ_HOST=localhost
|
||||
RABBITMQ_PORT=5672
|
||||
RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
## ===== REQUIRED SECURITY KEYS ===== ##
|
||||
# Generate using: from cryptography.fernet import Fernet;Fernet.generate_key().decode()
|
||||
ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw=
|
||||
UNSUBSCRIBE_SECRET_KEY=HlP8ivStJjmbf6NKi78m_3FnOogut0t5ckzjsIqeaio=
|
||||
|
||||
## GCS bucket is required for marketplace and library functionality
|
||||
## ===== IMPORTANT OPTIONAL CONFIGURATION ===== ##
|
||||
# Platform URLs (set these for webhooks and OAuth to work)
|
||||
PLATFORM_BASE_URL=http://localhost:8000
|
||||
FRONTEND_BASE_URL=http://localhost:3000
|
||||
|
||||
# Media Storage (required for marketplace and library functionality)
|
||||
MEDIA_GCS_BUCKET_NAME=
|
||||
|
||||
## For local development, you may need to set FRONTEND_BASE_URL for the OAuth flow
|
||||
## for integrations to work. Defaults to the value of PLATFORM_BASE_URL if not set.
|
||||
# FRONTEND_BASE_URL=http://localhost:3000
|
||||
## ===== API KEYS AND OAUTH CREDENTIALS ===== ##
|
||||
# All API keys below are optional - only add what you need
|
||||
|
||||
## PLATFORM_BASE_URL must be set to a *publicly accessible* URL pointing to your backend
|
||||
## to use the platform's webhook-related functionality.
|
||||
## If you are developing locally, you can use something like ngrok to get a publc URL
|
||||
## and tunnel it to your locally running backend.
|
||||
PLATFORM_BASE_URL=http://localhost:3000
|
||||
|
||||
## Cloudflare Turnstile (CAPTCHA) Configuration
|
||||
## Get these from the Cloudflare Turnstile dashboard: https://dash.cloudflare.com/?to=/:account/turnstile
|
||||
## This is the backend secret key
|
||||
TURNSTILE_SECRET_KEY=
|
||||
## This is the verify URL
|
||||
TURNSTILE_VERIFY_URL=https://challenges.cloudflare.com/turnstile/v0/siteverify
|
||||
|
||||
LAUNCH_DARKLY_SDK_KEY=
|
||||
|
||||
## == INTEGRATION CREDENTIALS == ##
|
||||
# Each set of server side credentials is required for the corresponding 3rd party
|
||||
# integration to work.
|
||||
# AI/LLM Services
|
||||
OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
GROQ_API_KEY=
|
||||
LLAMA_API_KEY=
|
||||
AIML_API_KEY=
|
||||
V0_API_KEY=
|
||||
OPEN_ROUTER_API_KEY=
|
||||
NVIDIA_API_KEY=
|
||||
|
||||
# OAuth Credentials
|
||||
# For the OAuth callback URL, use <your_frontend_url>/auth/integrations/oauth_callback,
|
||||
# e.g. http://localhost:3000/auth/integrations/oauth_callback
|
||||
|
||||
@@ -87,7 +69,6 @@ GITHUB_CLIENT_SECRET=
|
||||
|
||||
# Google OAuth App server credentials - https://console.cloud.google.com/apis/credentials, and enable gmail api and set scopes
|
||||
# https://console.cloud.google.com/apis/credentials/consent ?project=<your_project_id>
|
||||
|
||||
# You'll need to add/enable the following scopes (minimum):
|
||||
# https://console.developers.google.com/apis/api/gmail.googleapis.com/overview ?project=<your_project_id>
|
||||
# https://console.cloud.google.com/apis/library/sheets.googleapis.com/ ?project=<your_project_id>
|
||||
@@ -123,104 +104,66 @@ LINEAR_CLIENT_SECRET=
|
||||
TODOIST_CLIENT_ID=
|
||||
TODOIST_CLIENT_SECRET=
|
||||
|
||||
## ===== OPTIONAL API KEYS ===== ##
|
||||
|
||||
# LLM
|
||||
OPENAI_API_KEY=
|
||||
ANTHROPIC_API_KEY=
|
||||
AIML_API_KEY=
|
||||
GROQ_API_KEY=
|
||||
OPEN_ROUTER_API_KEY=
|
||||
LLAMA_API_KEY=
|
||||
|
||||
# Reddit
|
||||
# Go to https://www.reddit.com/prefs/apps and create a new app
|
||||
# Choose "script" for the type
|
||||
# Fill in the redirect uri as <your_frontend_url>/auth/integrations/oauth_callback, e.g. http://localhost:3000/auth/integrations/oauth_callback
|
||||
NOTION_CLIENT_ID=
|
||||
NOTION_CLIENT_SECRET=
|
||||
REDDIT_CLIENT_ID=
|
||||
REDDIT_CLIENT_SECRET=
|
||||
REDDIT_USER_AGENT="AutoGPT:1.0 (by /u/autogpt)"
|
||||
|
||||
# Discord
|
||||
DISCORD_BOT_TOKEN=
|
||||
# Payment Processing
|
||||
STRIPE_API_KEY=
|
||||
STRIPE_WEBHOOK_SECRET=
|
||||
|
||||
# SMTP/Email
|
||||
SMTP_SERVER=
|
||||
SMTP_PORT=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
# Email Service (for sending notifications and confirmations)
|
||||
POSTMARK_SERVER_API_TOKEN=
|
||||
POSTMARK_SENDER_EMAIL=invalid@invalid.com
|
||||
POSTMARK_WEBHOOK_TOKEN=
|
||||
|
||||
# D-ID
|
||||
# Error Tracking
|
||||
SENTRY_DSN=
|
||||
|
||||
# Cloudflare Turnstile (CAPTCHA) Configuration
|
||||
# Get these from the Cloudflare Turnstile dashboard: https://dash.cloudflare.com/?to=/:account/turnstile
|
||||
# This is the backend secret key
|
||||
TURNSTILE_SECRET_KEY=
|
||||
# This is the verify URL
|
||||
TURNSTILE_VERIFY_URL=https://challenges.cloudflare.com/turnstile/v0/siteverify
|
||||
|
||||
# Feature Flags
|
||||
LAUNCH_DARKLY_SDK_KEY=
|
||||
|
||||
# Content Generation & Media
|
||||
DID_API_KEY=
|
||||
FAL_API_KEY=
|
||||
IDEOGRAM_API_KEY=
|
||||
REPLICATE_API_KEY=
|
||||
REVID_API_KEY=
|
||||
SCREENSHOTONE_API_KEY=
|
||||
UNREAL_SPEECH_API_KEY=
|
||||
|
||||
# Open Weather Map
|
||||
# Data & Search Services
|
||||
E2B_API_KEY=
|
||||
EXA_API_KEY=
|
||||
JINA_API_KEY=
|
||||
MEM0_API_KEY=
|
||||
OPENWEATHERMAP_API_KEY=
|
||||
|
||||
# SMTP
|
||||
SMTP_SERVER=
|
||||
SMTP_PORT=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Medium
|
||||
MEDIUM_API_KEY=
|
||||
MEDIUM_AUTHOR_ID=
|
||||
|
||||
# Google Maps
|
||||
GOOGLE_MAPS_API_KEY=
|
||||
|
||||
# Replicate
|
||||
REPLICATE_API_KEY=
|
||||
# Communication Services
|
||||
DISCORD_BOT_TOKEN=
|
||||
MEDIUM_API_KEY=
|
||||
MEDIUM_AUTHOR_ID=
|
||||
SMTP_SERVER=
|
||||
SMTP_PORT=
|
||||
SMTP_USERNAME=
|
||||
SMTP_PASSWORD=
|
||||
|
||||
# Ideogram
|
||||
IDEOGRAM_API_KEY=
|
||||
|
||||
# Fal
|
||||
FAL_API_KEY=
|
||||
|
||||
# Exa
|
||||
EXA_API_KEY=
|
||||
|
||||
# E2B
|
||||
E2B_API_KEY=
|
||||
|
||||
# Mem0
|
||||
MEM0_API_KEY=
|
||||
|
||||
# Nvidia
|
||||
NVIDIA_API_KEY=
|
||||
|
||||
# Apollo
|
||||
# Business & Marketing Tools
|
||||
APOLLO_API_KEY=
|
||||
|
||||
# SmartLead
|
||||
SMARTLEAD_API_KEY=
|
||||
|
||||
# ZeroBounce
|
||||
ZEROBOUNCE_API_KEY=
|
||||
|
||||
# Ayrshare
|
||||
ENRICHLAYER_API_KEY=
|
||||
AYRSHARE_API_KEY=
|
||||
AYRSHARE_JWT_KEY=
|
||||
SMARTLEAD_API_KEY=
|
||||
ZEROBOUNCE_API_KEY=
|
||||
|
||||
## ===== OPTIONAL API KEYS END ===== ##
|
||||
|
||||
# Block Error Rate Monitoring
|
||||
BLOCK_ERROR_RATE_THRESHOLD=0.5
|
||||
BLOCK_ERROR_RATE_CHECK_INTERVAL_SECS=86400
|
||||
|
||||
# Logging Configuration
|
||||
LOG_LEVEL=INFO
|
||||
ENABLE_CLOUD_LOGGING=false
|
||||
ENABLE_FILE_LOGGING=false
|
||||
# Use to manually set the log directory
|
||||
# LOG_DIR=./logs
|
||||
|
||||
# Example Blocks Configuration
|
||||
# Set to true to enable example blocks in development
|
||||
# These blocks are disabled by default in production
|
||||
ENABLE_EXAMPLE_BLOCKS=false
|
||||
|
||||
# Cloud Storage Configuration
|
||||
# Cleanup interval for expired files (hours between cleanup runs, 1-24 hours)
|
||||
CLOUD_STORAGE_CLEANUP_INTERVAL_HOURS=6
|
||||
# Other Services
|
||||
AUTOMOD_API_KEY=
|
||||
1
autogpt_platform/backend/.gitignore
vendored
1
autogpt_platform/backend/.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
.env
|
||||
database.db
|
||||
database.db-journal
|
||||
dev.db
|
||||
|
||||
@@ -8,14 +8,14 @@ WORKDIR /app
|
||||
|
||||
RUN echo 'Acquire::http::Pipeline-Depth 0;\nAcquire::http::No-Cache true;\nAcquire::BrokenProxy true;\n' > /etc/apt/apt.conf.d/99fixbadproxy
|
||||
|
||||
RUN apt-get update --allow-releaseinfo-change --fix-missing
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get install -y build-essential
|
||||
RUN apt-get install -y libpq5
|
||||
RUN apt-get install -y libz-dev
|
||||
RUN apt-get install -y libssl-dev
|
||||
RUN apt-get install -y postgresql-client
|
||||
# Update package list and install build dependencies in a single layer
|
||||
RUN apt-get update --allow-releaseinfo-change --fix-missing \
|
||||
&& apt-get install -y \
|
||||
build-essential \
|
||||
libpq5 \
|
||||
libz-dev \
|
||||
libssl-dev \
|
||||
postgresql-client
|
||||
|
||||
ENV POETRY_HOME=/opt/poetry
|
||||
ENV POETRY_NO_INTERACTION=1
|
||||
@@ -68,6 +68,12 @@ COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.tom
|
||||
|
||||
WORKDIR /app/autogpt_platform/backend
|
||||
|
||||
FROM server_dependencies AS migrate
|
||||
|
||||
# Migration stage only needs schema and migrations - much lighter than full backend
|
||||
COPY autogpt_platform/backend/schema.prisma /app/autogpt_platform/backend/
|
||||
COPY autogpt_platform/backend/migrations /app/autogpt_platform/backend/migrations
|
||||
|
||||
FROM server_dependencies AS server
|
||||
|
||||
COPY autogpt_platform/backend /app/autogpt_platform/backend
|
||||
|
||||
408
autogpt_platform/backend/backend/blocks/enrichlayer/_api.py
Normal file
408
autogpt_platform/backend/backend/blocks/enrichlayer/_api.py
Normal file
@@ -0,0 +1,408 @@
|
||||
"""
|
||||
API module for Enrichlayer integration.
|
||||
|
||||
This module provides a client for interacting with the Enrichlayer API,
|
||||
which allows fetching LinkedIn profile data and related information.
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import enum
|
||||
import logging
|
||||
from json import JSONDecodeError
|
||||
from typing import Any, Optional, TypeVar
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.data.model import APIKeyCredentials
|
||||
from backend.util.request import Requests
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class EnrichlayerAPIException(Exception):
|
||||
"""Exception raised for Enrichlayer API errors."""
|
||||
|
||||
def __init__(self, message: str, status_code: int):
|
||||
super().__init__(message)
|
||||
self.status_code = status_code
|
||||
|
||||
|
||||
class FallbackToCache(enum.Enum):
|
||||
ON_ERROR = "on-error"
|
||||
NEVER = "never"
|
||||
|
||||
|
||||
class UseCache(enum.Enum):
|
||||
IF_PRESENT = "if-present"
|
||||
NEVER = "never"
|
||||
|
||||
|
||||
class SocialMediaProfiles(BaseModel):
|
||||
"""Social media profiles model."""
|
||||
|
||||
twitter: Optional[str] = None
|
||||
facebook: Optional[str] = None
|
||||
github: Optional[str] = None
|
||||
|
||||
|
||||
class Experience(BaseModel):
|
||||
"""Experience model for LinkedIn profiles."""
|
||||
|
||||
company: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
location: Optional[str] = None
|
||||
starts_at: Optional[dict[str, int]] = None
|
||||
ends_at: Optional[dict[str, int]] = None
|
||||
company_linkedin_profile_url: Optional[str] = None
|
||||
|
||||
|
||||
class Education(BaseModel):
|
||||
"""Education model for LinkedIn profiles."""
|
||||
|
||||
school: Optional[str] = None
|
||||
degree_name: Optional[str] = None
|
||||
field_of_study: Optional[str] = None
|
||||
starts_at: Optional[dict[str, int]] = None
|
||||
ends_at: Optional[dict[str, int]] = None
|
||||
school_linkedin_profile_url: Optional[str] = None
|
||||
|
||||
|
||||
class PersonProfileResponse(BaseModel):
|
||||
"""Response model for LinkedIn person profile.
|
||||
|
||||
This model represents the response from Enrichlayer's LinkedIn profile API.
|
||||
The API returns comprehensive profile data including work experience,
|
||||
education, skills, and contact information (when available).
|
||||
|
||||
Example API Response:
|
||||
{
|
||||
"public_identifier": "johnsmith",
|
||||
"full_name": "John Smith",
|
||||
"occupation": "Software Engineer at Tech Corp",
|
||||
"experiences": [
|
||||
{
|
||||
"company": "Tech Corp",
|
||||
"title": "Software Engineer",
|
||||
"starts_at": {"year": 2020, "month": 1}
|
||||
}
|
||||
],
|
||||
"education": [...],
|
||||
"skills": ["Python", "JavaScript", ...]
|
||||
}
|
||||
"""
|
||||
|
||||
public_identifier: Optional[str] = None
|
||||
profile_pic_url: Optional[str] = None
|
||||
full_name: Optional[str] = None
|
||||
first_name: Optional[str] = None
|
||||
last_name: Optional[str] = None
|
||||
occupation: Optional[str] = None
|
||||
headline: Optional[str] = None
|
||||
summary: Optional[str] = None
|
||||
country: Optional[str] = None
|
||||
country_full_name: Optional[str] = None
|
||||
city: Optional[str] = None
|
||||
state: Optional[str] = None
|
||||
experiences: Optional[list[Experience]] = None
|
||||
education: Optional[list[Education]] = None
|
||||
languages: Optional[list[str]] = None
|
||||
skills: Optional[list[str]] = None
|
||||
inferred_salary: Optional[dict[str, Any]] = None
|
||||
personal_email: Optional[str] = None
|
||||
personal_contact_number: Optional[str] = None
|
||||
social_media_profiles: Optional[SocialMediaProfiles] = None
|
||||
extra: Optional[dict[str, Any]] = None
|
||||
|
||||
|
||||
class SimilarProfile(BaseModel):
|
||||
"""Similar profile model for LinkedIn person lookup."""
|
||||
|
||||
similarity: float
|
||||
linkedin_profile_url: str
|
||||
|
||||
|
||||
class PersonLookupResponse(BaseModel):
|
||||
"""Response model for LinkedIn person lookup.
|
||||
|
||||
This model represents the response from Enrichlayer's person lookup API.
|
||||
The API returns a LinkedIn profile URL and similarity scores when
|
||||
searching for a person by name and company.
|
||||
|
||||
Example API Response:
|
||||
{
|
||||
"url": "https://www.linkedin.com/in/johnsmith/",
|
||||
"name_similarity_score": 0.95,
|
||||
"company_similarity_score": 0.88,
|
||||
"title_similarity_score": 0.75,
|
||||
"location_similarity_score": 0.60
|
||||
}
|
||||
"""
|
||||
|
||||
url: str | None = None
|
||||
name_similarity_score: float | None
|
||||
company_similarity_score: float | None
|
||||
title_similarity_score: float | None
|
||||
location_similarity_score: float | None
|
||||
last_updated: datetime.datetime | None = None
|
||||
profile: PersonProfileResponse | None = None
|
||||
|
||||
|
||||
class RoleLookupResponse(BaseModel):
|
||||
"""Response model for LinkedIn role lookup.
|
||||
|
||||
This model represents the response from Enrichlayer's role lookup API.
|
||||
The API returns LinkedIn profile data for a specific role at a company.
|
||||
|
||||
Example API Response:
|
||||
{
|
||||
"linkedin_profile_url": "https://www.linkedin.com/in/johnsmith/",
|
||||
"profile_data": {...} // Full PersonProfileResponse data when enrich_profile=True
|
||||
}
|
||||
"""
|
||||
|
||||
linkedin_profile_url: Optional[str] = None
|
||||
profile_data: Optional[PersonProfileResponse] = None
|
||||
|
||||
|
||||
class ProfilePictureResponse(BaseModel):
|
||||
"""Response model for LinkedIn profile picture.
|
||||
|
||||
This model represents the response from Enrichlayer's profile picture API.
|
||||
The API returns a URL to the person's LinkedIn profile picture.
|
||||
|
||||
Example API Response:
|
||||
{
|
||||
"tmp_profile_pic_url": "https://media.licdn.com/dms/image/..."
|
||||
}
|
||||
"""
|
||||
|
||||
tmp_profile_pic_url: str = Field(
|
||||
..., description="URL of the profile picture", alias="tmp_profile_pic_url"
|
||||
)
|
||||
|
||||
@property
|
||||
def profile_picture_url(self) -> str:
|
||||
"""Backward compatibility property for profile_picture_url."""
|
||||
return self.tmp_profile_pic_url
|
||||
|
||||
|
||||
class EnrichlayerClient:
|
||||
"""Client for interacting with the Enrichlayer API."""
|
||||
|
||||
API_BASE_URL = "https://enrichlayer.com/api/v2"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
credentials: Optional[APIKeyCredentials] = None,
|
||||
custom_requests: Optional[Requests] = None,
|
||||
):
|
||||
"""
|
||||
Initialize the Enrichlayer client.
|
||||
|
||||
Args:
|
||||
credentials: The credentials to use for authentication.
|
||||
custom_requests: Custom Requests instance for testing.
|
||||
"""
|
||||
if custom_requests:
|
||||
self._requests = custom_requests
|
||||
else:
|
||||
headers: dict[str, str] = {
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
if credentials:
|
||||
headers["Authorization"] = (
|
||||
f"Bearer {credentials.api_key.get_secret_value()}"
|
||||
)
|
||||
|
||||
self._requests = Requests(
|
||||
extra_headers=headers,
|
||||
raise_for_status=False,
|
||||
)
|
||||
|
||||
async def _handle_response(self, response) -> Any:
|
||||
"""
|
||||
Handle API response and check for errors.
|
||||
|
||||
Args:
|
||||
response: The response object from the request.
|
||||
|
||||
Returns:
|
||||
The response data.
|
||||
|
||||
Raises:
|
||||
EnrichlayerAPIException: If the API request fails.
|
||||
"""
|
||||
if not response.ok:
|
||||
try:
|
||||
error_data = response.json()
|
||||
error_message = error_data.get("message", "")
|
||||
except JSONDecodeError:
|
||||
error_message = response.text
|
||||
|
||||
raise EnrichlayerAPIException(
|
||||
f"Enrichlayer API request failed ({response.status_code}): {error_message}",
|
||||
response.status_code,
|
||||
)
|
||||
|
||||
return response.json()
|
||||
|
||||
async def fetch_profile(
|
||||
self,
|
||||
linkedin_url: str,
|
||||
fallback_to_cache: FallbackToCache = FallbackToCache.ON_ERROR,
|
||||
use_cache: UseCache = UseCache.IF_PRESENT,
|
||||
include_skills: bool = False,
|
||||
include_inferred_salary: bool = False,
|
||||
include_personal_email: bool = False,
|
||||
include_personal_contact_number: bool = False,
|
||||
include_social_media: bool = False,
|
||||
include_extra: bool = False,
|
||||
) -> PersonProfileResponse:
|
||||
"""
|
||||
Fetch a LinkedIn profile with optional parameters.
|
||||
|
||||
Args:
|
||||
linkedin_url: The LinkedIn profile URL to fetch.
|
||||
fallback_to_cache: Cache usage if live fetch fails ('on-error' or 'never').
|
||||
use_cache: Cache utilization ('if-present' or 'never').
|
||||
include_skills: Whether to include skills data.
|
||||
include_inferred_salary: Whether to include inferred salary data.
|
||||
include_personal_email: Whether to include personal email.
|
||||
include_personal_contact_number: Whether to include personal contact number.
|
||||
include_social_media: Whether to include social media profiles.
|
||||
include_extra: Whether to include additional data.
|
||||
|
||||
Returns:
|
||||
The LinkedIn profile data.
|
||||
|
||||
Raises:
|
||||
EnrichlayerAPIException: If the API request fails.
|
||||
"""
|
||||
params = {
|
||||
"url": linkedin_url,
|
||||
"fallback_to_cache": fallback_to_cache.value.lower(),
|
||||
"use_cache": use_cache.value.lower(),
|
||||
}
|
||||
|
||||
if include_skills:
|
||||
params["skills"] = "include"
|
||||
if include_inferred_salary:
|
||||
params["inferred_salary"] = "include"
|
||||
if include_personal_email:
|
||||
params["personal_email"] = "include"
|
||||
if include_personal_contact_number:
|
||||
params["personal_contact_number"] = "include"
|
||||
if include_social_media:
|
||||
params["twitter_profile_id"] = "include"
|
||||
params["facebook_profile_id"] = "include"
|
||||
params["github_profile_id"] = "include"
|
||||
if include_extra:
|
||||
params["extra"] = "include"
|
||||
|
||||
response = await self._requests.get(
|
||||
f"{self.API_BASE_URL}/profile", params=params
|
||||
)
|
||||
return PersonProfileResponse(**await self._handle_response(response))
|
||||
|
||||
async def lookup_person(
|
||||
self,
|
||||
first_name: str,
|
||||
company_domain: str,
|
||||
last_name: str | None = None,
|
||||
location: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
include_similarity_checks: bool = False,
|
||||
enrich_profile: bool = False,
|
||||
) -> PersonLookupResponse:
|
||||
"""
|
||||
Look up a LinkedIn profile by person's information.
|
||||
|
||||
Args:
|
||||
first_name: The person's first name.
|
||||
last_name: The person's last name.
|
||||
company_domain: The domain of the company they work for.
|
||||
location: The person's location.
|
||||
title: The person's job title.
|
||||
include_similarity_checks: Whether to include similarity checks.
|
||||
enrich_profile: Whether to enrich the profile.
|
||||
|
||||
Returns:
|
||||
The LinkedIn profile lookup result.
|
||||
|
||||
Raises:
|
||||
EnrichlayerAPIException: If the API request fails.
|
||||
"""
|
||||
params = {"first_name": first_name, "company_domain": company_domain}
|
||||
|
||||
if last_name:
|
||||
params["last_name"] = last_name
|
||||
if location:
|
||||
params["location"] = location
|
||||
if title:
|
||||
params["title"] = title
|
||||
if include_similarity_checks:
|
||||
params["similarity_checks"] = "include"
|
||||
if enrich_profile:
|
||||
params["enrich_profile"] = "enrich"
|
||||
|
||||
response = await self._requests.get(
|
||||
f"{self.API_BASE_URL}/profile/resolve", params=params
|
||||
)
|
||||
return PersonLookupResponse(**await self._handle_response(response))
|
||||
|
||||
async def lookup_role(
|
||||
self, role: str, company_name: str, enrich_profile: bool = False
|
||||
) -> RoleLookupResponse:
|
||||
"""
|
||||
Look up a LinkedIn profile by role in a company.
|
||||
|
||||
Args:
|
||||
role: The role title (e.g., CEO, CTO).
|
||||
company_name: The name of the company.
|
||||
enrich_profile: Whether to enrich the profile.
|
||||
|
||||
Returns:
|
||||
The LinkedIn profile lookup result.
|
||||
|
||||
Raises:
|
||||
EnrichlayerAPIException: If the API request fails.
|
||||
"""
|
||||
params = {
|
||||
"role": role,
|
||||
"company_name": company_name,
|
||||
}
|
||||
|
||||
if enrich_profile:
|
||||
params["enrich_profile"] = "enrich"
|
||||
|
||||
response = await self._requests.get(
|
||||
f"{self.API_BASE_URL}/find/company/role", params=params
|
||||
)
|
||||
return RoleLookupResponse(**await self._handle_response(response))
|
||||
|
||||
async def get_profile_picture(
|
||||
self, linkedin_profile_url: str
|
||||
) -> ProfilePictureResponse:
|
||||
"""
|
||||
Get a LinkedIn profile picture URL.
|
||||
|
||||
Args:
|
||||
linkedin_profile_url: The LinkedIn profile URL.
|
||||
|
||||
Returns:
|
||||
The profile picture URL.
|
||||
|
||||
Raises:
|
||||
EnrichlayerAPIException: If the API request fails.
|
||||
"""
|
||||
params = {
|
||||
"linkedin_person_profile_url": linkedin_profile_url,
|
||||
}
|
||||
|
||||
response = await self._requests.get(
|
||||
f"{self.API_BASE_URL}/person/profile-picture", params=params
|
||||
)
|
||||
return ProfilePictureResponse(**await self._handle_response(response))
|
||||
34
autogpt_platform/backend/backend/blocks/enrichlayer/_auth.py
Normal file
34
autogpt_platform/backend/backend/blocks/enrichlayer/_auth.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""
|
||||
Authentication module for Enrichlayer API integration.
|
||||
|
||||
This module provides credential types and test credentials for the Enrichlayer API.
|
||||
"""
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from backend.data.model import APIKeyCredentials, CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
# Define the type of credentials input expected for Enrichlayer API
|
||||
EnrichlayerCredentialsInput = CredentialsMetaInput[
|
||||
Literal[ProviderName.ENRICHLAYER], Literal["api_key"]
|
||||
]
|
||||
|
||||
# Mock credentials for testing Enrichlayer API integration
|
||||
TEST_CREDENTIALS = APIKeyCredentials(
|
||||
id="1234a567-89bc-4def-ab12-3456cdef7890",
|
||||
provider="enrichlayer",
|
||||
api_key=SecretStr("mock-enrichlayer-api-key"),
|
||||
title="Mock Enrichlayer API key",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
# Dictionary representation of test credentials for input fields
|
||||
TEST_CREDENTIALS_INPUT = {
|
||||
"provider": TEST_CREDENTIALS.provider,
|
||||
"id": TEST_CREDENTIALS.id,
|
||||
"type": TEST_CREDENTIALS.type,
|
||||
"title": TEST_CREDENTIALS.title,
|
||||
}
|
||||
527
autogpt_platform/backend/backend/blocks/enrichlayer/linkedin.py
Normal file
527
autogpt_platform/backend/backend/blocks/enrichlayer/linkedin.py
Normal file
@@ -0,0 +1,527 @@
|
||||
"""
|
||||
Block definitions for Enrichlayer API integration.
|
||||
|
||||
This module implements blocks for interacting with the Enrichlayer API,
|
||||
which provides access to LinkedIn profile data and related information.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from backend.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from backend.data.model import APIKeyCredentials, CredentialsField, SchemaField
|
||||
from backend.util.type import MediaFileType
|
||||
|
||||
from ._api import (
|
||||
EnrichlayerClient,
|
||||
Experience,
|
||||
FallbackToCache,
|
||||
PersonLookupResponse,
|
||||
PersonProfileResponse,
|
||||
RoleLookupResponse,
|
||||
UseCache,
|
||||
)
|
||||
from ._auth import TEST_CREDENTIALS, TEST_CREDENTIALS_INPUT, EnrichlayerCredentialsInput
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GetLinkedinProfileBlock(Block):
|
||||
"""Block to fetch LinkedIn profile data using Enrichlayer API."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
"""Input schema for GetLinkedinProfileBlock."""
|
||||
|
||||
linkedin_url: str = SchemaField(
|
||||
description="LinkedIn profile URL to fetch data from",
|
||||
placeholder="https://www.linkedin.com/in/username/",
|
||||
)
|
||||
fallback_to_cache: FallbackToCache = SchemaField(
|
||||
description="Cache usage if live fetch fails",
|
||||
default=FallbackToCache.ON_ERROR,
|
||||
advanced=True,
|
||||
)
|
||||
use_cache: UseCache = SchemaField(
|
||||
description="Cache utilization strategy",
|
||||
default=UseCache.IF_PRESENT,
|
||||
advanced=True,
|
||||
)
|
||||
include_skills: bool = SchemaField(
|
||||
description="Include skills data",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
include_inferred_salary: bool = SchemaField(
|
||||
description="Include inferred salary data",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
include_personal_email: bool = SchemaField(
|
||||
description="Include personal email",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
include_personal_contact_number: bool = SchemaField(
|
||||
description="Include personal contact number",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
include_social_media: bool = SchemaField(
|
||||
description="Include social media profiles",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
include_extra: bool = SchemaField(
|
||||
description="Include additional data",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
credentials: EnrichlayerCredentialsInput = CredentialsField(
|
||||
description="Enrichlayer API credentials"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
"""Output schema for GetLinkedinProfileBlock."""
|
||||
|
||||
profile: PersonProfileResponse = SchemaField(
|
||||
description="LinkedIn profile data"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize GetLinkedinProfileBlock."""
|
||||
super().__init__(
|
||||
id="f6e0ac73-4f1d-4acb-b4b7-b67066c5984e",
|
||||
description="Fetch LinkedIn profile data using Enrichlayer",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=GetLinkedinProfileBlock.Input,
|
||||
output_schema=GetLinkedinProfileBlock.Output,
|
||||
test_input={
|
||||
"linkedin_url": "https://www.linkedin.com/in/williamhgates/",
|
||||
"include_skills": True,
|
||||
"include_social_media": True,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
(
|
||||
"profile",
|
||||
PersonProfileResponse(
|
||||
public_identifier="williamhgates",
|
||||
full_name="Bill Gates",
|
||||
occupation="Co-chair at Bill & Melinda Gates Foundation",
|
||||
experiences=[
|
||||
Experience(
|
||||
company="Bill & Melinda Gates Foundation",
|
||||
title="Co-chair",
|
||||
starts_at={"year": 2000},
|
||||
)
|
||||
],
|
||||
),
|
||||
)
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={
|
||||
"_fetch_profile": lambda *args, **kwargs: PersonProfileResponse(
|
||||
public_identifier="williamhgates",
|
||||
full_name="Bill Gates",
|
||||
occupation="Co-chair at Bill & Melinda Gates Foundation",
|
||||
experiences=[
|
||||
Experience(
|
||||
company="Bill & Melinda Gates Foundation",
|
||||
title="Co-chair",
|
||||
starts_at={"year": 2000},
|
||||
)
|
||||
],
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def _fetch_profile(
|
||||
credentials: APIKeyCredentials,
|
||||
linkedin_url: str,
|
||||
fallback_to_cache: FallbackToCache = FallbackToCache.ON_ERROR,
|
||||
use_cache: UseCache = UseCache.IF_PRESENT,
|
||||
include_skills: bool = False,
|
||||
include_inferred_salary: bool = False,
|
||||
include_personal_email: bool = False,
|
||||
include_personal_contact_number: bool = False,
|
||||
include_social_media: bool = False,
|
||||
include_extra: bool = False,
|
||||
):
|
||||
client = EnrichlayerClient(credentials)
|
||||
profile = await client.fetch_profile(
|
||||
linkedin_url=linkedin_url,
|
||||
fallback_to_cache=fallback_to_cache,
|
||||
use_cache=use_cache,
|
||||
include_skills=include_skills,
|
||||
include_inferred_salary=include_inferred_salary,
|
||||
include_personal_email=include_personal_email,
|
||||
include_personal_contact_number=include_personal_contact_number,
|
||||
include_social_media=include_social_media,
|
||||
include_extra=include_extra,
|
||||
)
|
||||
return profile
|
||||
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""
|
||||
Run the block to fetch LinkedIn profile data.
|
||||
|
||||
Args:
|
||||
input_data: Input parameters for the block
|
||||
credentials: API key credentials for Enrichlayer
|
||||
**kwargs: Additional keyword arguments
|
||||
|
||||
Yields:
|
||||
Tuples of (output_name, output_value)
|
||||
"""
|
||||
try:
|
||||
profile = await self._fetch_profile(
|
||||
credentials=credentials,
|
||||
linkedin_url=input_data.linkedin_url,
|
||||
fallback_to_cache=input_data.fallback_to_cache,
|
||||
use_cache=input_data.use_cache,
|
||||
include_skills=input_data.include_skills,
|
||||
include_inferred_salary=input_data.include_inferred_salary,
|
||||
include_personal_email=input_data.include_personal_email,
|
||||
include_personal_contact_number=input_data.include_personal_contact_number,
|
||||
include_social_media=input_data.include_social_media,
|
||||
include_extra=input_data.include_extra,
|
||||
)
|
||||
yield "profile", profile
|
||||
except Exception as e:
|
||||
logger.error(f"Error fetching LinkedIn profile: {str(e)}")
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class LinkedinPersonLookupBlock(Block):
|
||||
"""Block to look up LinkedIn profiles by person's information using Enrichlayer API."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
"""Input schema for LinkedinPersonLookupBlock."""
|
||||
|
||||
first_name: str = SchemaField(
|
||||
description="Person's first name",
|
||||
placeholder="John",
|
||||
advanced=False,
|
||||
)
|
||||
last_name: str | None = SchemaField(
|
||||
description="Person's last name",
|
||||
placeholder="Doe",
|
||||
default=None,
|
||||
advanced=False,
|
||||
)
|
||||
company_domain: str = SchemaField(
|
||||
description="Domain of the company they work for (optional)",
|
||||
placeholder="example.com",
|
||||
advanced=False,
|
||||
)
|
||||
location: Optional[str] = SchemaField(
|
||||
description="Person's location (optional)",
|
||||
placeholder="San Francisco",
|
||||
default=None,
|
||||
)
|
||||
title: Optional[str] = SchemaField(
|
||||
description="Person's job title (optional)",
|
||||
placeholder="CEO",
|
||||
default=None,
|
||||
)
|
||||
include_similarity_checks: bool = SchemaField(
|
||||
description="Include similarity checks",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
enrich_profile: bool = SchemaField(
|
||||
description="Enrich the profile with additional data",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
credentials: EnrichlayerCredentialsInput = CredentialsField(
|
||||
description="Enrichlayer API credentials"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
"""Output schema for LinkedinPersonLookupBlock."""
|
||||
|
||||
lookup_result: PersonLookupResponse = SchemaField(
|
||||
description="LinkedIn profile lookup result"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize LinkedinPersonLookupBlock."""
|
||||
super().__init__(
|
||||
id="d237a98a-5c4b-4a1c-b9e3-e6f9a6c81df7",
|
||||
description="Look up LinkedIn profiles by person information using Enrichlayer",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=LinkedinPersonLookupBlock.Input,
|
||||
output_schema=LinkedinPersonLookupBlock.Output,
|
||||
test_input={
|
||||
"first_name": "Bill",
|
||||
"last_name": "Gates",
|
||||
"company_domain": "gatesfoundation.org",
|
||||
"include_similarity_checks": True,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
(
|
||||
"lookup_result",
|
||||
PersonLookupResponse(
|
||||
url="https://www.linkedin.com/in/williamhgates/",
|
||||
name_similarity_score=0.93,
|
||||
company_similarity_score=0.83,
|
||||
title_similarity_score=0.3,
|
||||
location_similarity_score=0.20,
|
||||
),
|
||||
)
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={
|
||||
"_lookup_person": lambda *args, **kwargs: PersonLookupResponse(
|
||||
url="https://www.linkedin.com/in/williamhgates/",
|
||||
name_similarity_score=0.93,
|
||||
company_similarity_score=0.83,
|
||||
title_similarity_score=0.3,
|
||||
location_similarity_score=0.20,
|
||||
)
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def _lookup_person(
|
||||
credentials: APIKeyCredentials,
|
||||
first_name: str,
|
||||
company_domain: str,
|
||||
last_name: str | None = None,
|
||||
location: Optional[str] = None,
|
||||
title: Optional[str] = None,
|
||||
include_similarity_checks: bool = False,
|
||||
enrich_profile: bool = False,
|
||||
):
|
||||
client = EnrichlayerClient(credentials=credentials)
|
||||
lookup_result = await client.lookup_person(
|
||||
first_name=first_name,
|
||||
last_name=last_name,
|
||||
company_domain=company_domain,
|
||||
location=location,
|
||||
title=title,
|
||||
include_similarity_checks=include_similarity_checks,
|
||||
enrich_profile=enrich_profile,
|
||||
)
|
||||
return lookup_result
|
||||
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""
|
||||
Run the block to look up LinkedIn profiles.
|
||||
|
||||
Args:
|
||||
input_data: Input parameters for the block
|
||||
credentials: API key credentials for Enrichlayer
|
||||
**kwargs: Additional keyword arguments
|
||||
|
||||
Yields:
|
||||
Tuples of (output_name, output_value)
|
||||
"""
|
||||
try:
|
||||
lookup_result = await self._lookup_person(
|
||||
credentials=credentials,
|
||||
first_name=input_data.first_name,
|
||||
last_name=input_data.last_name,
|
||||
company_domain=input_data.company_domain,
|
||||
location=input_data.location,
|
||||
title=input_data.title,
|
||||
include_similarity_checks=input_data.include_similarity_checks,
|
||||
enrich_profile=input_data.enrich_profile,
|
||||
)
|
||||
yield "lookup_result", lookup_result
|
||||
except Exception as e:
|
||||
logger.error(f"Error looking up LinkedIn profile: {str(e)}")
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class LinkedinRoleLookupBlock(Block):
|
||||
"""Block to look up LinkedIn profiles by role in a company using Enrichlayer API."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
"""Input schema for LinkedinRoleLookupBlock."""
|
||||
|
||||
role: str = SchemaField(
|
||||
description="Role title (e.g., CEO, CTO)",
|
||||
placeholder="CEO",
|
||||
)
|
||||
company_name: str = SchemaField(
|
||||
description="Name of the company",
|
||||
placeholder="Microsoft",
|
||||
)
|
||||
enrich_profile: bool = SchemaField(
|
||||
description="Enrich the profile with additional data",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
credentials: EnrichlayerCredentialsInput = CredentialsField(
|
||||
description="Enrichlayer API credentials"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
"""Output schema for LinkedinRoleLookupBlock."""
|
||||
|
||||
role_lookup_result: RoleLookupResponse = SchemaField(
|
||||
description="LinkedIn role lookup result"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize LinkedinRoleLookupBlock."""
|
||||
super().__init__(
|
||||
id="3b9fc742-06d4-49c7-b5ce-7e302dd7c8a7",
|
||||
description="Look up LinkedIn profiles by role in a company using Enrichlayer",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=LinkedinRoleLookupBlock.Input,
|
||||
output_schema=LinkedinRoleLookupBlock.Output,
|
||||
test_input={
|
||||
"role": "Co-chair",
|
||||
"company_name": "Gates Foundation",
|
||||
"enrich_profile": True,
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
(
|
||||
"role_lookup_result",
|
||||
RoleLookupResponse(
|
||||
linkedin_profile_url="https://www.linkedin.com/in/williamhgates/",
|
||||
),
|
||||
)
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={
|
||||
"_lookup_role": lambda *args, **kwargs: RoleLookupResponse(
|
||||
linkedin_profile_url="https://www.linkedin.com/in/williamhgates/",
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def _lookup_role(
|
||||
credentials: APIKeyCredentials,
|
||||
role: str,
|
||||
company_name: str,
|
||||
enrich_profile: bool = False,
|
||||
):
|
||||
client = EnrichlayerClient(credentials=credentials)
|
||||
role_lookup_result = await client.lookup_role(
|
||||
role=role,
|
||||
company_name=company_name,
|
||||
enrich_profile=enrich_profile,
|
||||
)
|
||||
return role_lookup_result
|
||||
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""
|
||||
Run the block to look up LinkedIn profiles by role.
|
||||
|
||||
Args:
|
||||
input_data: Input parameters for the block
|
||||
credentials: API key credentials for Enrichlayer
|
||||
**kwargs: Additional keyword arguments
|
||||
|
||||
Yields:
|
||||
Tuples of (output_name, output_value)
|
||||
"""
|
||||
try:
|
||||
role_lookup_result = await self._lookup_role(
|
||||
credentials=credentials,
|
||||
role=input_data.role,
|
||||
company_name=input_data.company_name,
|
||||
enrich_profile=input_data.enrich_profile,
|
||||
)
|
||||
yield "role_lookup_result", role_lookup_result
|
||||
except Exception as e:
|
||||
logger.error(f"Error looking up role in company: {str(e)}")
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class GetLinkedinProfilePictureBlock(Block):
|
||||
"""Block to get LinkedIn profile pictures using Enrichlayer API."""
|
||||
|
||||
class Input(BlockSchema):
|
||||
"""Input schema for GetLinkedinProfilePictureBlock."""
|
||||
|
||||
linkedin_profile_url: str = SchemaField(
|
||||
description="LinkedIn profile URL",
|
||||
placeholder="https://www.linkedin.com/in/username/",
|
||||
)
|
||||
credentials: EnrichlayerCredentialsInput = CredentialsField(
|
||||
description="Enrichlayer API credentials"
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
"""Output schema for GetLinkedinProfilePictureBlock."""
|
||||
|
||||
profile_picture_url: MediaFileType = SchemaField(
|
||||
description="LinkedIn profile picture URL"
|
||||
)
|
||||
error: str = SchemaField(description="Error message if the request failed")
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize GetLinkedinProfilePictureBlock."""
|
||||
super().__init__(
|
||||
id="68d5a942-9b3f-4e9a-b7c1-d96ea4321f0d",
|
||||
description="Get LinkedIn profile pictures using Enrichlayer",
|
||||
categories={BlockCategory.SOCIAL},
|
||||
input_schema=GetLinkedinProfilePictureBlock.Input,
|
||||
output_schema=GetLinkedinProfilePictureBlock.Output,
|
||||
test_input={
|
||||
"linkedin_profile_url": "https://www.linkedin.com/in/williamhgates/",
|
||||
"credentials": TEST_CREDENTIALS_INPUT,
|
||||
},
|
||||
test_output=[
|
||||
(
|
||||
"profile_picture_url",
|
||||
"https://media.licdn.com/dms/image/C4D03AQFj-xjuXrLFSQ/profile-displayphoto-shrink_800_800/0/1576881858598?e=1686787200&v=beta&t=zrQC76QwsfQQIWthfOnrKRBMZ5D-qIAvzLXLmWgYvTk",
|
||||
)
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={
|
||||
"_get_profile_picture": lambda *args, **kwargs: "https://media.licdn.com/dms/image/C4D03AQFj-xjuXrLFSQ/profile-displayphoto-shrink_800_800/0/1576881858598?e=1686787200&v=beta&t=zrQC76QwsfQQIWthfOnrKRBMZ5D-qIAvzLXLmWgYvTk",
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
async def _get_profile_picture(
|
||||
credentials: APIKeyCredentials, linkedin_profile_url: str
|
||||
):
|
||||
client = EnrichlayerClient(credentials=credentials)
|
||||
profile_picture_response = await client.get_profile_picture(
|
||||
linkedin_profile_url=linkedin_profile_url,
|
||||
)
|
||||
return profile_picture_response.profile_picture_url
|
||||
|
||||
async def run(
|
||||
self, input_data: Input, *, credentials: APIKeyCredentials, **kwargs
|
||||
) -> BlockOutput:
|
||||
"""
|
||||
Run the block to get LinkedIn profile pictures.
|
||||
|
||||
Args:
|
||||
input_data: Input parameters for the block
|
||||
credentials: API key credentials for Enrichlayer
|
||||
**kwargs: Additional keyword arguments
|
||||
|
||||
Yields:
|
||||
Tuples of (output_name, output_value)
|
||||
"""
|
||||
try:
|
||||
profile_picture = await self._get_profile_picture(
|
||||
credentials=credentials,
|
||||
linkedin_profile_url=input_data.linkedin_profile_url,
|
||||
)
|
||||
yield "profile_picture_url", profile_picture
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting profile picture: {str(e)}")
|
||||
yield "error", str(e)
|
||||
5
autogpt_platform/backend/backend/blocks/firecrawl/extract.py
Normal file → Executable file
5
autogpt_platform/backend/backend/blocks/firecrawl/extract.py
Normal file → Executable file
@@ -29,8 +29,8 @@ class FirecrawlExtractBlock(Block):
|
||||
prompt: str | None = SchemaField(
|
||||
description="The prompt to use for the crawl", default=None, advanced=False
|
||||
)
|
||||
output_schema: str | None = SchemaField(
|
||||
description="A more rigid structure if you already know the JSON layout.",
|
||||
output_schema: dict | None = SchemaField(
|
||||
description="A Json Schema describing the output structure if more rigid structure is desired.",
|
||||
default=None,
|
||||
)
|
||||
enable_web_search: bool = SchemaField(
|
||||
@@ -56,7 +56,6 @@ class FirecrawlExtractBlock(Block):
|
||||
|
||||
app = FirecrawlApp(api_key=credentials.api_key.get_secret_value())
|
||||
|
||||
# Sync call
|
||||
extract_result = app.extract(
|
||||
urls=input_data.urls,
|
||||
prompt=input_data.prompt,
|
||||
|
||||
@@ -37,6 +37,7 @@ LLMProviderName = Literal[
|
||||
ProviderName.OPENAI,
|
||||
ProviderName.OPEN_ROUTER,
|
||||
ProviderName.LLAMA_API,
|
||||
ProviderName.V0,
|
||||
]
|
||||
AICredentials = CredentialsMetaInput[LLMProviderName, Literal["api_key"]]
|
||||
|
||||
@@ -155,6 +156,10 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
||||
LLAMA_API_LLAMA4_MAVERICK = "Llama-4-Maverick-17B-128E-Instruct-FP8"
|
||||
LLAMA_API_LLAMA3_3_8B = "Llama-3.3-8B-Instruct"
|
||||
LLAMA_API_LLAMA3_3_70B = "Llama-3.3-70B-Instruct"
|
||||
# v0 by Vercel models
|
||||
V0_1_5_MD = "v0-1.5-md"
|
||||
V0_1_5_LG = "v0-1.5-lg"
|
||||
V0_1_0_MD = "v0-1.0-md"
|
||||
|
||||
@property
|
||||
def metadata(self) -> ModelMetadata:
|
||||
@@ -280,6 +285,10 @@ MODEL_METADATA = {
|
||||
LlmModel.LLAMA_API_LLAMA4_MAVERICK: ModelMetadata("llama_api", 128000, 4028),
|
||||
LlmModel.LLAMA_API_LLAMA3_3_8B: ModelMetadata("llama_api", 128000, 4028),
|
||||
LlmModel.LLAMA_API_LLAMA3_3_70B: ModelMetadata("llama_api", 128000, 4028),
|
||||
# v0 by Vercel models
|
||||
LlmModel.V0_1_5_MD: ModelMetadata("v0", 128000, 64000),
|
||||
LlmModel.V0_1_5_LG: ModelMetadata("v0", 512000, 64000),
|
||||
LlmModel.V0_1_0_MD: ModelMetadata("v0", 128000, 64000),
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
@@ -700,6 +709,42 @@ async def llm_call(
|
||||
),
|
||||
reasoning=None,
|
||||
)
|
||||
elif provider == "v0":
|
||||
tools_param = tools if tools else openai.NOT_GIVEN
|
||||
client = openai.AsyncOpenAI(
|
||||
base_url="https://api.v0.dev/v1",
|
||||
api_key=credentials.api_key.get_secret_value(),
|
||||
)
|
||||
|
||||
response_format = None
|
||||
if json_format:
|
||||
response_format = {"type": "json_object"}
|
||||
|
||||
parallel_tool_calls_param = get_parallel_tool_calls_param(
|
||||
llm_model, parallel_tool_calls
|
||||
)
|
||||
|
||||
response = await client.chat.completions.create(
|
||||
model=llm_model.value,
|
||||
messages=prompt, # type: ignore
|
||||
response_format=response_format, # type: ignore
|
||||
max_tokens=max_tokens,
|
||||
tools=tools_param, # type: ignore
|
||||
parallel_tool_calls=parallel_tool_calls_param,
|
||||
)
|
||||
|
||||
tool_calls = extract_openai_tool_calls(response)
|
||||
reasoning = extract_openai_reasoning(response)
|
||||
|
||||
return LLMResponse(
|
||||
raw_response=response.choices[0].message,
|
||||
prompt=prompt,
|
||||
response=response.choices[0].message.content or "",
|
||||
tool_calls=tool_calls,
|
||||
prompt_tokens=response.usage.prompt_tokens if response.usage else 0,
|
||||
completion_tokens=response.usage.completion_tokens if response.usage else 0,
|
||||
reasoning=reasoning,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Unsupported LLM provider: {provider}")
|
||||
|
||||
|
||||
@@ -5,6 +5,12 @@ from backend.blocks.ai_shortform_video_block import AIShortformVideoCreatorBlock
|
||||
from backend.blocks.apollo.organization import SearchOrganizationsBlock
|
||||
from backend.blocks.apollo.people import SearchPeopleBlock
|
||||
from backend.blocks.apollo.person import GetPersonDetailBlock
|
||||
from backend.blocks.enrichlayer.linkedin import (
|
||||
GetLinkedinProfileBlock,
|
||||
GetLinkedinProfilePictureBlock,
|
||||
LinkedinPersonLookupBlock,
|
||||
LinkedinRoleLookupBlock,
|
||||
)
|
||||
from backend.blocks.flux_kontext import AIImageEditorBlock, FluxKontextModelName
|
||||
from backend.blocks.ideogram import IdeogramModelBlock
|
||||
from backend.blocks.jina.embeddings import JinaEmbeddingBlock
|
||||
@@ -30,6 +36,7 @@ from backend.integrations.credentials_store import (
|
||||
anthropic_credentials,
|
||||
apollo_credentials,
|
||||
did_credentials,
|
||||
enrichlayer_credentials,
|
||||
groq_credentials,
|
||||
ideogram_credentials,
|
||||
jina_credentials,
|
||||
@@ -39,6 +46,7 @@ from backend.integrations.credentials_store import (
|
||||
replicate_credentials,
|
||||
revid_credentials,
|
||||
unreal_credentials,
|
||||
v0_credentials,
|
||||
)
|
||||
|
||||
# =============== Configure the cost for each LLM Model call =============== #
|
||||
@@ -115,6 +123,10 @@ MODEL_COST: dict[LlmModel, int] = {
|
||||
LlmModel.GEMINI_2_5_FLASH_LITE_PREVIEW: 1,
|
||||
LlmModel.GEMINI_2_0_FLASH_LITE: 1,
|
||||
LlmModel.DEEPSEEK_R1_0528: 1,
|
||||
# v0 by Vercel models
|
||||
LlmModel.V0_1_5_MD: 1,
|
||||
LlmModel.V0_1_5_LG: 2,
|
||||
LlmModel.V0_1_0_MD: 1,
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
@@ -204,6 +216,23 @@ LLM_COST = (
|
||||
for model, cost in MODEL_COST.items()
|
||||
if MODEL_METADATA[model].provider == "llama_api"
|
||||
]
|
||||
# v0 by Vercel Models
|
||||
+ [
|
||||
BlockCost(
|
||||
cost_type=BlockCostType.RUN,
|
||||
cost_filter={
|
||||
"model": model,
|
||||
"credentials": {
|
||||
"id": v0_credentials.id,
|
||||
"provider": v0_credentials.provider,
|
||||
"type": v0_credentials.type,
|
||||
},
|
||||
},
|
||||
cost_amount=cost,
|
||||
)
|
||||
for model, cost in MODEL_COST.items()
|
||||
if MODEL_METADATA[model].provider == "v0"
|
||||
]
|
||||
# AI/ML Api Models
|
||||
+ [
|
||||
BlockCost(
|
||||
@@ -376,6 +405,54 @@ BLOCK_COSTS: dict[Type[Block], list[BlockCost]] = {
|
||||
},
|
||||
)
|
||||
],
|
||||
GetLinkedinProfileBlock: [
|
||||
BlockCost(
|
||||
cost_amount=1,
|
||||
cost_filter={
|
||||
"credentials": {
|
||||
"id": enrichlayer_credentials.id,
|
||||
"provider": enrichlayer_credentials.provider,
|
||||
"type": enrichlayer_credentials.type,
|
||||
}
|
||||
},
|
||||
)
|
||||
],
|
||||
LinkedinPersonLookupBlock: [
|
||||
BlockCost(
|
||||
cost_amount=2,
|
||||
cost_filter={
|
||||
"credentials": {
|
||||
"id": enrichlayer_credentials.id,
|
||||
"provider": enrichlayer_credentials.provider,
|
||||
"type": enrichlayer_credentials.type,
|
||||
}
|
||||
},
|
||||
)
|
||||
],
|
||||
LinkedinRoleLookupBlock: [
|
||||
BlockCost(
|
||||
cost_amount=3,
|
||||
cost_filter={
|
||||
"credentials": {
|
||||
"id": enrichlayer_credentials.id,
|
||||
"provider": enrichlayer_credentials.provider,
|
||||
"type": enrichlayer_credentials.type,
|
||||
}
|
||||
},
|
||||
)
|
||||
],
|
||||
GetLinkedinProfilePictureBlock: [
|
||||
BlockCost(
|
||||
cost_amount=3,
|
||||
cost_filter={
|
||||
"credentials": {
|
||||
"id": enrichlayer_credentials.id,
|
||||
"provider": enrichlayer_credentials.provider,
|
||||
"type": enrichlayer_credentials.type,
|
||||
}
|
||||
},
|
||||
)
|
||||
],
|
||||
SmartDecisionMakerBlock: LLM_COST,
|
||||
SearchOrganizationsBlock: [
|
||||
BlockCost(
|
||||
|
||||
@@ -34,10 +34,10 @@ from backend.data.model import (
|
||||
from backend.data.notifications import NotificationEventModel, RefundRequestData
|
||||
from backend.data.user import get_user_by_id, get_user_email_by_id
|
||||
from backend.notifications.notifications import queue_notification_async
|
||||
from backend.server.model import Pagination
|
||||
from backend.server.v2.admin.model import UserHistoryResponse
|
||||
from backend.util.exceptions import InsufficientBalanceError
|
||||
from backend.util.json import SafeJson
|
||||
from backend.util.models import Pagination
|
||||
from backend.util.retry import func_retry
|
||||
from backend.util.settings import Settings
|
||||
|
||||
@@ -286,11 +286,17 @@ class UserCreditBase(ABC):
|
||||
transaction = await CreditTransaction.prisma().find_first_or_raise(
|
||||
where={"transactionKey": transaction_key, "userId": user_id}
|
||||
)
|
||||
|
||||
if transaction.isActive:
|
||||
return
|
||||
|
||||
async with db.locked_transaction(f"usr_trx_{user_id}"):
|
||||
|
||||
transaction = await CreditTransaction.prisma().find_first_or_raise(
|
||||
where={"transactionKey": transaction_key, "userId": user_id}
|
||||
)
|
||||
if transaction.isActive:
|
||||
return
|
||||
|
||||
user_balance, _ = await self._get_credits(user_id)
|
||||
await CreditTransaction.prisma().update(
|
||||
where={
|
||||
|
||||
109
autogpt_platform/backend/backend/data/generate_data.py
Normal file
109
autogpt_platform/backend/backend/data/generate_data.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
|
||||
from prisma.enums import AgentExecutionStatus
|
||||
|
||||
from backend.data.execution import get_graph_executions
|
||||
from backend.data.graph import get_graph_metadata
|
||||
from backend.data.model import UserExecutionSummaryStats
|
||||
from backend.server.v2.store.exceptions import DatabaseError
|
||||
from backend.util.logging import TruncatedLogger
|
||||
|
||||
logger = TruncatedLogger(logging.getLogger(__name__), prefix="[SummaryData]")
|
||||
|
||||
|
||||
async def get_user_execution_summary_data(
|
||||
user_id: str, start_time: datetime, end_time: datetime
|
||||
) -> UserExecutionSummaryStats:
|
||||
"""Gather all summary data for a user in a time range.
|
||||
|
||||
This function fetches graph executions once and aggregates all required
|
||||
statistics in a single pass for efficiency.
|
||||
"""
|
||||
try:
|
||||
# Fetch graph executions once
|
||||
executions = await get_graph_executions(
|
||||
user_id=user_id,
|
||||
created_time_gte=start_time,
|
||||
created_time_lte=end_time,
|
||||
)
|
||||
|
||||
# Initialize aggregation variables
|
||||
total_credits_used = 0.0
|
||||
total_executions = len(executions)
|
||||
successful_runs = 0
|
||||
failed_runs = 0
|
||||
terminated_runs = 0
|
||||
execution_times = []
|
||||
agent_usage = defaultdict(int)
|
||||
cost_by_graph_id = defaultdict(float)
|
||||
|
||||
# Single pass through executions to aggregate all stats
|
||||
for execution in executions:
|
||||
# Count execution statuses (including TERMINATED as failed)
|
||||
if execution.status == AgentExecutionStatus.COMPLETED:
|
||||
successful_runs += 1
|
||||
elif execution.status == AgentExecutionStatus.FAILED:
|
||||
failed_runs += 1
|
||||
elif execution.status == AgentExecutionStatus.TERMINATED:
|
||||
terminated_runs += 1
|
||||
|
||||
# Aggregate costs from stats
|
||||
if execution.stats and hasattr(execution.stats, "cost"):
|
||||
cost_in_dollars = execution.stats.cost / 100
|
||||
total_credits_used += cost_in_dollars
|
||||
cost_by_graph_id[execution.graph_id] += cost_in_dollars
|
||||
|
||||
# Collect execution times
|
||||
if execution.stats and hasattr(execution.stats, "duration"):
|
||||
execution_times.append(execution.stats.duration)
|
||||
|
||||
# Count agent usage
|
||||
agent_usage[execution.graph_id] += 1
|
||||
|
||||
# Calculate derived stats
|
||||
total_execution_time = sum(execution_times)
|
||||
average_execution_time = (
|
||||
total_execution_time / len(execution_times) if execution_times else 0
|
||||
)
|
||||
|
||||
# Find most used agent
|
||||
most_used_agent = "No agents used"
|
||||
if agent_usage:
|
||||
most_used_agent_id = max(agent_usage, key=lambda k: agent_usage[k])
|
||||
try:
|
||||
graph_meta = await get_graph_metadata(graph_id=most_used_agent_id)
|
||||
most_used_agent = (
|
||||
graph_meta.name if graph_meta else f"Agent {most_used_agent_id[:8]}"
|
||||
)
|
||||
except Exception:
|
||||
logger.warning(f"Could not get metadata for graph {most_used_agent_id}")
|
||||
most_used_agent = f"Agent {most_used_agent_id[:8]}"
|
||||
|
||||
# Convert graph_ids to agent names for cost breakdown
|
||||
cost_breakdown = {}
|
||||
for graph_id, cost in cost_by_graph_id.items():
|
||||
try:
|
||||
graph_meta = await get_graph_metadata(graph_id=graph_id)
|
||||
agent_name = graph_meta.name if graph_meta else f"Agent {graph_id[:8]}"
|
||||
except Exception:
|
||||
logger.warning(f"Could not get metadata for graph {graph_id}")
|
||||
agent_name = f"Agent {graph_id[:8]}"
|
||||
cost_breakdown[agent_name] = cost
|
||||
|
||||
# Build the summary stats object (include terminated runs as failed)
|
||||
return UserExecutionSummaryStats(
|
||||
total_credits_used=total_credits_used,
|
||||
total_executions=total_executions,
|
||||
successful_runs=successful_runs,
|
||||
failed_runs=failed_runs + terminated_runs,
|
||||
most_used_agent=most_used_agent,
|
||||
total_execution_time=total_execution_time,
|
||||
average_execution_time=average_execution_time,
|
||||
cost_breakdown=cost_breakdown,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get user summary data: {e}")
|
||||
raise DatabaseError(f"Failed to get user summary data: {e}") from e
|
||||
@@ -821,3 +821,21 @@ class GraphExecutionStats(BaseModel):
|
||||
activity_status: Optional[str] = Field(
|
||||
default=None, description="AI-generated summary of what the agent did"
|
||||
)
|
||||
|
||||
|
||||
class UserExecutionSummaryStats(BaseModel):
|
||||
"""Summary of user statistics for a specific user."""
|
||||
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
arbitrary_types_allowed=True,
|
||||
)
|
||||
|
||||
total_credits_used: float = Field(default=0)
|
||||
total_executions: int = Field(default=0)
|
||||
successful_runs: int = Field(default=0)
|
||||
failed_runs: int = Field(default=0)
|
||||
most_used_agent: str = Field(default="")
|
||||
total_execution_time: float = Field(default=0)
|
||||
average_execution_time: float = Field(default=0)
|
||||
cost_breakdown: dict[str, float] = Field(default_factory=dict)
|
||||
|
||||
@@ -20,6 +20,7 @@ from backend.data.execution import (
|
||||
upsert_execution_input,
|
||||
upsert_execution_output,
|
||||
)
|
||||
from backend.data.generate_data import get_user_execution_summary_data
|
||||
from backend.data.graph import (
|
||||
get_connected_output_nodes,
|
||||
get_graph,
|
||||
@@ -144,6 +145,9 @@ class DatabaseManager(AppService):
|
||||
get_user_notification_oldest_message_in_batch
|
||||
)
|
||||
|
||||
# Summary data - async
|
||||
get_user_execution_summary_data = _(get_user_execution_summary_data)
|
||||
|
||||
|
||||
class DatabaseManagerClient(AppServiceClient):
|
||||
d = DatabaseManager
|
||||
@@ -169,6 +173,9 @@ class DatabaseManagerClient(AppServiceClient):
|
||||
spend_credits = _(d.spend_credits)
|
||||
get_credits = _(d.get_credits)
|
||||
|
||||
# Summary data - async
|
||||
get_user_execution_summary_data = _(d.get_user_execution_summary_data)
|
||||
|
||||
# Block error monitoring
|
||||
get_block_error_stats = _(d.get_block_error_stats)
|
||||
|
||||
@@ -215,3 +222,6 @@ class DatabaseManagerAsyncClient(AppServiceClient):
|
||||
get_user_notification_oldest_message_in_batch = (
|
||||
d.get_user_notification_oldest_message_in_batch
|
||||
)
|
||||
|
||||
# Summary data
|
||||
get_user_execution_summary_data = d.get_user_execution_summary_data
|
||||
|
||||
@@ -307,9 +307,10 @@ class Scheduler(AppService):
|
||||
|
||||
if self.register_system_tasks:
|
||||
# Notification PROCESS WEEKLY SUMMARY
|
||||
# Runs every Monday at 9 AM UTC
|
||||
self.scheduler.add_job(
|
||||
process_weekly_summary,
|
||||
CronTrigger.from_crontab("0 * * * *"),
|
||||
CronTrigger.from_crontab("0 9 * * 1"),
|
||||
id="process_weekly_summary",
|
||||
kwargs={},
|
||||
replace_existing=True,
|
||||
|
||||
@@ -182,6 +182,15 @@ zerobounce_credentials = APIKeyCredentials(
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
enrichlayer_credentials = APIKeyCredentials(
|
||||
id="d9fce73a-6c1d-4e8b-ba2e-12a456789def",
|
||||
provider="enrichlayer",
|
||||
api_key=SecretStr(settings.secrets.enrichlayer_api_key),
|
||||
title="Use Credits for Enrichlayer",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
|
||||
llama_api_credentials = APIKeyCredentials(
|
||||
id="d44045af-1c33-4833-9e19-752313214de2",
|
||||
provider="llama_api",
|
||||
@@ -190,6 +199,14 @@ llama_api_credentials = APIKeyCredentials(
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
v0_credentials = APIKeyCredentials(
|
||||
id="c4e6d1a0-3b5f-4789-a8e2-9b123456789f",
|
||||
provider="v0",
|
||||
api_key=SecretStr(settings.secrets.v0_api_key),
|
||||
title="Use Credits for v0 by Vercel",
|
||||
expires_at=None,
|
||||
)
|
||||
|
||||
DEFAULT_CREDENTIALS = [
|
||||
ollama_credentials,
|
||||
revid_credentials,
|
||||
@@ -203,6 +220,7 @@ DEFAULT_CREDENTIALS = [
|
||||
jina_credentials,
|
||||
unreal_credentials,
|
||||
open_router_credentials,
|
||||
enrichlayer_credentials,
|
||||
fal_credentials,
|
||||
exa_credentials,
|
||||
e2b_credentials,
|
||||
@@ -213,6 +231,8 @@ DEFAULT_CREDENTIALS = [
|
||||
smartlead_credentials,
|
||||
zerobounce_credentials,
|
||||
google_maps_credentials,
|
||||
llama_api_credentials,
|
||||
v0_credentials,
|
||||
]
|
||||
|
||||
|
||||
@@ -279,6 +299,8 @@ class IntegrationCredentialsStore:
|
||||
all_credentials.append(unreal_credentials)
|
||||
if settings.secrets.open_router_api_key:
|
||||
all_credentials.append(open_router_credentials)
|
||||
if settings.secrets.enrichlayer_api_key:
|
||||
all_credentials.append(enrichlayer_credentials)
|
||||
if settings.secrets.fal_api_key:
|
||||
all_credentials.append(fal_credentials)
|
||||
if settings.secrets.exa_api_key:
|
||||
|
||||
@@ -25,6 +25,7 @@ class ProviderName(str, Enum):
|
||||
GROQ = "groq"
|
||||
HTTP = "http"
|
||||
HUBSPOT = "hubspot"
|
||||
ENRICHLAYER = "enrichlayer"
|
||||
IDEOGRAM = "ideogram"
|
||||
JINA = "jina"
|
||||
LLAMA_API = "llama_api"
|
||||
@@ -47,6 +48,7 @@ class ProviderName(str, Enum):
|
||||
TWITTER = "twitter"
|
||||
TODOIST = "todoist"
|
||||
UNREAL_SPEECH = "unreal_speech"
|
||||
V0 = "v0"
|
||||
ZEROBOUNCE = "zerobounce"
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -223,10 +223,14 @@ class NotificationManager(AppService):
|
||||
processed_count = 0
|
||||
current_time = datetime.now(tz=timezone.utc)
|
||||
start_time = current_time - timedelta(days=7)
|
||||
logger.info(
|
||||
f"Querying for active users between {start_time} and {current_time}"
|
||||
)
|
||||
users = await get_database_manager_async_client().get_active_user_ids_in_timerange(
|
||||
end_time=current_time.isoformat(),
|
||||
start_time=start_time.isoformat(),
|
||||
)
|
||||
logger.info(f"Found {len(users)} active users in the last 7 days")
|
||||
for user in users:
|
||||
await self._queue_scheduled_notification(
|
||||
SummaryParamsEventModel(
|
||||
@@ -384,10 +388,13 @@ class NotificationManager(AppService):
|
||||
async def _queue_scheduled_notification(self, event: SummaryParamsEventModel):
|
||||
"""Queue a scheduled notification - exposed method for other services to call"""
|
||||
try:
|
||||
logger.debug(f"Received Request to queue scheduled notification {event=}")
|
||||
logger.info(
|
||||
f"Queueing scheduled notification type={event.type} user_id={event.user_id}"
|
||||
)
|
||||
|
||||
exchange = "notifications"
|
||||
routing_key = get_routing_key(event.type)
|
||||
logger.info(f"Using routing key: {routing_key}")
|
||||
|
||||
# Publish to RabbitMQ
|
||||
await self.rabbit.publish_message(
|
||||
@@ -395,6 +402,7 @@ class NotificationManager(AppService):
|
||||
message=event.model_dump_json(),
|
||||
exchange=next(ex for ex in EXCHANGES if ex.name == exchange),
|
||||
)
|
||||
logger.info(f"Successfully queued notification for user {event.user_id}")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error queueing notification: {e}")
|
||||
@@ -416,85 +424,99 @@ class NotificationManager(AppService):
|
||||
# only if both are true, should we email this person
|
||||
return validated_email and preference
|
||||
|
||||
def _gather_summary_data(
|
||||
async def _gather_summary_data(
|
||||
self, user_id: str, event_type: NotificationType, params: BaseSummaryParams
|
||||
) -> BaseSummaryData:
|
||||
"""Gathers the data to build a summary notification"""
|
||||
|
||||
logger.info(
|
||||
f"Gathering summary data for {user_id} and {event_type} wiht {params=}"
|
||||
f"Gathering summary data for {user_id} and {event_type} with {params=}"
|
||||
)
|
||||
|
||||
# total_credits_used = self.run_and_wait(
|
||||
# get_total_credits_used(user_id, start_time, end_time)
|
||||
# )
|
||||
|
||||
# total_executions = self.run_and_wait(
|
||||
# get_total_executions(user_id, start_time, end_time)
|
||||
# )
|
||||
|
||||
# most_used_agent = self.run_and_wait(
|
||||
# get_most_used_agent(user_id, start_time, end_time)
|
||||
# )
|
||||
|
||||
# execution_times = self.run_and_wait(
|
||||
# get_execution_time(user_id, start_time, end_time)
|
||||
# )
|
||||
|
||||
# runs = self.run_and_wait(
|
||||
# get_runs(user_id, start_time, end_time)
|
||||
# )
|
||||
total_credits_used = 3.0
|
||||
total_executions = 2
|
||||
most_used_agent = {"name": "Some"}
|
||||
execution_times = [1, 2, 3]
|
||||
runs = [{"status": "COMPLETED"}, {"status": "FAILED"}]
|
||||
|
||||
successful_runs = len([run for run in runs if run["status"] == "COMPLETED"])
|
||||
failed_runs = len([run for run in runs if run["status"] != "COMPLETED"])
|
||||
average_execution_time = (
|
||||
sum(execution_times) / len(execution_times) if execution_times else 0
|
||||
)
|
||||
# cost_breakdown = self.run_and_wait(
|
||||
# get_cost_breakdown(user_id, start_time, end_time)
|
||||
# )
|
||||
|
||||
cost_breakdown = {
|
||||
"agent1": 1.0,
|
||||
"agent2": 2.0,
|
||||
}
|
||||
|
||||
if event_type == NotificationType.DAILY_SUMMARY and isinstance(
|
||||
params, DailySummaryParams
|
||||
):
|
||||
return DailySummaryData(
|
||||
total_credits_used=total_credits_used,
|
||||
total_executions=total_executions,
|
||||
most_used_agent=most_used_agent["name"],
|
||||
total_execution_time=sum(execution_times),
|
||||
successful_runs=successful_runs,
|
||||
failed_runs=failed_runs,
|
||||
average_execution_time=average_execution_time,
|
||||
cost_breakdown=cost_breakdown,
|
||||
date=params.date,
|
||||
try:
|
||||
# Get summary data from the database
|
||||
summary_data = await get_database_manager_async_client().get_user_execution_summary_data(
|
||||
user_id=user_id,
|
||||
start_time=params.start_date,
|
||||
end_time=params.end_date,
|
||||
)
|
||||
elif event_type == NotificationType.WEEKLY_SUMMARY and isinstance(
|
||||
params, WeeklySummaryParams
|
||||
):
|
||||
return WeeklySummaryData(
|
||||
total_credits_used=total_credits_used,
|
||||
total_executions=total_executions,
|
||||
most_used_agent=most_used_agent["name"],
|
||||
total_execution_time=sum(execution_times),
|
||||
successful_runs=successful_runs,
|
||||
failed_runs=failed_runs,
|
||||
average_execution_time=average_execution_time,
|
||||
cost_breakdown=cost_breakdown,
|
||||
start_date=params.start_date,
|
||||
end_date=params.end_date,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Invalid event type or params")
|
||||
|
||||
# Extract data from summary
|
||||
total_credits_used = summary_data.total_credits_used
|
||||
total_executions = summary_data.total_executions
|
||||
most_used_agent = summary_data.most_used_agent
|
||||
successful_runs = summary_data.successful_runs
|
||||
failed_runs = summary_data.failed_runs
|
||||
total_execution_time = summary_data.total_execution_time
|
||||
average_execution_time = summary_data.average_execution_time
|
||||
cost_breakdown = summary_data.cost_breakdown
|
||||
|
||||
if event_type == NotificationType.DAILY_SUMMARY and isinstance(
|
||||
params, DailySummaryParams
|
||||
):
|
||||
return DailySummaryData(
|
||||
total_credits_used=total_credits_used,
|
||||
total_executions=total_executions,
|
||||
most_used_agent=most_used_agent,
|
||||
total_execution_time=total_execution_time,
|
||||
successful_runs=successful_runs,
|
||||
failed_runs=failed_runs,
|
||||
average_execution_time=average_execution_time,
|
||||
cost_breakdown=cost_breakdown,
|
||||
date=params.date,
|
||||
)
|
||||
elif event_type == NotificationType.WEEKLY_SUMMARY and isinstance(
|
||||
params, WeeklySummaryParams
|
||||
):
|
||||
return WeeklySummaryData(
|
||||
total_credits_used=total_credits_used,
|
||||
total_executions=total_executions,
|
||||
most_used_agent=most_used_agent,
|
||||
total_execution_time=total_execution_time,
|
||||
successful_runs=successful_runs,
|
||||
failed_runs=failed_runs,
|
||||
average_execution_time=average_execution_time,
|
||||
cost_breakdown=cost_breakdown,
|
||||
start_date=params.start_date,
|
||||
end_date=params.end_date,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Invalid event type or params")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to gather summary data: {e}")
|
||||
# Return sensible defaults in case of error
|
||||
if event_type == NotificationType.DAILY_SUMMARY and isinstance(
|
||||
params, DailySummaryParams
|
||||
):
|
||||
return DailySummaryData(
|
||||
total_credits_used=0.0,
|
||||
total_executions=0,
|
||||
most_used_agent="No data available",
|
||||
total_execution_time=0.0,
|
||||
successful_runs=0,
|
||||
failed_runs=0,
|
||||
average_execution_time=0.0,
|
||||
cost_breakdown={},
|
||||
date=params.date,
|
||||
)
|
||||
elif event_type == NotificationType.WEEKLY_SUMMARY and isinstance(
|
||||
params, WeeklySummaryParams
|
||||
):
|
||||
return WeeklySummaryData(
|
||||
total_credits_used=0.0,
|
||||
total_executions=0,
|
||||
most_used_agent="No data available",
|
||||
total_execution_time=0.0,
|
||||
successful_runs=0,
|
||||
failed_runs=0,
|
||||
average_execution_time=0.0,
|
||||
cost_breakdown={},
|
||||
start_date=params.start_date,
|
||||
end_date=params.end_date,
|
||||
)
|
||||
else:
|
||||
raise ValueError("Invalid event type or params") from e
|
||||
|
||||
async def _should_batch(
|
||||
self, user_id: str, event_type: NotificationType, event: NotificationEventModel
|
||||
@@ -764,7 +786,7 @@ class NotificationManager(AppService):
|
||||
)
|
||||
return True
|
||||
|
||||
summary_data = self._gather_summary_data(
|
||||
summary_data = await self._gather_summary_data(
|
||||
event.user_id, event.type, model.data
|
||||
)
|
||||
|
||||
|
||||
@@ -5,23 +5,64 @@ data.start_date: the start date of the summary
|
||||
data.end_date: the end date of the summary
|
||||
data.total_credits_used: the total credits used during the summary
|
||||
data.total_executions: the total number of executions during the summary
|
||||
data.most_used_agent: the most used agent's nameduring the summary
|
||||
data.most_used_agent: the most used agent's name during the summary
|
||||
data.total_execution_time: the total execution time during the summary
|
||||
data.successful_runs: the total number of successful runs during the summary
|
||||
data.failed_runs: the total number of failed runs during the summary
|
||||
data.average_execution_time: the average execution time during the summary
|
||||
data.cost_breakdown: the cost breakdown during the summary
|
||||
data.cost_breakdown: the cost breakdown during the summary (dict mapping agent names to credit amounts)
|
||||
#}
|
||||
|
||||
<h1>Weekly Summary</h1>
|
||||
<h1 style="color: #5D23BB; font-size: 32px; font-weight: 600; margin-bottom: 25px; margin-top: 0;">
|
||||
Weekly Summary
|
||||
</h1>
|
||||
|
||||
<p>Start Date: {{ data.start_date }}</p>
|
||||
<p>End Date: {{ data.end_date }}</p>
|
||||
<p>Total Credits Used: {{ data.total_credits_used }}</p>
|
||||
<p>Total Executions: {{ data.total_executions }}</p>
|
||||
<p>Most Used Agent: {{ data.most_used_agent }}</p>
|
||||
<p>Total Execution Time: {{ data.total_execution_time }}</p>
|
||||
<p>Successful Runs: {{ data.successful_runs }}</p>
|
||||
<p>Failed Runs: {{ data.failed_runs }}</p>
|
||||
<p>Average Execution Time: {{ data.average_execution_time }}</p>
|
||||
<p>Cost Breakdown: {{ data.cost_breakdown }}</p>
|
||||
<h2 style="color: #070629; font-size: 24px; font-weight: 500; margin-bottom: 20px;">
|
||||
Your Agent Activity: {{ data.start_date.strftime('%B %-d') }} – {{ data.end_date.strftime('%B %-d') }}
|
||||
</h2>
|
||||
|
||||
<div style="background-color: #ffffff; border-radius: 8px; padding: 20px; margin-bottom: 25px;">
|
||||
<ul style="list-style-type: disc; padding-left: 20px; margin: 0;">
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 8px;">
|
||||
<strong>Total Executions:</strong> {{ data.total_executions }}
|
||||
</li>
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 8px;">
|
||||
<strong>Total Credits Used:</strong> {{ data.total_credits_used|format("%.2f") }}
|
||||
</li>
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 8px;">
|
||||
<strong>Total Execution Time:</strong> {{ data.total_execution_time|format("%.1f") }} seconds
|
||||
</li>
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 8px;">
|
||||
<strong>Successful Runs:</strong> {{ data.successful_runs }}
|
||||
</li>
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 8px;">
|
||||
<strong>Failed Runs:</strong> {{ data.failed_runs }}
|
||||
</li>
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 8px;">
|
||||
<strong>Average Execution Time:</strong> {{ data.average_execution_time|format("%.1f") }} seconds
|
||||
</li>
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 8px;">
|
||||
<strong>Most Used Agent:</strong> {{ data.most_used_agent }}
|
||||
</li>
|
||||
{% if data.cost_breakdown %}
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 8px;">
|
||||
<strong>Cost Breakdown:</strong>
|
||||
<ul style="list-style-type: disc; padding-left: 40px; margin-top: 8px;">
|
||||
{% for agent_name, credits in data.cost_breakdown.items() %}
|
||||
<li style="font-size: 16px; line-height: 1.8; margin-bottom: 4px;">
|
||||
{{ agent_name }}: {{ credits|format("%.2f") }} credits
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</li>
|
||||
{% endif %}
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<p style="font-size: 16px; line-height: 165%; margin-top: 20px; margin-bottom: 10px;">
|
||||
Thank you for being a part of the AutoGPT community! 🎉
|
||||
</p>
|
||||
|
||||
<p style="font-size: 16px; line-height: 165%; margin-bottom: 0;">
|
||||
Join the conversation on <a href="https://discord.gg/autogpt" style="color: #4285F4; text-decoration: underline;">Discord here</a>.
|
||||
</p>
|
||||
@@ -60,21 +60,6 @@ class UpdatePermissionsRequest(pydantic.BaseModel):
|
||||
permissions: list[APIKeyPermission]
|
||||
|
||||
|
||||
class Pagination(pydantic.BaseModel):
|
||||
total_items: int = pydantic.Field(
|
||||
description="Total number of items.", examples=[42]
|
||||
)
|
||||
total_pages: int = pydantic.Field(
|
||||
description="Total number of pages.", examples=[2]
|
||||
)
|
||||
current_page: int = pydantic.Field(
|
||||
description="Current_page page number.", examples=[1]
|
||||
)
|
||||
page_size: int = pydantic.Field(
|
||||
description="Number of items per page.", examples=[25]
|
||||
)
|
||||
|
||||
|
||||
class RequestTopUp(pydantic.BaseModel):
|
||||
credit_amount: int
|
||||
|
||||
|
||||
@@ -458,12 +458,16 @@ async def stripe_webhook(request: Request):
|
||||
event = stripe.Webhook.construct_event(
|
||||
payload, sig_header, settings.secrets.stripe_webhook_secret
|
||||
)
|
||||
except ValueError:
|
||||
except ValueError as e:
|
||||
# Invalid payload
|
||||
raise HTTPException(status_code=400)
|
||||
except stripe.SignatureVerificationError:
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid payload: {str(e) or type(e).__name__}"
|
||||
)
|
||||
except stripe.SignatureVerificationError as e:
|
||||
# Invalid signature
|
||||
raise HTTPException(status_code=400)
|
||||
raise HTTPException(
|
||||
status_code=400, detail=f"Invalid signature: {str(e) or type(e).__name__}"
|
||||
)
|
||||
|
||||
if (
|
||||
event["type"] == "checkout.session.completed"
|
||||
@@ -1067,7 +1071,6 @@ async def get_api_key(
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def delete_api_key(
|
||||
key_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> Optional[APIKeyWithoutHash]:
|
||||
@@ -1096,7 +1099,6 @@ async def delete_api_key(
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def suspend_key(
|
||||
key_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> Optional[APIKeyWithoutHash]:
|
||||
@@ -1122,7 +1124,6 @@ async def suspend_key(
|
||||
tags=["api-keys"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
@feature_flag("api-keys-enabled")
|
||||
async def update_permissions(
|
||||
key_id: str,
|
||||
request: UpdatePermissionsRequest,
|
||||
|
||||
@@ -14,7 +14,7 @@ import backend.server.v2.admin.credit_admin_routes as credit_admin_routes
|
||||
import backend.server.v2.admin.model as admin_model
|
||||
from backend.data.model import UserTransaction
|
||||
from backend.server.conftest import ADMIN_USER_ID, TARGET_USER_ID
|
||||
from backend.server.model import Pagination
|
||||
from backend.util.models import Pagination
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(credit_admin_routes.router)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.data.model import UserTransaction
|
||||
from backend.server.model import Pagination
|
||||
from backend.util.models import Pagination
|
||||
|
||||
|
||||
class UserHistoryResponse(BaseModel):
|
||||
|
||||
@@ -9,7 +9,6 @@ import prisma.models
|
||||
import prisma.types
|
||||
|
||||
import backend.data.graph as graph_db
|
||||
import backend.server.model
|
||||
import backend.server.v2.library.model as library_model
|
||||
import backend.server.v2.store.exceptions as store_exceptions
|
||||
import backend.server.v2.store.image_gen as store_image_gen
|
||||
@@ -23,6 +22,7 @@ from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.integrations.webhooks.graph_lifecycle_hooks import on_graph_activate
|
||||
from backend.util.exceptions import NotFoundError
|
||||
from backend.util.json import SafeJson
|
||||
from backend.util.models import Pagination
|
||||
from backend.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -131,7 +131,7 @@ async def list_library_agents(
|
||||
# Return the response with only valid agents
|
||||
return library_model.LibraryAgentResponse(
|
||||
agents=valid_library_agents,
|
||||
pagination=backend.server.model.Pagination(
|
||||
pagination=Pagination(
|
||||
total_items=agent_count,
|
||||
total_pages=(agent_count + page_size - 1) // page_size,
|
||||
current_page=page,
|
||||
@@ -629,7 +629,7 @@ async def list_presets(
|
||||
|
||||
return library_model.LibraryAgentPresetResponse(
|
||||
presets=presets,
|
||||
pagination=backend.server.model.Pagination(
|
||||
pagination=Pagination(
|
||||
total_items=total_items,
|
||||
total_pages=total_pages,
|
||||
current_page=page,
|
||||
|
||||
@@ -8,9 +8,9 @@ import pydantic
|
||||
|
||||
import backend.data.block as block_model
|
||||
import backend.data.graph as graph_model
|
||||
import backend.server.model as server_model
|
||||
from backend.data.model import CredentialsMetaInput, is_credentials_field_name
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.models import Pagination
|
||||
|
||||
|
||||
class LibraryAgentStatus(str, Enum):
|
||||
@@ -213,7 +213,7 @@ class LibraryAgentResponse(pydantic.BaseModel):
|
||||
"""Response schema for a list of library agents and pagination info."""
|
||||
|
||||
agents: list[LibraryAgent]
|
||||
pagination: server_model.Pagination
|
||||
pagination: Pagination
|
||||
|
||||
|
||||
class LibraryAgentPresetCreatable(pydantic.BaseModel):
|
||||
@@ -317,7 +317,7 @@ class LibraryAgentPresetResponse(pydantic.BaseModel):
|
||||
"""Response schema for a list of agent presets and pagination info."""
|
||||
|
||||
presets: list[LibraryAgentPreset]
|
||||
pagination: server_model.Pagination
|
||||
pagination: Pagination
|
||||
|
||||
|
||||
class LibraryAgentFilter(str, Enum):
|
||||
|
||||
@@ -7,9 +7,9 @@ import pytest
|
||||
import pytest_mock
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
import backend.server.model as server_model
|
||||
import backend.server.v2.library.model as library_model
|
||||
from backend.server.v2.library.routes import router as library_router
|
||||
from backend.util.models import Pagination
|
||||
|
||||
app = fastapi.FastAPI()
|
||||
app.include_router(library_router)
|
||||
@@ -77,7 +77,7 @@ async def test_get_library_agents_success(
|
||||
updated_at=datetime.datetime(2023, 1, 1, 0, 0, 0),
|
||||
),
|
||||
],
|
||||
pagination=server_model.Pagination(
|
||||
pagination=Pagination(
|
||||
total_items=2, total_pages=1, current_page=1, page_size=50
|
||||
),
|
||||
)
|
||||
|
||||
@@ -466,6 +466,8 @@ async def get_store_submissions(
|
||||
# internal_comments omitted for regular users
|
||||
reviewed_at=sub.reviewed_at,
|
||||
changes_summary=sub.changes_summary,
|
||||
video_url=sub.video_url,
|
||||
categories=sub.categories,
|
||||
)
|
||||
submission_models.append(submission_model)
|
||||
|
||||
@@ -546,7 +548,7 @@ async def create_store_submission(
|
||||
description: str = "",
|
||||
sub_heading: str = "",
|
||||
categories: list[str] = [],
|
||||
changes_summary: str = "Initial Submission",
|
||||
changes_summary: str | None = "Initial Submission",
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
"""
|
||||
Create the first (and only) store listing and thus submission as a normal user
|
||||
@@ -685,6 +687,160 @@ async def create_store_submission(
|
||||
) from e
|
||||
|
||||
|
||||
async def edit_store_submission(
|
||||
user_id: str,
|
||||
store_listing_version_id: str,
|
||||
name: str,
|
||||
video_url: str | None = None,
|
||||
image_urls: list[str] = [],
|
||||
description: str = "",
|
||||
sub_heading: str = "",
|
||||
categories: list[str] = [],
|
||||
changes_summary: str | None = "Update submission",
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
"""
|
||||
Edit an existing store listing submission.
|
||||
|
||||
Args:
|
||||
user_id: ID of the authenticated user editing the submission
|
||||
store_listing_version_id: ID of the store listing version to edit
|
||||
agent_id: ID of the agent being submitted
|
||||
agent_version: Version of the agent being submitted
|
||||
slug: URL slug for the listing (only changeable for PENDING submissions)
|
||||
name: Name of the agent
|
||||
video_url: Optional URL to video demo
|
||||
image_urls: List of image URLs for the listing
|
||||
description: Description of the agent
|
||||
sub_heading: Optional sub-heading for the agent
|
||||
categories: List of categories for the agent
|
||||
changes_summary: Summary of changes made in this submission
|
||||
|
||||
Returns:
|
||||
StoreSubmission: The updated store submission
|
||||
|
||||
Raises:
|
||||
SubmissionNotFoundError: If the submission is not found
|
||||
UnauthorizedError: If the user doesn't own the submission
|
||||
InvalidOperationError: If trying to edit a submission that can't be edited
|
||||
"""
|
||||
try:
|
||||
# Get the current version and verify ownership
|
||||
current_version = await prisma.models.StoreListingVersion.prisma().find_first(
|
||||
where=prisma.types.StoreListingVersionWhereInput(
|
||||
id=store_listing_version_id
|
||||
),
|
||||
include={
|
||||
"StoreListing": {
|
||||
"include": {
|
||||
"Versions": {"order_by": {"version": "desc"}, "take": 1}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
if not current_version:
|
||||
raise backend.server.v2.store.exceptions.SubmissionNotFoundError(
|
||||
f"Store listing version not found: {store_listing_version_id}"
|
||||
)
|
||||
|
||||
# Verify the user owns this submission
|
||||
if (
|
||||
not current_version.StoreListing
|
||||
or current_version.StoreListing.owningUserId != user_id
|
||||
):
|
||||
raise backend.server.v2.store.exceptions.UnauthorizedError(
|
||||
f"User {user_id} does not own submission {store_listing_version_id}"
|
||||
)
|
||||
|
||||
# Currently we are not allowing user to update the agent associated with a submission
|
||||
# If we allow it in future, then we need a check here to verify the agent belongs to this user.
|
||||
|
||||
# Check if we can edit this submission
|
||||
if current_version.submissionStatus == prisma.enums.SubmissionStatus.REJECTED:
|
||||
raise backend.server.v2.store.exceptions.InvalidOperationError(
|
||||
"Cannot edit a rejected submission"
|
||||
)
|
||||
|
||||
# For APPROVED submissions, we need to create a new version
|
||||
if current_version.submissionStatus == prisma.enums.SubmissionStatus.APPROVED:
|
||||
# Create a new version for the existing listing
|
||||
return await create_store_version(
|
||||
user_id=user_id,
|
||||
agent_id=current_version.agentGraphId,
|
||||
agent_version=current_version.agentGraphVersion,
|
||||
store_listing_id=current_version.storeListingId,
|
||||
name=name,
|
||||
video_url=video_url,
|
||||
image_urls=image_urls,
|
||||
description=description,
|
||||
sub_heading=sub_heading,
|
||||
categories=categories,
|
||||
changes_summary=changes_summary,
|
||||
)
|
||||
|
||||
# For PENDING submissions, we can update the existing version
|
||||
elif current_version.submissionStatus == prisma.enums.SubmissionStatus.PENDING:
|
||||
# Update the existing version
|
||||
updated_version = await prisma.models.StoreListingVersion.prisma().update(
|
||||
where={"id": store_listing_version_id},
|
||||
data=prisma.types.StoreListingVersionUpdateInput(
|
||||
name=name,
|
||||
videoUrl=video_url,
|
||||
imageUrls=image_urls,
|
||||
description=description,
|
||||
categories=categories,
|
||||
subHeading=sub_heading,
|
||||
changesSummary=changes_summary,
|
||||
),
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
f"Updated existing version {store_listing_version_id} for agent {current_version.agentGraphId}"
|
||||
)
|
||||
|
||||
if not updated_version:
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to update store listing version"
|
||||
)
|
||||
return backend.server.v2.store.model.StoreSubmission(
|
||||
agent_id=current_version.agentGraphId,
|
||||
agent_version=current_version.agentGraphVersion,
|
||||
name=name,
|
||||
sub_heading=sub_heading,
|
||||
slug=current_version.StoreListing.slug,
|
||||
description=description,
|
||||
image_urls=image_urls,
|
||||
date_submitted=updated_version.submittedAt or updated_version.createdAt,
|
||||
status=updated_version.submissionStatus,
|
||||
runs=0,
|
||||
rating=0.0,
|
||||
store_listing_version_id=updated_version.id,
|
||||
changes_summary=changes_summary,
|
||||
video_url=video_url,
|
||||
categories=categories,
|
||||
version=updated_version.version,
|
||||
)
|
||||
|
||||
else:
|
||||
raise backend.server.v2.store.exceptions.InvalidOperationError(
|
||||
f"Cannot edit submission with status: {current_version.submissionStatus}"
|
||||
)
|
||||
|
||||
except (
|
||||
backend.server.v2.store.exceptions.SubmissionNotFoundError,
|
||||
backend.server.v2.store.exceptions.UnauthorizedError,
|
||||
backend.server.v2.store.exceptions.AgentNotFoundError,
|
||||
backend.server.v2.store.exceptions.ListingExistsError,
|
||||
backend.server.v2.store.exceptions.InvalidOperationError,
|
||||
):
|
||||
raise
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error editing store submission: {e}")
|
||||
raise backend.server.v2.store.exceptions.DatabaseError(
|
||||
"Failed to edit store submission"
|
||||
) from e
|
||||
|
||||
|
||||
async def create_store_version(
|
||||
user_id: str,
|
||||
agent_id: str,
|
||||
@@ -696,7 +852,7 @@ async def create_store_version(
|
||||
description: str = "",
|
||||
sub_heading: str = "",
|
||||
categories: list[str] = [],
|
||||
changes_summary: str = "Update Submission",
|
||||
changes_summary: str | None = "Initial submission",
|
||||
) -> backend.server.v2.store.model.StoreSubmission:
|
||||
"""
|
||||
Create a new version for an existing store listing
|
||||
|
||||
@@ -94,3 +94,15 @@ class SubmissionNotFoundError(StoreError):
|
||||
"""Raised when a submission is not found"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class InvalidOperationError(StoreError):
|
||||
"""Raised when an operation is not valid for the current state"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class UnauthorizedError(StoreError):
|
||||
"""Raised when a user is not authorized to perform an action"""
|
||||
|
||||
pass
|
||||
|
||||
@@ -4,7 +4,7 @@ from typing import List
|
||||
import prisma.enums
|
||||
import pydantic
|
||||
|
||||
from backend.server.model import Pagination
|
||||
from backend.util.models import Pagination
|
||||
|
||||
|
||||
class MyAgent(pydantic.BaseModel):
|
||||
@@ -115,11 +115,9 @@ class StoreSubmission(pydantic.BaseModel):
|
||||
reviewed_at: datetime.datetime | None = None
|
||||
changes_summary: str | None = None
|
||||
|
||||
reviewer_id: str | None = None
|
||||
review_comments: str | None = None # External comments visible to creator
|
||||
internal_comments: str | None = None # Private notes for admin use only
|
||||
reviewed_at: datetime.datetime | None = None
|
||||
changes_summary: str | None = None
|
||||
# Additional fields for editing
|
||||
video_url: str | None = None
|
||||
categories: list[str] = []
|
||||
|
||||
|
||||
class StoreSubmissionsResponse(pydantic.BaseModel):
|
||||
@@ -161,6 +159,16 @@ class StoreSubmissionRequest(pydantic.BaseModel):
|
||||
changes_summary: str | None = None
|
||||
|
||||
|
||||
class StoreSubmissionEditRequest(pydantic.BaseModel):
|
||||
name: str
|
||||
sub_heading: str
|
||||
video_url: str | None = None
|
||||
image_urls: list[str] = []
|
||||
description: str = ""
|
||||
categories: list[str] = []
|
||||
changes_summary: str | None = None
|
||||
|
||||
|
||||
class ProfileDetails(pydantic.BaseModel):
|
||||
name: str
|
||||
username: str
|
||||
|
||||
@@ -564,6 +564,47 @@ async def create_submission(
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/submissions/{store_listing_version_id}",
|
||||
summary="Edit store submission",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Depends(autogpt_libs.auth.middleware.auth_middleware)],
|
||||
response_model=backend.server.v2.store.model.StoreSubmission,
|
||||
)
|
||||
async def edit_submission(
|
||||
store_listing_version_id: str,
|
||||
submission_request: backend.server.v2.store.model.StoreSubmissionEditRequest,
|
||||
user_id: typing.Annotated[
|
||||
str, fastapi.Depends(autogpt_libs.auth.depends.get_user_id)
|
||||
],
|
||||
):
|
||||
"""
|
||||
Edit an existing store listing submission.
|
||||
|
||||
Args:
|
||||
store_listing_version_id (str): ID of the store listing version to edit
|
||||
submission_request (StoreSubmissionRequest): The updated submission details
|
||||
user_id (str): ID of the authenticated user editing the listing
|
||||
|
||||
Returns:
|
||||
StoreSubmission: The updated store submission
|
||||
|
||||
Raises:
|
||||
HTTPException: If there is an error editing the submission
|
||||
"""
|
||||
return await backend.server.v2.store.db.edit_store_submission(
|
||||
user_id=user_id,
|
||||
store_listing_version_id=store_listing_version_id,
|
||||
name=submission_request.name,
|
||||
video_url=submission_request.video_url,
|
||||
image_urls=submission_request.image_urls,
|
||||
description=submission_request.description,
|
||||
sub_heading=submission_request.sub_heading,
|
||||
categories=submission_request.categories,
|
||||
changes_summary=submission_request.changes_summary,
|
||||
)
|
||||
|
||||
|
||||
@router.post(
|
||||
"/submissions/media",
|
||||
summary="Upload submission media",
|
||||
|
||||
@@ -551,6 +551,8 @@ def test_get_submissions_success(
|
||||
agent_version=1,
|
||||
sub_heading="Test agent subheading",
|
||||
slug="test-agent",
|
||||
video_url="test.mp4",
|
||||
categories=["test-category"],
|
||||
)
|
||||
],
|
||||
pagination=backend.server.v2.store.model.Pagination(
|
||||
|
||||
20
autogpt_platform/backend/backend/util/models.py
Normal file
20
autogpt_platform/backend/backend/util/models.py
Normal file
@@ -0,0 +1,20 @@
|
||||
"""
|
||||
Shared models and types used across the backend to avoid circular imports.
|
||||
"""
|
||||
|
||||
import pydantic
|
||||
|
||||
|
||||
class Pagination(pydantic.BaseModel):
|
||||
total_items: int = pydantic.Field(
|
||||
description="Total number of items.", examples=[42]
|
||||
)
|
||||
total_pages: int = pydantic.Field(
|
||||
description="Total number of pages.", examples=[2]
|
||||
)
|
||||
current_page: int = pydantic.Field(
|
||||
description="Current_page page number.", examples=[1]
|
||||
)
|
||||
page_size: int = pydantic.Field(
|
||||
description="Number of items per page.", examples=[25]
|
||||
)
|
||||
@@ -360,7 +360,7 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="Maximum message size limit for communication with the message bus",
|
||||
)
|
||||
|
||||
backend_cors_allow_origins: List[str] = Field(default_factory=list)
|
||||
backend_cors_allow_origins: List[str] = Field(default=["http://localhost:3000"])
|
||||
|
||||
@field_validator("backend_cors_allow_origins")
|
||||
@classmethod
|
||||
@@ -472,6 +472,7 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
groq_api_key: str = Field(default="", description="Groq API key")
|
||||
open_router_api_key: str = Field(default="", description="Open Router API Key")
|
||||
llama_api_key: str = Field(default="", description="Llama API Key")
|
||||
v0_api_key: str = Field(default="", description="v0 by Vercel API key")
|
||||
|
||||
reddit_client_id: str = Field(default="", description="Reddit client ID")
|
||||
reddit_client_secret: str = Field(default="", description="Reddit client secret")
|
||||
@@ -521,6 +522,7 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
apollo_api_key: str = Field(default="", description="Apollo API Key")
|
||||
smartlead_api_key: str = Field(default="", description="SmartLead API Key")
|
||||
zerobounce_api_key: str = Field(default="", description="ZeroBounce API Key")
|
||||
enrichlayer_api_key: str = Field(default="", description="Enrichlayer API Key")
|
||||
|
||||
# AutoMod API credentials
|
||||
automod_api_key: str = Field(default="", description="AutoMod API key")
|
||||
@@ -534,7 +536,6 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
ayrshare_api_key: str = Field(default="", description="Ayrshare API Key")
|
||||
ayrshare_jwt_key: str = Field(default="", description="Ayrshare private Key")
|
||||
# Add more secret fields as needed
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
-- Drop the existing view
|
||||
DROP VIEW IF EXISTS "StoreSubmission";
|
||||
|
||||
-- Recreate the view with the new fields
|
||||
CREATE VIEW "StoreSubmission" AS
|
||||
SELECT
|
||||
sl.id AS listing_id,
|
||||
sl."owningUserId" AS user_id,
|
||||
slv."agentGraphId" AS agent_id,
|
||||
slv.version AS agent_version,
|
||||
sl.slug,
|
||||
COALESCE(slv.name, '') AS name,
|
||||
slv."subHeading" AS sub_heading,
|
||||
slv.description,
|
||||
slv."imageUrls" AS image_urls,
|
||||
slv."submittedAt" AS date_submitted,
|
||||
slv."submissionStatus" AS status,
|
||||
COALESCE(ar.run_count, 0::bigint) AS runs,
|
||||
COALESCE(avg(sr.score::numeric), 0.0)::double precision AS rating,
|
||||
slv.id AS store_listing_version_id,
|
||||
slv."reviewerId" AS reviewer_id,
|
||||
slv."reviewComments" AS review_comments,
|
||||
slv."internalComments" AS internal_comments,
|
||||
slv."reviewedAt" AS reviewed_at,
|
||||
slv."changesSummary" AS changes_summary,
|
||||
-- Add the two new fields:
|
||||
slv."videoUrl" AS video_url,
|
||||
slv.categories
|
||||
FROM "StoreListing" sl
|
||||
JOIN "StoreListingVersion" slv ON slv."storeListingId" = sl.id
|
||||
LEFT JOIN "StoreListingReview" sr ON sr."storeListingVersionId" = slv.id
|
||||
LEFT JOIN (
|
||||
SELECT "AgentGraphExecution"."agentGraphId", count(*) AS run_count
|
||||
FROM "AgentGraphExecution"
|
||||
GROUP BY "AgentGraphExecution"."agentGraphId"
|
||||
) ar ON ar."agentGraphId" = slv."agentGraphId"
|
||||
WHERE sl."isDeleted" = false
|
||||
GROUP BY sl.id, sl."owningUserId", slv.id, slv."agentGraphId", slv.version, sl.slug, slv.name,
|
||||
slv."subHeading", slv.description, slv."imageUrls", slv."submittedAt",
|
||||
slv."submissionStatus", slv."reviewerId", slv."reviewComments", slv."internalComments",
|
||||
slv."reviewedAt", slv."changesSummary", slv."videoUrl", slv.categories, ar.run_count;
|
||||
@@ -659,6 +659,8 @@ view StoreSubmission {
|
||||
internal_comments String?
|
||||
reviewed_at DateTime?
|
||||
changes_summary String?
|
||||
video_url String?
|
||||
categories String[]
|
||||
|
||||
// Index or unique are not applied to views
|
||||
}
|
||||
|
||||
@@ -20,7 +20,11 @@
|
||||
"review_comments": null,
|
||||
"internal_comments": null,
|
||||
"reviewed_at": null,
|
||||
"changes_summary": null
|
||||
"changes_summary": null,
|
||||
"video_url": "test.mp4",
|
||||
"categories": [
|
||||
"test-category"
|
||||
]
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
|
||||
@@ -39,20 +39,20 @@ faker = Faker()
|
||||
|
||||
|
||||
# Constants for data generation limits (reduced for E2E tests)
|
||||
NUM_USERS = 10
|
||||
NUM_AGENT_BLOCKS = 20
|
||||
MIN_GRAPHS_PER_USER = 10
|
||||
MAX_GRAPHS_PER_USER = 10
|
||||
MIN_NODES_PER_GRAPH = 2
|
||||
MAX_NODES_PER_GRAPH = 4
|
||||
MIN_PRESETS_PER_USER = 1
|
||||
MAX_PRESETS_PER_USER = 2
|
||||
MIN_AGENTS_PER_USER = 10
|
||||
MAX_AGENTS_PER_USER = 10
|
||||
MIN_EXECUTIONS_PER_GRAPH = 1
|
||||
MAX_EXECUTIONS_PER_GRAPH = 5
|
||||
MIN_REVIEWS_PER_VERSION = 1
|
||||
MAX_REVIEWS_PER_VERSION = 3
|
||||
NUM_USERS = 15
|
||||
NUM_AGENT_BLOCKS = 30
|
||||
MIN_GRAPHS_PER_USER = 15
|
||||
MAX_GRAPHS_PER_USER = 15
|
||||
MIN_NODES_PER_GRAPH = 3
|
||||
MAX_NODES_PER_GRAPH = 6
|
||||
MIN_PRESETS_PER_USER = 2
|
||||
MAX_PRESETS_PER_USER = 3
|
||||
MIN_AGENTS_PER_USER = 15
|
||||
MAX_AGENTS_PER_USER = 15
|
||||
MIN_EXECUTIONS_PER_GRAPH = 2
|
||||
MAX_EXECUTIONS_PER_GRAPH = 8
|
||||
MIN_REVIEWS_PER_VERSION = 2
|
||||
MAX_REVIEWS_PER_VERSION = 5
|
||||
|
||||
|
||||
def get_image():
|
||||
@@ -76,6 +76,23 @@ def get_video_url():
|
||||
return f"https://www.youtube.com/watch?v={video_id}"
|
||||
|
||||
|
||||
def get_category():
|
||||
"""Generate a random category from the predefined list."""
|
||||
categories = [
|
||||
"productivity",
|
||||
"writing",
|
||||
"development",
|
||||
"data",
|
||||
"marketing",
|
||||
"research",
|
||||
"creative",
|
||||
"business",
|
||||
"personal",
|
||||
"other",
|
||||
]
|
||||
return random.choice(categories)
|
||||
|
||||
|
||||
class TestDataCreator:
|
||||
"""Creates test data using API functions for E2E tests."""
|
||||
|
||||
@@ -559,24 +576,37 @@ class TestDataCreator:
|
||||
submissions.append(test_submission.model_dump())
|
||||
print("✅ Created special test store submission for test123@gmail.com")
|
||||
|
||||
# Auto-approve the test submission
|
||||
# Randomly approve, reject, or leave pending the test submission
|
||||
if test_submission.store_listing_version_id:
|
||||
approved_submission = await review_store_submission(
|
||||
store_listing_version_id=test_submission.store_listing_version_id,
|
||||
is_approved=True,
|
||||
external_comments="Test submission approved",
|
||||
internal_comments="Auto-approved test submission",
|
||||
reviewer_id=test_user["id"],
|
||||
)
|
||||
approved_submissions.append(approved_submission.model_dump())
|
||||
print("✅ Approved test store submission")
|
||||
random_value = random.random()
|
||||
if random_value < 0.4: # 40% chance to approve
|
||||
approved_submission = await review_store_submission(
|
||||
store_listing_version_id=test_submission.store_listing_version_id,
|
||||
is_approved=True,
|
||||
external_comments="Test submission approved",
|
||||
internal_comments="Auto-approved test submission",
|
||||
reviewer_id=test_user["id"],
|
||||
)
|
||||
approved_submissions.append(approved_submission.model_dump())
|
||||
print("✅ Approved test store submission")
|
||||
|
||||
# Mark test submission as featured
|
||||
await prisma.storelistingversion.update(
|
||||
where={"id": test_submission.store_listing_version_id},
|
||||
data={"isFeatured": True},
|
||||
)
|
||||
print("🌟 Marked test agent as FEATURED")
|
||||
# Mark approved submission as featured
|
||||
await prisma.storelistingversion.update(
|
||||
where={"id": test_submission.store_listing_version_id},
|
||||
data={"isFeatured": True},
|
||||
)
|
||||
print("🌟 Marked test agent as FEATURED")
|
||||
elif random_value < 0.7: # 30% chance to reject (40% to 70%)
|
||||
await review_store_submission(
|
||||
store_listing_version_id=test_submission.store_listing_version_id,
|
||||
is_approved=False,
|
||||
external_comments="Test submission rejected - needs improvements",
|
||||
internal_comments="Auto-rejected test submission for E2E testing",
|
||||
reviewer_id=test_user["id"],
|
||||
)
|
||||
print("❌ Rejected test store submission")
|
||||
else: # 30% chance to leave pending (70% to 100%)
|
||||
print("⏳ Left test submission pending for review")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error creating test store submission: {e}")
|
||||
@@ -617,58 +647,87 @@ class TestDataCreator:
|
||||
video_url=get_video_url() if random.random() < 0.3 else None,
|
||||
image_urls=[get_image() for _ in range(3)],
|
||||
description=faker.text(),
|
||||
categories=[faker.word() for _ in range(3)],
|
||||
categories=[
|
||||
get_category()
|
||||
], # Single category from predefined list
|
||||
changes_summary="Initial E2E test submission",
|
||||
)
|
||||
submissions.append(submission.model_dump())
|
||||
print(f"✅ Created store submission: {submission.name}")
|
||||
|
||||
# Approve the submission so it appears in the store
|
||||
# Randomly approve, reject, or leave pending the submission
|
||||
if submission.store_listing_version_id:
|
||||
try:
|
||||
# Pick a random user as the reviewer (admin)
|
||||
reviewer_id = random.choice(self.users)["id"]
|
||||
random_value = random.random()
|
||||
if random_value < 0.4: # 40% chance to approve
|
||||
try:
|
||||
# Pick a random user as the reviewer (admin)
|
||||
reviewer_id = random.choice(self.users)["id"]
|
||||
|
||||
approved_submission = await review_store_submission(
|
||||
store_listing_version_id=submission.store_listing_version_id,
|
||||
is_approved=True,
|
||||
external_comments="Auto-approved for E2E testing",
|
||||
internal_comments="Automatically approved by E2E test data script",
|
||||
reviewer_id=reviewer_id,
|
||||
)
|
||||
approved_submissions.append(
|
||||
approved_submission.model_dump()
|
||||
)
|
||||
print(f"✅ Approved store submission: {submission.name}")
|
||||
approved_submission = await review_store_submission(
|
||||
store_listing_version_id=submission.store_listing_version_id,
|
||||
is_approved=True,
|
||||
external_comments="Auto-approved for E2E testing",
|
||||
internal_comments="Automatically approved by E2E test data script",
|
||||
reviewer_id=reviewer_id,
|
||||
)
|
||||
approved_submissions.append(
|
||||
approved_submission.model_dump()
|
||||
)
|
||||
print(
|
||||
f"✅ Approved store submission: {submission.name}"
|
||||
)
|
||||
|
||||
# Mark some agents as featured during creation (30% chance)
|
||||
# More likely for creators and first submissions
|
||||
is_creator = user["id"] in [
|
||||
p.get("userId") for p in self.profiles
|
||||
]
|
||||
feature_chance = (
|
||||
0.5 if is_creator else 0.2
|
||||
) # 50% for creators, 20% for others
|
||||
# Mark some agents as featured during creation (30% chance)
|
||||
# More likely for creators and first submissions
|
||||
is_creator = user["id"] in [
|
||||
p.get("userId") for p in self.profiles
|
||||
]
|
||||
feature_chance = (
|
||||
0.5 if is_creator else 0.2
|
||||
) # 50% for creators, 20% for others
|
||||
|
||||
if random.random() < feature_chance:
|
||||
try:
|
||||
await prisma.storelistingversion.update(
|
||||
where={
|
||||
"id": submission.store_listing_version_id
|
||||
},
|
||||
data={"isFeatured": True},
|
||||
)
|
||||
print(
|
||||
f"🌟 Marked agent as FEATURED: {submission.name}"
|
||||
)
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Warning: Could not mark submission as featured: {e}"
|
||||
)
|
||||
if random.random() < feature_chance:
|
||||
try:
|
||||
await prisma.storelistingversion.update(
|
||||
where={
|
||||
"id": submission.store_listing_version_id
|
||||
},
|
||||
data={"isFeatured": True},
|
||||
)
|
||||
print(
|
||||
f"🌟 Marked agent as FEATURED: {submission.name}"
|
||||
)
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Warning: Could not mark submission as featured: {e}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Warning: Could not approve submission {submission.name}: {e}"
|
||||
)
|
||||
elif random_value < 0.7: # 30% chance to reject (40% to 70%)
|
||||
try:
|
||||
# Pick a random user as the reviewer (admin)
|
||||
reviewer_id = random.choice(self.users)["id"]
|
||||
|
||||
await review_store_submission(
|
||||
store_listing_version_id=submission.store_listing_version_id,
|
||||
is_approved=False,
|
||||
external_comments="Submission rejected - needs improvements",
|
||||
internal_comments="Automatically rejected by E2E test data script",
|
||||
reviewer_id=reviewer_id,
|
||||
)
|
||||
print(
|
||||
f"❌ Rejected store submission: {submission.name}"
|
||||
)
|
||||
except Exception as e:
|
||||
print(
|
||||
f"Warning: Could not reject submission {submission.name}: {e}"
|
||||
)
|
||||
else: # 30% chance to leave pending (70% to 100%)
|
||||
print(
|
||||
f"Warning: Could not approve submission {submission.name}: {e}"
|
||||
f"⏳ Left submission pending for review: {submission.name}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
|
||||
@@ -1,123 +0,0 @@
|
||||
############
|
||||
# Secrets
|
||||
# YOU MUST CHANGE THESE BEFORE GOING INTO PRODUCTION
|
||||
############
|
||||
|
||||
POSTGRES_PASSWORD=your-super-secret-and-long-postgres-password
|
||||
JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
DASHBOARD_USERNAME=supabase
|
||||
DASHBOARD_PASSWORD=this_password_is_insecure_and_should_be_updated
|
||||
SECRET_KEY_BASE=UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||
VAULT_ENC_KEY=your-encryption-key-32-chars-min
|
||||
|
||||
|
||||
############
|
||||
# Database - You can change these to any PostgreSQL database that has logical replication enabled.
|
||||
############
|
||||
|
||||
POSTGRES_HOST=db
|
||||
POSTGRES_DB=postgres
|
||||
POSTGRES_PORT=5432
|
||||
# default user is postgres
|
||||
|
||||
|
||||
############
|
||||
# Supavisor -- Database pooler
|
||||
############
|
||||
POOLER_PROXY_PORT_TRANSACTION=6543
|
||||
POOLER_DEFAULT_POOL_SIZE=20
|
||||
POOLER_MAX_CLIENT_CONN=100
|
||||
POOLER_TENANT_ID=your-tenant-id
|
||||
|
||||
|
||||
############
|
||||
# API Proxy - Configuration for the Kong Reverse proxy.
|
||||
############
|
||||
|
||||
KONG_HTTP_PORT=8000
|
||||
KONG_HTTPS_PORT=8443
|
||||
|
||||
|
||||
############
|
||||
# API - Configuration for PostgREST.
|
||||
############
|
||||
|
||||
PGRST_DB_SCHEMAS=public,storage,graphql_public
|
||||
|
||||
|
||||
############
|
||||
# Auth - Configuration for the GoTrue authentication server.
|
||||
############
|
||||
|
||||
## General
|
||||
SITE_URL=http://localhost:3000
|
||||
ADDITIONAL_REDIRECT_URLS=
|
||||
JWT_EXPIRY=3600
|
||||
DISABLE_SIGNUP=false
|
||||
API_EXTERNAL_URL=http://localhost:8000
|
||||
|
||||
## Mailer Config
|
||||
MAILER_URLPATHS_CONFIRMATION="/auth/v1/verify"
|
||||
MAILER_URLPATHS_INVITE="/auth/v1/verify"
|
||||
MAILER_URLPATHS_RECOVERY="/auth/v1/verify"
|
||||
MAILER_URLPATHS_EMAIL_CHANGE="/auth/v1/verify"
|
||||
|
||||
## Email auth
|
||||
ENABLE_EMAIL_SIGNUP=true
|
||||
ENABLE_EMAIL_AUTOCONFIRM=false
|
||||
SMTP_ADMIN_EMAIL=admin@example.com
|
||||
SMTP_HOST=supabase-mail
|
||||
SMTP_PORT=2500
|
||||
SMTP_USER=fake_mail_user
|
||||
SMTP_PASS=fake_mail_password
|
||||
SMTP_SENDER_NAME=fake_sender
|
||||
ENABLE_ANONYMOUS_USERS=false
|
||||
|
||||
## Phone auth
|
||||
ENABLE_PHONE_SIGNUP=true
|
||||
ENABLE_PHONE_AUTOCONFIRM=true
|
||||
|
||||
|
||||
############
|
||||
# Studio - Configuration for the Dashboard
|
||||
############
|
||||
|
||||
STUDIO_DEFAULT_ORGANIZATION=Default Organization
|
||||
STUDIO_DEFAULT_PROJECT=Default Project
|
||||
|
||||
STUDIO_PORT=3000
|
||||
# replace if you intend to use Studio outside of localhost
|
||||
SUPABASE_PUBLIC_URL=http://localhost:8000
|
||||
|
||||
# Enable webp support
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION=true
|
||||
|
||||
# Add your OpenAI API key to enable SQL Editor Assistant
|
||||
OPENAI_API_KEY=
|
||||
|
||||
|
||||
############
|
||||
# Functions - Configuration for Functions
|
||||
############
|
||||
# NOTE: VERIFY_JWT applies to all functions. Per-function VERIFY_JWT is not supported yet.
|
||||
FUNCTIONS_VERIFY_JWT=false
|
||||
|
||||
|
||||
############
|
||||
# Logs - Configuration for Logflare
|
||||
# Please refer to https://supabase.com/docs/reference/self-hosting-analytics/introduction
|
||||
############
|
||||
|
||||
LOGFLARE_LOGGER_BACKEND_API_KEY=your-super-secret-and-long-logflare-key
|
||||
|
||||
# Change vector.toml sinks to reflect this change
|
||||
LOGFLARE_API_KEY=your-super-secret-and-long-logflare-key
|
||||
|
||||
# Docker socket location - this value will differ depending on your OS
|
||||
DOCKER_SOCKET_LOCATION=/var/run/docker.sock
|
||||
|
||||
# Google Cloud Project details
|
||||
GOOGLE_PROJECT_ID=GOOGLE_PROJECT_ID
|
||||
GOOGLE_PROJECT_NUMBER=GOOGLE_PROJECT_NUMBER
|
||||
1
autogpt_platform/db/docker/.gitignore
vendored
1
autogpt_platform/db/docker/.gitignore
vendored
@@ -1,5 +1,4 @@
|
||||
volumes/db/data
|
||||
volumes/storage
|
||||
.env
|
||||
test.http
|
||||
docker-compose.override.yml
|
||||
|
||||
@@ -5,8 +5,101 @@
|
||||
# Destroy: docker compose -f docker-compose.yml -f ./dev/docker-compose.dev.yml down -v --remove-orphans
|
||||
# Reset everything: ./reset.sh
|
||||
|
||||
# Environment Variable Loading Order (first → last, later overrides earlier):
|
||||
# 1. ../../.env.default - Default values for all Supabase settings
|
||||
# 2. ../../.env - User's custom configuration (if exists)
|
||||
# 3. ./.env - Local overrides specific to db/docker (if exists)
|
||||
# 4. environment key - Service-specific overrides defined below
|
||||
# 5. Shell environment - Variables exported before running docker compose
|
||||
|
||||
name: supabase
|
||||
|
||||
# Common env_file configuration for all Supabase services
|
||||
x-supabase-env-files: &supabase-env-files
|
||||
env_file:
|
||||
- ../../.env.default # Base defaults from platform root
|
||||
- path: ../../.env # User overrides from platform root (optional)
|
||||
required: false
|
||||
- path: ./.env # Local overrides for db/docker (optional)
|
||||
required: false
|
||||
|
||||
# Common Supabase environment - hardcoded defaults to avoid variable substitution
|
||||
x-supabase-env: &supabase-env
|
||||
# Core PostgreSQL settings
|
||||
POSTGRES_PASSWORD: your-super-secret-and-long-postgres-password
|
||||
POSTGRES_HOST: db
|
||||
POSTGRES_PORT: "5432"
|
||||
POSTGRES_DB: postgres
|
||||
|
||||
# Authentication & Security
|
||||
JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SERVICE_ROLE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
DASHBOARD_USERNAME: supabase
|
||||
DASHBOARD_PASSWORD: this_password_is_insecure_and_should_be_updated
|
||||
SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||
VAULT_ENC_KEY: your-encryption-key-32-chars-min
|
||||
|
||||
# URLs and Endpoints
|
||||
SITE_URL: http://localhost:3000
|
||||
API_EXTERNAL_URL: http://localhost:8000
|
||||
SUPABASE_PUBLIC_URL: http://localhost:8000
|
||||
ADDITIONAL_REDIRECT_URLS: ""
|
||||
|
||||
# Feature Flags
|
||||
DISABLE_SIGNUP: "false"
|
||||
ENABLE_EMAIL_SIGNUP: "true"
|
||||
ENABLE_EMAIL_AUTOCONFIRM: "false"
|
||||
ENABLE_ANONYMOUS_USERS: "false"
|
||||
ENABLE_PHONE_SIGNUP: "true"
|
||||
ENABLE_PHONE_AUTOCONFIRM: "true"
|
||||
FUNCTIONS_VERIFY_JWT: "false"
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION: "true"
|
||||
|
||||
# Email/SMTP Configuration
|
||||
SMTP_ADMIN_EMAIL: admin@example.com
|
||||
SMTP_HOST: supabase-mail
|
||||
SMTP_PORT: "2500"
|
||||
SMTP_USER: fake_mail_user
|
||||
SMTP_PASS: fake_mail_password
|
||||
SMTP_SENDER_NAME: fake_sender
|
||||
|
||||
# Mailer URLs
|
||||
MAILER_URLPATHS_CONFIRMATION: /auth/v1/verify
|
||||
MAILER_URLPATHS_INVITE: /auth/v1/verify
|
||||
MAILER_URLPATHS_RECOVERY: /auth/v1/verify
|
||||
MAILER_URLPATHS_EMAIL_CHANGE: /auth/v1/verify
|
||||
|
||||
# JWT Settings
|
||||
JWT_EXPIRY: "3600"
|
||||
|
||||
# Database Schemas
|
||||
PGRST_DB_SCHEMAS: public,storage,graphql_public
|
||||
|
||||
# Studio Settings
|
||||
STUDIO_DEFAULT_ORGANIZATION: Default Organization
|
||||
STUDIO_DEFAULT_PROJECT: Default Project
|
||||
|
||||
# Logging
|
||||
LOGFLARE_API_KEY: your-super-secret-and-long-logflare-key
|
||||
|
||||
# Pooler Settings
|
||||
POOLER_DEFAULT_POOL_SIZE: "20"
|
||||
POOLER_MAX_CLIENT_CONN: "100"
|
||||
POOLER_TENANT_ID: your-tenant-id
|
||||
POOLER_PROXY_PORT_TRANSACTION: "6543"
|
||||
|
||||
# Kong Ports
|
||||
KONG_HTTP_PORT: "8000"
|
||||
KONG_HTTPS_PORT: "8443"
|
||||
|
||||
# Docker
|
||||
DOCKER_SOCKET_LOCATION: /var/run/docker.sock
|
||||
|
||||
# Google Cloud (if needed)
|
||||
GOOGLE_PROJECT_ID: GOOGLE_PROJECT_ID
|
||||
GOOGLE_PROJECT_NUMBER: GOOGLE_PROJECT_NUMBER
|
||||
|
||||
services:
|
||||
|
||||
studio:
|
||||
@@ -24,24 +117,24 @@ services:
|
||||
timeout: 10s
|
||||
interval: 5s
|
||||
retries: 3
|
||||
depends_on:
|
||||
analytics:
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
STUDIO_PG_META_URL: http://meta:8080
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_PASSWORD: your-super-secret-and-long-postgres-password
|
||||
|
||||
DEFAULT_ORGANIZATION_NAME: ${STUDIO_DEFAULT_ORGANIZATION}
|
||||
DEFAULT_PROJECT_NAME: ${STUDIO_DEFAULT_PROJECT}
|
||||
OPENAI_API_KEY: ${OPENAI_API_KEY:-}
|
||||
DEFAULT_ORGANIZATION_NAME: Default Organization
|
||||
DEFAULT_PROJECT_NAME: Default Project
|
||||
OPENAI_API_KEY: ""
|
||||
|
||||
SUPABASE_URL: http://kong:8000
|
||||
SUPABASE_PUBLIC_URL: ${SUPABASE_PUBLIC_URL}
|
||||
SUPABASE_ANON_KEY: ${ANON_KEY}
|
||||
SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY}
|
||||
AUTH_JWT_SECRET: ${JWT_SECRET}
|
||||
SUPABASE_PUBLIC_URL: http://localhost:8000
|
||||
SUPABASE_ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SUPABASE_SERVICE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
AUTH_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
|
||||
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
|
||||
LOGFLARE_API_KEY: your-super-secret-and-long-logflare-key
|
||||
LOGFLARE_URL: http://analytics:4000
|
||||
NEXT_PUBLIC_ENABLE_LOGS: true
|
||||
# Comment to use Big Query backend for analytics
|
||||
@@ -54,15 +147,15 @@ services:
|
||||
image: kong:2.8.1
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- ${KONG_HTTP_PORT}:8000/tcp
|
||||
- ${KONG_HTTPS_PORT}:8443/tcp
|
||||
- 8000:8000/tcp
|
||||
- 8443:8443/tcp
|
||||
volumes:
|
||||
# https://github.com/supabase/supabase/issues/12661
|
||||
- ./volumes/api/kong.yml:/home/kong/temp.yml:ro
|
||||
depends_on:
|
||||
analytics:
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
KONG_DATABASE: "off"
|
||||
KONG_DECLARATIVE_CONFIG: /home/kong/kong.yml
|
||||
# https://github.com/supabase/cli/issues/14
|
||||
@@ -70,10 +163,10 @@ services:
|
||||
KONG_PLUGINS: request-transformer,cors,key-auth,acl,basic-auth
|
||||
KONG_NGINX_PROXY_PROXY_BUFFER_SIZE: 160k
|
||||
KONG_NGINX_PROXY_PROXY_BUFFERS: 64 160k
|
||||
SUPABASE_ANON_KEY: ${ANON_KEY}
|
||||
SUPABASE_SERVICE_KEY: ${SERVICE_ROLE_KEY}
|
||||
DASHBOARD_USERNAME: ${DASHBOARD_USERNAME}
|
||||
DASHBOARD_PASSWORD: ${DASHBOARD_PASSWORD}
|
||||
SUPABASE_ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SUPABASE_SERVICE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
DASHBOARD_USERNAME: supabase
|
||||
DASHBOARD_PASSWORD: this_password_is_insecure_and_should_be_updated
|
||||
# https://unix.stackexchange.com/a/294837
|
||||
entrypoint: bash -c 'eval "echo \"$$(cat ~/temp.yml)\"" > ~/kong.yml && /docker-entrypoint.sh kong docker-start'
|
||||
|
||||
@@ -98,48 +191,49 @@ services:
|
||||
db:
|
||||
# Disable this if you are using an external Postgres database
|
||||
condition: service_healthy
|
||||
analytics:
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
GOTRUE_API_HOST: 0.0.0.0
|
||||
GOTRUE_API_PORT: 9999
|
||||
API_EXTERNAL_URL: ${API_EXTERNAL_URL}
|
||||
API_EXTERNAL_URL: http://localhost:8000
|
||||
|
||||
GOTRUE_DB_DRIVER: postgres
|
||||
GOTRUE_DB_DATABASE_URL: postgres://supabase_auth_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
GOTRUE_DB_DATABASE_URL: postgres://supabase_auth_admin:your-super-secret-and-long-postgres-password@db:5432/postgres
|
||||
|
||||
GOTRUE_SITE_URL: ${SITE_URL}
|
||||
GOTRUE_URI_ALLOW_LIST: ${ADDITIONAL_REDIRECT_URLS}
|
||||
GOTRUE_DISABLE_SIGNUP: ${DISABLE_SIGNUP}
|
||||
GOTRUE_SITE_URL: http://localhost:3000
|
||||
GOTRUE_URI_ALLOW_LIST: ""
|
||||
GOTRUE_DISABLE_SIGNUP: false
|
||||
|
||||
GOTRUE_JWT_ADMIN_ROLES: service_role
|
||||
GOTRUE_JWT_AUD: authenticated
|
||||
GOTRUE_JWT_DEFAULT_GROUP_NAME: authenticated
|
||||
GOTRUE_JWT_EXP: ${JWT_EXPIRY}
|
||||
GOTRUE_JWT_SECRET: ${JWT_SECRET}
|
||||
GOTRUE_JWT_EXP: 3600
|
||||
GOTRUE_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
|
||||
GOTRUE_EXTERNAL_EMAIL_ENABLED: ${ENABLE_EMAIL_SIGNUP}
|
||||
GOTRUE_EXTERNAL_ANONYMOUS_USERS_ENABLED: ${ENABLE_ANONYMOUS_USERS}
|
||||
GOTRUE_MAILER_AUTOCONFIRM: ${ENABLE_EMAIL_AUTOCONFIRM}
|
||||
GOTRUE_EXTERNAL_EMAIL_ENABLED: true
|
||||
GOTRUE_EXTERNAL_ANONYMOUS_USERS_ENABLED: false
|
||||
GOTRUE_MAILER_AUTOCONFIRM: false
|
||||
|
||||
# Uncomment to bypass nonce check in ID Token flow. Commonly set to true when using Google Sign In on mobile.
|
||||
# GOTRUE_EXTERNAL_SKIP_NONCE_CHECK: true
|
||||
|
||||
# GOTRUE_MAILER_SECURE_EMAIL_CHANGE_ENABLED: true
|
||||
# GOTRUE_SMTP_MAX_FREQUENCY: 1s
|
||||
GOTRUE_SMTP_ADMIN_EMAIL: ${SMTP_ADMIN_EMAIL}
|
||||
GOTRUE_SMTP_HOST: ${SMTP_HOST}
|
||||
GOTRUE_SMTP_PORT: ${SMTP_PORT}
|
||||
GOTRUE_SMTP_USER: ${SMTP_USER}
|
||||
GOTRUE_SMTP_PASS: ${SMTP_PASS}
|
||||
GOTRUE_SMTP_SENDER_NAME: ${SMTP_SENDER_NAME}
|
||||
GOTRUE_MAILER_URLPATHS_INVITE: ${MAILER_URLPATHS_INVITE}
|
||||
GOTRUE_MAILER_URLPATHS_CONFIRMATION: ${MAILER_URLPATHS_CONFIRMATION}
|
||||
GOTRUE_MAILER_URLPATHS_RECOVERY: ${MAILER_URLPATHS_RECOVERY}
|
||||
GOTRUE_MAILER_URLPATHS_EMAIL_CHANGE: ${MAILER_URLPATHS_EMAIL_CHANGE}
|
||||
GOTRUE_SMTP_ADMIN_EMAIL: admin@example.com
|
||||
GOTRUE_SMTP_HOST: supabase-mail
|
||||
GOTRUE_SMTP_PORT: 2500
|
||||
GOTRUE_SMTP_USER: fake_mail_user
|
||||
GOTRUE_SMTP_PASS: fake_mail_password
|
||||
GOTRUE_SMTP_SENDER_NAME: fake_sender
|
||||
GOTRUE_MAILER_URLPATHS_INVITE: /auth/v1/verify
|
||||
GOTRUE_MAILER_URLPATHS_CONFIRMATION: /auth/v1/verify
|
||||
GOTRUE_MAILER_URLPATHS_RECOVERY: /auth/v1/verify
|
||||
GOTRUE_MAILER_URLPATHS_EMAIL_CHANGE: /auth/v1/verify
|
||||
|
||||
GOTRUE_EXTERNAL_PHONE_ENABLED: ${ENABLE_PHONE_SIGNUP}
|
||||
GOTRUE_SMS_AUTOCONFIRM: ${ENABLE_PHONE_AUTOCONFIRM}
|
||||
GOTRUE_EXTERNAL_PHONE_ENABLED: true
|
||||
GOTRUE_SMS_AUTOCONFIRM: true
|
||||
# Uncomment to enable custom access token hook. Please see: https://supabase.com/docs/guides/auth/auth-hooks for full list of hooks and additional details about custom_access_token_hook
|
||||
|
||||
# GOTRUE_HOOK_CUSTOM_ACCESS_TOKEN_ENABLED: "true"
|
||||
@@ -168,16 +262,17 @@ services:
|
||||
db:
|
||||
# Disable this if you are using an external Postgres database
|
||||
condition: service_healthy
|
||||
analytics:
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
PGRST_DB_URI: postgres://authenticator:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
PGRST_DB_SCHEMAS: ${PGRST_DB_SCHEMAS}
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
PGRST_DB_URI: postgres://authenticator:your-super-secret-and-long-postgres-password@db:5432/postgres
|
||||
PGRST_DB_SCHEMAS: public,storage,graphql_public
|
||||
PGRST_DB_ANON_ROLE: anon
|
||||
PGRST_JWT_SECRET: ${JWT_SECRET}
|
||||
PGRST_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
PGRST_DB_USE_LEGACY_GUCS: "false"
|
||||
PGRST_APP_SETTINGS_JWT_SECRET: ${JWT_SECRET}
|
||||
PGRST_APP_SETTINGS_JWT_EXP: ${JWT_EXPIRY}
|
||||
PGRST_APP_SETTINGS_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
PGRST_APP_SETTINGS_JWT_EXP: 3600
|
||||
command:
|
||||
[
|
||||
"postgrest"
|
||||
@@ -192,8 +287,6 @@ services:
|
||||
db:
|
||||
# Disable this if you are using an external Postgres database
|
||||
condition: service_healthy
|
||||
analytics:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
@@ -204,23 +297,26 @@ services:
|
||||
"-o",
|
||||
"/dev/null",
|
||||
"-H",
|
||||
"Authorization: Bearer ${ANON_KEY}",
|
||||
"Authorization: Bearer eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE",
|
||||
"http://localhost:4000/api/tenants/realtime-dev/health"
|
||||
]
|
||||
timeout: 5s
|
||||
interval: 5s
|
||||
retries: 3
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
PORT: 4000
|
||||
DB_HOST: ${POSTGRES_HOST}
|
||||
DB_PORT: ${POSTGRES_PORT}
|
||||
DB_HOST: db
|
||||
DB_PORT: 5432
|
||||
DB_USER: supabase_admin
|
||||
DB_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
DB_NAME: ${POSTGRES_DB}
|
||||
DB_PASSWORD: your-super-secret-and-long-postgres-password
|
||||
DB_NAME: postgres
|
||||
DB_AFTER_CONNECT_QUERY: 'SET search_path TO _realtime'
|
||||
DB_ENC_KEY: supabaserealtime
|
||||
API_JWT_SECRET: ${JWT_SECRET}
|
||||
SECRET_KEY_BASE: ${SECRET_KEY_BASE}
|
||||
API_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||
ERL_AFLAGS: -proto_dist inet_tcp
|
||||
DNS_NODES: "''"
|
||||
RLIMIT_NOFILE: "10000"
|
||||
@@ -256,12 +352,15 @@ services:
|
||||
condition: service_started
|
||||
imgproxy:
|
||||
condition: service_started
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
ANON_KEY: ${ANON_KEY}
|
||||
SERVICE_KEY: ${SERVICE_ROLE_KEY}
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SERVICE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
POSTGREST_URL: http://rest:3000
|
||||
PGRST_JWT_SECRET: ${JWT_SECRET}
|
||||
DATABASE_URL: postgres://supabase_storage_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
PGRST_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
DATABASE_URL: postgres://supabase_storage_admin:your-super-secret-and-long-postgres-password@db:5432/postgres
|
||||
FILE_SIZE_LIMIT: 52428800
|
||||
STORAGE_BACKEND: file
|
||||
FILE_STORAGE_BACKEND_PATH: /var/lib/storage
|
||||
@@ -288,11 +387,14 @@ services:
|
||||
timeout: 5s
|
||||
interval: 5s
|
||||
retries: 3
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
IMGPROXY_BIND: ":5001"
|
||||
IMGPROXY_LOCAL_FILESYSTEM_ROOT: /
|
||||
IMGPROXY_USE_ETAG: "true"
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION: ${IMGPROXY_ENABLE_WEBP_DETECTION}
|
||||
IMGPROXY_ENABLE_WEBP_DETECTION: true
|
||||
|
||||
meta:
|
||||
container_name: supabase-meta
|
||||
@@ -302,15 +404,16 @@ services:
|
||||
db:
|
||||
# Disable this if you are using an external Postgres database
|
||||
condition: service_healthy
|
||||
analytics:
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
PG_META_PORT: 8080
|
||||
PG_META_DB_HOST: ${POSTGRES_HOST}
|
||||
PG_META_DB_PORT: ${POSTGRES_PORT}
|
||||
PG_META_DB_NAME: ${POSTGRES_DB}
|
||||
PG_META_DB_HOST: db
|
||||
PG_META_DB_PORT: 5432
|
||||
PG_META_DB_NAME: postgres
|
||||
PG_META_DB_USER: supabase_admin
|
||||
PG_META_DB_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
PG_META_DB_PASSWORD: your-super-secret-and-long-postgres-password
|
||||
|
||||
functions:
|
||||
container_name: supabase-edge-functions
|
||||
@@ -318,17 +421,17 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./volumes/functions:/home/deno/functions:Z
|
||||
depends_on:
|
||||
analytics:
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
SUPABASE_URL: http://kong:8000
|
||||
SUPABASE_ANON_KEY: ${ANON_KEY}
|
||||
SUPABASE_SERVICE_ROLE_KEY: ${SERVICE_ROLE_KEY}
|
||||
SUPABASE_DB_URL: postgresql://postgres:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
|
||||
SUPABASE_ANON_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
SUPABASE_SERVICE_ROLE_KEY: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
SUPABASE_DB_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres
|
||||
# TODO: Allow configuring VERIFY_JWT per function. This PR might help: https://github.com/supabase/cli/pull/786
|
||||
VERIFY_JWT: "${FUNCTIONS_VERIFY_JWT}"
|
||||
VERIFY_JWT: "false"
|
||||
command:
|
||||
[
|
||||
"start",
|
||||
@@ -362,26 +465,29 @@ services:
|
||||
db:
|
||||
# Disable this if you are using an external Postgres database
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
LOGFLARE_NODE_HOST: 127.0.0.1
|
||||
DB_USERNAME: supabase_admin
|
||||
DB_DATABASE: _supabase
|
||||
DB_HOSTNAME: ${POSTGRES_HOST}
|
||||
DB_PORT: ${POSTGRES_PORT}
|
||||
DB_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
DB_HOSTNAME: db
|
||||
DB_PORT: 5432
|
||||
DB_PASSWORD: your-super-secret-and-long-postgres-password
|
||||
DB_SCHEMA: _analytics
|
||||
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
|
||||
LOGFLARE_API_KEY: your-super-secret-and-long-logflare-key
|
||||
LOGFLARE_SINGLE_TENANT: true
|
||||
LOGFLARE_SUPABASE_MODE: true
|
||||
LOGFLARE_MIN_CLUSTER_SIZE: 1
|
||||
|
||||
# Comment variables to use Big Query backend for analytics
|
||||
POSTGRES_BACKEND_URL: postgresql://supabase_admin:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/_supabase
|
||||
POSTGRES_BACKEND_URL: postgresql://supabase_admin:your-super-secret-and-long-postgres-password@db:5432/_supabase
|
||||
POSTGRES_BACKEND_SCHEMA: _analytics
|
||||
LOGFLARE_FEATURE_FLAG_OVERRIDE: multibackend=true
|
||||
# Uncomment to use Big Query backend for analytics
|
||||
# GOOGLE_PROJECT_ID: ${GOOGLE_PROJECT_ID}
|
||||
# GOOGLE_PROJECT_NUMBER: ${GOOGLE_PROJECT_NUMBER}
|
||||
# GOOGLE_PROJECT_ID: GOOGLE_PROJECT_ID
|
||||
# GOOGLE_PROJECT_NUMBER: GOOGLE_PROJECT_NUMBER
|
||||
|
||||
# Comment out everything below this point if you are using an external Postgres database
|
||||
db:
|
||||
@@ -419,19 +525,19 @@ services:
|
||||
interval: 5s
|
||||
timeout: 5s
|
||||
retries: 10
|
||||
depends_on:
|
||||
vector:
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
POSTGRES_HOST: /var/run/postgresql
|
||||
PGPORT: ${POSTGRES_PORT}
|
||||
POSTGRES_PORT: ${POSTGRES_PORT}
|
||||
PGPASSWORD: ${POSTGRES_PASSWORD}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
PGDATABASE: ${POSTGRES_DB}
|
||||
POSTGRES_DB: ${POSTGRES_DB}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
JWT_EXP: ${JWT_EXPIRY}
|
||||
PGPORT: 5432
|
||||
POSTGRES_PORT: 5432
|
||||
PGPASSWORD: your-super-secret-and-long-postgres-password
|
||||
POSTGRES_PASSWORD: your-super-secret-and-long-postgres-password
|
||||
PGDATABASE: postgres
|
||||
POSTGRES_DB: postgres
|
||||
JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
JWT_EXP: 3600
|
||||
command:
|
||||
[
|
||||
"postgres",
|
||||
@@ -447,7 +553,7 @@ services:
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./volumes/logs/vector.yml:/etc/vector/vector.yml:ro
|
||||
- ${DOCKER_SOCKET_LOCATION}:/var/run/docker.sock:ro
|
||||
- /var/run/docker.sock:/var/run/docker.sock:ro
|
||||
healthcheck:
|
||||
test:
|
||||
[
|
||||
@@ -461,8 +567,11 @@ services:
|
||||
timeout: 5s
|
||||
interval: 5s
|
||||
retries: 3
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
LOGFLARE_API_KEY: ${LOGFLARE_API_KEY}
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
LOGFLARE_API_KEY: your-super-secret-and-long-logflare-key
|
||||
command:
|
||||
[
|
||||
"--config",
|
||||
@@ -475,8 +584,8 @@ services:
|
||||
image: supabase/supavisor:2.4.12
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- ${POSTGRES_PORT}:5432
|
||||
- ${POOLER_PROXY_PORT_TRANSACTION}:6543
|
||||
- 5432:5432
|
||||
- 6543:6543
|
||||
volumes:
|
||||
- ./volumes/pooler/pooler.exs:/etc/pooler/pooler.exs:ro
|
||||
healthcheck:
|
||||
@@ -498,22 +607,25 @@ services:
|
||||
condition: service_healthy
|
||||
analytics:
|
||||
condition: service_healthy
|
||||
<<: *supabase-env-files
|
||||
environment:
|
||||
<<: *supabase-env
|
||||
# Keep any existing environment variables specific to that service
|
||||
PORT: 4000
|
||||
POSTGRES_PORT: ${POSTGRES_PORT}
|
||||
POSTGRES_DB: ${POSTGRES_DB}
|
||||
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
|
||||
DATABASE_URL: ecto://supabase_admin:${POSTGRES_PASSWORD}@db:${POSTGRES_PORT}/_supabase
|
||||
POSTGRES_PORT: 5432
|
||||
POSTGRES_DB: postgres
|
||||
POSTGRES_PASSWORD: your-super-secret-and-long-postgres-password
|
||||
DATABASE_URL: ecto://supabase_admin:your-super-secret-and-long-postgres-password@db:5432/_supabase
|
||||
CLUSTER_POSTGRES: true
|
||||
SECRET_KEY_BASE: ${SECRET_KEY_BASE}
|
||||
VAULT_ENC_KEY: ${VAULT_ENC_KEY}
|
||||
API_JWT_SECRET: ${JWT_SECRET}
|
||||
METRICS_JWT_SECRET: ${JWT_SECRET}
|
||||
SECRET_KEY_BASE: UpNVntn3cDxHJpq99YMc1T1AQgQpc8kfYTuRgBiYa15BLrx8etQoXz3gZv1/u2oq
|
||||
VAULT_ENC_KEY: your-encryption-key-32-chars-min
|
||||
API_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
METRICS_JWT_SECRET: your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
REGION: local
|
||||
ERL_AFLAGS: -proto_dist inet_tcp
|
||||
POOLER_TENANT_ID: ${POOLER_TENANT_ID}
|
||||
POOLER_DEFAULT_POOL_SIZE: ${POOLER_DEFAULT_POOL_SIZE}
|
||||
POOLER_MAX_CLIENT_CONN: ${POOLER_MAX_CLIENT_CONN}
|
||||
POOLER_TENANT_ID: your-tenant-id
|
||||
POOLER_DEFAULT_POOL_SIZE: 20
|
||||
POOLER_MAX_CLIENT_CONN: 100
|
||||
POOLER_POOL_MODE: transaction
|
||||
command:
|
||||
[
|
||||
|
||||
@@ -34,11 +34,11 @@ else
|
||||
echo "No .env file found. Skipping .env removal step..."
|
||||
fi
|
||||
|
||||
if [ -f ".env.example" ]; then
|
||||
echo "Copying .env.example to .env..."
|
||||
cp .env.example .env
|
||||
if [ -f ".env.default" ]; then
|
||||
echo "Copying .env.default to .env..."
|
||||
cp .env.default .env
|
||||
else
|
||||
echo ".env.example file not found. Skipping .env reset step..."
|
||||
echo ".env.default file not found. Skipping .env reset step..."
|
||||
fi
|
||||
|
||||
echo "Cleanup complete!"
|
||||
@@ -1,9 +1,39 @@
|
||||
# Environment Variable Loading Order (first → last, later overrides earlier):
|
||||
# 1. backend/.env.default - Default values for all settings
|
||||
# 2. backend/.env - User's custom configuration (if exists)
|
||||
# 3. environment key - Docker-specific overrides defined below
|
||||
# 4. Shell environment - Variables exported before running docker compose
|
||||
# 5. CLI arguments - docker compose run -e VAR=value
|
||||
|
||||
# Common backend environment - Docker service names
|
||||
x-backend-env:
|
||||
&backend-env # Docker internal service hostnames (override localhost defaults)
|
||||
PYRO_HOST: "0.0.0.0"
|
||||
AGENTSERVER_HOST: rest_server
|
||||
SCHEDULER_HOST: scheduler_server
|
||||
DATABASEMANAGER_HOST: database_manager
|
||||
EXECUTIONMANAGER_HOST: executor
|
||||
NOTIFICATIONMANAGER_HOST: notification_server
|
||||
CLAMAV_SERVICE_HOST: clamav
|
||||
DB_HOST: db
|
||||
REDIS_HOST: redis
|
||||
RABBITMQ_HOST: rabbitmq
|
||||
# Override Supabase URL for Docker network
|
||||
SUPABASE_URL: http://kong:8000
|
||||
|
||||
# Common env_file configuration for backend services
|
||||
x-backend-env-files: &backend-env-files
|
||||
env_file:
|
||||
- backend/.env.default # Base defaults (always exists)
|
||||
- path: backend/.env # User overrides (optional)
|
||||
required: false
|
||||
|
||||
services:
|
||||
migrate:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: autogpt_platform/backend/Dockerfile
|
||||
target: server
|
||||
target: migrate
|
||||
command: ["sh", "-c", "poetry run prisma migrate deploy"]
|
||||
develop:
|
||||
watch:
|
||||
@@ -20,10 +50,11 @@ services:
|
||||
- app-network
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "poetry", "run", "prisma", "migrate", "status"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
test: ["CMD-SHELL", "poetry run prisma migrate status | grep -q 'No pending migrations' || exit 1"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 5s
|
||||
|
||||
redis:
|
||||
image: redis:latest
|
||||
@@ -73,29 +104,12 @@ services:
|
||||
condition: service_completed_successfully
|
||||
rabbitmq:
|
||||
condition: service_healthy
|
||||
<<: *backend-env-files
|
||||
environment:
|
||||
- SUPABASE_URL=http://kong:8000
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- DIRECT_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- RABBITMQ_HOST=rabbitmq
|
||||
- RABBITMQ_PORT=5672
|
||||
- RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
- RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
- REDIS_PASSWORD=password
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- SCHEDULER_HOST=scheduler_server
|
||||
- EXECUTIONMANAGER_HOST=executor
|
||||
- NOTIFICATIONMANAGER_HOST=notification_server
|
||||
- CLAMAV_SERVICE_HOST=clamav
|
||||
- NEXT_PUBLIC_FRONTEND_BASE_URL=http://localhost:3000
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
- UNSUBSCRIBE_SECRET_KEY=HlP8ivStJjmbf6NKi78m_3FnOogut0t5ckzjsIqeaio= # DO NOT USE IN PRODUCTION!!
|
||||
<<: *backend-env
|
||||
# Service-specific overrides
|
||||
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
ports:
|
||||
- "8006:8006"
|
||||
networks:
|
||||
@@ -123,26 +137,12 @@ services:
|
||||
condition: service_completed_successfully
|
||||
database_manager:
|
||||
condition: service_started
|
||||
<<: *backend-env-files
|
||||
environment:
|
||||
- DATABASEMANAGER_HOST=database_manager
|
||||
- SUPABASE_URL=http://kong:8000
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- SUPABASE_SERVICE_ROLE_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJzZXJ2aWNlX3JvbGUiLAogICAgImlzcyI6ICJzdXBhYmFzZS1kZW1vIiwKICAgICJpYXQiOiAxNjQxNzY5MjAwLAogICAgImV4cCI6IDE3OTk1MzU2MDAKfQ.DaYlNEoUrrEn2Ig7tqibS-PHK5vgusbcbo7X36XVt4Q
|
||||
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- DIRECT_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- RABBITMQ_HOST=rabbitmq
|
||||
- RABBITMQ_PORT=5672
|
||||
- RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
- RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- AGENTSERVER_HOST=rest_server
|
||||
- NOTIFICATIONMANAGER_HOST=notification_server
|
||||
- CLAMAV_SERVICE_HOST=clamav
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
<<: *backend-env
|
||||
# Service-specific overrides
|
||||
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
ports:
|
||||
- "8002:8002"
|
||||
networks:
|
||||
@@ -168,22 +168,12 @@ services:
|
||||
condition: service_completed_successfully
|
||||
database_manager:
|
||||
condition: service_started
|
||||
<<: *backend-env-files
|
||||
environment:
|
||||
- DATABASEMANAGER_HOST=database_manager
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- DIRECT_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
# - RABBITMQ_HOST=rabbitmq
|
||||
# - RABBITMQ_PORT=5672
|
||||
# - RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
# - RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
|
||||
<<: *backend-env
|
||||
# Service-specific overrides
|
||||
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
ports:
|
||||
- "8001:8001"
|
||||
networks:
|
||||
@@ -205,11 +195,12 @@ services:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
<<: *backend-env-files
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- DIRECT_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- ENCRYPTION_KEY=dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw= # DO NOT USE IN PRODUCTION!!
|
||||
<<: *backend-env
|
||||
# Service-specific overrides
|
||||
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
ports:
|
||||
- "8005:8005"
|
||||
networks:
|
||||
@@ -250,23 +241,12 @@ services:
|
||||
# interval: 10s
|
||||
# timeout: 10s
|
||||
# retries: 5
|
||||
<<: *backend-env-files
|
||||
environment:
|
||||
- DATABASEMANAGER_HOST=database_manager
|
||||
- NOTIFICATIONMANAGER_HOST=notification_server
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- DATABASE_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- DIRECT_URL=postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- RABBITMQ_HOST=rabbitmq
|
||||
- RABBITMQ_PORT=5672
|
||||
- RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
- RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
|
||||
<<: *backend-env
|
||||
# Service-specific overrides
|
||||
DATABASE_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
DIRECT_URL: postgresql://postgres:your-super-secret-and-long-postgres-password@db:5432/postgres?connect_timeout=60&schema=platform
|
||||
ports:
|
||||
- "8003:8003"
|
||||
networks:
|
||||
@@ -292,52 +272,39 @@ services:
|
||||
condition: service_completed_successfully
|
||||
database_manager:
|
||||
condition: service_started
|
||||
<<: *backend-env-files
|
||||
environment:
|
||||
- DATABASEMANAGER_HOST=database_manager
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- RABBITMQ_HOST=rabbitmq
|
||||
- RABBITMQ_PORT=5672
|
||||
- RABBITMQ_DEFAULT_USER=rabbitmq_user_default
|
||||
- RABBITMQ_DEFAULT_PASS=k0VMxyIJF9S35f3x2uaw5IWAl6Y536O7
|
||||
- ENABLE_AUTH=true
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- BACKEND_CORS_ALLOW_ORIGINS=["http://localhost:3000"]
|
||||
|
||||
<<: *backend-env
|
||||
ports:
|
||||
- "8007:8007"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
# frontend:
|
||||
# build:
|
||||
# context: ../
|
||||
# dockerfile: autogpt_platform/frontend/Dockerfile
|
||||
# target: dev
|
||||
# depends_on:
|
||||
# db:
|
||||
# condition: service_healthy
|
||||
# rest_server:
|
||||
# condition: service_started
|
||||
# websocket_server:
|
||||
# condition: service_started
|
||||
# migrate:
|
||||
# condition: service_completed_successfully
|
||||
# environment:
|
||||
# - NEXT_PUBLIC_SUPABASE_URL=http://kong:8000
|
||||
# - NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
# - DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/postgres?connect_timeout=60&schema=platform
|
||||
# - DIRECT_URL=postgresql://agpt_user:pass123@postgres:5432/postgres?connect_timeout=60&schema=platform
|
||||
# - NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
# - NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
# - NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
||||
# - NEXT_PUBLIC_BEHAVE_AS=LOCAL
|
||||
# ports:
|
||||
# - "3000:3000"
|
||||
# networks:
|
||||
# - app-network
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: autogpt_platform/frontend/Dockerfile
|
||||
target: prod
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
ports:
|
||||
- "3000:3000"
|
||||
networks:
|
||||
- app-network
|
||||
# Load environment variables in order (later overrides earlier)
|
||||
env_file:
|
||||
- path: ./frontend/.env.default # Base defaults (always exists)
|
||||
- path: ./frontend/.env # User overrides (optional)
|
||||
required: false
|
||||
environment:
|
||||
# Server-side environment variables (Docker service names)
|
||||
# These override the localhost URLs from env files when running in Docker
|
||||
AUTH_CALLBACK_URL: http://rest_server:8006/auth/callback
|
||||
SUPABASE_URL: http://kong:8000
|
||||
AGPT_SERVER_URL: http://rest_server:8006/api
|
||||
AGPT_WS_SERVER_URL: ws://websocket_server:8001/ws
|
||||
networks:
|
||||
app-network:
|
||||
driver: bridge
|
||||
|
||||
@@ -20,6 +20,7 @@ x-supabase-services:
|
||||
- app-network
|
||||
- shared-network
|
||||
|
||||
|
||||
services:
|
||||
# AGPT services
|
||||
migrate:
|
||||
@@ -96,19 +97,13 @@ services:
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
# frontend:
|
||||
# <<: *agpt-services
|
||||
# extends:
|
||||
# file: ./docker-compose.platform.yml
|
||||
# service: frontend
|
||||
|
||||
# Supabase services
|
||||
studio:
|
||||
<<: *supabase-services
|
||||
frontend:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: studio
|
||||
file: ./docker-compose.platform.yml
|
||||
service: frontend
|
||||
|
||||
# Supabase services (minimal: auth + db + kong)
|
||||
kong:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
@@ -123,61 +118,35 @@ services:
|
||||
environment:
|
||||
GOTRUE_MAILER_AUTOCONFIRM: true
|
||||
|
||||
rest:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: rest
|
||||
|
||||
realtime:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: realtime
|
||||
|
||||
storage:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: storage
|
||||
|
||||
imgproxy:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: imgproxy
|
||||
|
||||
meta:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: meta
|
||||
|
||||
functions:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: functions
|
||||
|
||||
analytics:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: analytics
|
||||
|
||||
db:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: db
|
||||
ports:
|
||||
- ${POSTGRES_PORT}:5432 # We don't use Supavisor locally, so we expose the db directly.
|
||||
- 5432:5432 # We don't use Supavisor locally, so we expose the db directly.
|
||||
|
||||
vector:
|
||||
# Studio and its dependencies for local development only
|
||||
meta:
|
||||
<<: *supabase-services
|
||||
profiles:
|
||||
- local
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: vector
|
||||
service: meta
|
||||
|
||||
studio:
|
||||
<<: *supabase-services
|
||||
profiles:
|
||||
- local
|
||||
extends:
|
||||
file: ./db/docker/docker-compose.yml
|
||||
service: studio
|
||||
depends_on:
|
||||
meta:
|
||||
condition: service_healthy
|
||||
# environment:
|
||||
# NEXT_PUBLIC_ENABLE_LOGS: false # Disable analytics/logging features
|
||||
|
||||
deps:
|
||||
<<: *supabase-services
|
||||
@@ -186,13 +155,24 @@ services:
|
||||
image: busybox
|
||||
command: /bin/true
|
||||
depends_on:
|
||||
- studio
|
||||
- kong
|
||||
- auth
|
||||
- meta
|
||||
- analytics
|
||||
- db
|
||||
- vector
|
||||
- studio
|
||||
- redis
|
||||
- rabbitmq
|
||||
- clamav
|
||||
- migrate
|
||||
|
||||
deps_backend:
|
||||
<<: *agpt-services
|
||||
profiles:
|
||||
- local
|
||||
image: busybox
|
||||
command: /bin/true
|
||||
depends_on:
|
||||
- deps
|
||||
- rest_server
|
||||
- executor
|
||||
- websocket_server
|
||||
- database_manager
|
||||
|
||||
20
autogpt_platform/frontend/.env.default
Normal file
20
autogpt_platform/frontend/.env.default
Normal file
@@ -0,0 +1,20 @@
|
||||
NEXT_PUBLIC_SUPABASE_URL=http://localhost:8000
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
NEXT_PUBLIC_FRONTEND_BASE_URL=http://localhost:3000
|
||||
|
||||
NEXT_PUBLIC_APP_ENV=local
|
||||
NEXT_PUBLIC_BEHAVE_AS=LOCAL
|
||||
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_ENABLED=false
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID=687ab1372f497809b131e06e
|
||||
|
||||
NEXT_PUBLIC_SHOW_BILLING_PAGE=false
|
||||
NEXT_PUBLIC_TURNSTILE=disabled
|
||||
NEXT_PUBLIC_REACT_QUERY_DEVTOOL=true
|
||||
|
||||
NEXT_PUBLIC_GA_MEASUREMENT_ID=G-FH2XK2W4GN
|
||||
NEXT_PUBLIC_PW_TEST=true
|
||||
|
||||
@@ -1,44 +0,0 @@
|
||||
NEXT_PUBLIC_FRONTEND_BASE_URL=http://localhost:3000
|
||||
|
||||
NEXT_PUBLIC_AUTH_CALLBACK_URL=http://localhost:8006/auth/callback
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_ENABLED=false
|
||||
NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID=687ab1372f497809b131e06e # Local environment on Launch darkly
|
||||
NEXT_PUBLIC_APP_ENV=local
|
||||
|
||||
NEXT_PUBLIC_AGPT_SERVER_BASE_URL=http://localhost:8006
|
||||
|
||||
## Locale settings
|
||||
|
||||
NEXT_PUBLIC_DEFAULT_LOCALE=en
|
||||
NEXT_PUBLIC_LOCALES=en,es
|
||||
|
||||
## Supabase credentials
|
||||
|
||||
NEXT_PUBLIC_SUPABASE_URL=http://localhost:8000
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
|
||||
## OAuth Callback URL
|
||||
## This should be {domain}/auth/callback
|
||||
## Only used if you're using Supabase and OAuth
|
||||
AUTH_CALLBACK_URL="${NEXT_PUBLIC_FRONTEND_BASE_URL}/auth/callback"
|
||||
GA_MEASUREMENT_ID=G-FH2XK2W4GN
|
||||
|
||||
# When running locally, set NEXT_PUBLIC_BEHAVE_AS=CLOUD to use the a locally hosted marketplace (as is typical in development, and the cloud deployment), otherwise set it to LOCAL to have the marketplace open in a new tab
|
||||
NEXT_PUBLIC_BEHAVE_AS=LOCAL
|
||||
NEXT_PUBLIC_SHOW_BILLING_PAGE=false
|
||||
|
||||
## Cloudflare Turnstile (CAPTCHA) Configuration
|
||||
## Get these from the Cloudflare Turnstile dashboard: https://dash.cloudflare.com/?to=/:account/turnstile
|
||||
## This is the frontend site key
|
||||
NEXT_PUBLIC_CLOUDFLARE_TURNSTILE_SITE_KEY=
|
||||
NEXT_PUBLIC_TURNSTILE=disabled
|
||||
|
||||
# Devtools
|
||||
NEXT_PUBLIC_REACT_QUERY_DEVTOOL=true
|
||||
|
||||
# In case you are running Playwright locally
|
||||
# NEXT_PUBLIC_PW_TEST=true
|
||||
|
||||
6
autogpt_platform/frontend/.gitignore
vendored
6
autogpt_platform/frontend/.gitignore
vendored
@@ -31,6 +31,7 @@ yarn.lock
|
||||
package-lock.json
|
||||
|
||||
# local env files
|
||||
.env
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
@@ -53,4 +54,7 @@ storybook-static
|
||||
*.ignore.*
|
||||
*.ign.*
|
||||
!.npmrc
|
||||
.cursorrules
|
||||
.cursorrules
|
||||
|
||||
# Generated API files
|
||||
src/app/api/__generated__/
|
||||
@@ -5,18 +5,16 @@ RUN corepack enable
|
||||
COPY autogpt_platform/frontend/package.json autogpt_platform/frontend/pnpm-lock.yaml ./
|
||||
RUN --mount=type=cache,target=/root/.local/share/pnpm pnpm install --frozen-lockfile
|
||||
|
||||
# Dev stage
|
||||
FROM base AS dev
|
||||
ENV NODE_ENV=development
|
||||
ENV HOSTNAME=0.0.0.0
|
||||
COPY autogpt_platform/frontend/ .
|
||||
EXPOSE 3000
|
||||
CMD ["pnpm", "run", "dev", "--hostname", "0.0.0.0"]
|
||||
|
||||
# Build stage for prod
|
||||
FROM base AS build
|
||||
|
||||
COPY autogpt_platform/frontend/ .
|
||||
ENV SKIP_STORYBOOK_TESTS=true
|
||||
RUN if [ -f .env ]; then \
|
||||
cat .env.default .env > .env.merged && mv .env.merged .env; \
|
||||
else \
|
||||
cp .env.default .env; \
|
||||
fi
|
||||
RUN pnpm run generate:api
|
||||
RUN pnpm build
|
||||
|
||||
# Prod stage - based on NextJS reference Dockerfile https://github.com/vercel/next.js/blob/64271354533ed16da51be5dce85f0dbd15f17517/examples/with-docker/Dockerfile
|
||||
|
||||
@@ -18,31 +18,58 @@ Make sure you have Node.js 16.10+ installed. Corepack is included with Node.js b
|
||||
>
|
||||
> Then follow the setup steps below.
|
||||
|
||||
### Setup
|
||||
## Setup
|
||||
|
||||
1. **Enable corepack** (run this once on your system):
|
||||
### 1. **Enable corepack** (run this once on your system):
|
||||
|
||||
```bash
|
||||
corepack enable
|
||||
```
|
||||
```bash
|
||||
corepack enable
|
||||
```
|
||||
|
||||
This enables corepack to automatically manage pnpm based on the `packageManager` field in `package.json`.
|
||||
This enables corepack to automatically manage pnpm based on the `packageManager` field in `package.json`.
|
||||
|
||||
2. **Install dependencies**:
|
||||
### 2. **Install dependencies**:
|
||||
|
||||
```bash
|
||||
pnpm i
|
||||
```
|
||||
```bash
|
||||
pnpm i
|
||||
```
|
||||
|
||||
3. **Start the development server**:
|
||||
```bash
|
||||
pnpm dev
|
||||
```
|
||||
### 3. **Start the development server**:
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.
|
||||
#### Running the Front-end & Back-end separately
|
||||
|
||||
We recommend this approach if you are doing active development on the project. First spin up the Back-end:
|
||||
|
||||
```bash
|
||||
# on `autogpt_platform`
|
||||
docker compose --profile local up deps_backend -d
|
||||
# on `autogpt_platform/backend`
|
||||
poetry run app
|
||||
```
|
||||
|
||||
Then start the Front-end:
|
||||
|
||||
```bash
|
||||
# on `autogpt_platform/frontend`
|
||||
pnpm dev
|
||||
```
|
||||
|
||||
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. If the server starts on `http://localhost:3001` it means the Front-end is already running via Docker. You have to kill the container then or do `docker compose down`.
|
||||
|
||||
You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
|
||||
|
||||
#### Running both the Front-end and Back-end via Docker
|
||||
|
||||
If you run:
|
||||
|
||||
```bash
|
||||
# on `autogpt_platform`
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
It will spin up the Back-end and Front-end via Docker. The Front-end will start on port `3000`. This might not be
|
||||
what you want when actively contributing to the Front-end as you won't have direct/easy access to the Next.js dev server.
|
||||
|
||||
### Subsequent Runs
|
||||
|
||||
For subsequent development sessions, you only need to run:
|
||||
@@ -60,12 +87,12 @@ Every time a new Front-end dependency is added by you or others, you will need t
|
||||
- `pnpm start` - Start production server
|
||||
- `pnpm lint` - Run ESLint and Prettier checks
|
||||
- `pnpm format` - Format code with Prettier
|
||||
- `pnpm type-check` - Run TypeScript type checking
|
||||
- `pnpm types` - Run TypeScript type checking
|
||||
- `pnpm test` - Run Playwright tests
|
||||
- `pnpm test-ui` - Run Playwright tests with UI
|
||||
- `pnpm fetch:openapi` - Fetch OpenAPI spec from backend
|
||||
- `pnpm generate:api-client` - Generate API client from OpenAPI spec
|
||||
- `pnpm generate:api-all` - Fetch OpenAPI spec and generate API client
|
||||
- `pnpm generate:api` - Fetch OpenAPI spec and generate API client
|
||||
|
||||
This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font.
|
||||
|
||||
@@ -88,7 +115,7 @@ This project uses an auto-generated API client powered by [**Orval**](https://or
|
||||
|
||||
```bash
|
||||
# Fetch OpenAPI spec from backend and generate client
|
||||
pnpm generate:api-all
|
||||
pnpm generate:api
|
||||
|
||||
# Only fetch the OpenAPI spec
|
||||
pnpm fetch:openapi
|
||||
|
||||
@@ -3,13 +3,13 @@
|
||||
"version": "0.3.4",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev --turbo",
|
||||
"build": "cross-env pnpm run generate:api-client && SKIP_STORYBOOK_TESTS=true next build",
|
||||
"dev": "pnpm run generate:api:force && next dev --turbo",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"start:standalone": "cd .next/standalone && node server.js",
|
||||
"lint": "next lint && prettier --check .",
|
||||
"format": "prettier --write .",
|
||||
"type-check": "tsc --noEmit",
|
||||
"format": "next lint --fix; prettier --write .",
|
||||
"types": "tsc --noEmit",
|
||||
"test": "next build --turbo && playwright test",
|
||||
"test-ui": "next build --turbo && playwright test --ui",
|
||||
"test:no-build": "playwright test",
|
||||
@@ -18,44 +18,43 @@
|
||||
"build-storybook": "storybook build",
|
||||
"test-storybook": "test-storybook",
|
||||
"test-storybook:ci": "concurrently -k -s first -n \"SB,TEST\" -c \"magenta,blue\" \"pnpm run build-storybook -- --quiet && npx http-server storybook-static --port 6006 --silent\" \"wait-on tcp:6006 && pnpm run test-storybook\"",
|
||||
"fetch:openapi": "curl http://localhost:8006/openapi.json > ./src/app/api/openapi.json && prettier --write ./src/app/api/openapi.json",
|
||||
"generate:api-client": "orval --config ./orval.config.ts",
|
||||
"generate:api-all": "pnpm run fetch:openapi && pnpm run generate:api-client"
|
||||
"generate:api": "npx --yes tsx ./scripts/generate-api-queries.ts && orval --config ./orval.config.ts",
|
||||
"generate:api:force": "npx --yes tsx ./scripts/generate-api-queries.ts --force && orval --config ./orval.config.ts"
|
||||
},
|
||||
"browserslist": [
|
||||
"defaults"
|
||||
],
|
||||
"dependencies": {
|
||||
"@faker-js/faker": "9.9.0",
|
||||
"@hookform/resolvers": "5.2.0",
|
||||
"@next/third-parties": "15.4.4",
|
||||
"@hookform/resolvers": "5.2.1",
|
||||
"@next/third-parties": "15.4.6",
|
||||
"@phosphor-icons/react": "2.1.10",
|
||||
"@radix-ui/react-alert-dialog": "1.1.14",
|
||||
"@radix-ui/react-alert-dialog": "1.1.15",
|
||||
"@radix-ui/react-avatar": "1.1.10",
|
||||
"@radix-ui/react-checkbox": "1.3.2",
|
||||
"@radix-ui/react-collapsible": "1.1.11",
|
||||
"@radix-ui/react-context-menu": "2.2.15",
|
||||
"@radix-ui/react-dialog": "1.1.14",
|
||||
"@radix-ui/react-dropdown-menu": "2.1.15",
|
||||
"@radix-ui/react-checkbox": "1.3.3",
|
||||
"@radix-ui/react-collapsible": "1.1.12",
|
||||
"@radix-ui/react-context-menu": "2.2.16",
|
||||
"@radix-ui/react-dialog": "1.1.15",
|
||||
"@radix-ui/react-dropdown-menu": "2.1.16",
|
||||
"@radix-ui/react-icons": "1.3.2",
|
||||
"@radix-ui/react-label": "2.1.7",
|
||||
"@radix-ui/react-popover": "1.1.14",
|
||||
"@radix-ui/react-radio-group": "1.3.7",
|
||||
"@radix-ui/react-scroll-area": "1.2.9",
|
||||
"@radix-ui/react-select": "2.2.5",
|
||||
"@radix-ui/react-popover": "1.1.15",
|
||||
"@radix-ui/react-radio-group": "1.3.8",
|
||||
"@radix-ui/react-scroll-area": "1.2.10",
|
||||
"@radix-ui/react-select": "2.2.6",
|
||||
"@radix-ui/react-separator": "1.1.7",
|
||||
"@radix-ui/react-slot": "1.2.3",
|
||||
"@radix-ui/react-switch": "1.2.5",
|
||||
"@radix-ui/react-tabs": "1.1.12",
|
||||
"@radix-ui/react-toast": "1.2.14",
|
||||
"@radix-ui/react-tooltip": "1.2.7",
|
||||
"@radix-ui/react-switch": "1.2.6",
|
||||
"@radix-ui/react-tabs": "1.1.13",
|
||||
"@radix-ui/react-toast": "1.2.15",
|
||||
"@radix-ui/react-tooltip": "1.2.8",
|
||||
"@sentry/nextjs": "9.42.0",
|
||||
"@supabase/ssr": "0.6.1",
|
||||
"@supabase/supabase-js": "2.52.1",
|
||||
"@tanstack/react-query": "5.83.0",
|
||||
"@supabase/supabase-js": "2.55.0",
|
||||
"@tanstack/react-query": "5.85.3",
|
||||
"@tanstack/react-table": "8.21.3",
|
||||
"@types/jaro-winkler": "0.2.4",
|
||||
"@xyflow/react": "12.8.2",
|
||||
"@xyflow/react": "12.8.3",
|
||||
"boring-avatars": "1.11.2",
|
||||
"class-variance-authority": "0.7.1",
|
||||
"clsx": "2.1.1",
|
||||
@@ -65,22 +64,22 @@
|
||||
"dotenv": "17.2.1",
|
||||
"elliptic": "6.6.1",
|
||||
"embla-carousel-react": "8.6.0",
|
||||
"framer-motion": "12.23.9",
|
||||
"framer-motion": "12.23.12",
|
||||
"geist": "1.4.2",
|
||||
"jaro-winkler": "0.2.8",
|
||||
"launchdarkly-react-client-sdk": "3.8.1",
|
||||
"lodash": "4.17.21",
|
||||
"lucide-react": "0.525.0",
|
||||
"lucide-react": "0.539.0",
|
||||
"moment": "2.30.1",
|
||||
"next": "15.4.4",
|
||||
"next": "15.4.6",
|
||||
"next-themes": "0.4.6",
|
||||
"nuqs": "2.4.3",
|
||||
"party-js": "2.2.0",
|
||||
"react": "18.3.1",
|
||||
"react-day-picker": "9.8.0",
|
||||
"react-day-picker": "9.8.1",
|
||||
"react-dom": "18.3.1",
|
||||
"react-drag-drop-files": "2.4.0",
|
||||
"react-hook-form": "7.61.1",
|
||||
"react-hook-form": "7.62.0",
|
||||
"react-icons": "5.5.0",
|
||||
"react-markdown": "9.0.3",
|
||||
"react-modal": "3.16.3",
|
||||
@@ -88,7 +87,7 @@
|
||||
"react-window": "1.8.11",
|
||||
"recharts": "2.15.3",
|
||||
"shepherd.js": "14.5.1",
|
||||
"sonner": "2.0.6",
|
||||
"sonner": "2.0.7",
|
||||
"tailwind-merge": "2.6.0",
|
||||
"tailwindcss-animate": "1.0.7",
|
||||
"uuid": "11.1.0",
|
||||
@@ -96,42 +95,42 @@
|
||||
"zod": "3.25.76"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/storybook": "4.0.1",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@storybook/addon-a11y": "9.0.17",
|
||||
"@storybook/addon-docs": "9.0.17",
|
||||
"@storybook/addon-links": "9.0.17",
|
||||
"@storybook/addon-onboarding": "9.0.17",
|
||||
"@storybook/nextjs": "9.0.17",
|
||||
"@tanstack/eslint-plugin-query": "5.81.2",
|
||||
"@tanstack/react-query-devtools": "5.83.0",
|
||||
"@chromatic-com/storybook": "4.1.0",
|
||||
"@playwright/test": "1.54.2",
|
||||
"@storybook/addon-a11y": "9.1.2",
|
||||
"@storybook/addon-docs": "9.1.2",
|
||||
"@storybook/addon-links": "9.1.2",
|
||||
"@storybook/addon-onboarding": "9.1.2",
|
||||
"@storybook/nextjs": "9.1.2",
|
||||
"@tanstack/eslint-plugin-query": "5.83.1",
|
||||
"@tanstack/react-query-devtools": "5.84.2",
|
||||
"@types/canvas-confetti": "1.9.0",
|
||||
"@types/lodash": "4.17.20",
|
||||
"@types/negotiator": "0.6.4",
|
||||
"@types/node": "24.0.15",
|
||||
"@types/node": "24.2.1",
|
||||
"@types/react": "18.3.17",
|
||||
"@types/react-dom": "18.3.5",
|
||||
"@types/react-modal": "3.16.3",
|
||||
"@types/react-window": "1.8.8",
|
||||
"axe-playwright": "2.1.0",
|
||||
"chromatic": "13.1.2",
|
||||
"chromatic": "13.1.3",
|
||||
"concurrently": "9.2.0",
|
||||
"cross-env": "7.0.3",
|
||||
"eslint": "8.57.1",
|
||||
"eslint-config-next": "15.4.2",
|
||||
"eslint-plugin-storybook": "9.0.17",
|
||||
"eslint-config-next": "15.4.6",
|
||||
"eslint-plugin-storybook": "9.1.2",
|
||||
"import-in-the-middle": "1.14.2",
|
||||
"msw": "2.10.4",
|
||||
"msw-storybook-addon": "2.0.5",
|
||||
"orval": "7.10.0",
|
||||
"orval": "7.11.2",
|
||||
"pbkdf2": "3.1.3",
|
||||
"postcss": "8.5.6",
|
||||
"prettier": "3.6.2",
|
||||
"prettier-plugin-tailwindcss": "0.6.14",
|
||||
"require-in-the-middle": "7.5.2",
|
||||
"storybook": "9.0.17",
|
||||
"storybook": "9.1.2",
|
||||
"tailwindcss": "3.4.17",
|
||||
"typescript": "5.8.3"
|
||||
"typescript": "5.9.2"
|
||||
},
|
||||
"msw": {
|
||||
"workerDirectory": [
|
||||
|
||||
@@ -45,7 +45,7 @@ export default defineConfig({
|
||||
webServer: {
|
||||
command: "pnpm start",
|
||||
url: "http://localhost:3000",
|
||||
reuseExistingServer: !process.env.CI,
|
||||
reuseExistingServer: true,
|
||||
},
|
||||
|
||||
/* Configure projects for major browsers */
|
||||
|
||||
2651
autogpt_platform/frontend/pnpm-lock.yaml
generated
2651
autogpt_platform/frontend/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
61
autogpt_platform/frontend/scripts/generate-api-queries.ts
Normal file
61
autogpt_platform/frontend/scripts/generate-api-queries.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { getAgptServerBaseUrl } from "@/lib/env-config";
|
||||
import { execSync } from "child_process";
|
||||
import * as path from "path";
|
||||
import * as fs from "fs";
|
||||
|
||||
function fetchOpenApiSpec(): void {
|
||||
const args = process.argv.slice(2);
|
||||
const forceFlag = args.includes("--force");
|
||||
|
||||
const baseUrl = getAgptServerBaseUrl();
|
||||
const openApiUrl = `${baseUrl}/openapi.json`;
|
||||
const outputPath = path.join(
|
||||
__dirname,
|
||||
"..",
|
||||
"src",
|
||||
"app",
|
||||
"api",
|
||||
"openapi.json",
|
||||
);
|
||||
|
||||
console.log(`Output path: ${outputPath}`);
|
||||
console.log(`Force flag: ${forceFlag}`);
|
||||
|
||||
// Check if local file exists
|
||||
const localFileExists = fs.existsSync(outputPath);
|
||||
|
||||
if (!forceFlag && localFileExists) {
|
||||
console.log("✅ Using existing local OpenAPI spec file");
|
||||
console.log("💡 Use --force flag to fetch from server");
|
||||
return;
|
||||
}
|
||||
|
||||
if (!localFileExists) {
|
||||
console.log("📄 No local OpenAPI spec found, fetching from server...");
|
||||
} else {
|
||||
console.log(
|
||||
"🔄 Force flag detected, fetching fresh OpenAPI spec from server...",
|
||||
);
|
||||
}
|
||||
|
||||
console.log(`Fetching OpenAPI spec from: ${openApiUrl}`);
|
||||
|
||||
try {
|
||||
// Fetch the OpenAPI spec
|
||||
execSync(`curl "${openApiUrl}" > "${outputPath}"`, { stdio: "inherit" });
|
||||
|
||||
// Format with prettier
|
||||
execSync(`prettier --write "${outputPath}"`, { stdio: "inherit" });
|
||||
|
||||
console.log("✅ OpenAPI spec fetched and formatted successfully");
|
||||
} catch (error) {
|
||||
console.error("❌ Failed to fetch OpenAPI spec:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
fetchOpenApiSpec();
|
||||
}
|
||||
@@ -14,12 +14,7 @@ export async function addDollars(formData: FormData) {
|
||||
comments: formData.get("comments") as string,
|
||||
};
|
||||
const api = new BackendApi();
|
||||
const resp = await api.addUserCredits(
|
||||
data.user_id,
|
||||
data.amount,
|
||||
data.comments,
|
||||
);
|
||||
console.log(resp);
|
||||
await api.addUserCredits(data.user_id, data.amount, data.comments);
|
||||
revalidatePath("/admin/spending");
|
||||
}
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ function SpendingDashboard({
|
||||
</div>
|
||||
|
||||
<Suspense
|
||||
key={`${page}-${status}-${search}`}
|
||||
fallback={
|
||||
<div className="py-10 text-center">Loading submissions...</div>
|
||||
}
|
||||
|
||||
@@ -0,0 +1,63 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Plus } from "lucide-react";
|
||||
import { ButtonHTMLAttributes } from "react";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
title?: string;
|
||||
description?: string;
|
||||
ai_name?: string;
|
||||
}
|
||||
|
||||
export const AiBlock: React.FC<Props> = ({
|
||||
title,
|
||||
description,
|
||||
className,
|
||||
ai_name,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"group flex h-[5.625rem] w-full min-w-[7.5rem] items-center justify-start space-x-3 whitespace-normal rounded-[0.75rem] bg-zinc-50 px-[0.875rem] py-[0.625rem] text-start shadow-none",
|
||||
"hover:bg-zinc-100 focus:ring-0 active:bg-zinc-100 active:ring-1 active:ring-zinc-300 disabled:pointer-events-none",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<div className="flex flex-1 flex-col items-start gap-1.5">
|
||||
<div className="space-y-0.5">
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-sm font-medium leading-[1.375rem] text-zinc-700 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{title}
|
||||
</span>
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-xs font-normal leading-5 text-zinc-500 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{description}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<span
|
||||
className={cn(
|
||||
"rounded-[0.75rem] bg-zinc-200 px-[0.5rem] font-sans text-xs leading-[1.25rem] text-zinc-500",
|
||||
)}
|
||||
>
|
||||
Supports {ai_name}
|
||||
</span>
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-7 w-7 items-center justify-center rounded-[0.5rem] bg-zinc-700 group-disabled:bg-zinc-400",
|
||||
)}
|
||||
>
|
||||
<Plus className="h-5 w-5 text-zinc-50" strokeWidth={2} />
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,77 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { beautifyString, cn } from "@/lib/utils";
|
||||
import { Plus } from "lucide-react";
|
||||
import React, { ButtonHTMLAttributes } from "react";import { highlightText } from "./helpers";
|
||||
;
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
title?: string;
|
||||
description?: string;
|
||||
highlightedText?: string;
|
||||
}
|
||||
|
||||
interface BlockComponent extends React.FC<Props> {
|
||||
Skeleton: React.FC<{ className?: string }>;
|
||||
}
|
||||
|
||||
export const Block: BlockComponent = ({
|
||||
title,
|
||||
description,
|
||||
highlightedText,
|
||||
className,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"group flex h-16 w-full min-w-[7.5rem] items-center justify-start space-x-3 whitespace-normal rounded-[0.75rem] bg-zinc-50 px-[0.875rem] py-[0.625rem] text-start shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0 active:bg-zinc-100 active:ring-1 active:ring-zinc-300 disabled:cursor-not-allowed",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
{title && (
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-sm font-medium leading-[1.375rem] text-zinc-800 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{highlightText(beautifyString(title), highlightedText)}
|
||||
</span>
|
||||
)}
|
||||
{description && (
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-xs font-normal leading-5 text-zinc-500 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{highlightText(description, highlightedText)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-7 w-7 items-center justify-center rounded-[0.5rem] bg-zinc-700 group-disabled:bg-zinc-400",
|
||||
)}
|
||||
>
|
||||
<Plus className="h-5 w-5 text-zinc-50" strokeWidth={2} />
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
const BlockSkeleton = () => {
|
||||
return (
|
||||
<Skeleton className="flex h-16 w-full min-w-[7.5rem] animate-pulse items-center justify-start space-x-3 rounded-[0.75rem] bg-zinc-100 px-[0.875rem] py-[0.625rem]">
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
<Skeleton className="h-[1.375rem] w-24 rounded bg-zinc-200" />
|
||||
<Skeleton className="h-5 w-32 rounded bg-zinc-200" />
|
||||
</div>
|
||||
<Skeleton className="h-7 w-7 rounded-[0.5rem] bg-zinc-200" />
|
||||
</Skeleton>
|
||||
);
|
||||
};
|
||||
|
||||
Block.Skeleton = BlockSkeleton;
|
||||
@@ -0,0 +1,51 @@
|
||||
import React from "react";
|
||||
import {
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from "@/components/ui/popover";
|
||||
import { ToyBrick } from "lucide-react";
|
||||
import { BlockMenuContent } from "../BlockMenuContent/BlockMenuContent";
|
||||
import { ControlPanelButton } from "../ControlPanelButton";
|
||||
import { useBlockMenu } from "./useBlockMenu";
|
||||
|
||||
interface BlockMenuProps {
|
||||
pinBlocksPopover: boolean;
|
||||
blockMenuSelected: "save" | "block" | "";
|
||||
setBlockMenuSelected: React.Dispatch<
|
||||
React.SetStateAction<"" | "save" | "block">
|
||||
>;
|
||||
}
|
||||
|
||||
export const BlockMenu: React.FC<BlockMenuProps> = ({
|
||||
pinBlocksPopover,
|
||||
blockMenuSelected,
|
||||
setBlockMenuSelected,
|
||||
}) => {
|
||||
const {open, onOpen} = useBlockMenu({pinBlocksPopover, setBlockMenuSelected});
|
||||
return (
|
||||
<Popover open={pinBlocksPopover ? true : open} onOpenChange={onOpen}>
|
||||
<PopoverTrigger className="hover:cursor-pointer">
|
||||
<ControlPanelButton
|
||||
data-id="blocks-control-popover-trigger"
|
||||
data-testid="blocks-control-blocks-button"
|
||||
selected={blockMenuSelected === "block"}
|
||||
className="rounded-none"
|
||||
>
|
||||
{/* Need to find phosphor icon alternative for this lucide icon */}
|
||||
<ToyBrick className="h-5 w-6" strokeWidth={2} />
|
||||
</ControlPanelButton>
|
||||
</PopoverTrigger>
|
||||
|
||||
<PopoverContent
|
||||
side="right"
|
||||
align="start"
|
||||
sideOffset={16}
|
||||
className="absolute h-[75vh] w-[46.625rem] overflow-hidden rounded-[1rem] border-none p-0 shadow-[0_2px_6px_0_rgba(0,0,0,0.05)]"
|
||||
data-id="blocks-control-popover-content"
|
||||
>
|
||||
<BlockMenuContent />
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,23 @@
|
||||
import { useState } from "react";
|
||||
|
||||
interface useBlockMenuProps {
|
||||
pinBlocksPopover: boolean;
|
||||
setBlockMenuSelected: React.Dispatch<
|
||||
React.SetStateAction<"" | "save" | "block">
|
||||
>;
|
||||
}
|
||||
|
||||
export const useBlockMenu = ({pinBlocksPopover, setBlockMenuSelected}: useBlockMenuProps) => {
|
||||
const [open, setOpen] = useState(false);
|
||||
const onOpen = (newOpen: boolean) => {
|
||||
if (!pinBlocksPopover) {
|
||||
setOpen(newOpen);
|
||||
setBlockMenuSelected(newOpen ? "block" : "");
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
open,
|
||||
onOpen,
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,10 @@
|
||||
"use client";
|
||||
import React from "react";
|
||||
|
||||
export const BlockMenuContent = () => {
|
||||
return (
|
||||
<div className="flex h-full w-full flex-col items-center justify-center">
|
||||
This is the block menu content
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,35 @@
|
||||
// BLOCK MENU TODO: We need a disable state in this, currently it's not in design.
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import React from "react";
|
||||
|
||||
interface Props extends React.HTMLAttributes<HTMLDivElement> {
|
||||
selected?: boolean;
|
||||
children?: React.ReactNode; // For icon purpose
|
||||
disabled?: boolean;
|
||||
}
|
||||
|
||||
export const ControlPanelButton: React.FC<Props> = ({
|
||||
selected = false,
|
||||
children,
|
||||
disabled,
|
||||
className,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
// Using div instead of button, because it's only for design purposes. We are using this to give design to PopoverTrigger.
|
||||
<div
|
||||
role="button"
|
||||
className={cn(
|
||||
"flex h-[4.25rem] w-[4.25rem] items-center justify-center whitespace-normal bg-white p-[1.38rem] text-zinc-800 shadow-none hover:cursor-pointer hover:bg-zinc-100 hover:text-zinc-950 focus:ring-0",
|
||||
selected &&
|
||||
"bg-violet-50 text-violet-700 hover:cursor-default hover:bg-violet-50 hover:text-violet-700 active:bg-violet-50 active:text-violet-700",
|
||||
disabled && "cursor-not-allowed",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
{children}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,54 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { X } from "lucide-react";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
selected?: boolean;
|
||||
number?: number;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export const FilterChip: React.FC<Props> = ({
|
||||
selected = false,
|
||||
number,
|
||||
name,
|
||||
className,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"group w-fit space-x-1 rounded-[1.5rem] border border-zinc-300 bg-transparent px-[0.625rem] py-[0.375rem] shadow-none transition-transform duration-300 ease-in-out",
|
||||
"hover:border-violet-500 hover:bg-transparent focus:ring-0 disabled:cursor-not-allowed",
|
||||
selected && "border-0 bg-violet-700 hover:border",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<span
|
||||
className={cn(
|
||||
"font-sans text-sm font-medium leading-[1.375rem] text-zinc-600 group-hover:text-zinc-600 group-disabled:text-zinc-400",
|
||||
selected && "text-zinc-50",
|
||||
)}
|
||||
>
|
||||
{name}
|
||||
</span>
|
||||
{selected && (
|
||||
<>
|
||||
<span className="flex h-4 w-4 items-center justify-center rounded-full bg-zinc-50 transition-all duration-300 ease-in-out group-hover:hidden">
|
||||
<X
|
||||
className="h-3 w-3 rounded-full text-violet-700"
|
||||
strokeWidth={2}
|
||||
/>
|
||||
</span>
|
||||
{number !== undefined && (
|
||||
<span className="hidden h-[1.375rem] items-center rounded-[1.25rem] bg-violet-700 p-[0.375rem] text-zinc-50 transition-all duration-300 ease-in-out animate-in fade-in zoom-in group-hover:flex">
|
||||
{number > 100 ? "100+" : number}
|
||||
</span>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,88 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { beautifyString, cn } from "@/lib/utils";
|
||||
import Image from "next/image";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
title?: string;
|
||||
description?: string;
|
||||
icon_url?: string;
|
||||
number_of_blocks?: number;
|
||||
}
|
||||
|
||||
interface IntegrationComponent extends React.FC<Props> {
|
||||
Skeleton: React.FC<{ className?: string }>;
|
||||
}
|
||||
|
||||
export const Integration: IntegrationComponent = ({
|
||||
title,
|
||||
icon_url,
|
||||
description,
|
||||
className,
|
||||
number_of_blocks,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"group flex h-16 w-full min-w-[7.5rem] items-center justify-start space-x-3 whitespace-normal rounded-[0.75rem] bg-zinc-50 px-[0.875rem] py-[0.625rem] text-start shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0 active:bg-zinc-50 active:ring-1 active:ring-zinc-300 disabled:pointer-events-none",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<div className="relative h-[2.625rem] w-[2.625rem] overflow-hidden rounded-[0.5rem] bg-white">
|
||||
{icon_url && (
|
||||
<Image
|
||||
src={icon_url}
|
||||
alt="integration-icon"
|
||||
fill
|
||||
sizes="2.25rem"
|
||||
className="w-full rounded-[0.5rem] object-contain group-disabled:opacity-50"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="w-full">
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
{title && (
|
||||
<p className="line-clamp-1 flex-1 font-sans text-sm font-medium leading-[1.375rem] text-zinc-700 group-disabled:text-zinc-400">
|
||||
{beautifyString(title)}
|
||||
</p>
|
||||
)}
|
||||
<span className="flex h-[1.375rem] w-[1.6875rem] items-center justify-center rounded-[1.25rem] bg-[#f0f0f0] p-1.5 font-sans text-sm leading-[1.375rem] text-zinc-500 group-disabled:text-zinc-400">
|
||||
{number_of_blocks}
|
||||
</span>
|
||||
</div>
|
||||
<span className="line-clamp-1 font-sans text-xs font-normal leading-5 text-zinc-500 group-disabled:text-zinc-400">
|
||||
{description}
|
||||
</span>
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
const IntegrationSkeleton: React.FC<{ className?: string }> = ({
|
||||
className,
|
||||
}) => {
|
||||
return (
|
||||
<Skeleton
|
||||
className={cn(
|
||||
"flex h-16 w-full min-w-[7.5rem] animate-pulse items-center justify-start space-x-3 rounded-[0.75rem] bg-zinc-100 px-[0.875rem] py-[0.625rem]",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<Skeleton className="h-[2.625rem] w-[2.625rem] rounded-[0.5rem] bg-zinc-200" />
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
<div className="flex w-full items-center justify-between">
|
||||
<Skeleton className="h-[1.375rem] w-24 rounded bg-zinc-200" />
|
||||
<Skeleton className="h-[1.375rem] w-[1.6875rem] rounded-[1.25rem] bg-zinc-200" />
|
||||
</div>
|
||||
<Skeleton className="h-5 w-[80%] rounded bg-zinc-200" />
|
||||
</div>
|
||||
</Skeleton>
|
||||
);
|
||||
};
|
||||
|
||||
Integration.Skeleton = IntegrationSkeleton;
|
||||
@@ -0,0 +1,60 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { beautifyString, cn } from "@/lib/utils";
|
||||
import Image from "next/image";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
name?: string;
|
||||
icon_url?: string;
|
||||
}
|
||||
|
||||
interface IntegrationChipComponent extends React.FC<Props> {
|
||||
Skeleton: React.FC;
|
||||
}
|
||||
|
||||
export const IntegrationChip: IntegrationChipComponent = ({
|
||||
icon_url,
|
||||
name,
|
||||
className,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"flex h-[3.25rem] w-full min-w-[7.5rem] justify-start gap-2 whitespace-normal rounded-[0.5rem] bg-zinc-50 p-2 pr-3 shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0 active:bg-zinc-100 active:ring-1 active:ring-zinc-300",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<div className="relative h-9 w-9 rounded-[0.5rem] bg-transparent">
|
||||
{icon_url && (
|
||||
<Image
|
||||
src={icon_url}
|
||||
alt="integration-icon"
|
||||
fill
|
||||
sizes="2.25rem"
|
||||
className="w-full object-contain"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
{name && (
|
||||
<span className="truncate font-sans text-sm font-normal leading-[1.375rem] text-zinc-800">
|
||||
{beautifyString(name)}
|
||||
</span>
|
||||
)}
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
const IntegrationChipSkeleton: React.FC = () => {
|
||||
return (
|
||||
<Skeleton className="flex h-[3.25rem] w-full min-w-[7.5rem] gap-2 rounded-[0.5rem] bg-zinc-100 p-2 pr-3">
|
||||
<Skeleton className="h-9 w-12 rounded-[0.5rem] bg-zinc-200" />
|
||||
<Skeleton className="h-5 w-24 self-center rounded-sm bg-zinc-200" />
|
||||
</Skeleton>
|
||||
);
|
||||
};
|
||||
|
||||
IntegrationChip.Skeleton = IntegrationChipSkeleton;
|
||||
@@ -0,0 +1,99 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { beautifyString, cn } from "@/lib/utils";
|
||||
import { Plus } from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
import { highlightText } from "./helpers";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
title?: string;
|
||||
description?: string;
|
||||
icon_url?: string;
|
||||
highlightedText?: string;
|
||||
}
|
||||
|
||||
interface IntegrationBlockComponent extends React.FC<Props> {
|
||||
Skeleton: React.FC<{ className?: string }>;
|
||||
}
|
||||
|
||||
|
||||
|
||||
export const IntegrationBlock: IntegrationBlockComponent = ({
|
||||
title,
|
||||
icon_url,
|
||||
description,
|
||||
className,
|
||||
highlightedText,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"group flex h-16 w-full min-w-[7.5rem] items-center justify-start gap-3 whitespace-normal rounded-[0.75rem] bg-zinc-50 px-[0.875rem] py-[0.625rem] text-start shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0 active:bg-zinc-100 active:ring-1 active:ring-zinc-300 disabled:cursor-not-allowed",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<div className="relative h-[2.625rem] w-[2.625rem] rounded-[0.5rem] bg-white">
|
||||
{icon_url && (
|
||||
<Image
|
||||
src={icon_url}
|
||||
alt="integration-icon"
|
||||
fill
|
||||
sizes="2.25rem"
|
||||
className="w-full object-contain group-disabled:opacity-50"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
{title && (
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-sm font-medium leading-[1.375rem] text-zinc-800 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{highlightText(beautifyString(title), highlightedText)}
|
||||
</span>
|
||||
)}
|
||||
{description && (
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-xs font-normal leading-5 text-zinc-500 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{highlightText(description, highlightedText)}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-7 w-7 items-center justify-center rounded-[0.5rem] bg-zinc-700 group-disabled:bg-zinc-400",
|
||||
)}
|
||||
>
|
||||
<Plus className="h-5 w-5 text-zinc-50" strokeWidth={2} />
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
const IntegrationBlockSkeleton = ({ className }: { className?: string }) => {
|
||||
return (
|
||||
<Skeleton
|
||||
className={cn(
|
||||
"flex h-16 w-full min-w-[7.5rem] animate-pulse items-center justify-start gap-3 rounded-[0.75rem] bg-zinc-100 px-[0.875rem] py-[0.625rem]",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<Skeleton className="h-[2.625rem] w-[2.625rem] rounded-[0.5rem] bg-zinc-200" />
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
<Skeleton className="h-[1.375rem] w-24 rounded bg-zinc-200" />
|
||||
<Skeleton className="h-5 w-32 rounded bg-zinc-200" />
|
||||
</div>
|
||||
<Skeleton className="h-7 w-7 rounded-[0.5rem] bg-zinc-200" />
|
||||
</Skeleton>
|
||||
);
|
||||
};
|
||||
|
||||
IntegrationBlock.Skeleton = IntegrationBlockSkeleton;
|
||||
@@ -0,0 +1,135 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ExternalLink, Loader2, Plus } from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
import Link from "next/link";
|
||||
import { highlightText } from "./helpers";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
title?: string;
|
||||
creator_name?: string;
|
||||
number_of_runs?: number;
|
||||
image_url?: string;
|
||||
highlightedText?: string;
|
||||
slug: string;
|
||||
loading: boolean;
|
||||
}
|
||||
|
||||
interface MarketplaceAgentBlockComponent extends React.FC<Props> {
|
||||
Skeleton: React.FC<{ className?: string }>;
|
||||
}
|
||||
|
||||
export const MarketplaceAgentBlock: MarketplaceAgentBlockComponent = ({
|
||||
title,
|
||||
image_url,
|
||||
creator_name,
|
||||
number_of_runs,
|
||||
className,
|
||||
loading,
|
||||
highlightedText,
|
||||
slug,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"group flex h-[4.375rem] w-full min-w-[7.5rem] items-center justify-start gap-3 whitespace-normal rounded-[0.75rem] bg-zinc-50 p-[0.625rem] pr-[0.875rem] text-start shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0 active:bg-zinc-100 active:ring-1 active:ring-zinc-300 disabled:pointer-events-none",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<div className="relative h-[3.125rem] w-[5.625rem] overflow-hidden rounded-[0.375rem] bg-white">
|
||||
{image_url && (
|
||||
<Image
|
||||
src={image_url}
|
||||
alt="integration-icon"
|
||||
fill
|
||||
sizes="5.625rem"
|
||||
className="w-full object-contain group-disabled:opacity-50"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
{title && (
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-sm font-medium leading-[1.375rem] text-zinc-800 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{highlightText(title, highlightedText)}
|
||||
</span>
|
||||
)}
|
||||
<div className="flex items-center space-x-2.5">
|
||||
<span
|
||||
className={cn(
|
||||
"truncate font-sans text-xs font-normal leading-5 text-zinc-500 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
By {creator_name}
|
||||
</span>
|
||||
|
||||
<span className="font-sans text-zinc-400">•</span>
|
||||
|
||||
<span
|
||||
className={cn(
|
||||
"truncate font-sans text-xs font-normal leading-5 text-zinc-500 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{number_of_runs} runs
|
||||
</span>
|
||||
<span className="font-sans text-zinc-400">•</span>
|
||||
<Link
|
||||
href={`/marketplace/agent/${creator_name}/${slug}`}
|
||||
className="flex gap-0.5 truncate"
|
||||
onClick={(e) => e.stopPropagation()}
|
||||
>
|
||||
<span className="font-sans text-xs leading-5 text-blue-700 underline">
|
||||
Agent page
|
||||
</span>
|
||||
<ExternalLink className="h-4 w-4 text-blue-700" strokeWidth={1} />
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-7 min-w-7 items-center justify-center rounded-[0.5rem] bg-zinc-700 group-disabled:bg-zinc-400",
|
||||
)}
|
||||
>
|
||||
{!loading ? (
|
||||
<Plus className="h-5 w-5 text-zinc-50" strokeWidth={2} />
|
||||
) : (
|
||||
<Loader2 className="h-5 w-5 animate-spin" />
|
||||
)}
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
const MarketplaceAgentBlockSkeleton: React.FC<{ className?: string }> = ({
|
||||
className,
|
||||
}) => {
|
||||
return (
|
||||
<Skeleton
|
||||
className={cn(
|
||||
"flex h-[4.375rem] w-full min-w-[7.5rem] animate-pulse items-center justify-start gap-3 rounded-[0.75rem] bg-zinc-100 p-[0.625rem] pr-[0.875rem]",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<Skeleton className="h-[3.125rem] w-[5.625rem] rounded-[0.375rem] bg-zinc-200" />
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
<Skeleton className="h-[1.375rem] w-24 rounded bg-zinc-200" />
|
||||
<div className="flex items-center gap-1">
|
||||
<Skeleton className="h-5 w-16 rounded bg-zinc-200" />
|
||||
|
||||
<Skeleton className="h-5 w-16 rounded bg-zinc-200" />
|
||||
</div>
|
||||
</div>
|
||||
<Skeleton className="h-7 w-7 rounded-[0.5rem] bg-zinc-200" />
|
||||
</Skeleton>
|
||||
);
|
||||
};
|
||||
|
||||
MarketplaceAgentBlock.Skeleton = MarketplaceAgentBlockSkeleton;
|
||||
@@ -0,0 +1,40 @@
|
||||
// BLOCK MENU TODO: We need to add a better hover state to it; currently it's not in the design either.
|
||||
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { cn } from "@/lib/utils";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
selected?: boolean;
|
||||
number?: number;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export const MenuItem: React.FC<Props> = ({
|
||||
selected = false,
|
||||
number,
|
||||
name,
|
||||
className,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"flex h-[2.375rem] w-[12.875rem] justify-between whitespace-normal rounded-[0.5rem] bg-transparent p-2 pl-3 shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0",
|
||||
selected && "bg-zinc-100",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<span className="truncate font-sans text-sm font-medium leading-[1.375rem] text-zinc-800">
|
||||
{name}
|
||||
</span>
|
||||
{number && (
|
||||
<span className="font-sans text-sm font-normal leading-[1.375rem] text-zinc-600">
|
||||
{number > 100 ? "100+" : number}
|
||||
</span>
|
||||
)}
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,110 @@
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
import { cn } from "@/lib/utils";
|
||||
import React, { useMemo } from "react";
|
||||
import { BlockMenu } from "../BlockMenu/BlockMenu";
|
||||
import { useNewControlPanel } from "./useNewControlPanel";
|
||||
import { NewSaveControl } from "../SaveControl/NewSaveControl";
|
||||
import { GraphExecutionID } from "@/lib/autogpt-server-api";
|
||||
import { history } from "@/components/history";
|
||||
import { ControlPanelButton } from "../ControlPanelButton";
|
||||
import { ArrowUUpLeftIcon, ArrowUUpRightIcon } from "@phosphor-icons/react";
|
||||
|
||||
export type Control = {
|
||||
icon: React.ReactNode;
|
||||
label: string;
|
||||
disabled?: boolean;
|
||||
onClick: () => void;
|
||||
};
|
||||
|
||||
interface ControlPanelProps {
|
||||
className?: string;
|
||||
flowExecutionID: GraphExecutionID | undefined;
|
||||
visualizeBeads: "no" | "static" | "animate";
|
||||
pinSavePopover: boolean;
|
||||
pinBlocksPopover: boolean;
|
||||
}
|
||||
|
||||
export const NewControlPanel = ({
|
||||
flowExecutionID,
|
||||
visualizeBeads,
|
||||
pinSavePopover,
|
||||
pinBlocksPopover,
|
||||
className,
|
||||
}: ControlPanelProps) => {
|
||||
const {
|
||||
blockMenuSelected,
|
||||
setBlockMenuSelected,
|
||||
agentDescription,
|
||||
setAgentDescription,
|
||||
saveAgent,
|
||||
agentName,
|
||||
setAgentName,
|
||||
savedAgent,
|
||||
isSaving,
|
||||
isRunning,
|
||||
isStopping,
|
||||
} = useNewControlPanel({ flowExecutionID, visualizeBeads });
|
||||
|
||||
const controls: Control[] = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: "Undo",
|
||||
icon: <ArrowUUpLeftIcon size={20} weight="bold" />,
|
||||
onClick: history.undo,
|
||||
disabled: !history.canUndo(),
|
||||
},
|
||||
{
|
||||
label: "Redo",
|
||||
icon: <ArrowUUpRightIcon size={20} weight="bold" />,
|
||||
onClick: history.redo,
|
||||
disabled: !history.canRedo(),
|
||||
},
|
||||
],
|
||||
[]
|
||||
);
|
||||
|
||||
return (
|
||||
<section
|
||||
className={cn(
|
||||
"absolute left-4 top-24 z-10 w-[4.25rem] overflow-hidden rounded-[1rem] border-none bg-white p-0 shadow-[0_1px_5px_0_rgba(0,0,0,0.1)]",
|
||||
className
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-col items-center justify-center rounded-[1rem] p-0">
|
||||
<BlockMenu
|
||||
pinBlocksPopover={pinBlocksPopover}
|
||||
blockMenuSelected={blockMenuSelected}
|
||||
setBlockMenuSelected={setBlockMenuSelected}
|
||||
/>
|
||||
<Separator className="text-[#E1E1E1]" />
|
||||
{controls.map((control, index) => (
|
||||
<ControlPanelButton
|
||||
key={index}
|
||||
onClick={() => control.onClick()}
|
||||
data-id={`control-button-${index}`}
|
||||
data-testid={`blocks-control-${control.label.toLowerCase()}-button`}
|
||||
disabled={control.disabled || false}
|
||||
className="rounded-none"
|
||||
>
|
||||
{control.icon}
|
||||
</ControlPanelButton>
|
||||
))}
|
||||
<Separator className="text-[#E1E1E1]" />
|
||||
<NewSaveControl
|
||||
agentMeta={savedAgent}
|
||||
canSave={!isSaving && !isRunning && !isStopping}
|
||||
onSave={saveAgent}
|
||||
agentDescription={agentDescription}
|
||||
onDescriptionChange={setAgentDescription}
|
||||
agentName={agentName}
|
||||
onNameChange={setAgentName}
|
||||
pinSavePopover={pinSavePopover}
|
||||
blockMenuSelected={blockMenuSelected}
|
||||
setBlockMenuSelected={setBlockMenuSelected}
|
||||
/>
|
||||
</div>
|
||||
</section>
|
||||
);
|
||||
};
|
||||
|
||||
export default NewControlPanel;
|
||||
@@ -0,0 +1,35 @@
|
||||
import useAgentGraph from "@/hooks/useAgentGraph";
|
||||
import { GraphExecutionID, GraphID } from "@/lib/autogpt-server-api";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
|
||||
export interface NewControlPanelProps {
|
||||
flowExecutionID: GraphExecutionID | undefined;
|
||||
visualizeBeads: "no" | "static" | "animate";
|
||||
}
|
||||
|
||||
export const useNewControlPanel = ({flowExecutionID, visualizeBeads}: NewControlPanelProps) => {
|
||||
const [blockMenuSelected, setBlockMenuSelected] = useState<
|
||||
"save" | "block" | ""
|
||||
>("");
|
||||
const query = useSearchParams();
|
||||
const _graphVersion = query.get("flowVersion");
|
||||
const graphVersion = _graphVersion ? parseInt(_graphVersion) : undefined;
|
||||
|
||||
const flowID = query.get("flowID") as GraphID | null ?? undefined;
|
||||
const {agentDescription, setAgentDescription, saveAgent, agentName, setAgentName, savedAgent, isSaving, isRunning, isStopping} = useAgentGraph(flowID, graphVersion, flowExecutionID, visualizeBeads !== "no")
|
||||
|
||||
return {
|
||||
blockMenuSelected,
|
||||
setBlockMenuSelected,
|
||||
agentDescription,
|
||||
setAgentDescription,
|
||||
saveAgent,
|
||||
agentName,
|
||||
setAgentName,
|
||||
savedAgent,
|
||||
isSaving,
|
||||
isRunning,
|
||||
isStopping,
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,17 @@
|
||||
import { SmileySadIcon } from "@phosphor-icons/react";
|
||||
|
||||
export const NoSearchResult = () => {
|
||||
return (
|
||||
<div className="flex h-full w-full flex-col items-center justify-center text-center">
|
||||
<SmileySadIcon size={64} className="mb-10 text-zinc-400" />
|
||||
<div className="space-y-1">
|
||||
<p className="font-sans text-sm font-medium leading-[1.375rem] text-zinc-800">
|
||||
No match found
|
||||
</p>
|
||||
<p className="font-sans text-sm font-normal leading-[1.375rem] text-zinc-600">
|
||||
Try adjusting your search terms
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,158 @@
|
||||
import React, { useCallback, useEffect } from "react";
|
||||
import {
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from "@/components/ui/popover";
|
||||
import { Card, CardContent, CardFooter } from "@/components/ui/card";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { GraphMeta } from "@/lib/autogpt-server-api";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { IconSave } from "@/components/ui/icons";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { ControlPanelButton } from "../ControlPanelButton";
|
||||
|
||||
interface SaveControlProps {
|
||||
agentMeta: GraphMeta | null;
|
||||
agentName: string;
|
||||
agentDescription: string;
|
||||
canSave: boolean;
|
||||
onSave: () => void;
|
||||
onNameChange: (name: string) => void;
|
||||
onDescriptionChange: (description: string) => void;
|
||||
pinSavePopover: boolean;
|
||||
|
||||
blockMenuSelected: "save" | "block" | "";
|
||||
setBlockMenuSelected: React.Dispatch<
|
||||
React.SetStateAction<"" | "save" | "block">
|
||||
>;
|
||||
}
|
||||
|
||||
export const NewSaveControl = ({
|
||||
agentMeta,
|
||||
canSave,
|
||||
onSave,
|
||||
agentName,
|
||||
onNameChange,
|
||||
agentDescription,
|
||||
onDescriptionChange,
|
||||
blockMenuSelected,
|
||||
setBlockMenuSelected,
|
||||
pinSavePopover,
|
||||
}: SaveControlProps) => {
|
||||
|
||||
const handleSave = useCallback(() => {
|
||||
onSave();
|
||||
}, [onSave]);
|
||||
|
||||
const { toast } = useToast();
|
||||
|
||||
useEffect(() => {
|
||||
const handleKeyDown = (event: KeyboardEvent) => {
|
||||
if ((event.ctrlKey || event.metaKey) && event.key === "s") {
|
||||
event.preventDefault();
|
||||
handleSave();
|
||||
toast({
|
||||
duration: 2000,
|
||||
title: "All changes saved successfully!",
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
window.addEventListener("keydown", handleKeyDown);
|
||||
|
||||
return () => {
|
||||
window.removeEventListener("keydown", handleKeyDown);
|
||||
};
|
||||
}, [handleSave, toast]);
|
||||
|
||||
return (
|
||||
<Popover
|
||||
open={pinSavePopover ? true : undefined}
|
||||
onOpenChange={(open) => open || setBlockMenuSelected("")}
|
||||
>
|
||||
<PopoverTrigger>
|
||||
<ControlPanelButton
|
||||
data-id="save-control-popover-trigger"
|
||||
data-testid="blocks-control-save-button"
|
||||
selected={blockMenuSelected === "save"}
|
||||
onClick={() => {
|
||||
setBlockMenuSelected("save");
|
||||
}}
|
||||
className="rounded-none"
|
||||
>
|
||||
{/* Need to find phosphor icon alternative for this lucide icon */}
|
||||
<IconSave className="h-5 w-5" strokeWidth={2} />
|
||||
</ControlPanelButton>
|
||||
</PopoverTrigger>
|
||||
|
||||
<PopoverContent
|
||||
side="right"
|
||||
sideOffset={16}
|
||||
align="start"
|
||||
className="w-[17rem] rounded-xl border-none p-0 shadow-none md:w-[30rem]"
|
||||
data-id="save-control-popover-content"
|
||||
>
|
||||
<Card className="border-none shadow-none dark:bg-slate-900">
|
||||
<CardContent className="p-4">
|
||||
<div className="grid gap-3">
|
||||
<Label htmlFor="name" className="dark:text-gray-300">
|
||||
Name
|
||||
</Label>
|
||||
<Input
|
||||
id="name"
|
||||
placeholder="Enter your agent name"
|
||||
className="col-span-3"
|
||||
value={agentName}
|
||||
onChange={(e) => onNameChange(e.target.value)}
|
||||
data-id="save-control-name-input"
|
||||
data-testid="save-control-name-input"
|
||||
maxLength={100}
|
||||
/>
|
||||
<Label htmlFor="description" className="dark:text-gray-300">
|
||||
Description
|
||||
</Label>
|
||||
<Input
|
||||
id="description"
|
||||
placeholder="Your agent description"
|
||||
className="col-span-3"
|
||||
value={agentDescription}
|
||||
onChange={(e) => onDescriptionChange(e.target.value)}
|
||||
data-id="save-control-description-input"
|
||||
data-testid="save-control-description-input"
|
||||
maxLength={500}
|
||||
/>
|
||||
{agentMeta?.version && (
|
||||
<>
|
||||
<Label htmlFor="version" className="dark:text-gray-300">
|
||||
Version
|
||||
</Label>
|
||||
<Input
|
||||
id="version"
|
||||
placeholder="Version"
|
||||
className="col-span-3"
|
||||
value={agentMeta?.version || "-"}
|
||||
disabled
|
||||
data-testid="save-control-version-output"
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</CardContent>
|
||||
<CardFooter className="flex flex-col items-stretch gap-2">
|
||||
<Button
|
||||
className="w-full dark:bg-slate-700 dark:text-slate-100 dark:hover:bg-slate-800"
|
||||
onClick={handleSave}
|
||||
data-id="save-control-save-agent"
|
||||
data-testid="save-control-save-agent-button"
|
||||
disabled={!canSave}
|
||||
>
|
||||
Save Agent
|
||||
</Button>
|
||||
</CardFooter>
|
||||
</Card>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,47 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { ArrowUpRight } from "lucide-react";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
content?: string;
|
||||
}
|
||||
|
||||
interface SearchHistoryChipComponent extends React.FC<Props> {
|
||||
Skeleton: React.FC<{ className?: string }>;
|
||||
}
|
||||
|
||||
export const SearchHistoryChip: SearchHistoryChipComponent = ({
|
||||
content,
|
||||
className,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"my-[1px] h-[2.25rem] space-x-1 rounded-[1.5rem] bg-zinc-50 p-[0.375rem] pr-[0.625rem] shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0 active:bg-zinc-100 active:ring-1 active:ring-zinc-300",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
<ArrowUpRight className="h-6 w-6 text-zinc-500" strokeWidth={1.25} />
|
||||
<span className="font-sans text-sm font-normal leading-[1.375rem] text-zinc-800">
|
||||
{content}
|
||||
</span>
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
const SearchHistoryChipSkeleton: React.FC<{ className?: string }> = ({
|
||||
className,
|
||||
}) => {
|
||||
return (
|
||||
<Skeleton
|
||||
className={cn("h-[2.25rem] w-32 rounded-[1.5rem] bg-zinc-100", className)}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
SearchHistoryChip.Skeleton = SearchHistoryChipSkeleton;
|
||||
@@ -0,0 +1,117 @@
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Plus } from "lucide-react";
|
||||
import Image from "next/image";
|
||||
import React, { ButtonHTMLAttributes } from "react";
|
||||
import { highlightText } from "./helpers";
|
||||
import { formatTimeAgo } from "@/lib/utils/time";
|
||||
|
||||
interface Props extends ButtonHTMLAttributes<HTMLButtonElement> {
|
||||
title?: string;
|
||||
edited_time?: Date;
|
||||
version?: number;
|
||||
image_url?: string;
|
||||
highlightedText?: string;
|
||||
}
|
||||
|
||||
interface UGCAgentBlockComponent extends React.FC<Props> {
|
||||
Skeleton: React.FC<{ className?: string }>;
|
||||
}
|
||||
|
||||
export const UGCAgentBlock: UGCAgentBlockComponent = ({
|
||||
title,
|
||||
image_url,
|
||||
edited_time = new Date(),
|
||||
version,
|
||||
className,
|
||||
highlightedText,
|
||||
...rest
|
||||
}) => {
|
||||
return (
|
||||
<Button
|
||||
className={cn(
|
||||
"group flex h-[4.375rem] w-full min-w-[7.5rem] items-center justify-start gap-3 whitespace-normal rounded-[0.75rem] bg-zinc-50 p-[0.625rem] pr-[0.875rem] text-start shadow-none",
|
||||
"hover:cursor-default hover:bg-zinc-100 focus:ring-0 active:bg-zinc-100 active:ring-1 active:ring-zinc-300 disabled:cursor-not-allowed",
|
||||
className,
|
||||
)}
|
||||
{...rest}
|
||||
>
|
||||
{image_url && (
|
||||
<div className="relative h-[3.125rem] w-[5.625rem] overflow-hidden rounded-[0.375rem] bg-white">
|
||||
<Image
|
||||
src={image_url}
|
||||
alt="integration-icon"
|
||||
fill
|
||||
sizes="5.625rem"
|
||||
className="w-full object-contain group-disabled:opacity-50"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
{title && (
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-sm font-medium leading-[1.375rem] text-zinc-800 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
{highlightText(title, highlightedText)}
|
||||
</span>
|
||||
)}
|
||||
<div className="flex items-center space-x-1.5">
|
||||
{edited_time && (
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-xs font-normal leading-5 text-zinc-500 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
Edited {formatTimeAgo(edited_time.toISOString())}
|
||||
</span>
|
||||
)}
|
||||
|
||||
<span className="font-sans text-zinc-400">•</span>
|
||||
|
||||
<span
|
||||
className={cn(
|
||||
"line-clamp-1 font-sans text-xs font-normal leading-5 text-zinc-500 group-disabled:text-zinc-400",
|
||||
)}
|
||||
>
|
||||
Version {version}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={cn(
|
||||
"flex h-7 w-7 items-center justify-center rounded-[0.5rem] bg-zinc-700 group-disabled:bg-zinc-400",
|
||||
)}
|
||||
>
|
||||
<Plus className="h-5 w-5 text-zinc-50" strokeWidth={2} />
|
||||
</div>
|
||||
</Button>
|
||||
);
|
||||
};
|
||||
|
||||
const UGCAgentBlockSkeleton: React.FC<{ className?: string }> = ({
|
||||
className,
|
||||
}) => {
|
||||
return (
|
||||
<Skeleton
|
||||
className={cn(
|
||||
"flex h-[4.375rem] w-full min-w-[7.5rem] animate-pulse items-center justify-start gap-3 rounded-[0.75rem] bg-zinc-100 p-[0.625rem] pr-[0.875rem]",
|
||||
className,
|
||||
)}
|
||||
>
|
||||
<Skeleton className="h-[3.125rem] w-[5.625rem] rounded-[0.375rem] bg-zinc-200" />
|
||||
<div className="flex flex-1 flex-col items-start gap-0.5">
|
||||
<Skeleton className="h-[1.375rem] w-24 rounded bg-zinc-200" />
|
||||
<div className="flex items-center gap-1">
|
||||
<Skeleton className="h-5 w-16 rounded bg-zinc-200" />
|
||||
<Skeleton className="h-5 w-16 rounded bg-zinc-200" />
|
||||
</div>
|
||||
</div>
|
||||
<Skeleton className="h-7 w-7 rounded-[0.5rem] bg-zinc-200" />
|
||||
</Skeleton>
|
||||
);
|
||||
};
|
||||
|
||||
UGCAgentBlock.Skeleton = UGCAgentBlockSkeleton;
|
||||
@@ -0,0 +1,22 @@
|
||||
export const highlightText = (
|
||||
text: string | undefined,
|
||||
highlight: string | undefined,
|
||||
) => {
|
||||
if (!text || !highlight) return text;
|
||||
|
||||
function escapeRegExp(s: string) {
|
||||
return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
const escaped = escapeRegExp(highlight);
|
||||
const parts = text.split(new RegExp(`(${escaped})`, "gi"));
|
||||
return parts.map((part, i) =>
|
||||
part.toLowerCase() === highlight?.toLowerCase() ? (
|
||||
<mark key={i} className="bg-transparent font-bold">
|
||||
{part}
|
||||
</mark>
|
||||
) : (
|
||||
part
|
||||
),
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,74 @@
|
||||
"use client";
|
||||
|
||||
import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs";
|
||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||
import { useAgentRunsView } from "./useAgentRunsView";
|
||||
import { AgentRunsLoading } from "./components/AgentRunsLoading";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Plus } from "@phosphor-icons/react";
|
||||
|
||||
export function AgentRunsView() {
|
||||
const { response, ready, error, agentId } = useAgentRunsView();
|
||||
|
||||
// Handle loading state
|
||||
if (!ready) {
|
||||
return <AgentRunsLoading />;
|
||||
}
|
||||
|
||||
// Handle errors - check for query error first, then response errors
|
||||
if (error || (response && response.status !== 200)) {
|
||||
return (
|
||||
<ErrorCard
|
||||
isSuccess={false}
|
||||
responseError={error || undefined}
|
||||
httpError={
|
||||
response?.status !== 200
|
||||
? {
|
||||
status: response?.status,
|
||||
statusText: "Request failed",
|
||||
}
|
||||
: undefined
|
||||
}
|
||||
context="agent"
|
||||
onRetry={() => window.location.reload()}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// Handle missing data
|
||||
if (!response?.data) {
|
||||
return (
|
||||
<ErrorCard
|
||||
isSuccess={false}
|
||||
responseError={{ message: "No agent data found" }}
|
||||
context="agent"
|
||||
onRetry={() => window.location.reload()}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
const agent = response.data;
|
||||
|
||||
return (
|
||||
<div className="grid h-screen grid-cols-[25%_85%] gap-4 pt-8">
|
||||
{/* Left Sidebar - 30% */}
|
||||
<div className="bg-gray-50 p-4">
|
||||
<Button variant="primary" size="large" className="w-full">
|
||||
<Plus size={20} /> New Run
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{/* Main Content - 70% */}
|
||||
<div className="p-4">
|
||||
<Breadcrumbs
|
||||
items={[
|
||||
{ name: "My Library", link: "/library" },
|
||||
{ name: agent.name, link: `/library/agents/${agentId}` },
|
||||
]}
|
||||
/>
|
||||
{/* Main content will go here */}
|
||||
<div className="mt-4 text-gray-600">Main content area</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
import React from "react";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
|
||||
export function AgentRunsLoading() {
|
||||
return (
|
||||
<div className="px-6 py-6">
|
||||
<div className="flex h-screen w-full gap-4">
|
||||
{/* Left Sidebar */}
|
||||
<div className="w-80 space-y-4">
|
||||
<Skeleton className="h-12 w-full" />
|
||||
<Skeleton className="h-32 w-full" />
|
||||
<Skeleton className="h-24 w-full" />
|
||||
</div>
|
||||
|
||||
{/* Main Content */}
|
||||
<div className="flex-1 space-y-4">
|
||||
<Skeleton className="h-8 w-full" />
|
||||
<Skeleton className="h-12 w-full" />
|
||||
<Skeleton className="h-64 w-full" />
|
||||
<Skeleton className="h-32 w-full" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
import { useGetV2GetLibraryAgent } from "@/app/api/__generated__/endpoints/library/library";
|
||||
import { useParams } from "next/navigation";
|
||||
|
||||
export function useAgentRunsView() {
|
||||
const { id } = useParams();
|
||||
const agentId = id as string;
|
||||
const { data: response, isSuccess, error } = useGetV2GetLibraryAgent(agentId);
|
||||
|
||||
return {
|
||||
agentId: id,
|
||||
ready: isSuccess,
|
||||
error,
|
||||
response,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,622 @@
|
||||
"use client";
|
||||
import { useParams, useRouter } from "next/navigation";
|
||||
import { useQueryState } from "nuqs";
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from "react";
|
||||
|
||||
import {
|
||||
Graph,
|
||||
GraphExecution,
|
||||
GraphExecutionID,
|
||||
GraphExecutionMeta,
|
||||
GraphID,
|
||||
LibraryAgent,
|
||||
LibraryAgentID,
|
||||
LibraryAgentPreset,
|
||||
LibraryAgentPresetID,
|
||||
Schedule,
|
||||
ScheduleID,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { exportAsJSONFile } from "@/lib/utils";
|
||||
|
||||
import DeleteConfirmDialog from "@/components/agptui/delete-confirm-dialog";
|
||||
import type { ButtonAction } from "@/components/agptui/types";
|
||||
import { useOnboarding } from "@/components/onboarding/onboarding-provider";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import LoadingBox, { LoadingSpinner } from "@/components/ui/loading";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { AgentRunDetailsView } from "./components/agent-run-details-view";
|
||||
import { AgentRunDraftView } from "./components/agent-run-draft-view";
|
||||
import { AgentRunsSelectorList } from "./components/agent-runs-selector-list";
|
||||
import { AgentScheduleDetailsView } from "./components/agent-schedule-details-view";
|
||||
|
||||
export function OldAgentLibraryView() {
|
||||
const { id: agentID }: { id: LibraryAgentID } = useParams();
|
||||
const [executionId, setExecutionId] = useQueryState("executionId");
|
||||
const { toast } = useToast();
|
||||
const router = useRouter();
|
||||
const api = useBackendAPI();
|
||||
|
||||
// ============================ STATE =============================
|
||||
|
||||
const [graph, setGraph] = useState<Graph | null>(null); // Graph version corresponding to LibraryAgent
|
||||
const [agent, setAgent] = useState<LibraryAgent | null>(null);
|
||||
const [agentRuns, setAgentRuns] = useState<GraphExecutionMeta[]>([]);
|
||||
const [agentPresets, setAgentPresets] = useState<LibraryAgentPreset[]>([]);
|
||||
const [schedules, setSchedules] = useState<Schedule[]>([]);
|
||||
const [selectedView, selectView] = useState<
|
||||
| { type: "run"; id?: GraphExecutionID }
|
||||
| { type: "preset"; id: LibraryAgentPresetID }
|
||||
| { type: "schedule"; id: ScheduleID }
|
||||
>({ type: "run" });
|
||||
const [selectedRun, setSelectedRun] = useState<
|
||||
GraphExecution | GraphExecutionMeta | null
|
||||
>(null);
|
||||
const selectedSchedule =
|
||||
selectedView.type == "schedule"
|
||||
? schedules.find((s) => s.id == selectedView.id)
|
||||
: null;
|
||||
const [isFirstLoad, setIsFirstLoad] = useState<boolean>(true);
|
||||
const [agentDeleteDialogOpen, setAgentDeleteDialogOpen] =
|
||||
useState<boolean>(false);
|
||||
const [confirmingDeleteAgentRun, setConfirmingDeleteAgentRun] =
|
||||
useState<GraphExecutionMeta | null>(null);
|
||||
const [confirmingDeleteAgentPreset, setConfirmingDeleteAgentPreset] =
|
||||
useState<LibraryAgentPresetID | null>(null);
|
||||
const {
|
||||
state: onboardingState,
|
||||
updateState: updateOnboardingState,
|
||||
incrementRuns,
|
||||
} = useOnboarding();
|
||||
const [copyAgentDialogOpen, setCopyAgentDialogOpen] = useState(false);
|
||||
|
||||
// Set page title with agent name
|
||||
useEffect(() => {
|
||||
if (agent) {
|
||||
document.title = `${agent.name} - Library - AutoGPT Platform`;
|
||||
}
|
||||
}, [agent]);
|
||||
|
||||
const openRunDraftView = useCallback(() => {
|
||||
selectView({ type: "run" });
|
||||
}, []);
|
||||
|
||||
const selectRun = useCallback((id: GraphExecutionID) => {
|
||||
selectView({ type: "run", id });
|
||||
}, []);
|
||||
|
||||
const selectPreset = useCallback((id: LibraryAgentPresetID) => {
|
||||
selectView({ type: "preset", id });
|
||||
}, []);
|
||||
|
||||
const selectSchedule = useCallback((id: ScheduleID) => {
|
||||
selectView({ type: "schedule", id });
|
||||
}, []);
|
||||
|
||||
const graphVersions = useRef<Record<number, Graph>>({});
|
||||
const loadingGraphVersions = useRef<Record<number, Promise<Graph>>>({});
|
||||
const getGraphVersion = useCallback(
|
||||
async (graphID: GraphID, version: number) => {
|
||||
if (version in graphVersions.current)
|
||||
return graphVersions.current[version];
|
||||
if (version in loadingGraphVersions.current)
|
||||
return loadingGraphVersions.current[version];
|
||||
|
||||
const pendingGraph = api.getGraph(graphID, version).then((graph) => {
|
||||
graphVersions.current[version] = graph;
|
||||
return graph;
|
||||
});
|
||||
// Cache promise as well to avoid duplicate requests
|
||||
loadingGraphVersions.current[version] = pendingGraph;
|
||||
return pendingGraph;
|
||||
},
|
||||
[api, graphVersions, loadingGraphVersions],
|
||||
);
|
||||
|
||||
// Reward user for viewing results of their onboarding agent
|
||||
useEffect(() => {
|
||||
if (
|
||||
!onboardingState ||
|
||||
!selectedRun ||
|
||||
onboardingState.completedSteps.includes("GET_RESULTS")
|
||||
)
|
||||
return;
|
||||
|
||||
if (selectedRun.id === onboardingState.onboardingAgentExecutionId) {
|
||||
updateOnboardingState({
|
||||
completedSteps: [...onboardingState.completedSteps, "GET_RESULTS"],
|
||||
});
|
||||
}
|
||||
}, [selectedRun, onboardingState, updateOnboardingState]);
|
||||
|
||||
const lastRefresh = useRef<number>(0);
|
||||
const refreshPageData = useCallback(() => {
|
||||
if (Date.now() - lastRefresh.current < 2e3) return; // 2 second debounce
|
||||
lastRefresh.current = Date.now();
|
||||
|
||||
api.getLibraryAgent(agentID).then((agent) => {
|
||||
setAgent(agent);
|
||||
|
||||
getGraphVersion(agent.graph_id, agent.graph_version).then(
|
||||
(_graph) =>
|
||||
(graph && graph.version == _graph.version) || setGraph(_graph),
|
||||
);
|
||||
Promise.all([
|
||||
api.getGraphExecutions(agent.graph_id),
|
||||
api.listLibraryAgentPresets({
|
||||
graph_id: agent.graph_id,
|
||||
page_size: 100,
|
||||
}),
|
||||
]).then(([runs, presets]) => {
|
||||
setAgentRuns(runs);
|
||||
setAgentPresets(presets.presets);
|
||||
|
||||
// Preload the corresponding graph versions for the latest 10 runs
|
||||
new Set(runs.slice(0, 10).map((run) => run.graph_version)).forEach(
|
||||
(version) => getGraphVersion(agent.graph_id, version),
|
||||
);
|
||||
});
|
||||
});
|
||||
}, [api, agentID, getGraphVersion, graph]);
|
||||
|
||||
// On first load: select the latest run
|
||||
useEffect(() => {
|
||||
// Only for first load or first execution
|
||||
if (selectedView.id || !isFirstLoad) return;
|
||||
if (agentRuns.length == 0 && agentPresets.length == 0) return;
|
||||
|
||||
setIsFirstLoad(false);
|
||||
if (agentRuns.length > 0) {
|
||||
// select latest run
|
||||
const latestRun = agentRuns.reduce((latest, current) => {
|
||||
if (latest.started_at && !current.started_at) return current;
|
||||
else if (!latest.started_at) return latest;
|
||||
return latest.started_at > current.started_at ? latest : current;
|
||||
}, agentRuns[0]);
|
||||
selectRun(latestRun.id);
|
||||
} else {
|
||||
// select top preset
|
||||
const latestPreset = agentPresets.toSorted(
|
||||
(a, b) => b.updated_at.getTime() - a.updated_at.getTime(),
|
||||
)[0];
|
||||
selectPreset(latestPreset.id);
|
||||
}
|
||||
}, [
|
||||
isFirstLoad,
|
||||
selectedView.id,
|
||||
agentRuns,
|
||||
agentPresets,
|
||||
selectRun,
|
||||
selectPreset,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (executionId) {
|
||||
selectRun(executionId as GraphExecutionID);
|
||||
setExecutionId(null);
|
||||
}
|
||||
}, [executionId, selectRun, setExecutionId]);
|
||||
|
||||
// Initial load
|
||||
useEffect(() => {
|
||||
refreshPageData();
|
||||
|
||||
// Show a toast when the WebSocket connection disconnects
|
||||
let connectionToast: ReturnType<typeof toast> | null = null;
|
||||
const cancelDisconnectHandler = api.onWebSocketDisconnect(() => {
|
||||
connectionToast ??= toast({
|
||||
title: "Connection to server was lost",
|
||||
variant: "destructive",
|
||||
description: (
|
||||
<div className="flex items-center">
|
||||
Trying to reconnect...
|
||||
<LoadingSpinner className="ml-1.5 size-3.5" />
|
||||
</div>
|
||||
),
|
||||
duration: Infinity,
|
||||
dismissable: true,
|
||||
});
|
||||
});
|
||||
const cancelConnectHandler = api.onWebSocketConnect(() => {
|
||||
if (connectionToast)
|
||||
connectionToast.update({
|
||||
id: connectionToast.id,
|
||||
title: "✅ Connection re-established",
|
||||
variant: "default",
|
||||
description: (
|
||||
<div className="flex items-center">
|
||||
Refreshing data...
|
||||
<LoadingSpinner className="ml-1.5 size-3.5" />
|
||||
</div>
|
||||
),
|
||||
duration: 2000,
|
||||
dismissable: true,
|
||||
});
|
||||
connectionToast = null;
|
||||
});
|
||||
return () => {
|
||||
cancelDisconnectHandler();
|
||||
cancelConnectHandler();
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Subscribe to WebSocket updates for agent runs
|
||||
useEffect(() => {
|
||||
if (!agent?.graph_id) return;
|
||||
|
||||
return api.onWebSocketConnect(() => {
|
||||
refreshPageData(); // Sync up on (re)connect
|
||||
|
||||
// Subscribe to all executions for this agent
|
||||
api.subscribeToGraphExecutions(agent.graph_id);
|
||||
});
|
||||
}, [api, agent?.graph_id, refreshPageData]);
|
||||
|
||||
// Handle execution updates
|
||||
useEffect(() => {
|
||||
const detachExecUpdateHandler = api.onWebSocketMessage(
|
||||
"graph_execution_event",
|
||||
(data) => {
|
||||
if (data.graph_id != agent?.graph_id) return;
|
||||
|
||||
if (data.status == "COMPLETED") {
|
||||
incrementRuns();
|
||||
}
|
||||
|
||||
setAgentRuns((prev) => {
|
||||
const index = prev.findIndex((run) => run.id === data.id);
|
||||
if (index === -1) {
|
||||
return [...prev, data];
|
||||
}
|
||||
const newRuns = [...prev];
|
||||
newRuns[index] = { ...newRuns[index], ...data };
|
||||
return newRuns;
|
||||
});
|
||||
if (data.id === selectedView.id) {
|
||||
setSelectedRun((prev) => ({ ...prev, ...data }));
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
return () => {
|
||||
detachExecUpdateHandler();
|
||||
};
|
||||
}, [api, agent?.graph_id, selectedView.id, incrementRuns]);
|
||||
|
||||
// Pre-load selectedRun based on selectedView
|
||||
useEffect(() => {
|
||||
if (selectedView.type != "run" || !selectedView.id) return;
|
||||
|
||||
const newSelectedRun = agentRuns.find((run) => run.id == selectedView.id);
|
||||
if (selectedView.id !== selectedRun?.id) {
|
||||
// Pull partial data from "cache" while waiting for the rest to load
|
||||
setSelectedRun(newSelectedRun ?? null);
|
||||
}
|
||||
}, [api, selectedView, agentRuns, selectedRun?.id]);
|
||||
|
||||
// Load selectedRun based on selectedView; refresh on agent refresh
|
||||
useEffect(() => {
|
||||
if (selectedView.type != "run" || !selectedView.id || !agent) return;
|
||||
|
||||
api
|
||||
.getGraphExecutionInfo(agent.graph_id, selectedView.id)
|
||||
.then(async (run) => {
|
||||
// Ensure corresponding graph version is available before rendering I/O
|
||||
await getGraphVersion(run.graph_id, run.graph_version);
|
||||
setSelectedRun(run);
|
||||
});
|
||||
}, [api, selectedView, agent, getGraphVersion]);
|
||||
|
||||
const fetchSchedules = useCallback(async () => {
|
||||
if (!agent) return;
|
||||
|
||||
setSchedules(await api.listGraphExecutionSchedules(agent.graph_id));
|
||||
}, [api, agent?.graph_id]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchSchedules();
|
||||
}, [fetchSchedules]);
|
||||
|
||||
// =========================== ACTIONS ============================
|
||||
|
||||
const deleteRun = useCallback(
|
||||
async (run: GraphExecutionMeta) => {
|
||||
if (run.status == "RUNNING" || run.status == "QUEUED") {
|
||||
await api.stopGraphExecution(run.graph_id, run.id);
|
||||
}
|
||||
await api.deleteGraphExecution(run.id);
|
||||
|
||||
setConfirmingDeleteAgentRun(null);
|
||||
if (selectedView.type == "run" && selectedView.id == run.id) {
|
||||
openRunDraftView();
|
||||
}
|
||||
setAgentRuns((runs) => runs.filter((r) => r.id !== run.id));
|
||||
},
|
||||
[api, selectedView, openRunDraftView],
|
||||
);
|
||||
|
||||
const deletePreset = useCallback(
|
||||
async (presetID: LibraryAgentPresetID) => {
|
||||
await api.deleteLibraryAgentPreset(presetID);
|
||||
|
||||
setConfirmingDeleteAgentPreset(null);
|
||||
if (selectedView.type == "preset" && selectedView.id == presetID) {
|
||||
openRunDraftView();
|
||||
}
|
||||
setAgentPresets((presets) => presets.filter((p) => p.id !== presetID));
|
||||
},
|
||||
[api, selectedView, openRunDraftView],
|
||||
);
|
||||
|
||||
const deleteSchedule = useCallback(
|
||||
async (scheduleID: ScheduleID) => {
|
||||
const removedSchedule =
|
||||
await api.deleteGraphExecutionSchedule(scheduleID);
|
||||
|
||||
setSchedules((schedules) => {
|
||||
const newSchedules = schedules.filter(
|
||||
(s) => s.id !== removedSchedule.id,
|
||||
);
|
||||
if (
|
||||
selectedView.type == "schedule" &&
|
||||
selectedView.id == removedSchedule.id
|
||||
) {
|
||||
if (newSchedules.length > 0) {
|
||||
// Select next schedule if available
|
||||
selectSchedule(newSchedules[0].id);
|
||||
} else {
|
||||
// Reset to draft view if current schedule was deleted
|
||||
openRunDraftView();
|
||||
}
|
||||
}
|
||||
return newSchedules;
|
||||
});
|
||||
openRunDraftView();
|
||||
},
|
||||
[schedules, api],
|
||||
);
|
||||
|
||||
const downloadGraph = useCallback(
|
||||
async () =>
|
||||
agent &&
|
||||
// Export sanitized graph from backend
|
||||
api
|
||||
.getGraph(agent.graph_id, agent.graph_version, true)
|
||||
.then((graph) =>
|
||||
exportAsJSONFile(graph, `${graph.name}_v${graph.version}.json`),
|
||||
),
|
||||
[api, agent],
|
||||
);
|
||||
|
||||
const copyAgent = useCallback(async () => {
|
||||
setCopyAgentDialogOpen(false);
|
||||
api
|
||||
.forkLibraryAgent(agentID)
|
||||
.then((newAgent) => {
|
||||
router.push(`/library/agents/${newAgent.id}`);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error copying agent:", error);
|
||||
toast({
|
||||
title: "Error copying agent",
|
||||
description: `An error occurred while copying the agent: ${error.message}`,
|
||||
variant: "destructive",
|
||||
});
|
||||
});
|
||||
}, [agentID, api, router, toast]);
|
||||
|
||||
const agentActions: ButtonAction[] = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: "Customize agent",
|
||||
href: `/build?flowID=${agent?.graph_id}&flowVersion=${agent?.graph_version}`,
|
||||
disabled: !agent?.can_access_graph,
|
||||
},
|
||||
{ label: "Export agent to file", callback: downloadGraph },
|
||||
...(!agent?.can_access_graph
|
||||
? [
|
||||
{
|
||||
label: "Edit a copy",
|
||||
callback: () => setCopyAgentDialogOpen(true),
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
label: "Delete agent",
|
||||
callback: () => setAgentDeleteDialogOpen(true),
|
||||
},
|
||||
],
|
||||
[agent, downloadGraph],
|
||||
);
|
||||
|
||||
const runGraph =
|
||||
graphVersions.current[selectedRun?.graph_version ?? 0] ?? graph;
|
||||
|
||||
const onCreateSchedule = useCallback(
|
||||
(schedule: Schedule) => {
|
||||
setSchedules((prev) => [...prev, schedule]);
|
||||
selectSchedule(schedule.id);
|
||||
},
|
||||
[selectView],
|
||||
);
|
||||
|
||||
const onCreatePreset = useCallback(
|
||||
(preset: LibraryAgentPreset) => {
|
||||
setAgentPresets((prev) => [...prev, preset]);
|
||||
selectPreset(preset.id);
|
||||
},
|
||||
[selectPreset],
|
||||
);
|
||||
|
||||
const onUpdatePreset = useCallback(
|
||||
(updated: LibraryAgentPreset) => {
|
||||
setAgentPresets((prev) =>
|
||||
prev.map((p) => (p.id === updated.id ? updated : p)),
|
||||
);
|
||||
selectPreset(updated.id);
|
||||
},
|
||||
[selectPreset],
|
||||
);
|
||||
|
||||
if (!agent || !graph) {
|
||||
return <LoadingBox className="h-[90vh]" />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container justify-stretch p-0 pt-16 lg:flex">
|
||||
{/* Sidebar w/ list of runs */}
|
||||
{/* TODO: render this below header in sm and md layouts */}
|
||||
<AgentRunsSelectorList
|
||||
className="agpt-div w-full border-b lg:w-auto lg:border-b-0 lg:border-r"
|
||||
agent={agent}
|
||||
agentRuns={agentRuns}
|
||||
agentPresets={agentPresets}
|
||||
schedules={schedules}
|
||||
selectedView={selectedView}
|
||||
onSelectRun={selectRun}
|
||||
onSelectPreset={selectPreset}
|
||||
onSelectSchedule={selectSchedule}
|
||||
onSelectDraftNewRun={openRunDraftView}
|
||||
doDeleteRun={setConfirmingDeleteAgentRun}
|
||||
doDeletePreset={setConfirmingDeleteAgentPreset}
|
||||
doDeleteSchedule={deleteSchedule}
|
||||
/>
|
||||
|
||||
<div className="flex-1">
|
||||
{/* Header */}
|
||||
<div className="agpt-div w-full border-b">
|
||||
<h1
|
||||
data-testid="agent-title"
|
||||
className="font-poppins text-3xl font-medium"
|
||||
>
|
||||
{
|
||||
agent.name /* TODO: use dynamic/custom run title - https://github.com/Significant-Gravitas/AutoGPT/issues/9184 */
|
||||
}
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
{/* Run / Schedule views */}
|
||||
{(selectedView.type == "run" && selectedView.id ? (
|
||||
selectedRun && runGraph ? (
|
||||
<AgentRunDetailsView
|
||||
agent={agent}
|
||||
graph={runGraph}
|
||||
run={selectedRun}
|
||||
agentActions={agentActions}
|
||||
onRun={selectRun}
|
||||
deleteRun={() => setConfirmingDeleteAgentRun(selectedRun)}
|
||||
/>
|
||||
) : null
|
||||
) : selectedView.type == "run" ? (
|
||||
/* Draft new runs / Create new presets */
|
||||
<AgentRunDraftView
|
||||
graph={graph}
|
||||
triggerSetupInfo={agent.trigger_setup_info}
|
||||
onRun={selectRun}
|
||||
onCreateSchedule={onCreateSchedule}
|
||||
onCreatePreset={onCreatePreset}
|
||||
agentActions={agentActions}
|
||||
/>
|
||||
) : selectedView.type == "preset" ? (
|
||||
/* Edit & update presets */
|
||||
<AgentRunDraftView
|
||||
graph={graph}
|
||||
triggerSetupInfo={agent.trigger_setup_info}
|
||||
agentPreset={
|
||||
agentPresets.find((preset) => preset.id == selectedView.id)!
|
||||
}
|
||||
onRun={selectRun}
|
||||
onCreateSchedule={onCreateSchedule}
|
||||
onUpdatePreset={onUpdatePreset}
|
||||
doDeletePreset={setConfirmingDeleteAgentPreset}
|
||||
agentActions={agentActions}
|
||||
/>
|
||||
) : selectedView.type == "schedule" ? (
|
||||
selectedSchedule &&
|
||||
graph && (
|
||||
<AgentScheduleDetailsView
|
||||
graph={graph}
|
||||
schedule={selectedSchedule}
|
||||
// agent={agent}
|
||||
agentActions={agentActions}
|
||||
onForcedRun={selectRun}
|
||||
doDeleteSchedule={deleteSchedule}
|
||||
/>
|
||||
)
|
||||
) : null) || <LoadingBox className="h-[70vh]" />}
|
||||
|
||||
<DeleteConfirmDialog
|
||||
entityType="agent"
|
||||
open={agentDeleteDialogOpen}
|
||||
onOpenChange={setAgentDeleteDialogOpen}
|
||||
onDoDelete={() =>
|
||||
agent &&
|
||||
api.deleteLibraryAgent(agent.id).then(() => router.push("/library"))
|
||||
}
|
||||
/>
|
||||
|
||||
<DeleteConfirmDialog
|
||||
entityType="agent run"
|
||||
open={!!confirmingDeleteAgentRun}
|
||||
onOpenChange={(open) => !open && setConfirmingDeleteAgentRun(null)}
|
||||
onDoDelete={() =>
|
||||
confirmingDeleteAgentRun && deleteRun(confirmingDeleteAgentRun)
|
||||
}
|
||||
/>
|
||||
<DeleteConfirmDialog
|
||||
entityType={agent.has_external_trigger ? "trigger" : "agent preset"}
|
||||
open={!!confirmingDeleteAgentPreset}
|
||||
onOpenChange={(open) => !open && setConfirmingDeleteAgentPreset(null)}
|
||||
onDoDelete={() =>
|
||||
confirmingDeleteAgentPreset &&
|
||||
deletePreset(confirmingDeleteAgentPreset)
|
||||
}
|
||||
/>
|
||||
{/* Copy agent confirmation dialog */}
|
||||
<Dialog
|
||||
onOpenChange={setCopyAgentDialogOpen}
|
||||
open={copyAgentDialogOpen}
|
||||
>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>You're making an editable copy</DialogTitle>
|
||||
<DialogDescription className="pt-2">
|
||||
The original Marketplace agent stays the same and cannot be
|
||||
edited. We'll save a new version of this agent to your
|
||||
Library. From there, you can customize it however you'd
|
||||
like by clicking "Customize agent" — this will open
|
||||
the builder where you can see and modify the inner workings.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogFooter className="justify-end">
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
onClick={() => setCopyAgentDialogOpen(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="button" onClick={copyAgent}>
|
||||
Continue
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -36,7 +36,7 @@ import {
|
||||
} from "@/components/agents/agent-run-status-chip";
|
||||
import useCredits from "@/hooks/useCredits";
|
||||
|
||||
export default function AgentRunDetailsView({
|
||||
export function AgentRunDetailsView({
|
||||
agent,
|
||||
graph,
|
||||
run,
|
||||
@@ -30,7 +30,7 @@ import {
|
||||
useToastOnFail,
|
||||
} from "@/components/molecules/Toast/use-toast";
|
||||
|
||||
export default function AgentRunDraftView({
|
||||
export function AgentRunDraftView({
|
||||
graph,
|
||||
agentPreset,
|
||||
triggerSetupInfo,
|
||||
@@ -19,7 +19,7 @@ import { Separator } from "@/components/ui/separator";
|
||||
|
||||
import { agentRunStatusMap } from "@/components/agents/agent-run-status-chip";
|
||||
import AgentRunSummaryCard from "@/components/agents/agent-run-summary-card";
|
||||
import { Button } from "../atoms/Button/Button";
|
||||
import { Button } from "../../../../../../../../components/atoms/Button/Button";
|
||||
|
||||
interface AgentRunsSelectorListProps {
|
||||
agent: LibraryAgent;
|
||||
@@ -38,7 +38,7 @@ interface AgentRunsSelectorListProps {
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export default function AgentRunsSelectorList({
|
||||
export function AgentRunsSelectorList({
|
||||
agent,
|
||||
agentRuns,
|
||||
agentPresets,
|
||||
@@ -20,7 +20,7 @@ import { useToastOnFail } from "@/components/molecules/Toast/use-toast";
|
||||
import { humanizeCronExpression } from "@/lib/cron-expression-utils";
|
||||
import { PlayIcon } from "lucide-react";
|
||||
|
||||
export default function AgentScheduleDetailsView({
|
||||
export function AgentScheduleDetailsView({
|
||||
graph,
|
||||
schedule,
|
||||
agentActions,
|
||||
@@ -1,21 +1,3 @@
|
||||
import AgentFlowListSkeleton from "@/components/monitor/skeletons/AgentFlowListSkeleton";
|
||||
import React from "react";
|
||||
import FlowRunsListSkeleton from "@/components/monitor/skeletons/FlowRunsListSkeleton";
|
||||
import FlowRunsStatusSkeleton from "@/components/monitor/skeletons/FlowRunsStatusSkeleton";
|
||||
import { AgentRunsLoading } from "./components/AgentRunsView/components/AgentRunsLoading";
|
||||
|
||||
export default function MonitorLoadingSkeleton() {
|
||||
return (
|
||||
<div className="space-y-4 p-4">
|
||||
<div className="grid grid-cols-1 gap-4 md:grid-cols-3">
|
||||
{/* Agents Section */}
|
||||
<AgentFlowListSkeleton />
|
||||
|
||||
{/* Runs Section */}
|
||||
<FlowRunsListSkeleton />
|
||||
|
||||
{/* Stats Section */}
|
||||
<FlowRunsStatusSkeleton />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
export default AgentRunsLoading;
|
||||
|
||||
@@ -1,622 +1,16 @@
|
||||
"use client";
|
||||
import { useParams, useRouter } from "next/navigation";
|
||||
import { useQueryState } from "nuqs";
|
||||
import React, {
|
||||
useCallback,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from "react";
|
||||
|
||||
import {
|
||||
Graph,
|
||||
GraphExecution,
|
||||
GraphExecutionID,
|
||||
GraphExecutionMeta,
|
||||
GraphID,
|
||||
LibraryAgent,
|
||||
LibraryAgentID,
|
||||
LibraryAgentPreset,
|
||||
LibraryAgentPresetID,
|
||||
Schedule,
|
||||
ScheduleID,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { exportAsJSONFile } from "@/lib/utils";
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
|
||||
import AgentRunDetailsView from "@/components/agents/agent-run-details-view";
|
||||
import AgentRunDraftView from "@/components/agents/agent-run-draft-view";
|
||||
import AgentRunsSelectorList from "@/components/agents/agent-runs-selector-list";
|
||||
import AgentScheduleDetailsView from "@/components/agents/agent-schedule-details-view";
|
||||
import DeleteConfirmDialog from "@/components/agptui/delete-confirm-dialog";
|
||||
import type { ButtonAction } from "@/components/agptui/types";
|
||||
import { useOnboarding } from "@/components/onboarding/onboarding-provider";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
} from "@/components/ui/dialog";
|
||||
import LoadingBox, { LoadingSpinner } from "@/components/ui/loading";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { OldAgentLibraryView } from "./components/OldAgentLibraryView/OldAgentLibraryView";
|
||||
import { AgentRunsView } from "./components/AgentRunsView/AgentRunsView";
|
||||
|
||||
export default function AgentRunsPage(): React.ReactElement {
|
||||
const { id: agentID }: { id: LibraryAgentID } = useParams();
|
||||
const [executionId, setExecutionId] = useQueryState("executionId");
|
||||
const { toast } = useToast();
|
||||
const router = useRouter();
|
||||
const api = useBackendAPI();
|
||||
export default function AgentLibraryPage() {
|
||||
const isNewAgentRunsEnabled = useGetFlag(Flag.NEW_AGENT_RUNS);
|
||||
|
||||
// ============================ STATE =============================
|
||||
|
||||
const [graph, setGraph] = useState<Graph | null>(null); // Graph version corresponding to LibraryAgent
|
||||
const [agent, setAgent] = useState<LibraryAgent | null>(null);
|
||||
const [agentRuns, setAgentRuns] = useState<GraphExecutionMeta[]>([]);
|
||||
const [agentPresets, setAgentPresets] = useState<LibraryAgentPreset[]>([]);
|
||||
const [schedules, setSchedules] = useState<Schedule[]>([]);
|
||||
const [selectedView, selectView] = useState<
|
||||
| { type: "run"; id?: GraphExecutionID }
|
||||
| { type: "preset"; id: LibraryAgentPresetID }
|
||||
| { type: "schedule"; id: ScheduleID }
|
||||
>({ type: "run" });
|
||||
const [selectedRun, setSelectedRun] = useState<
|
||||
GraphExecution | GraphExecutionMeta | null
|
||||
>(null);
|
||||
const selectedSchedule =
|
||||
selectedView.type == "schedule"
|
||||
? schedules.find((s) => s.id == selectedView.id)
|
||||
: null;
|
||||
const [isFirstLoad, setIsFirstLoad] = useState<boolean>(true);
|
||||
const [agentDeleteDialogOpen, setAgentDeleteDialogOpen] =
|
||||
useState<boolean>(false);
|
||||
const [confirmingDeleteAgentRun, setConfirmingDeleteAgentRun] =
|
||||
useState<GraphExecutionMeta | null>(null);
|
||||
const [confirmingDeleteAgentPreset, setConfirmingDeleteAgentPreset] =
|
||||
useState<LibraryAgentPresetID | null>(null);
|
||||
const {
|
||||
state: onboardingState,
|
||||
updateState: updateOnboardingState,
|
||||
incrementRuns,
|
||||
} = useOnboarding();
|
||||
const [copyAgentDialogOpen, setCopyAgentDialogOpen] = useState(false);
|
||||
|
||||
// Set page title with agent name
|
||||
useEffect(() => {
|
||||
if (agent) {
|
||||
document.title = `${agent.name} - Library - AutoGPT Platform`;
|
||||
}
|
||||
}, [agent]);
|
||||
|
||||
const openRunDraftView = useCallback(() => {
|
||||
selectView({ type: "run" });
|
||||
}, []);
|
||||
|
||||
const selectRun = useCallback((id: GraphExecutionID) => {
|
||||
selectView({ type: "run", id });
|
||||
}, []);
|
||||
|
||||
const selectPreset = useCallback((id: LibraryAgentPresetID) => {
|
||||
selectView({ type: "preset", id });
|
||||
}, []);
|
||||
|
||||
const selectSchedule = useCallback((id: ScheduleID) => {
|
||||
selectView({ type: "schedule", id });
|
||||
}, []);
|
||||
|
||||
const graphVersions = useRef<Record<number, Graph>>({});
|
||||
const loadingGraphVersions = useRef<Record<number, Promise<Graph>>>({});
|
||||
const getGraphVersion = useCallback(
|
||||
async (graphID: GraphID, version: number) => {
|
||||
if (version in graphVersions.current)
|
||||
return graphVersions.current[version];
|
||||
if (version in loadingGraphVersions.current)
|
||||
return loadingGraphVersions.current[version];
|
||||
|
||||
const pendingGraph = api.getGraph(graphID, version).then((graph) => {
|
||||
graphVersions.current[version] = graph;
|
||||
return graph;
|
||||
});
|
||||
// Cache promise as well to avoid duplicate requests
|
||||
loadingGraphVersions.current[version] = pendingGraph;
|
||||
return pendingGraph;
|
||||
},
|
||||
[api, graphVersions, loadingGraphVersions],
|
||||
);
|
||||
|
||||
// Reward user for viewing results of their onboarding agent
|
||||
useEffect(() => {
|
||||
if (
|
||||
!onboardingState ||
|
||||
!selectedRun ||
|
||||
onboardingState.completedSteps.includes("GET_RESULTS")
|
||||
)
|
||||
return;
|
||||
|
||||
if (selectedRun.id === onboardingState.onboardingAgentExecutionId) {
|
||||
updateOnboardingState({
|
||||
completedSteps: [...onboardingState.completedSteps, "GET_RESULTS"],
|
||||
});
|
||||
}
|
||||
}, [selectedRun, onboardingState, updateOnboardingState]);
|
||||
|
||||
const lastRefresh = useRef<number>(0);
|
||||
const refreshPageData = useCallback(() => {
|
||||
if (Date.now() - lastRefresh.current < 2e3) return; // 2 second debounce
|
||||
lastRefresh.current = Date.now();
|
||||
|
||||
api.getLibraryAgent(agentID).then((agent) => {
|
||||
setAgent(agent);
|
||||
|
||||
getGraphVersion(agent.graph_id, agent.graph_version).then(
|
||||
(_graph) =>
|
||||
(graph && graph.version == _graph.version) || setGraph(_graph),
|
||||
);
|
||||
Promise.all([
|
||||
api.getGraphExecutions(agent.graph_id),
|
||||
api.listLibraryAgentPresets({
|
||||
graph_id: agent.graph_id,
|
||||
page_size: 100,
|
||||
}),
|
||||
]).then(([runs, presets]) => {
|
||||
setAgentRuns(runs);
|
||||
setAgentPresets(presets.presets);
|
||||
|
||||
// Preload the corresponding graph versions for the latest 10 runs
|
||||
new Set(runs.slice(0, 10).map((run) => run.graph_version)).forEach(
|
||||
(version) => getGraphVersion(agent.graph_id, version),
|
||||
);
|
||||
});
|
||||
});
|
||||
}, [api, agentID, getGraphVersion, graph]);
|
||||
|
||||
// On first load: select the latest run
|
||||
useEffect(() => {
|
||||
// Only for first load or first execution
|
||||
if (selectedView.id || !isFirstLoad) return;
|
||||
if (agentRuns.length == 0 && agentPresets.length == 0) return;
|
||||
|
||||
setIsFirstLoad(false);
|
||||
if (agentRuns.length > 0) {
|
||||
// select latest run
|
||||
const latestRun = agentRuns.reduce((latest, current) => {
|
||||
if (latest.started_at && !current.started_at) return current;
|
||||
else if (!latest.started_at) return latest;
|
||||
return latest.started_at > current.started_at ? latest : current;
|
||||
}, agentRuns[0]);
|
||||
selectRun(latestRun.id);
|
||||
} else {
|
||||
// select top preset
|
||||
const latestPreset = agentPresets.toSorted(
|
||||
(a, b) => b.updated_at.getTime() - a.updated_at.getTime(),
|
||||
)[0];
|
||||
selectPreset(latestPreset.id);
|
||||
}
|
||||
}, [
|
||||
isFirstLoad,
|
||||
selectedView.id,
|
||||
agentRuns,
|
||||
agentPresets,
|
||||
selectRun,
|
||||
selectPreset,
|
||||
]);
|
||||
|
||||
useEffect(() => {
|
||||
if (executionId) {
|
||||
selectRun(executionId as GraphExecutionID);
|
||||
setExecutionId(null);
|
||||
}
|
||||
}, [executionId, selectRun, setExecutionId]);
|
||||
|
||||
// Initial load
|
||||
useEffect(() => {
|
||||
refreshPageData();
|
||||
|
||||
// Show a toast when the WebSocket connection disconnects
|
||||
let connectionToast: ReturnType<typeof toast> | null = null;
|
||||
const cancelDisconnectHandler = api.onWebSocketDisconnect(() => {
|
||||
connectionToast ??= toast({
|
||||
title: "Connection to server was lost",
|
||||
variant: "destructive",
|
||||
description: (
|
||||
<div className="flex items-center">
|
||||
Trying to reconnect...
|
||||
<LoadingSpinner className="ml-1.5 size-3.5" />
|
||||
</div>
|
||||
),
|
||||
duration: Infinity,
|
||||
dismissable: true,
|
||||
});
|
||||
});
|
||||
const cancelConnectHandler = api.onWebSocketConnect(() => {
|
||||
if (connectionToast)
|
||||
connectionToast.update({
|
||||
id: connectionToast.id,
|
||||
title: "✅ Connection re-established",
|
||||
variant: "default",
|
||||
description: (
|
||||
<div className="flex items-center">
|
||||
Refreshing data...
|
||||
<LoadingSpinner className="ml-1.5 size-3.5" />
|
||||
</div>
|
||||
),
|
||||
duration: 2000,
|
||||
dismissable: true,
|
||||
});
|
||||
connectionToast = null;
|
||||
});
|
||||
return () => {
|
||||
cancelDisconnectHandler();
|
||||
cancelConnectHandler();
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Subscribe to WebSocket updates for agent runs
|
||||
useEffect(() => {
|
||||
if (!agent?.graph_id) return;
|
||||
|
||||
return api.onWebSocketConnect(() => {
|
||||
refreshPageData(); // Sync up on (re)connect
|
||||
|
||||
// Subscribe to all executions for this agent
|
||||
api.subscribeToGraphExecutions(agent.graph_id);
|
||||
});
|
||||
}, [api, agent?.graph_id, refreshPageData]);
|
||||
|
||||
// Handle execution updates
|
||||
useEffect(() => {
|
||||
const detachExecUpdateHandler = api.onWebSocketMessage(
|
||||
"graph_execution_event",
|
||||
(data) => {
|
||||
if (data.graph_id != agent?.graph_id) return;
|
||||
|
||||
if (data.status == "COMPLETED") {
|
||||
incrementRuns();
|
||||
}
|
||||
|
||||
setAgentRuns((prev) => {
|
||||
const index = prev.findIndex((run) => run.id === data.id);
|
||||
if (index === -1) {
|
||||
return [...prev, data];
|
||||
}
|
||||
const newRuns = [...prev];
|
||||
newRuns[index] = { ...newRuns[index], ...data };
|
||||
return newRuns;
|
||||
});
|
||||
if (data.id === selectedView.id) {
|
||||
setSelectedRun((prev) => ({ ...prev, ...data }));
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
return () => {
|
||||
detachExecUpdateHandler();
|
||||
};
|
||||
}, [api, agent?.graph_id, selectedView.id, incrementRuns]);
|
||||
|
||||
// Pre-load selectedRun based on selectedView
|
||||
useEffect(() => {
|
||||
if (selectedView.type != "run" || !selectedView.id) return;
|
||||
|
||||
const newSelectedRun = agentRuns.find((run) => run.id == selectedView.id);
|
||||
if (selectedView.id !== selectedRun?.id) {
|
||||
// Pull partial data from "cache" while waiting for the rest to load
|
||||
setSelectedRun(newSelectedRun ?? null);
|
||||
}
|
||||
}, [api, selectedView, agentRuns, selectedRun?.id]);
|
||||
|
||||
// Load selectedRun based on selectedView; refresh on agent refresh
|
||||
useEffect(() => {
|
||||
if (selectedView.type != "run" || !selectedView.id || !agent) return;
|
||||
|
||||
api
|
||||
.getGraphExecutionInfo(agent.graph_id, selectedView.id)
|
||||
.then(async (run) => {
|
||||
// Ensure corresponding graph version is available before rendering I/O
|
||||
await getGraphVersion(run.graph_id, run.graph_version);
|
||||
setSelectedRun(run);
|
||||
});
|
||||
}, [api, selectedView, agent, getGraphVersion]);
|
||||
|
||||
const fetchSchedules = useCallback(async () => {
|
||||
if (!agent) return;
|
||||
|
||||
setSchedules(await api.listGraphExecutionSchedules(agent.graph_id));
|
||||
}, [api, agent?.graph_id]);
|
||||
|
||||
useEffect(() => {
|
||||
fetchSchedules();
|
||||
}, [fetchSchedules]);
|
||||
|
||||
// =========================== ACTIONS ============================
|
||||
|
||||
const deleteRun = useCallback(
|
||||
async (run: GraphExecutionMeta) => {
|
||||
if (run.status == "RUNNING" || run.status == "QUEUED") {
|
||||
await api.stopGraphExecution(run.graph_id, run.id);
|
||||
}
|
||||
await api.deleteGraphExecution(run.id);
|
||||
|
||||
setConfirmingDeleteAgentRun(null);
|
||||
if (selectedView.type == "run" && selectedView.id == run.id) {
|
||||
openRunDraftView();
|
||||
}
|
||||
setAgentRuns((runs) => runs.filter((r) => r.id !== run.id));
|
||||
},
|
||||
[api, selectedView, openRunDraftView],
|
||||
);
|
||||
|
||||
const deletePreset = useCallback(
|
||||
async (presetID: LibraryAgentPresetID) => {
|
||||
await api.deleteLibraryAgentPreset(presetID);
|
||||
|
||||
setConfirmingDeleteAgentPreset(null);
|
||||
if (selectedView.type == "preset" && selectedView.id == presetID) {
|
||||
openRunDraftView();
|
||||
}
|
||||
setAgentPresets((presets) => presets.filter((p) => p.id !== presetID));
|
||||
},
|
||||
[api, selectedView, openRunDraftView],
|
||||
);
|
||||
|
||||
const deleteSchedule = useCallback(
|
||||
async (scheduleID: ScheduleID) => {
|
||||
const removedSchedule =
|
||||
await api.deleteGraphExecutionSchedule(scheduleID);
|
||||
|
||||
setSchedules((schedules) => {
|
||||
const newSchedules = schedules.filter(
|
||||
(s) => s.id !== removedSchedule.id,
|
||||
);
|
||||
if (
|
||||
selectedView.type == "schedule" &&
|
||||
selectedView.id == removedSchedule.id
|
||||
) {
|
||||
if (newSchedules.length > 0) {
|
||||
// Select next schedule if available
|
||||
selectSchedule(newSchedules[0].id);
|
||||
} else {
|
||||
// Reset to draft view if current schedule was deleted
|
||||
openRunDraftView();
|
||||
}
|
||||
}
|
||||
return newSchedules;
|
||||
});
|
||||
openRunDraftView();
|
||||
},
|
||||
[schedules, api],
|
||||
);
|
||||
|
||||
const downloadGraph = useCallback(
|
||||
async () =>
|
||||
agent &&
|
||||
// Export sanitized graph from backend
|
||||
api
|
||||
.getGraph(agent.graph_id, agent.graph_version, true)
|
||||
.then((graph) =>
|
||||
exportAsJSONFile(graph, `${graph.name}_v${graph.version}.json`),
|
||||
),
|
||||
[api, agent],
|
||||
);
|
||||
|
||||
const copyAgent = useCallback(async () => {
|
||||
setCopyAgentDialogOpen(false);
|
||||
api
|
||||
.forkLibraryAgent(agentID)
|
||||
.then((newAgent) => {
|
||||
router.push(`/library/agents/${newAgent.id}`);
|
||||
})
|
||||
.catch((error) => {
|
||||
console.error("Error copying agent:", error);
|
||||
toast({
|
||||
title: "Error copying agent",
|
||||
description: `An error occurred while copying the agent: ${error.message}`,
|
||||
variant: "destructive",
|
||||
});
|
||||
});
|
||||
}, [agentID, api, router, toast]);
|
||||
|
||||
const agentActions: ButtonAction[] = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: "Customize agent",
|
||||
href: `/build?flowID=${agent?.graph_id}&flowVersion=${agent?.graph_version}`,
|
||||
disabled: !agent?.can_access_graph,
|
||||
},
|
||||
{ label: "Export agent to file", callback: downloadGraph },
|
||||
...(!agent?.can_access_graph
|
||||
? [
|
||||
{
|
||||
label: "Edit a copy",
|
||||
callback: () => setCopyAgentDialogOpen(true),
|
||||
},
|
||||
]
|
||||
: []),
|
||||
{
|
||||
label: "Delete agent",
|
||||
callback: () => setAgentDeleteDialogOpen(true),
|
||||
},
|
||||
],
|
||||
[agent, downloadGraph],
|
||||
);
|
||||
|
||||
const runGraph =
|
||||
graphVersions.current[selectedRun?.graph_version ?? 0] ?? graph;
|
||||
|
||||
const onCreateSchedule = useCallback(
|
||||
(schedule: Schedule) => {
|
||||
setSchedules((prev) => [...prev, schedule]);
|
||||
selectSchedule(schedule.id);
|
||||
},
|
||||
[selectView],
|
||||
);
|
||||
|
||||
const onCreatePreset = useCallback(
|
||||
(preset: LibraryAgentPreset) => {
|
||||
setAgentPresets((prev) => [...prev, preset]);
|
||||
selectPreset(preset.id);
|
||||
},
|
||||
[selectPreset],
|
||||
);
|
||||
|
||||
const onUpdatePreset = useCallback(
|
||||
(updated: LibraryAgentPreset) => {
|
||||
setAgentPresets((prev) =>
|
||||
prev.map((p) => (p.id === updated.id ? updated : p)),
|
||||
);
|
||||
selectPreset(updated.id);
|
||||
},
|
||||
[selectPreset],
|
||||
);
|
||||
|
||||
if (!agent || !graph) {
|
||||
return <LoadingBox className="h-[90vh]" />;
|
||||
if (isNewAgentRunsEnabled) {
|
||||
return <AgentRunsView />;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container justify-stretch p-0 pt-16 lg:flex">
|
||||
{/* Sidebar w/ list of runs */}
|
||||
{/* TODO: render this below header in sm and md layouts */}
|
||||
<AgentRunsSelectorList
|
||||
className="agpt-div w-full border-b lg:w-auto lg:border-b-0 lg:border-r"
|
||||
agent={agent}
|
||||
agentRuns={agentRuns}
|
||||
agentPresets={agentPresets}
|
||||
schedules={schedules}
|
||||
selectedView={selectedView}
|
||||
onSelectRun={selectRun}
|
||||
onSelectPreset={selectPreset}
|
||||
onSelectSchedule={selectSchedule}
|
||||
onSelectDraftNewRun={openRunDraftView}
|
||||
doDeleteRun={setConfirmingDeleteAgentRun}
|
||||
doDeletePreset={setConfirmingDeleteAgentPreset}
|
||||
doDeleteSchedule={deleteSchedule}
|
||||
/>
|
||||
|
||||
<div className="flex-1">
|
||||
{/* Header */}
|
||||
<div className="agpt-div w-full border-b">
|
||||
<h1
|
||||
data-testid="agent-title"
|
||||
className="font-poppins text-3xl font-medium"
|
||||
>
|
||||
{
|
||||
agent.name /* TODO: use dynamic/custom run title - https://github.com/Significant-Gravitas/AutoGPT/issues/9184 */
|
||||
}
|
||||
</h1>
|
||||
</div>
|
||||
|
||||
{/* Run / Schedule views */}
|
||||
{(selectedView.type == "run" && selectedView.id ? (
|
||||
selectedRun && runGraph ? (
|
||||
<AgentRunDetailsView
|
||||
agent={agent}
|
||||
graph={runGraph}
|
||||
run={selectedRun}
|
||||
agentActions={agentActions}
|
||||
onRun={selectRun}
|
||||
deleteRun={() => setConfirmingDeleteAgentRun(selectedRun)}
|
||||
/>
|
||||
) : null
|
||||
) : selectedView.type == "run" ? (
|
||||
/* Draft new runs / Create new presets */
|
||||
<AgentRunDraftView
|
||||
graph={graph}
|
||||
triggerSetupInfo={agent.trigger_setup_info}
|
||||
onRun={selectRun}
|
||||
onCreateSchedule={onCreateSchedule}
|
||||
onCreatePreset={onCreatePreset}
|
||||
agentActions={agentActions}
|
||||
/>
|
||||
) : selectedView.type == "preset" ? (
|
||||
/* Edit & update presets */
|
||||
<AgentRunDraftView
|
||||
graph={graph}
|
||||
triggerSetupInfo={agent.trigger_setup_info}
|
||||
agentPreset={
|
||||
agentPresets.find((preset) => preset.id == selectedView.id)!
|
||||
}
|
||||
onRun={selectRun}
|
||||
onCreateSchedule={onCreateSchedule}
|
||||
onUpdatePreset={onUpdatePreset}
|
||||
doDeletePreset={setConfirmingDeleteAgentPreset}
|
||||
agentActions={agentActions}
|
||||
/>
|
||||
) : selectedView.type == "schedule" ? (
|
||||
selectedSchedule &&
|
||||
graph && (
|
||||
<AgentScheduleDetailsView
|
||||
graph={graph}
|
||||
schedule={selectedSchedule}
|
||||
// agent={agent}
|
||||
agentActions={agentActions}
|
||||
onForcedRun={selectRun}
|
||||
doDeleteSchedule={deleteSchedule}
|
||||
/>
|
||||
)
|
||||
) : null) || <LoadingBox className="h-[70vh]" />}
|
||||
|
||||
<DeleteConfirmDialog
|
||||
entityType="agent"
|
||||
open={agentDeleteDialogOpen}
|
||||
onOpenChange={setAgentDeleteDialogOpen}
|
||||
onDoDelete={() =>
|
||||
agent &&
|
||||
api.deleteLibraryAgent(agent.id).then(() => router.push("/library"))
|
||||
}
|
||||
/>
|
||||
|
||||
<DeleteConfirmDialog
|
||||
entityType="agent run"
|
||||
open={!!confirmingDeleteAgentRun}
|
||||
onOpenChange={(open) => !open && setConfirmingDeleteAgentRun(null)}
|
||||
onDoDelete={() =>
|
||||
confirmingDeleteAgentRun && deleteRun(confirmingDeleteAgentRun)
|
||||
}
|
||||
/>
|
||||
<DeleteConfirmDialog
|
||||
entityType={agent.has_external_trigger ? "trigger" : "agent preset"}
|
||||
open={!!confirmingDeleteAgentPreset}
|
||||
onOpenChange={(open) => !open && setConfirmingDeleteAgentPreset(null)}
|
||||
onDoDelete={() =>
|
||||
confirmingDeleteAgentPreset &&
|
||||
deletePreset(confirmingDeleteAgentPreset)
|
||||
}
|
||||
/>
|
||||
{/* Copy agent confirmation dialog */}
|
||||
<Dialog
|
||||
onOpenChange={setCopyAgentDialogOpen}
|
||||
open={copyAgentDialogOpen}
|
||||
>
|
||||
<DialogContent>
|
||||
<DialogHeader>
|
||||
<DialogTitle>You're making an editable copy</DialogTitle>
|
||||
<DialogDescription className="pt-2">
|
||||
The original Marketplace agent stays the same and cannot be
|
||||
edited. We'll save a new version of this agent to your
|
||||
Library. From there, you can customize it however you'd
|
||||
like by clicking "Customize agent" — this will open
|
||||
the builder where you can see and modify the inner workings.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<DialogFooter className="justify-end">
|
||||
<Button
|
||||
type="button"
|
||||
variant="outline"
|
||||
onClick={() => setCopyAgentDialogOpen(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button type="button" onClick={copyAgent}>
|
||||
Continue
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
return <OldAgentLibraryView />;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
import { BreadCrumbs } from "@/components/agptui/BreadCrumbs";
|
||||
import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs";
|
||||
import { AgentInfo } from "@/components/agptui/AgentInfo";
|
||||
import { AgentImages } from "@/components/agptui/AgentImages";
|
||||
import { AgentsSection } from "@/components/agptui/composite/AgentsSection";
|
||||
@@ -73,7 +73,7 @@ export default async function MarketplaceAgentPage({
|
||||
return (
|
||||
<div className="mx-auto w-screen max-w-[1360px]">
|
||||
<main className="mt-5 px-4">
|
||||
<BreadCrumbs items={breadcrumbs} />
|
||||
<Breadcrumbs items={breadcrumbs} />
|
||||
|
||||
<div className="mt-4 flex flex-col items-start gap-4 sm:mt-6 sm:gap-6 md:mt-8 md:flex-row md:gap-8">
|
||||
<div className="w-full md:w-auto md:shrink-0">
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
import { AgentsSection } from "@/components/agptui/composite/AgentsSection";
|
||||
import { BreadCrumbs } from "@/components/agptui/BreadCrumbs";
|
||||
import { Breadcrumbs } from "@/components/molecules/Breadcrumbs/Breadcrumbs";
|
||||
import { Metadata } from "next";
|
||||
import { CreatorInfoCard } from "@/components/agptui/CreatorInfoCard";
|
||||
import { CreatorLinks } from "@/components/agptui/CreatorLinks";
|
||||
@@ -49,7 +49,7 @@ export default async function Page({
|
||||
return (
|
||||
<div className="mx-auto w-screen max-w-[1360px]">
|
||||
<main className="mt-5 px-4">
|
||||
<BreadCrumbs
|
||||
<Breadcrumbs
|
||||
items={[
|
||||
{ name: "Store", link: "/marketplace" },
|
||||
{ name: creator.name, link: "#" },
|
||||
|
||||
@@ -44,9 +44,9 @@ export function APIKeysSection() {
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{apiKeys.map((key) => (
|
||||
<TableRow key={key.id}>
|
||||
<TableRow key={key.id} data-testid="api-key-row">
|
||||
<TableCell>{key.name}</TableCell>
|
||||
<TableCell>
|
||||
<TableCell data-testid="api-key-id">
|
||||
<div className="rounded-md border p-1 px-2 text-xs">
|
||||
{`${key.prefix}******************${key.postfix}`}
|
||||
</div>
|
||||
@@ -76,7 +76,11 @@ export function APIKeysSection() {
|
||||
<TableCell>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button variant="ghost" size="sm">
|
||||
<Button
|
||||
data-testid="api-key-actions"
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
>
|
||||
<MoreVertical className="h-4 w-4" />
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
|
||||
@@ -7,6 +7,7 @@ import {
|
||||
AgentTableRow,
|
||||
AgentTableRowProps,
|
||||
} from "../AgentTableRow/AgentTableRow";
|
||||
import { StoreSubmissionEditRequest } from "@/app/api/__generated__/models/storeSubmissionEditRequest";
|
||||
|
||||
export interface AgentTableProps {
|
||||
agents: Omit<
|
||||
@@ -15,15 +16,23 @@ export interface AgentTableProps {
|
||||
| "selectedAgents"
|
||||
| "onViewSubmission"
|
||||
| "onDeleteSubmission"
|
||||
| "onEditSubmission"
|
||||
>[];
|
||||
onViewSubmission: (submission: StoreSubmission) => void;
|
||||
onDeleteSubmission: (submission_id: string) => void;
|
||||
onEditSubmission: (
|
||||
submission: StoreSubmissionEditRequest & {
|
||||
store_listing_version_id: string | undefined;
|
||||
agent_id: string;
|
||||
},
|
||||
) => void;
|
||||
}
|
||||
|
||||
export const AgentTable: React.FC<AgentTableProps> = ({
|
||||
agents,
|
||||
onViewSubmission,
|
||||
onDeleteSubmission,
|
||||
onEditSubmission,
|
||||
}) => {
|
||||
return (
|
||||
<div className="w-full" data-testid="agent-table">
|
||||
@@ -62,6 +71,7 @@ export const AgentTable: React.FC<AgentTableProps> = ({
|
||||
{...agent}
|
||||
onViewSubmission={onViewSubmission}
|
||||
onDeleteSubmission={onDeleteSubmission}
|
||||
onEditSubmission={onEditSubmission}
|
||||
/>
|
||||
<div className="block md:hidden">
|
||||
<AgentTableCard
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
import Image from "next/image";
|
||||
import { IconStarFilled, IconMore } from "@/components/ui/icons";
|
||||
import { StoreSubmission } from "@/app/api/__generated__/models/storeSubmission";
|
||||
import { Status, StatusType } from "@/components/agptui/Status";
|
||||
import { SubmissionStatus } from "@/app/api/__generated__/models/submissionStatus";
|
||||
import { Status } from "@/components/agptui/Status";
|
||||
|
||||
export interface AgentTableCardProps {
|
||||
agent_id: string;
|
||||
@@ -14,7 +14,7 @@ export interface AgentTableCardProps {
|
||||
description: string;
|
||||
imageSrc: string[];
|
||||
dateSubmitted: string;
|
||||
status: StatusType;
|
||||
status: SubmissionStatus;
|
||||
runs: number;
|
||||
rating: number;
|
||||
id: number;
|
||||
@@ -44,8 +44,7 @@ export const AgentTableCard = ({
|
||||
description,
|
||||
image_urls: imageSrc,
|
||||
date_submitted: dateSubmitted,
|
||||
// SafeCast: status is a string from the API...
|
||||
status: status.toUpperCase() as SubmissionStatus,
|
||||
status: status,
|
||||
runs,
|
||||
rating,
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ import Image from "next/image";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
|
||||
import * as DropdownMenu from "@radix-ui/react-dropdown-menu";
|
||||
import { Status, StatusType } from "@/components/agptui/Status";
|
||||
import { Status } from "@/components/agptui/Status";
|
||||
import { useAgentTableRow } from "./useAgentTableRow";
|
||||
import { StoreSubmission } from "@/app/api/__generated__/models/storeSubmission";
|
||||
import {
|
||||
@@ -13,7 +13,10 @@ import {
|
||||
ImageBroken,
|
||||
Star,
|
||||
Trash,
|
||||
PencilSimple,
|
||||
} from "@phosphor-icons/react/dist/ssr";
|
||||
import { SubmissionStatus } from "@/app/api/__generated__/models/submissionStatus";
|
||||
import { StoreSubmissionEditRequest } from "@/app/api/__generated__/models/storeSubmissionEditRequest";
|
||||
|
||||
export interface AgentTableRowProps {
|
||||
agent_id: string;
|
||||
@@ -23,13 +26,22 @@ export interface AgentTableRowProps {
|
||||
description: string;
|
||||
imageSrc: string[];
|
||||
date_submitted: string;
|
||||
status: StatusType;
|
||||
status: SubmissionStatus;
|
||||
runs: number;
|
||||
rating: number;
|
||||
dateSubmitted: string;
|
||||
id: number;
|
||||
video_url?: string;
|
||||
categories?: string[];
|
||||
store_listing_version_id?: string;
|
||||
onViewSubmission: (submission: StoreSubmission) => void;
|
||||
onDeleteSubmission: (submission_id: string) => void;
|
||||
onEditSubmission: (
|
||||
submission: StoreSubmissionEditRequest & {
|
||||
store_listing_version_id: string | undefined;
|
||||
agent_id: string;
|
||||
},
|
||||
) => void;
|
||||
}
|
||||
|
||||
export const AgentTableRow = ({
|
||||
@@ -44,13 +56,18 @@ export const AgentTableRow = ({
|
||||
runs,
|
||||
rating,
|
||||
id,
|
||||
video_url,
|
||||
categories,
|
||||
store_listing_version_id,
|
||||
onViewSubmission,
|
||||
onDeleteSubmission,
|
||||
onEditSubmission,
|
||||
}: AgentTableRowProps) => {
|
||||
const { handleView, handleDelete } = useAgentTableRow({
|
||||
const { handleView, handleDelete, handleEdit } = useAgentTableRow({
|
||||
id,
|
||||
onViewSubmission,
|
||||
onDeleteSubmission,
|
||||
onEditSubmission,
|
||||
agent_id,
|
||||
agent_version,
|
||||
agentName,
|
||||
@@ -58,15 +75,23 @@ export const AgentTableRow = ({
|
||||
description,
|
||||
imageSrc,
|
||||
dateSubmitted,
|
||||
status: status.toUpperCase(),
|
||||
status,
|
||||
runs,
|
||||
rating,
|
||||
video_url,
|
||||
categories,
|
||||
store_listing_version_id,
|
||||
});
|
||||
|
||||
// Determine if we should show Edit or View button
|
||||
const canEdit =
|
||||
status === SubmissionStatus.APPROVED || status === SubmissionStatus.PENDING;
|
||||
|
||||
return (
|
||||
<div
|
||||
data-testid="agent-table-row"
|
||||
data-agent-name={agentName}
|
||||
data-agent-id={agent_id}
|
||||
data-submission-id={store_listing_version_id}
|
||||
className="hidden items-center border-b border-neutral-300 px-4 py-4 hover:bg-neutral-50 dark:border-neutral-700 dark:hover:bg-neutral-800 md:flex"
|
||||
>
|
||||
<div className="grid w-full grid-cols-[minmax(400px,1fr),180px,140px,100px,100px,40px] items-center gap-4">
|
||||
@@ -139,13 +164,23 @@ export const AgentTableRow = ({
|
||||
<DotsThreeVerticalIcon className="h-5 w-5 text-neutral-800" />
|
||||
</DropdownMenu.Trigger>
|
||||
<DropdownMenu.Content className="z-10 rounded-xl border bg-white p-1 shadow-md dark:bg-gray-800">
|
||||
<DropdownMenu.Item
|
||||
onSelect={handleView}
|
||||
className="flex cursor-pointer items-center rounded-md px-3 py-2 hover:bg-gray-100 dark:hover:bg-gray-700"
|
||||
>
|
||||
<Eye className="mr-2 h-4 w-4 dark:text-gray-100" />
|
||||
<span className="dark:text-gray-100">View</span>
|
||||
</DropdownMenu.Item>
|
||||
{canEdit ? (
|
||||
<DropdownMenu.Item
|
||||
onSelect={handleEdit}
|
||||
className="flex cursor-pointer items-center rounded-md px-3 py-2 hover:bg-gray-100 dark:hover:bg-gray-700"
|
||||
>
|
||||
<PencilSimple className="mr-2 h-4 w-4 dark:text-gray-100" />
|
||||
<span className="dark:text-gray-100">Edit</span>
|
||||
</DropdownMenu.Item>
|
||||
) : (
|
||||
<DropdownMenu.Item
|
||||
onSelect={handleView}
|
||||
className="flex cursor-pointer items-center rounded-md px-3 py-2 hover:bg-gray-100 dark:hover:bg-gray-700"
|
||||
>
|
||||
<Eye className="mr-2 h-4 w-4 dark:text-gray-100" />
|
||||
<span className="dark:text-gray-100">View</span>
|
||||
</DropdownMenu.Item>
|
||||
)}
|
||||
<DropdownMenu.Separator className="my-1 h-px bg-gray-300 dark:bg-gray-600" />
|
||||
<DropdownMenu.Item
|
||||
onSelect={handleDelete}
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
import { StoreSubmission } from "@/app/api/__generated__/models/storeSubmission";
|
||||
import { StoreSubmissionEditRequest } from "@/app/api/__generated__/models/storeSubmissionEditRequest";
|
||||
import { SubmissionStatus } from "@/app/api/__generated__/models/submissionStatus";
|
||||
|
||||
interface useAgentTableRowProps {
|
||||
id: number;
|
||||
onViewSubmission: (submission: StoreSubmission) => void;
|
||||
onDeleteSubmission: (submission_id: string) => void;
|
||||
onEditSubmission: (
|
||||
submission: StoreSubmissionEditRequest & {
|
||||
store_listing_version_id: string | undefined;
|
||||
agent_id: string;
|
||||
},
|
||||
) => void;
|
||||
agent_id: string;
|
||||
agent_version: number;
|
||||
agentName: string;
|
||||
@@ -12,14 +19,18 @@ interface useAgentTableRowProps {
|
||||
description: string;
|
||||
imageSrc: string[];
|
||||
dateSubmitted: string;
|
||||
status: string;
|
||||
status: SubmissionStatus;
|
||||
runs: number;
|
||||
rating: number;
|
||||
video_url?: string;
|
||||
categories?: string[];
|
||||
store_listing_version_id?: string;
|
||||
}
|
||||
|
||||
export const useAgentTableRow = ({
|
||||
onViewSubmission,
|
||||
onDeleteSubmission,
|
||||
onEditSubmission,
|
||||
agent_id,
|
||||
agent_version,
|
||||
agentName,
|
||||
@@ -30,6 +41,9 @@ export const useAgentTableRow = ({
|
||||
status,
|
||||
runs,
|
||||
rating,
|
||||
video_url,
|
||||
categories,
|
||||
store_listing_version_id,
|
||||
}: useAgentTableRowProps) => {
|
||||
const handleView = () => {
|
||||
onViewSubmission({
|
||||
@@ -41,16 +55,32 @@ export const useAgentTableRow = ({
|
||||
description,
|
||||
image_urls: imageSrc,
|
||||
date_submitted: dateSubmitted,
|
||||
// SafeCast: status is a string from the API...
|
||||
status: status.toUpperCase() as SubmissionStatus,
|
||||
status: status,
|
||||
runs,
|
||||
rating,
|
||||
video_url,
|
||||
categories,
|
||||
store_listing_version_id,
|
||||
} satisfies StoreSubmission);
|
||||
};
|
||||
|
||||
const handleEdit = () => {
|
||||
onEditSubmission({
|
||||
name: agentName,
|
||||
sub_heading,
|
||||
description,
|
||||
image_urls: imageSrc,
|
||||
video_url,
|
||||
categories,
|
||||
changes_summary: "Update Submission",
|
||||
store_listing_version_id,
|
||||
agent_id,
|
||||
});
|
||||
};
|
||||
|
||||
const handleDelete = () => {
|
||||
onDeleteSubmission(agent_id);
|
||||
};
|
||||
|
||||
return { handleView, handleDelete };
|
||||
return { handleView, handleDelete, handleEdit };
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user