mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-13 08:14:58 -05:00
Compare commits
26 Commits
abhi/folde
...
ci/claude-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
34968ca3c0 | ||
|
|
43b25b5e2f | ||
|
|
ab0b537cc7 | ||
|
|
9a8c6ad609 | ||
|
|
f1681a06e0 | ||
|
|
3252c87cc7 | ||
|
|
7e8093ac22 | ||
|
|
8b903cecfc | ||
|
|
201e4b0c8f | ||
|
|
32987649e0 | ||
|
|
f67cf0591b | ||
|
|
6271c81a21 | ||
|
|
7e855806e3 | ||
|
|
adc46ec2e8 | ||
|
|
9d12807294 | ||
|
|
4ac40f11e5 | ||
|
|
f5cdb02a38 | ||
|
|
bd9ff05eaa | ||
|
|
66be27f6da | ||
|
|
39b821da94 | ||
|
|
393d6aa5ac | ||
|
|
f753058e8f | ||
|
|
7cdbbdd65e | ||
|
|
6191ac0b1e | ||
|
|
b51e87bc53 | ||
|
|
71f764f3d0 |
@@ -5,42 +5,13 @@
|
||||
!docs/
|
||||
|
||||
# Platform - Libs
|
||||
!autogpt_platform/autogpt_libs/autogpt_libs/
|
||||
!autogpt_platform/autogpt_libs/pyproject.toml
|
||||
!autogpt_platform/autogpt_libs/poetry.lock
|
||||
!autogpt_platform/autogpt_libs/README.md
|
||||
!autogpt_platform/autogpt_libs/
|
||||
|
||||
# Platform - Backend
|
||||
!autogpt_platform/backend/backend/
|
||||
!autogpt_platform/backend/test/e2e_test_data.py
|
||||
!autogpt_platform/backend/migrations/
|
||||
!autogpt_platform/backend/schema.prisma
|
||||
!autogpt_platform/backend/pyproject.toml
|
||||
!autogpt_platform/backend/poetry.lock
|
||||
!autogpt_platform/backend/README.md
|
||||
!autogpt_platform/backend/.env
|
||||
!autogpt_platform/backend/gen_prisma_types_stub.py
|
||||
|
||||
# Platform - Market
|
||||
!autogpt_platform/market/market/
|
||||
!autogpt_platform/market/scripts.py
|
||||
!autogpt_platform/market/schema.prisma
|
||||
!autogpt_platform/market/pyproject.toml
|
||||
!autogpt_platform/market/poetry.lock
|
||||
!autogpt_platform/market/README.md
|
||||
!autogpt_platform/backend/
|
||||
|
||||
# Platform - Frontend
|
||||
!autogpt_platform/frontend/src/
|
||||
!autogpt_platform/frontend/public/
|
||||
!autogpt_platform/frontend/scripts/
|
||||
!autogpt_platform/frontend/package.json
|
||||
!autogpt_platform/frontend/pnpm-lock.yaml
|
||||
!autogpt_platform/frontend/tsconfig.json
|
||||
!autogpt_platform/frontend/README.md
|
||||
## config
|
||||
!autogpt_platform/frontend/*.config.*
|
||||
!autogpt_platform/frontend/.env.*
|
||||
!autogpt_platform/frontend/.env
|
||||
!autogpt_platform/frontend/
|
||||
|
||||
# Classic - AutoGPT
|
||||
!classic/original_autogpt/autogpt/
|
||||
@@ -64,6 +35,38 @@
|
||||
# Classic - Frontend
|
||||
!classic/frontend/build/web/
|
||||
|
||||
# Explicitly re-ignore some folders
|
||||
.*
|
||||
**/__pycache__
|
||||
# Explicitly re-ignore unwanted files from whitelisted directories
|
||||
# Note: These patterns MUST come after the whitelist rules to take effect
|
||||
|
||||
# Hidden files and directories (but keep frontend .env files needed for build)
|
||||
**/.*
|
||||
!autogpt_platform/frontend/.env
|
||||
!autogpt_platform/frontend/.env.default
|
||||
!autogpt_platform/frontend/.env.production
|
||||
|
||||
# Python artifacts
|
||||
**/__pycache__/
|
||||
**/*.pyc
|
||||
**/*.pyo
|
||||
**/.venv/
|
||||
**/.ruff_cache/
|
||||
**/.pytest_cache/
|
||||
**/.coverage
|
||||
**/htmlcov/
|
||||
|
||||
# Node artifacts
|
||||
**/node_modules/
|
||||
**/.next/
|
||||
**/storybook-static/
|
||||
**/playwright-report/
|
||||
**/test-results/
|
||||
|
||||
# Build artifacts
|
||||
**/dist/
|
||||
**/build/
|
||||
!autogpt_platform/frontend/src/**/build/
|
||||
**/target/
|
||||
|
||||
# Logs and temp files
|
||||
**/*.log
|
||||
**/*.tmp
|
||||
|
||||
42
.github/workflows/claude-ci-failure-auto-fix.yml
vendored
42
.github/workflows/claude-ci-failure-auto-fix.yml
vendored
@@ -40,6 +40,48 @@ jobs:
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
echo "branch_name=$BRANCH_NAME" >> $GITHUB_OUTPUT
|
||||
|
||||
# Backend Python/Poetry setup (so Claude can run linting/tests)
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
|
||||
- name: Set up Python dependency cache
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ~/.cache/pypoetry
|
||||
key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/backend/poetry.lock') }}
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
cd autogpt_platform/backend
|
||||
HEAD_POETRY_VERSION=$(python3 ../../.github/workflows/scripts/get_package_version_from_lockfile.py poetry)
|
||||
curl -sSL https://install.python-poetry.org | POETRY_VERSION=$HEAD_POETRY_VERSION python3 -
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install Python dependencies
|
||||
working-directory: autogpt_platform/backend
|
||||
run: poetry install
|
||||
|
||||
- name: Generate Prisma Client
|
||||
working-directory: autogpt_platform/backend
|
||||
run: poetry run prisma generate && poetry run gen-prisma-stub
|
||||
|
||||
# Frontend Node.js/pnpm setup (so Claude can run linting/tests)
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
|
||||
- name: Install JavaScript dependencies
|
||||
working-directory: autogpt_platform/frontend
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Get CI failure details
|
||||
id: failure_details
|
||||
uses: actions/github-script@v8
|
||||
|
||||
22
.github/workflows/claude-dependabot.yml
vendored
22
.github/workflows/claude-dependabot.yml
vendored
@@ -77,27 +77,15 @@ jobs:
|
||||
run: poetry run prisma generate && poetry run gen-prisma-stub
|
||||
|
||||
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Set pnpm store directory
|
||||
run: |
|
||||
pnpm config set store-dir ~/.pnpm-store
|
||||
echo "PNPM_HOME=$HOME/.pnpm-store" >> $GITHUB_ENV
|
||||
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
|
||||
- name: Install JavaScript dependencies
|
||||
working-directory: autogpt_platform/frontend
|
||||
|
||||
22
.github/workflows/claude.yml
vendored
22
.github/workflows/claude.yml
vendored
@@ -93,27 +93,15 @@ jobs:
|
||||
run: poetry run prisma generate && poetry run gen-prisma-stub
|
||||
|
||||
# Frontend Node.js/pnpm setup (mirrors platform-frontend-ci.yml)
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Set pnpm store directory
|
||||
run: |
|
||||
pnpm config set store-dir ~/.pnpm-store
|
||||
echo "PNPM_HOME=$HOME/.pnpm-store" >> $GITHUB_ENV
|
||||
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
|
||||
- name: Install JavaScript dependencies
|
||||
working-directory: autogpt_platform/frontend
|
||||
|
||||
241
.github/workflows/platform-frontend-ci.yml
vendored
241
.github/workflows/platform-frontend-ci.yml
vendored
@@ -26,7 +26,6 @@ jobs:
|
||||
setup:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
cache-key: ${{ steps.cache-key.outputs.key }}
|
||||
components-changed: ${{ steps.filter.outputs.components }}
|
||||
|
||||
steps:
|
||||
@@ -41,28 +40,17 @@ jobs:
|
||||
components:
|
||||
- 'autogpt_platform/frontend/src/components/**'
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22.18.0"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Generate cache key
|
||||
id: cache-key
|
||||
run: echo "key=${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: actions/cache@v5
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ steps.cache-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
node-version: "22.18.0"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
|
||||
- name: Install dependencies
|
||||
- name: Install dependencies to populate cache
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
lint:
|
||||
@@ -73,22 +61,15 @@ jobs:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22.18.0"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Restore dependencies cache
|
||||
uses: actions/cache@v5
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ needs.setup.outputs.cache-key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
node-version: "22.18.0"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
@@ -111,22 +92,15 @@ jobs:
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22.18.0"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Restore dependencies cache
|
||||
uses: actions/cache@v5
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ needs.setup.outputs.cache-key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
node-version: "22.18.0"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
@@ -141,10 +115,8 @@ jobs:
|
||||
exitOnceUploaded: true
|
||||
|
||||
e2e_test:
|
||||
name: end-to-end tests
|
||||
runs-on: big-boi
|
||||
needs: setup
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -152,19 +124,11 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22.18.0"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Copy default supabase .env
|
||||
- name: Set up Platform - Copy default supabase .env
|
||||
run: |
|
||||
cp ../.env.default ../.env
|
||||
|
||||
- name: Copy backend .env and set OpenAI API key
|
||||
- name: Set up Platform - Copy backend .env and set OpenAI API key
|
||||
run: |
|
||||
cp ../backend/.env.default ../backend/.env
|
||||
echo "OPENAI_INTERNAL_API_KEY=${{ secrets.OPENAI_API_KEY }}" >> ../backend/.env
|
||||
@@ -172,77 +136,125 @@ jobs:
|
||||
# Used by E2E test data script to generate embeddings for approved store agents
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
- name: Set up Platform - Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: docker-container
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Cache Docker layers
|
||||
- name: Set up Platform - Expose GHA cache to docker buildx CLI
|
||||
uses: crazy-max/ghaction-github-runtime@v3
|
||||
|
||||
- name: Set up Platform - Build Docker images (with cache)
|
||||
working-directory: autogpt_platform
|
||||
run: |
|
||||
pip install pyyaml
|
||||
|
||||
# Resolve extends and generate a flat compose file that bake can understand
|
||||
docker compose -f docker-compose.yml config > docker-compose.resolved.yml
|
||||
|
||||
# Add cache configuration to the resolved compose file
|
||||
python ../.github/workflows/scripts/docker-ci-fix-compose-build-cache.py \
|
||||
--source docker-compose.resolved.yml \
|
||||
--cache-from "type=gha" \
|
||||
--cache-to "type=gha,mode=max" \
|
||||
--backend-hash "${{ hashFiles('autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/poetry.lock', 'autogpt_platform/backend/backend') }}" \
|
||||
--frontend-hash "${{ hashFiles('autogpt_platform/frontend/Dockerfile', 'autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/src') }}" \
|
||||
--git-ref "${{ github.ref }}"
|
||||
|
||||
# Build with bake using the resolved compose file (now includes cache config)
|
||||
docker buildx bake --allow=fs.read=.. -f docker-compose.resolved.yml --load
|
||||
env:
|
||||
NEXT_PUBLIC_PW_TEST: true
|
||||
|
||||
- name: Set up tests - Cache E2E test data
|
||||
id: e2e-data-cache
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: ${{ runner.os }}-buildx-frontend-test-${{ hashFiles('autogpt_platform/docker-compose.yml', 'autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/pyproject.toml', 'autogpt_platform/backend/poetry.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-buildx-frontend-test-
|
||||
path: /tmp/e2e_test_data.sql
|
||||
key: e2e-test-data-${{ hashFiles('autogpt_platform/backend/test/e2e_test_data.py', 'autogpt_platform/backend/migrations/**', '.github/workflows/platform-frontend-ci.yml') }}
|
||||
|
||||
- name: Run docker compose
|
||||
- name: Set up Platform - Start Supabase DB + Auth
|
||||
run: |
|
||||
NEXT_PUBLIC_PW_TEST=true docker compose -f ../docker-compose.yml up -d
|
||||
docker compose -f ../docker-compose.resolved.yml up -d db auth --no-build
|
||||
echo "Waiting for database to be ready..."
|
||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done'
|
||||
echo "Waiting for auth service to be ready..."
|
||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -c "SELECT 1 FROM auth.users LIMIT 1" 2>/dev/null; do sleep 2; done' || echo "Auth schema check timeout, continuing..."
|
||||
|
||||
- name: Set up Platform - Run migrations
|
||||
run: |
|
||||
echo "Running migrations..."
|
||||
docker compose -f ../docker-compose.resolved.yml run --rm migrate
|
||||
echo "✅ Migrations completed"
|
||||
env:
|
||||
DOCKER_BUILDKIT: 1
|
||||
BUILDX_CACHE_FROM: type=local,src=/tmp/.buildx-cache
|
||||
BUILDX_CACHE_TO: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
||||
NEXT_PUBLIC_PW_TEST: true
|
||||
|
||||
- name: Move cache
|
||||
- name: Set up tests - Load cached E2E test data
|
||||
if: steps.e2e-data-cache.outputs.cache-hit == 'true'
|
||||
run: |
|
||||
rm -rf /tmp/.buildx-cache
|
||||
if [ -d "/tmp/.buildx-cache-new" ]; then
|
||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
||||
fi
|
||||
echo "✅ Found cached E2E test data, restoring..."
|
||||
{
|
||||
echo "SET session_replication_role = 'replica';"
|
||||
cat /tmp/e2e_test_data.sql
|
||||
echo "SET session_replication_role = 'origin';"
|
||||
} | docker compose -f ../docker-compose.resolved.yml exec -T db psql -U postgres -d postgres -b
|
||||
# Refresh materialized views after restore
|
||||
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
||||
psql -U postgres -d postgres -b -c "SET search_path TO platform; SELECT refresh_store_materialized_views();" || true
|
||||
|
||||
- name: Wait for services to be ready
|
||||
echo "✅ E2E test data restored from cache"
|
||||
|
||||
- name: Set up Platform - Start (all other services)
|
||||
run: |
|
||||
docker compose -f ../docker-compose.resolved.yml up -d --no-build
|
||||
echo "Waiting for rest_server to be ready..."
|
||||
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
||||
echo "Waiting for database to be ready..."
|
||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
||||
env:
|
||||
NEXT_PUBLIC_PW_TEST: true
|
||||
|
||||
- name: Create E2E test data
|
||||
- name: Set up tests - Create E2E test data
|
||||
if: steps.e2e-data-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Creating E2E test data..."
|
||||
# First try to run the script from inside the container
|
||||
if docker compose -f ../docker-compose.yml exec -T rest_server test -f /app/autogpt_platform/backend/test/e2e_test_data.py; then
|
||||
echo "✅ Found e2e_test_data.py in container, running it..."
|
||||
docker compose -f ../docker-compose.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python backend/test/e2e_test_data.py" || {
|
||||
echo "❌ E2E test data creation failed!"
|
||||
docker compose -f ../docker-compose.yml logs --tail=50 rest_server
|
||||
exit 1
|
||||
}
|
||||
else
|
||||
echo "⚠️ e2e_test_data.py not found in container, copying and running..."
|
||||
# Copy the script into the container and run it
|
||||
docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.yml ps -q rest_server):/tmp/e2e_test_data.py || {
|
||||
echo "❌ Failed to copy script to container"
|
||||
exit 1
|
||||
}
|
||||
docker compose -f ../docker-compose.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || {
|
||||
echo "❌ E2E test data creation failed!"
|
||||
docker compose -f ../docker-compose.yml logs --tail=50 rest_server
|
||||
exit 1
|
||||
}
|
||||
fi
|
||||
docker cp ../backend/test/e2e_test_data.py $(docker compose -f ../docker-compose.resolved.yml ps -q rest_server):/tmp/e2e_test_data.py
|
||||
docker compose -f ../docker-compose.resolved.yml exec -T rest_server sh -c "cd /app/autogpt_platform && python /tmp/e2e_test_data.py" || {
|
||||
echo "❌ E2E test data creation failed!"
|
||||
docker compose -f ../docker-compose.resolved.yml logs --tail=50 rest_server
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Restore dependencies cache
|
||||
uses: actions/cache@v5
|
||||
# Dump auth.users + platform schema for cache (two separate dumps)
|
||||
echo "Dumping database for cache..."
|
||||
{
|
||||
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
||||
pg_dump -U postgres --data-only --column-inserts \
|
||||
--table='auth.users' postgres
|
||||
docker compose -f ../docker-compose.resolved.yml exec -T db \
|
||||
pg_dump -U postgres --data-only --column-inserts \
|
||||
--schema=platform \
|
||||
--exclude-table='platform._prisma_migrations' \
|
||||
--exclude-table='platform.apscheduler_jobs' \
|
||||
--exclude-table='platform.apscheduler_jobs_batched_notifications' \
|
||||
postgres
|
||||
} > /tmp/e2e_test_data.sql
|
||||
|
||||
echo "✅ Database dump created for caching ($(wc -l < /tmp/e2e_test_data.sql) lines)"
|
||||
|
||||
- name: Set up tests - Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Set up tests - Set up Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ needs.setup.outputs.cache-key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
node-version: "22.18.0"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
|
||||
- name: Install dependencies
|
||||
- name: Set up tests - Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
- name: Install Browser 'chromium'
|
||||
- name: Set up tests - Install browser 'chromium'
|
||||
run: pnpm playwright install --with-deps chromium
|
||||
|
||||
- name: Run Playwright tests
|
||||
@@ -269,7 +281,7 @@ jobs:
|
||||
|
||||
- name: Print Final Docker Compose logs
|
||||
if: always()
|
||||
run: docker compose -f ../docker-compose.yml logs
|
||||
run: docker compose -f ../docker-compose.resolved.yml logs
|
||||
|
||||
integration_test:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -281,22 +293,15 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22.18.0"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Restore dependencies cache
|
||||
uses: actions/cache@v5
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ needs.setup.outputs.cache-key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
node-version: "22.18.0"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
|
||||
- name: Install dependencies
|
||||
run: pnpm install --frozen-lockfile
|
||||
|
||||
195
.github/workflows/scripts/docker-ci-fix-compose-build-cache.py
vendored
Normal file
195
.github/workflows/scripts/docker-ci-fix-compose-build-cache.py
vendored
Normal file
@@ -0,0 +1,195 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Add cache configuration to a resolved docker-compose file for all services
|
||||
that have a build key, and ensure image names match what docker compose expects.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
DEFAULT_BRANCH = "dev"
|
||||
CACHE_BUILDS_FOR_COMPONENTS = ["backend", "frontend"]
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Add cache config to a resolved compose file"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--source",
|
||||
required=True,
|
||||
help="Source compose file to read (should be output of `docker compose config`)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--cache-from",
|
||||
default="type=gha",
|
||||
help="Cache source configuration",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--cache-to",
|
||||
default="type=gha,mode=max",
|
||||
help="Cache destination configuration",
|
||||
)
|
||||
for component in CACHE_BUILDS_FOR_COMPONENTS:
|
||||
parser.add_argument(
|
||||
f"--{component}-hash",
|
||||
default="",
|
||||
help=f"Hash for {component} cache scope (e.g., from hashFiles())",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--git-ref",
|
||||
default="",
|
||||
help="Git ref for branch-based cache scope (e.g., refs/heads/master)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
# Normalize git ref to a safe scope name (e.g., refs/heads/master -> master)
|
||||
git_ref_scope = ""
|
||||
if args.git_ref:
|
||||
git_ref_scope = args.git_ref.replace("refs/heads/", "").replace("/", "-")
|
||||
|
||||
with open(args.source, "r") as f:
|
||||
compose = yaml.safe_load(f)
|
||||
|
||||
# Get project name from compose file or default
|
||||
project_name = compose.get("name", "autogpt_platform")
|
||||
|
||||
def get_image_name(dockerfile: str, target: str) -> str:
|
||||
"""Generate image name based on Dockerfile folder and build target."""
|
||||
dockerfile_parts = dockerfile.replace("\\", "/").split("/")
|
||||
if len(dockerfile_parts) >= 2:
|
||||
folder_name = dockerfile_parts[-2] # e.g., "backend" or "frontend"
|
||||
else:
|
||||
folder_name = "app"
|
||||
return f"{project_name}-{folder_name}:{target}"
|
||||
|
||||
def get_build_key(dockerfile: str, target: str) -> str:
|
||||
"""Generate a unique key for a Dockerfile+target combination."""
|
||||
return f"{dockerfile}:{target}"
|
||||
|
||||
def get_component(dockerfile: str) -> str | None:
|
||||
"""Get component name (frontend/backend) from dockerfile path."""
|
||||
for component in CACHE_BUILDS_FOR_COMPONENTS:
|
||||
if component in dockerfile:
|
||||
return component
|
||||
return None
|
||||
|
||||
# First pass: collect all services with build configs and identify duplicates
|
||||
# Track which (dockerfile, target) combinations we've seen
|
||||
build_key_to_first_service: dict[str, str] = {}
|
||||
services_to_build: list[str] = []
|
||||
services_to_dedupe: list[str] = []
|
||||
|
||||
for service_name, service_config in compose.get("services", {}).items():
|
||||
if "build" not in service_config:
|
||||
continue
|
||||
|
||||
build_config = service_config["build"]
|
||||
dockerfile = build_config.get("dockerfile", "Dockerfile")
|
||||
target = build_config.get("target", "default")
|
||||
build_key = get_build_key(dockerfile, target)
|
||||
|
||||
if build_key not in build_key_to_first_service:
|
||||
# First service with this build config - it will do the actual build
|
||||
build_key_to_first_service[build_key] = service_name
|
||||
services_to_build.append(service_name)
|
||||
else:
|
||||
# Duplicate - will just use the image from the first service
|
||||
services_to_dedupe.append(service_name)
|
||||
|
||||
# Second pass: configure builds and deduplicate
|
||||
modified_services = []
|
||||
for service_name, service_config in compose.get("services", {}).items():
|
||||
if "build" not in service_config:
|
||||
continue
|
||||
|
||||
build_config = service_config["build"]
|
||||
dockerfile = build_config.get("dockerfile", "Dockerfile")
|
||||
target = build_config.get("target", "latest")
|
||||
image_name = get_image_name(dockerfile, target)
|
||||
|
||||
# Set image name for all services (needed for both builders and deduped)
|
||||
service_config["image"] = image_name
|
||||
|
||||
if service_name in services_to_dedupe:
|
||||
# Remove build config - this service will use the pre-built image
|
||||
del service_config["build"]
|
||||
continue
|
||||
|
||||
# This service will do the actual build - add cache config
|
||||
cache_from_list = []
|
||||
cache_to_list = []
|
||||
|
||||
component = get_component(dockerfile)
|
||||
if not component:
|
||||
# Skip services that don't clearly match frontend/backend
|
||||
continue
|
||||
|
||||
# Get the hash for this component
|
||||
component_hash = getattr(args, f"{component}_hash")
|
||||
|
||||
# Scope format: platform-{component}-{target}-{hash|ref}
|
||||
# Example: platform-backend-server-abc123
|
||||
|
||||
if "type=gha" in args.cache_from:
|
||||
# 1. Primary: exact hash match (most specific)
|
||||
if component_hash:
|
||||
hash_scope = f"platform-{component}-{target}-{component_hash}"
|
||||
cache_from_list.append(f"{args.cache_from},scope={hash_scope}")
|
||||
|
||||
# 2. Fallback: branch-based cache
|
||||
if git_ref_scope:
|
||||
ref_scope = f"platform-{component}-{target}-{git_ref_scope}"
|
||||
cache_from_list.append(f"{args.cache_from},scope={ref_scope}")
|
||||
|
||||
# 3. Fallback: dev branch cache (for PRs/feature branches)
|
||||
if git_ref_scope and git_ref_scope != DEFAULT_BRANCH:
|
||||
master_scope = f"platform-{component}-{target}-{DEFAULT_BRANCH}"
|
||||
cache_from_list.append(f"{args.cache_from},scope={master_scope}")
|
||||
|
||||
if "type=gha" in args.cache_to:
|
||||
# Write to both hash-based and branch-based scopes
|
||||
if component_hash:
|
||||
hash_scope = f"platform-{component}-{target}-{component_hash}"
|
||||
cache_to_list.append(f"{args.cache_to},scope={hash_scope}")
|
||||
|
||||
if git_ref_scope:
|
||||
ref_scope = f"platform-{component}-{target}-{git_ref_scope}"
|
||||
cache_to_list.append(f"{args.cache_to},scope={ref_scope}")
|
||||
|
||||
# Ensure we have at least one cache source/target
|
||||
if not cache_from_list:
|
||||
cache_from_list.append(args.cache_from)
|
||||
if not cache_to_list:
|
||||
cache_to_list.append(args.cache_to)
|
||||
|
||||
build_config["cache_from"] = cache_from_list
|
||||
build_config["cache_to"] = cache_to_list
|
||||
modified_services.append(service_name)
|
||||
|
||||
# Write back to the same file
|
||||
with open(args.source, "w") as f:
|
||||
yaml.dump(compose, f, default_flow_style=False, sort_keys=False)
|
||||
|
||||
print(f"Added cache config to {len(modified_services)} services in {args.source}:")
|
||||
for svc in modified_services:
|
||||
svc_config = compose["services"][svc]
|
||||
build_cfg = svc_config.get("build", {})
|
||||
cache_from_list = build_cfg.get("cache_from", ["none"])
|
||||
cache_to_list = build_cfg.get("cache_to", ["none"])
|
||||
print(f" - {svc}")
|
||||
print(f" image: {svc_config.get('image', 'N/A')}")
|
||||
print(f" cache_from: {cache_from_list}")
|
||||
print(f" cache_to: {cache_to_list}")
|
||||
if services_to_dedupe:
|
||||
print(
|
||||
f"Deduplicated {len(services_to_dedupe)} services (will use pre-built images):"
|
||||
)
|
||||
for svc in services_to_dedupe:
|
||||
print(f" - {svc} -> {compose['services'][svc].get('image', 'N/A')}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -45,6 +45,11 @@ AutoGPT Platform is a monorepo containing:
|
||||
- Backend/Frontend services use YAML anchors for consistent configuration
|
||||
- Supabase services (`db/docker/docker-compose.yml`) follow the same pattern
|
||||
|
||||
### Branching Strategy
|
||||
|
||||
- **`dev`** is the main development branch. All PRs should target `dev`.
|
||||
- **`master`** is the production branch. Only used for production releases.
|
||||
|
||||
### Creating Pull Requests
|
||||
|
||||
- Create the PR against the `dev` branch of the repository.
|
||||
|
||||
169
autogpt_platform/autogpt_libs/poetry.lock
generated
169
autogpt_platform/autogpt_libs/poetry.lock
generated
@@ -448,61 +448,61 @@ toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
|
||||
|
||||
[[package]]
|
||||
name = "cryptography"
|
||||
version = "46.0.4"
|
||||
version = "46.0.5"
|
||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||
optional = false
|
||||
python-versions = "!=3.9.0,!=3.9.1,>=3.8"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "cryptography-46.0.4-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:281526e865ed4166009e235afadf3a4c4cba6056f99336a99efba65336fd5485"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f14fba5bf6f4390d7ff8f086c566454bff0411f6d8aa7af79c88b6f9267aecc"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47bcd19517e6389132f76e2d5303ded6cf3f78903da2158a671be8de024f4cd0"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:01df4f50f314fbe7009f54046e908d1754f19d0c6d3070df1e6268c5a4af09fa"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5aa3e463596b0087b3da0dbe2b2487e9fc261d25da85754e30e3b40637d61f81"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0a9ad24359fee86f131836a9ac3bffc9329e956624a2d379b613f8f8abaf5255"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:dc1272e25ef673efe72f2096e92ae39dea1a1a450dd44918b15351f72c5a168e"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:de0f5f4ec8711ebc555f54735d4c673fc34b65c44283895f1a08c2b49d2fd99c"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:eeeb2e33d8dbcccc34d64651f00a98cb41b2dc69cef866771a5717e6734dfa32"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:3d425eacbc9aceafd2cb429e42f4e5d5633c6f873f5e567077043ef1b9bbf616"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91627ebf691d1ea3976a031b61fb7bac1ccd745afa03602275dda443e11c8de0"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2d08bc22efd73e8854b0b7caff402d735b354862f1145d7be3b9c0f740fef6a0"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-win32.whl", hash = "sha256:82a62483daf20b8134f6e92898da70d04d0ef9a75829d732ea1018678185f4f5"},
|
||||
{file = "cryptography-46.0.4-cp311-abi3-win_amd64.whl", hash = "sha256:6225d3ebe26a55dbc8ead5ad1265c0403552a63336499564675b29eb3184c09b"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:485e2b65d25ec0d901bca7bcae0f53b00133bf3173916d8e421f6fddde103908"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:078e5f06bd2fa5aea5a324f2a09f914b1484f1d0c2a4d6a8a28c74e72f65f2da"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dce1e4f068f03008da7fa51cc7abc6ddc5e5de3e3d1550334eaf8393982a5829"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:2067461c80271f422ee7bdbe79b9b4be54a5162e90345f86a23445a0cf3fd8a2"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:c92010b58a51196a5f41c3795190203ac52edfd5dc3ff99149b4659eba9d2085"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:829c2b12bbc5428ab02d6b7f7e9bbfd53e33efd6672d21341f2177470171ad8b"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:62217ba44bf81b30abaeda1488686a04a702a261e26f87db51ff61d9d3510abd"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:9c2da296c8d3415b93e6053f5a728649a87a48ce084a9aaf51d6e46c87c7f2d2"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:9b34d8ba84454641a6bf4d6762d15847ecbd85c1316c0a7984e6e4e9f748ec2e"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:df4a817fa7138dd0c96c8c8c20f04b8aaa1fac3bbf610913dcad8ea82e1bfd3f"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b1de0ebf7587f28f9190b9cb526e901bf448c9e6a99655d2b07fff60e8212a82"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9b4d17bc7bd7cdd98e3af40b441feaea4c68225e2eb2341026c84511ad246c0c"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-win32.whl", hash = "sha256:c411f16275b0dea722d76544a61d6421e2cc829ad76eec79280dbdc9ddf50061"},
|
||||
{file = "cryptography-46.0.4-cp314-cp314t-win_amd64.whl", hash = "sha256:728fedc529efc1439eb6107b677f7f7558adab4553ef8669f0d02d42d7b959a7"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a9556ba711f7c23f77b151d5798f3ac44a13455cc68db7697a1096e6d0563cab"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8bf75b0259e87fa70bddc0b8b4078b76e7fd512fd9afae6c1193bcf440a4dbef"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3c268a3490df22270955966ba236d6bc4a8f9b6e4ffddb78aac535f1a5ea471d"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:812815182f6a0c1d49a37893a303b44eaac827d7f0d582cecfc81b6427f22973"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:a90e43e3ef65e6dcf969dfe3bb40cbf5aef0d523dff95bfa24256be172a845f4"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a05177ff6296644ef2876fce50518dffb5bcdf903c85250974fc8bc85d54c0af"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:daa392191f626d50f1b136c9b4cf08af69ca8279d110ea24f5c2700054d2e263"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e07ea39c5b048e085f15923511d8121e4a9dc45cee4e3b970ca4f0d338f23095"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:d5a45ddc256f492ce42a4e35879c5e5528c09cd9ad12420828c972951d8e016b"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:6bb5157bf6a350e5b28aee23beb2d84ae6f5be390b2f8ee7ea179cda077e1019"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:dd5aba870a2c40f87a3af043e0dee7d9eb02d4aff88a797b48f2b43eff8c3ab4"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:93d8291da8d71024379ab2cb0b5c57915300155ad42e07f76bea6ad838d7e59b"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-win32.whl", hash = "sha256:0563655cb3c6d05fb2afe693340bc050c30f9f34e15763361cf08e94749401fc"},
|
||||
{file = "cryptography-46.0.4-cp38-abi3-win_amd64.whl", hash = "sha256:fa0900b9ef9c49728887d1576fd8d9e7e3ea872fa9b25ef9b64888adc434e976"},
|
||||
{file = "cryptography-46.0.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:766330cce7416c92b5e90c3bb71b1b79521760cdcfc3a6a1a182d4c9fab23d2b"},
|
||||
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c236a44acfb610e70f6b3e1c3ca20ff24459659231ef2f8c48e879e2d32b73da"},
|
||||
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8a15fb869670efa8f83cbffbc8753c1abf236883225aed74cd179b720ac9ec80"},
|
||||
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:fdc3daab53b212472f1524d070735b2f0c214239df131903bae1d598016fa822"},
|
||||
{file = "cryptography-46.0.4-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:44cc0675b27cadb71bdbb96099cca1fa051cd11d2ade09e5cd3a2edb929ed947"},
|
||||
{file = "cryptography-46.0.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be8c01a7d5a55f9a47d1888162b76c8f49d62b234d88f0ff91a9fbebe32ffbc3"},
|
||||
{file = "cryptography-46.0.4.tar.gz", hash = "sha256:bfd019f60f8abc2ed1b9be4ddc21cfef059c841d86d710bb69909a688cbb8f59"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48"},
|
||||
{file = "cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a"},
|
||||
{file = "cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72"},
|
||||
{file = "cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257"},
|
||||
{file = "cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7"},
|
||||
{file = "cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -516,7 +516,7 @@ nox = ["nox[uv] (>=2024.4.15)"]
|
||||
pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.14)", "ruff (>=0.11.11)"]
|
||||
sdist = ["build (>=1.0.0)"]
|
||||
ssh = ["bcrypt (>=3.1.5)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.4)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test = ["certifi (>=2024)", "cryptography-vectors (==46.0.5)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
|
||||
test-randomorder = ["pytest-randomly"]
|
||||
|
||||
[[package]]
|
||||
@@ -570,24 +570,25 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"]
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.128.0"
|
||||
version = "0.128.7"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d"},
|
||||
{file = "fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a"},
|
||||
{file = "fastapi-0.128.7-py3-none-any.whl", hash = "sha256:6bd9bd31cb7047465f2d3fa3ba3f33b0870b17d4eaf7cdb36d1576ab060ad662"},
|
||||
{file = "fastapi-0.128.7.tar.gz", hash = "sha256:783c273416995486c155ad2c0e2b45905dedfaf20b9ef8d9f6a9124670639a24"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-doc = ">=0.0.2"
|
||||
pydantic = ">=2.7.0"
|
||||
starlette = ">=0.40.0,<0.51.0"
|
||||
starlette = ">=0.40.0,<1.0.0"
|
||||
typing-extensions = ">=4.8.0"
|
||||
typing-inspection = ">=0.4.2"
|
||||
|
||||
[package.extras]
|
||||
all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"]
|
||||
all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.9.3)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=5.8.0)", "uvicorn[standard] (>=0.12.0)"]
|
||||
standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
|
||||
standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[standard-no-fastapi-cloud-cli] (>=0.0.8)", "httpx (>=0.23.0,<1.0.0)", "jinja2 (>=3.1.5)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"]
|
||||
|
||||
@@ -1062,14 +1063,14 @@ urllib3 = ">=1.26.0,<3"
|
||||
|
||||
[[package]]
|
||||
name = "launchdarkly-server-sdk"
|
||||
version = "9.14.1"
|
||||
version = "9.15.0"
|
||||
description = "LaunchDarkly SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "launchdarkly_server_sdk-9.14.1-py3-none-any.whl", hash = "sha256:a9e2bd9ecdef845cd631ae0d4334a1115e5b44257c42eb2349492be4bac7815c"},
|
||||
{file = "launchdarkly_server_sdk-9.14.1.tar.gz", hash = "sha256:1df44baf0a0efa74d8c1dad7a00592b98bce7d19edded7f770da8dbc49922213"},
|
||||
{file = "launchdarkly_server_sdk-9.15.0-py3-none-any.whl", hash = "sha256:c267e29bfa3fb5e2a06a208448ada6ed5557a2924979b8d79c970b45d227c668"},
|
||||
{file = "launchdarkly_server_sdk-9.15.0.tar.gz", hash = "sha256:f31441b74bc1a69c381db57c33116509e407a2612628ad6dff0a7dbb39d5020b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1478,14 +1479,14 @@ testing = ["coverage", "pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "postgrest"
|
||||
version = "2.27.2"
|
||||
version = "2.28.0"
|
||||
description = "PostgREST client for Python. This library provides an ORM interface to PostgREST."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "postgrest-2.27.2-py3-none-any.whl", hash = "sha256:1666fef3de05ca097a314433dd5ae2f2d71c613cb7b233d0f468c4ffe37277da"},
|
||||
{file = "postgrest-2.27.2.tar.gz", hash = "sha256:55407d530b5af3d64e883a71fec1f345d369958f723ce4a8ab0b7d169e313242"},
|
||||
{file = "postgrest-2.28.0-py3-none-any.whl", hash = "sha256:7bca2f24dd1a1bf8a3d586c7482aba6cd41662da6733045fad585b63b7f7df75"},
|
||||
{file = "postgrest-2.28.0.tar.gz", hash = "sha256:c36b38646d25ea4255321d3d924ce70f8d20ec7799cb42c1221d6a818d4f6515"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2248,14 +2249,14 @@ cli = ["click (>=5.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "realtime"
|
||||
version = "2.27.2"
|
||||
version = "2.28.0"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "realtime-2.27.2-py3-none-any.whl", hash = "sha256:34a9cbb26a274e707e8fc9e3ee0a66de944beac0fe604dc336d1e985db2c830f"},
|
||||
{file = "realtime-2.27.2.tar.gz", hash = "sha256:b960a90294d2cea1b3f1275ecb89204304728e08fff1c393cc1b3150739556b3"},
|
||||
{file = "realtime-2.28.0-py3-none-any.whl", hash = "sha256:db1bd59bab9b1fcc9f9d3b1a073bed35bf4994d720e6751f10031a58d57a3836"},
|
||||
{file = "realtime-2.28.0.tar.gz", hash = "sha256:d18cedcebd6a8f22fcd509bc767f639761eb218b7b2b6f14fc4205b6259b50fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2436,14 +2437,14 @@ full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart
|
||||
|
||||
[[package]]
|
||||
name = "storage3"
|
||||
version = "2.27.2"
|
||||
version = "2.28.0"
|
||||
description = "Supabase Storage client for Python."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "storage3-2.27.2-py3-none-any.whl", hash = "sha256:e6f16e7a260729e7b1f46e9bf61746805a02e30f5e419ee1291007c432e3ec63"},
|
||||
{file = "storage3-2.27.2.tar.gz", hash = "sha256:cb4807b7f86b4bb1272ac6fdd2f3cfd8ba577297046fa5f88557425200275af5"},
|
||||
{file = "storage3-2.28.0-py3-none-any.whl", hash = "sha256:ecb50efd2ac71dabbdf97e99ad346eafa630c4c627a8e5a138ceb5fbbadae716"},
|
||||
{file = "storage3-2.28.0.tar.gz", hash = "sha256:bc1d008aff67de7a0f2bd867baee7aadbcdb6f78f5a310b4f7a38e8c13c19865"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2487,35 +2488,35 @@ python-dateutil = ">=2.6.0"
|
||||
|
||||
[[package]]
|
||||
name = "supabase"
|
||||
version = "2.27.2"
|
||||
version = "2.28.0"
|
||||
description = "Supabase client for Python."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase-2.27.2-py3-none-any.whl", hash = "sha256:d4dce00b3a418ee578017ec577c0e5be47a9a636355009c76f20ed2faa15bc54"},
|
||||
{file = "supabase-2.27.2.tar.gz", hash = "sha256:2aed40e4f3454438822442a1e94a47be6694c2c70392e7ae99b51a226d4293f7"},
|
||||
{file = "supabase-2.28.0-py3-none-any.whl", hash = "sha256:42776971c7d0ccca16034df1ab96a31c50228eb1eb19da4249ad2f756fc20272"},
|
||||
{file = "supabase-2.28.0.tar.gz", hash = "sha256:aea299aaab2a2eed3c57e0be7fc035c6807214194cce795a3575add20268ece1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = ">=0.26,<0.29"
|
||||
postgrest = "2.27.2"
|
||||
realtime = "2.27.2"
|
||||
storage3 = "2.27.2"
|
||||
supabase-auth = "2.27.2"
|
||||
supabase-functions = "2.27.2"
|
||||
postgrest = "2.28.0"
|
||||
realtime = "2.28.0"
|
||||
storage3 = "2.28.0"
|
||||
supabase-auth = "2.28.0"
|
||||
supabase-functions = "2.28.0"
|
||||
yarl = ">=1.22.0"
|
||||
|
||||
[[package]]
|
||||
name = "supabase-auth"
|
||||
version = "2.27.2"
|
||||
version = "2.28.0"
|
||||
description = "Python Client Library for Supabase Auth"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase_auth-2.27.2-py3-none-any.whl", hash = "sha256:78ec25b11314d0a9527a7205f3b1c72560dccdc11b38392f80297ef98664ee91"},
|
||||
{file = "supabase_auth-2.27.2.tar.gz", hash = "sha256:0f5bcc79b3677cb42e9d321f3c559070cfa40d6a29a67672cc8382fb7dc2fe97"},
|
||||
{file = "supabase_auth-2.28.0-py3-none-any.whl", hash = "sha256:2ac85026cc285054c7fa6d41924f3a333e9ec298c013e5b5e1754039ba7caec9"},
|
||||
{file = "supabase_auth-2.28.0.tar.gz", hash = "sha256:2bb8f18ff39934e44b28f10918db965659f3735cd6fbfcc022fe0b82dbf8233e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2525,14 +2526,14 @@ pyjwt = {version = ">=2.10.1", extras = ["crypto"]}
|
||||
|
||||
[[package]]
|
||||
name = "supabase-functions"
|
||||
version = "2.27.2"
|
||||
version = "2.28.0"
|
||||
description = "Library for Supabase Functions"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase_functions-2.27.2-py3-none-any.whl", hash = "sha256:db480efc669d0bca07605b9b6f167312af43121adcc842a111f79bea416ef754"},
|
||||
{file = "supabase_functions-2.27.2.tar.gz", hash = "sha256:d0c8266207a94371cb3fd35ad3c7f025b78a97cf026861e04ccd35ac1775f80b"},
|
||||
{file = "supabase_functions-2.28.0-py3-none-any.whl", hash = "sha256:30bf2d586f8df285faf0621bb5d5bb3ec3157234fc820553ca156f009475e4ae"},
|
||||
{file = "supabase_functions-2.28.0.tar.gz", hash = "sha256:db3dddfc37aca5858819eb461130968473bd8c75bd284581013958526dac718b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -2911,4 +2912,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<4.0"
|
||||
content-hash = "40eae94995dc0a388fa832ed4af9b6137f28d5b5ced3aaea70d5f91d4d9a179d"
|
||||
content-hash = "9619cae908ad38fa2c48016a58bcf4241f6f5793aa0e6cc140276e91c433cbbb"
|
||||
|
||||
@@ -11,14 +11,14 @@ python = ">=3.10,<4.0"
|
||||
colorama = "^0.4.6"
|
||||
cryptography = "^46.0"
|
||||
expiringdict = "^1.2.2"
|
||||
fastapi = "^0.128.0"
|
||||
fastapi = "^0.128.7"
|
||||
google-cloud-logging = "^3.13.0"
|
||||
launchdarkly-server-sdk = "^9.14.1"
|
||||
launchdarkly-server-sdk = "^9.15.0"
|
||||
pydantic = "^2.12.5"
|
||||
pydantic-settings = "^2.12.0"
|
||||
pyjwt = { version = "^2.11.0", extras = ["crypto"] }
|
||||
redis = "^6.2.0"
|
||||
supabase = "^2.27.2"
|
||||
supabase = "^2.28.0"
|
||||
uvicorn = "^0.40.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# ============================ DEPENDENCY BUILDER ============================ #
|
||||
|
||||
FROM debian:13-slim AS builder
|
||||
|
||||
# Set environment variables
|
||||
@@ -51,7 +53,9 @@ COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/parti
|
||||
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
|
||||
RUN poetry run prisma generate && poetry run gen-prisma-stub
|
||||
|
||||
FROM debian:13-slim AS server_dependencies
|
||||
# ============================== BACKEND SERVER ============================== #
|
||||
|
||||
FROM debian:13-slim AS server
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
@@ -63,15 +67,14 @@ ENV POETRY_HOME=/opt/poetry \
|
||||
ENV PATH=/opt/poetry/bin:$PATH
|
||||
|
||||
# Install Python, FFmpeg, and ImageMagick (required for video processing blocks)
|
||||
RUN apt-get update && apt-get install -y \
|
||||
# Using --no-install-recommends saves ~650MB by skipping unnecessary deps like llvm, mesa, etc.
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3.13 \
|
||||
python3-pip \
|
||||
ffmpeg \
|
||||
imagemagick \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy only necessary files from builder
|
||||
COPY --from=builder /app /app
|
||||
COPY --from=builder /usr/local/lib/python3* /usr/local/lib/python3*
|
||||
COPY --from=builder /usr/local/bin/poetry /usr/local/bin/poetry
|
||||
# Copy Node.js installation for Prisma
|
||||
@@ -81,30 +84,54 @@ COPY --from=builder /usr/bin/npm /usr/bin/npm
|
||||
COPY --from=builder /usr/bin/npx /usr/bin/npx
|
||||
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
||||
|
||||
ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
|
||||
|
||||
RUN mkdir -p /app/autogpt_platform/autogpt_libs
|
||||
RUN mkdir -p /app/autogpt_platform/backend
|
||||
|
||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||
|
||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
||||
|
||||
WORKDIR /app/autogpt_platform/backend
|
||||
|
||||
FROM server_dependencies AS migrate
|
||||
# Copy only the .venv from builder (not the entire /app directory)
|
||||
# The .venv includes the generated Prisma client
|
||||
COPY --from=builder /app/autogpt_platform/backend/.venv ./.venv
|
||||
ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
|
||||
|
||||
# Migration stage only needs schema and migrations - much lighter than full backend
|
||||
COPY autogpt_platform/backend/schema.prisma /app/autogpt_platform/backend/
|
||||
COPY autogpt_platform/backend/backend/data/partial_types.py /app/autogpt_platform/backend/backend/data/partial_types.py
|
||||
COPY autogpt_platform/backend/migrations /app/autogpt_platform/backend/migrations
|
||||
# Copy dependency files + autogpt_libs (path dependency)
|
||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml ./
|
||||
|
||||
FROM server_dependencies AS server
|
||||
|
||||
COPY autogpt_platform/backend /app/autogpt_platform/backend
|
||||
# Copy backend code + docs (for Copilot docs search)
|
||||
COPY autogpt_platform/backend ./
|
||||
COPY docs /app/docs
|
||||
RUN poetry install --no-ansi --only-root
|
||||
|
||||
ENV PORT=8000
|
||||
|
||||
CMD ["poetry", "run", "rest"]
|
||||
|
||||
# =============================== DB MIGRATOR =============================== #
|
||||
|
||||
# Lightweight migrate stage - only needs Prisma CLI, not full Python environment
|
||||
FROM debian:13-slim AS migrate
|
||||
|
||||
WORKDIR /app/autogpt_platform/backend
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install only what's needed for prisma migrate: Node.js and minimal Python for prisma-python
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
python3.13 \
|
||||
python3-pip \
|
||||
ca-certificates \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Copy Node.js from builder (needed for Prisma CLI)
|
||||
COPY --from=builder /usr/bin/node /usr/bin/node
|
||||
COPY --from=builder /usr/lib/node_modules /usr/lib/node_modules
|
||||
COPY --from=builder /usr/bin/npm /usr/bin/npm
|
||||
|
||||
# Copy Prisma binaries
|
||||
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
||||
|
||||
# Install prisma-client-py directly (much smaller than copying full venv)
|
||||
RUN pip3 install prisma>=0.15.0 --break-system-packages
|
||||
|
||||
COPY autogpt_platform/backend/schema.prisma ./
|
||||
COPY autogpt_platform/backend/backend/data/partial_types.py ./backend/data/partial_types.py
|
||||
COPY autogpt_platform/backend/gen_prisma_types_stub.py ./
|
||||
COPY autogpt_platform/backend/migrations ./migrations
|
||||
|
||||
@@ -24,6 +24,7 @@ from .tools.models import (
|
||||
AgentPreviewResponse,
|
||||
AgentSavedResponse,
|
||||
AgentsFoundResponse,
|
||||
BlockDetailsResponse,
|
||||
BlockListResponse,
|
||||
BlockOutputResponse,
|
||||
ClarificationNeededResponse,
|
||||
@@ -971,6 +972,7 @@ ToolResponseUnion = (
|
||||
| AgentSavedResponse
|
||||
| ClarificationNeededResponse
|
||||
| BlockListResponse
|
||||
| BlockDetailsResponse
|
||||
| BlockOutputResponse
|
||||
| DocSearchResultsResponse
|
||||
| DocPageResponse
|
||||
|
||||
@@ -7,7 +7,6 @@ from backend.api.features.chat.model import ChatSession
|
||||
from backend.api.features.chat.tools.base import BaseTool, ToolResponseBase
|
||||
from backend.api.features.chat.tools.models import (
|
||||
BlockInfoSummary,
|
||||
BlockInputFieldInfo,
|
||||
BlockListResponse,
|
||||
ErrorResponse,
|
||||
NoResultsResponse,
|
||||
@@ -55,7 +54,8 @@ class FindBlockTool(BaseTool):
|
||||
"Blocks are reusable components that perform specific tasks like "
|
||||
"sending emails, making API calls, processing text, etc. "
|
||||
"IMPORTANT: Use this tool FIRST to get the block's 'id' before calling run_block. "
|
||||
"The response includes each block's id, required_inputs, and input_schema."
|
||||
"The response includes each block's id, name, and description. "
|
||||
"Call run_block with the block's id **with no inputs** to see detailed inputs/outputs and execute it."
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -124,7 +124,7 @@ class FindBlockTool(BaseTool):
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
# Enrich results with full block information
|
||||
# Enrich results with block information
|
||||
blocks: list[BlockInfoSummary] = []
|
||||
for result in results:
|
||||
block_id = result["content_id"]
|
||||
@@ -141,65 +141,11 @@ class FindBlockTool(BaseTool):
|
||||
):
|
||||
continue
|
||||
|
||||
# Get input/output schemas
|
||||
input_schema = {}
|
||||
output_schema = {}
|
||||
try:
|
||||
input_schema = block.input_schema.jsonschema()
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
"Failed to generate input schema for block %s: %s",
|
||||
block_id,
|
||||
e,
|
||||
)
|
||||
try:
|
||||
output_schema = block.output_schema.jsonschema()
|
||||
except Exception as e:
|
||||
logger.debug(
|
||||
"Failed to generate output schema for block %s: %s",
|
||||
block_id,
|
||||
e,
|
||||
)
|
||||
|
||||
# Get categories from block instance
|
||||
categories = []
|
||||
if hasattr(block, "categories") and block.categories:
|
||||
categories = [cat.value for cat in block.categories]
|
||||
|
||||
# Extract required inputs for easier use
|
||||
required_inputs: list[BlockInputFieldInfo] = []
|
||||
if input_schema:
|
||||
properties = input_schema.get("properties", {})
|
||||
required_fields = set(input_schema.get("required", []))
|
||||
# Get credential field names to exclude from required inputs
|
||||
credentials_fields = set(
|
||||
block.input_schema.get_credentials_fields().keys()
|
||||
)
|
||||
|
||||
for field_name, field_schema in properties.items():
|
||||
# Skip credential fields - they're handled separately
|
||||
if field_name in credentials_fields:
|
||||
continue
|
||||
|
||||
required_inputs.append(
|
||||
BlockInputFieldInfo(
|
||||
name=field_name,
|
||||
type=field_schema.get("type", "string"),
|
||||
description=field_schema.get("description", ""),
|
||||
required=field_name in required_fields,
|
||||
default=field_schema.get("default"),
|
||||
)
|
||||
)
|
||||
|
||||
blocks.append(
|
||||
BlockInfoSummary(
|
||||
id=block_id,
|
||||
name=block.name,
|
||||
description=block.description or "",
|
||||
categories=categories,
|
||||
input_schema=input_schema,
|
||||
output_schema=output_schema,
|
||||
required_inputs=required_inputs,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -228,8 +174,7 @@ class FindBlockTool(BaseTool):
|
||||
return BlockListResponse(
|
||||
message=(
|
||||
f"Found {len(blocks)} block(s) matching '{query}'. "
|
||||
"To execute a block, use run_block with the block's 'id' field "
|
||||
"and provide 'input_data' matching the block's input_schema."
|
||||
"To see a block's inputs/outputs and execute it, use run_block with the block's 'id' - providing no inputs."
|
||||
),
|
||||
blocks=blocks,
|
||||
count=len(blocks),
|
||||
|
||||
@@ -18,7 +18,13 @@ _TEST_USER_ID = "test-user-find-block"
|
||||
|
||||
|
||||
def make_mock_block(
|
||||
block_id: str, name: str, block_type: BlockType, disabled: bool = False
|
||||
block_id: str,
|
||||
name: str,
|
||||
block_type: BlockType,
|
||||
disabled: bool = False,
|
||||
input_schema: dict | None = None,
|
||||
output_schema: dict | None = None,
|
||||
credentials_fields: dict | None = None,
|
||||
):
|
||||
"""Create a mock block for testing."""
|
||||
mock = MagicMock()
|
||||
@@ -28,10 +34,13 @@ def make_mock_block(
|
||||
mock.block_type = block_type
|
||||
mock.disabled = disabled
|
||||
mock.input_schema = MagicMock()
|
||||
mock.input_schema.jsonschema.return_value = {"properties": {}, "required": []}
|
||||
mock.input_schema.get_credentials_fields.return_value = {}
|
||||
mock.input_schema.jsonschema.return_value = input_schema or {
|
||||
"properties": {},
|
||||
"required": [],
|
||||
}
|
||||
mock.input_schema.get_credentials_fields.return_value = credentials_fields or {}
|
||||
mock.output_schema = MagicMock()
|
||||
mock.output_schema.jsonschema.return_value = {}
|
||||
mock.output_schema.jsonschema.return_value = output_schema or {}
|
||||
mock.categories = []
|
||||
return mock
|
||||
|
||||
@@ -137,3 +146,241 @@ class TestFindBlockFiltering:
|
||||
assert isinstance(response, BlockListResponse)
|
||||
assert len(response.blocks) == 1
|
||||
assert response.blocks[0].id == "normal-block-id"
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_response_size_average_chars_per_block(self):
|
||||
"""Measure average chars per block in the serialized response."""
|
||||
session = make_session(user_id=_TEST_USER_ID)
|
||||
|
||||
# Realistic block definitions modeled after real blocks
|
||||
block_defs = [
|
||||
{
|
||||
"id": "http-block-id",
|
||||
"name": "Send Web Request",
|
||||
"input_schema": {
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "The URL to send the request to",
|
||||
},
|
||||
"method": {
|
||||
"type": "string",
|
||||
"description": "The HTTP method to use",
|
||||
},
|
||||
"headers": {
|
||||
"type": "object",
|
||||
"description": "Headers to include in the request",
|
||||
},
|
||||
"json_format": {
|
||||
"type": "boolean",
|
||||
"description": "If true, send the body as JSON",
|
||||
},
|
||||
"body": {
|
||||
"type": "object",
|
||||
"description": "Form/JSON body payload",
|
||||
},
|
||||
"credentials": {
|
||||
"type": "object",
|
||||
"description": "HTTP credentials",
|
||||
},
|
||||
},
|
||||
"required": ["url", "method"],
|
||||
},
|
||||
"output_schema": {
|
||||
"properties": {
|
||||
"response": {
|
||||
"type": "object",
|
||||
"description": "The response from the server",
|
||||
},
|
||||
"client_error": {
|
||||
"type": "object",
|
||||
"description": "Errors on 4xx status codes",
|
||||
},
|
||||
"server_error": {
|
||||
"type": "object",
|
||||
"description": "Errors on 5xx status codes",
|
||||
},
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Errors for all other exceptions",
|
||||
},
|
||||
},
|
||||
},
|
||||
"credentials_fields": {"credentials": True},
|
||||
},
|
||||
{
|
||||
"id": "email-block-id",
|
||||
"name": "Send Email",
|
||||
"input_schema": {
|
||||
"properties": {
|
||||
"to_email": {
|
||||
"type": "string",
|
||||
"description": "Recipient email address",
|
||||
},
|
||||
"subject": {
|
||||
"type": "string",
|
||||
"description": "Subject of the email",
|
||||
},
|
||||
"body": {
|
||||
"type": "string",
|
||||
"description": "Body of the email",
|
||||
},
|
||||
"config": {
|
||||
"type": "object",
|
||||
"description": "SMTP Config",
|
||||
},
|
||||
"credentials": {
|
||||
"type": "object",
|
||||
"description": "SMTP credentials",
|
||||
},
|
||||
},
|
||||
"required": ["to_email", "subject", "body", "credentials"],
|
||||
},
|
||||
"output_schema": {
|
||||
"properties": {
|
||||
"status": {
|
||||
"type": "string",
|
||||
"description": "Status of the email sending operation",
|
||||
},
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Error message if sending failed",
|
||||
},
|
||||
},
|
||||
},
|
||||
"credentials_fields": {"credentials": True},
|
||||
},
|
||||
{
|
||||
"id": "claude-code-block-id",
|
||||
"name": "Claude Code",
|
||||
"input_schema": {
|
||||
"properties": {
|
||||
"e2b_credentials": {
|
||||
"type": "object",
|
||||
"description": "API key for E2B platform",
|
||||
},
|
||||
"anthropic_credentials": {
|
||||
"type": "object",
|
||||
"description": "API key for Anthropic",
|
||||
},
|
||||
"prompt": {
|
||||
"type": "string",
|
||||
"description": "Task or instruction for Claude Code",
|
||||
},
|
||||
"timeout": {
|
||||
"type": "integer",
|
||||
"description": "Sandbox timeout in seconds",
|
||||
},
|
||||
"setup_commands": {
|
||||
"type": "array",
|
||||
"description": "Shell commands to run before execution",
|
||||
},
|
||||
"working_directory": {
|
||||
"type": "string",
|
||||
"description": "Working directory for Claude Code",
|
||||
},
|
||||
"session_id": {
|
||||
"type": "string",
|
||||
"description": "Session ID to resume a conversation",
|
||||
},
|
||||
"sandbox_id": {
|
||||
"type": "string",
|
||||
"description": "Sandbox ID to reconnect to",
|
||||
},
|
||||
"conversation_history": {
|
||||
"type": "string",
|
||||
"description": "Previous conversation history",
|
||||
},
|
||||
"dispose_sandbox": {
|
||||
"type": "boolean",
|
||||
"description": "Whether to dispose sandbox after execution",
|
||||
},
|
||||
},
|
||||
"required": [
|
||||
"e2b_credentials",
|
||||
"anthropic_credentials",
|
||||
"prompt",
|
||||
],
|
||||
},
|
||||
"output_schema": {
|
||||
"properties": {
|
||||
"response": {
|
||||
"type": "string",
|
||||
"description": "Output from Claude Code execution",
|
||||
},
|
||||
"files": {
|
||||
"type": "array",
|
||||
"description": "Files created/modified by Claude Code",
|
||||
},
|
||||
"conversation_history": {
|
||||
"type": "string",
|
||||
"description": "Full conversation history",
|
||||
},
|
||||
"session_id": {
|
||||
"type": "string",
|
||||
"description": "Session ID for this conversation",
|
||||
},
|
||||
"sandbox_id": {
|
||||
"type": "string",
|
||||
"description": "ID of the sandbox instance",
|
||||
},
|
||||
"error": {
|
||||
"type": "string",
|
||||
"description": "Error message if execution failed",
|
||||
},
|
||||
},
|
||||
},
|
||||
"credentials_fields": {
|
||||
"e2b_credentials": True,
|
||||
"anthropic_credentials": True,
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
search_results = [
|
||||
{"content_id": d["id"], "score": 0.9 - i * 0.1}
|
||||
for i, d in enumerate(block_defs)
|
||||
]
|
||||
mock_blocks = {
|
||||
d["id"]: make_mock_block(
|
||||
block_id=d["id"],
|
||||
name=d["name"],
|
||||
block_type=BlockType.STANDARD,
|
||||
input_schema=d["input_schema"],
|
||||
output_schema=d["output_schema"],
|
||||
credentials_fields=d["credentials_fields"],
|
||||
)
|
||||
for d in block_defs
|
||||
}
|
||||
|
||||
with patch(
|
||||
"backend.api.features.chat.tools.find_block.unified_hybrid_search",
|
||||
new_callable=AsyncMock,
|
||||
return_value=(search_results, len(search_results)),
|
||||
), patch(
|
||||
"backend.api.features.chat.tools.find_block.get_block",
|
||||
side_effect=lambda bid: mock_blocks.get(bid),
|
||||
):
|
||||
tool = FindBlockTool()
|
||||
response = await tool._execute(
|
||||
user_id=_TEST_USER_ID, session=session, query="test"
|
||||
)
|
||||
|
||||
assert isinstance(response, BlockListResponse)
|
||||
assert response.count == len(block_defs)
|
||||
|
||||
total_chars = len(response.model_dump_json())
|
||||
avg_chars = total_chars // response.count
|
||||
|
||||
# Print for visibility in test output
|
||||
print(f"\nTotal response size: {total_chars} chars")
|
||||
print(f"Number of blocks: {response.count}")
|
||||
print(f"Average chars per block: {avg_chars}")
|
||||
|
||||
# The old response was ~90K for 10 blocks (~9K per block).
|
||||
# Previous optimization reduced it to ~1.5K per block (no raw JSON schemas).
|
||||
# Now with only id/name/description, we expect ~300 chars per block.
|
||||
assert avg_chars < 500, (
|
||||
f"Average chars per block ({avg_chars}) exceeds 500. "
|
||||
f"Total response: {total_chars} chars for {response.count} blocks."
|
||||
)
|
||||
|
||||
@@ -25,6 +25,7 @@ class ResponseType(str, Enum):
|
||||
AGENT_SAVED = "agent_saved"
|
||||
CLARIFICATION_NEEDED = "clarification_needed"
|
||||
BLOCK_LIST = "block_list"
|
||||
BLOCK_DETAILS = "block_details"
|
||||
BLOCK_OUTPUT = "block_output"
|
||||
DOC_SEARCH_RESULTS = "doc_search_results"
|
||||
DOC_PAGE = "doc_page"
|
||||
@@ -334,13 +335,6 @@ class BlockInfoSummary(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
description: str
|
||||
categories: list[str]
|
||||
input_schema: dict[str, Any]
|
||||
output_schema: dict[str, Any]
|
||||
required_inputs: list[BlockInputFieldInfo] = Field(
|
||||
default_factory=list,
|
||||
description="List of required input fields for this block",
|
||||
)
|
||||
|
||||
|
||||
class BlockListResponse(ToolResponseBase):
|
||||
@@ -350,10 +344,25 @@ class BlockListResponse(ToolResponseBase):
|
||||
blocks: list[BlockInfoSummary]
|
||||
count: int
|
||||
query: str
|
||||
usage_hint: str = Field(
|
||||
default="To execute a block, call run_block with block_id set to the block's "
|
||||
"'id' field and input_data containing the required fields from input_schema."
|
||||
)
|
||||
|
||||
|
||||
class BlockDetails(BaseModel):
|
||||
"""Detailed block information."""
|
||||
|
||||
id: str
|
||||
name: str
|
||||
description: str
|
||||
inputs: dict[str, Any] = {}
|
||||
outputs: dict[str, Any] = {}
|
||||
credentials: list[CredentialsMetaInput] = []
|
||||
|
||||
|
||||
class BlockDetailsResponse(ToolResponseBase):
|
||||
"""Response for block details (first run_block attempt)."""
|
||||
|
||||
type: ResponseType = ResponseType.BLOCK_DETAILS
|
||||
block: BlockDetails
|
||||
user_authenticated: bool = False
|
||||
|
||||
|
||||
class BlockOutputResponse(ToolResponseBase):
|
||||
|
||||
@@ -23,8 +23,11 @@ from backend.util.exceptions import BlockError
|
||||
from .base import BaseTool
|
||||
from .helpers import get_inputs_from_schema
|
||||
from .models import (
|
||||
BlockDetails,
|
||||
BlockDetailsResponse,
|
||||
BlockOutputResponse,
|
||||
ErrorResponse,
|
||||
InputValidationErrorResponse,
|
||||
SetupInfo,
|
||||
SetupRequirementsResponse,
|
||||
ToolResponseBase,
|
||||
@@ -51,8 +54,8 @@ class RunBlockTool(BaseTool):
|
||||
"Execute a specific block with the provided input data. "
|
||||
"IMPORTANT: You MUST call find_block first to get the block's 'id' - "
|
||||
"do NOT guess or make up block IDs. "
|
||||
"Use the 'id' from find_block results and provide input_data "
|
||||
"matching the block's required_inputs."
|
||||
"On first attempt (without input_data), returns detailed schema showing "
|
||||
"required inputs and outputs. Then call again with proper input_data to execute."
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -67,11 +70,19 @@ class RunBlockTool(BaseTool):
|
||||
"NEVER guess this - always get it from find_block first."
|
||||
),
|
||||
},
|
||||
"block_name": {
|
||||
"type": "string",
|
||||
"description": (
|
||||
"The block's human-readable name from find_block results. "
|
||||
"Used for display purposes in the UI."
|
||||
),
|
||||
},
|
||||
"input_data": {
|
||||
"type": "object",
|
||||
"description": (
|
||||
"Input values for the block. Use the 'required_inputs' field "
|
||||
"from find_block to see what fields are needed."
|
||||
"Input values for the block. "
|
||||
"First call with empty {} to see the block's schema, "
|
||||
"then call again with proper values to execute."
|
||||
),
|
||||
},
|
||||
},
|
||||
@@ -156,6 +167,34 @@ class RunBlockTool(BaseTool):
|
||||
await self._resolve_block_credentials(user_id, block, input_data)
|
||||
)
|
||||
|
||||
# Get block schemas for details/validation
|
||||
try:
|
||||
input_schema: dict[str, Any] = block.input_schema.jsonschema()
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to generate input schema for block %s: %s",
|
||||
block_id,
|
||||
e,
|
||||
)
|
||||
return ErrorResponse(
|
||||
message=f"Block '{block.name}' has an invalid input schema",
|
||||
error=str(e),
|
||||
session_id=session_id,
|
||||
)
|
||||
try:
|
||||
output_schema: dict[str, Any] = block.output_schema.jsonschema()
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to generate output schema for block %s: %s",
|
||||
block_id,
|
||||
e,
|
||||
)
|
||||
return ErrorResponse(
|
||||
message=f"Block '{block.name}' has an invalid output schema",
|
||||
error=str(e),
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
if missing_credentials:
|
||||
# Return setup requirements response with missing credentials
|
||||
credentials_fields_info = block.input_schema.get_credentials_fields_info()
|
||||
@@ -188,6 +227,53 @@ class RunBlockTool(BaseTool):
|
||||
graph_version=None,
|
||||
)
|
||||
|
||||
# Check if this is a first attempt (required inputs missing)
|
||||
# Return block details so user can see what inputs are needed
|
||||
credentials_fields = set(block.input_schema.get_credentials_fields().keys())
|
||||
required_keys = set(input_schema.get("required", []))
|
||||
required_non_credential_keys = required_keys - credentials_fields
|
||||
provided_input_keys = set(input_data.keys()) - credentials_fields
|
||||
|
||||
# Check for unknown input fields
|
||||
valid_fields = (
|
||||
set(input_schema.get("properties", {}).keys()) - credentials_fields
|
||||
)
|
||||
unrecognized_fields = provided_input_keys - valid_fields
|
||||
if unrecognized_fields:
|
||||
return InputValidationErrorResponse(
|
||||
message=(
|
||||
f"Unknown input field(s) provided: {', '.join(sorted(unrecognized_fields))}. "
|
||||
f"Block was not executed. Please use the correct field names from the schema."
|
||||
),
|
||||
session_id=session_id,
|
||||
unrecognized_fields=sorted(unrecognized_fields),
|
||||
inputs=input_schema,
|
||||
)
|
||||
|
||||
# Show details when not all required non-credential inputs are provided
|
||||
if not (required_non_credential_keys <= provided_input_keys):
|
||||
# Get credentials info for the response
|
||||
credentials_meta = []
|
||||
for field_name, cred_meta in matched_credentials.items():
|
||||
credentials_meta.append(cred_meta)
|
||||
|
||||
return BlockDetailsResponse(
|
||||
message=(
|
||||
f"Block '{block.name}' details. "
|
||||
"Provide input_data matching the inputs schema to execute the block."
|
||||
),
|
||||
session_id=session_id,
|
||||
block=BlockDetails(
|
||||
id=block_id,
|
||||
name=block.name,
|
||||
description=block.description or "",
|
||||
inputs=input_schema,
|
||||
outputs=output_schema,
|
||||
credentials=credentials_meta,
|
||||
),
|
||||
user_authenticated=True,
|
||||
)
|
||||
|
||||
try:
|
||||
# Get or create user's workspace for CoPilot file operations
|
||||
workspace = await get_or_create_workspace(user_id)
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
"""Tests for block execution guards in RunBlockTool."""
|
||||
"""Tests for block execution guards and input validation in RunBlockTool."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.api.features.chat.tools.models import ErrorResponse
|
||||
from backend.api.features.chat.tools.models import (
|
||||
BlockDetailsResponse,
|
||||
BlockOutputResponse,
|
||||
ErrorResponse,
|
||||
InputValidationErrorResponse,
|
||||
)
|
||||
from backend.api.features.chat.tools.run_block import RunBlockTool
|
||||
from backend.blocks._base import BlockType
|
||||
|
||||
@@ -28,6 +33,39 @@ def make_mock_block(
|
||||
return mock
|
||||
|
||||
|
||||
def make_mock_block_with_schema(
|
||||
block_id: str,
|
||||
name: str,
|
||||
input_properties: dict,
|
||||
required_fields: list[str],
|
||||
output_properties: dict | None = None,
|
||||
):
|
||||
"""Create a mock block with a defined input/output schema for validation tests."""
|
||||
mock = MagicMock()
|
||||
mock.id = block_id
|
||||
mock.name = name
|
||||
mock.block_type = BlockType.STANDARD
|
||||
mock.disabled = False
|
||||
mock.description = f"Test block: {name}"
|
||||
|
||||
input_schema = {
|
||||
"properties": input_properties,
|
||||
"required": required_fields,
|
||||
}
|
||||
mock.input_schema = MagicMock()
|
||||
mock.input_schema.jsonschema.return_value = input_schema
|
||||
mock.input_schema.get_credentials_fields_info.return_value = {}
|
||||
mock.input_schema.get_credentials_fields.return_value = {}
|
||||
|
||||
output_schema = {
|
||||
"properties": output_properties or {"result": {"type": "string"}},
|
||||
}
|
||||
mock.output_schema = MagicMock()
|
||||
mock.output_schema.jsonschema.return_value = output_schema
|
||||
|
||||
return mock
|
||||
|
||||
|
||||
class TestRunBlockFiltering:
|
||||
"""Tests for block execution guards in RunBlockTool."""
|
||||
|
||||
@@ -104,3 +142,221 @@ class TestRunBlockFiltering:
|
||||
# (may be other errors like missing credentials, but not the exclusion guard)
|
||||
if isinstance(response, ErrorResponse):
|
||||
assert "cannot be run directly in CoPilot" not in response.message
|
||||
|
||||
|
||||
class TestRunBlockInputValidation:
|
||||
"""Tests for input field validation in RunBlockTool.
|
||||
|
||||
run_block rejects unknown input field names with InputValidationErrorResponse,
|
||||
preventing silent failures where incorrect keys would be ignored and the block
|
||||
would execute with default values instead of the caller's intended values.
|
||||
"""
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_unknown_input_fields_are_rejected(self):
|
||||
"""run_block rejects unknown input fields instead of silently ignoring them.
|
||||
|
||||
Scenario: The AI Text Generator block has a field called 'model' (for LLM model
|
||||
selection), but the LLM calling the tool guesses wrong and sends 'LLM_Model'
|
||||
instead. The block should reject the request and return the valid schema.
|
||||
"""
|
||||
session = make_session(user_id=_TEST_USER_ID)
|
||||
|
||||
mock_block = make_mock_block_with_schema(
|
||||
block_id="ai-text-gen-id",
|
||||
name="AI Text Generator",
|
||||
input_properties={
|
||||
"prompt": {"type": "string", "description": "The prompt to send"},
|
||||
"model": {
|
||||
"type": "string",
|
||||
"description": "The LLM model to use",
|
||||
"default": "gpt-4o-mini",
|
||||
},
|
||||
"sys_prompt": {
|
||||
"type": "string",
|
||||
"description": "System prompt",
|
||||
"default": "",
|
||||
},
|
||||
},
|
||||
required_fields=["prompt"],
|
||||
output_properties={"response": {"type": "string"}},
|
||||
)
|
||||
|
||||
with patch(
|
||||
"backend.api.features.chat.tools.run_block.get_block",
|
||||
return_value=mock_block,
|
||||
):
|
||||
tool = RunBlockTool()
|
||||
|
||||
# Provide 'prompt' (correct) but 'LLM_Model' instead of 'model' (wrong key)
|
||||
response = await tool._execute(
|
||||
user_id=_TEST_USER_ID,
|
||||
session=session,
|
||||
block_id="ai-text-gen-id",
|
||||
input_data={
|
||||
"prompt": "Write a haiku about coding",
|
||||
"LLM_Model": "claude-opus-4-6", # WRONG KEY - should be 'model'
|
||||
},
|
||||
)
|
||||
|
||||
assert isinstance(response, InputValidationErrorResponse)
|
||||
assert "LLM_Model" in response.unrecognized_fields
|
||||
assert "Block was not executed" in response.message
|
||||
assert "inputs" in response.model_dump() # valid schema included
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_multiple_wrong_keys_are_all_reported(self):
|
||||
"""All unrecognized field names are reported in a single error response."""
|
||||
session = make_session(user_id=_TEST_USER_ID)
|
||||
|
||||
mock_block = make_mock_block_with_schema(
|
||||
block_id="ai-text-gen-id",
|
||||
name="AI Text Generator",
|
||||
input_properties={
|
||||
"prompt": {"type": "string"},
|
||||
"model": {"type": "string", "default": "gpt-4o-mini"},
|
||||
"sys_prompt": {"type": "string", "default": ""},
|
||||
"retry": {"type": "integer", "default": 3},
|
||||
},
|
||||
required_fields=["prompt"],
|
||||
)
|
||||
|
||||
with patch(
|
||||
"backend.api.features.chat.tools.run_block.get_block",
|
||||
return_value=mock_block,
|
||||
):
|
||||
tool = RunBlockTool()
|
||||
|
||||
response = await tool._execute(
|
||||
user_id=_TEST_USER_ID,
|
||||
session=session,
|
||||
block_id="ai-text-gen-id",
|
||||
input_data={
|
||||
"prompt": "Hello", # correct
|
||||
"llm_model": "claude-opus-4-6", # WRONG - should be 'model'
|
||||
"system_prompt": "Be helpful", # WRONG - should be 'sys_prompt'
|
||||
"retries": 5, # WRONG - should be 'retry'
|
||||
},
|
||||
)
|
||||
|
||||
assert isinstance(response, InputValidationErrorResponse)
|
||||
assert set(response.unrecognized_fields) == {
|
||||
"llm_model",
|
||||
"system_prompt",
|
||||
"retries",
|
||||
}
|
||||
assert "Block was not executed" in response.message
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_unknown_fields_rejected_even_with_missing_required(self):
|
||||
"""Unknown fields are caught before the missing-required-fields check."""
|
||||
session = make_session(user_id=_TEST_USER_ID)
|
||||
|
||||
mock_block = make_mock_block_with_schema(
|
||||
block_id="ai-text-gen-id",
|
||||
name="AI Text Generator",
|
||||
input_properties={
|
||||
"prompt": {"type": "string"},
|
||||
"model": {"type": "string", "default": "gpt-4o-mini"},
|
||||
},
|
||||
required_fields=["prompt"],
|
||||
)
|
||||
|
||||
with patch(
|
||||
"backend.api.features.chat.tools.run_block.get_block",
|
||||
return_value=mock_block,
|
||||
):
|
||||
tool = RunBlockTool()
|
||||
|
||||
# 'prompt' is missing AND 'LLM_Model' is an unknown field
|
||||
response = await tool._execute(
|
||||
user_id=_TEST_USER_ID,
|
||||
session=session,
|
||||
block_id="ai-text-gen-id",
|
||||
input_data={
|
||||
"LLM_Model": "claude-opus-4-6", # wrong key, and 'prompt' is missing
|
||||
},
|
||||
)
|
||||
|
||||
# Unknown fields are caught first
|
||||
assert isinstance(response, InputValidationErrorResponse)
|
||||
assert "LLM_Model" in response.unrecognized_fields
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_correct_inputs_still_execute(self):
|
||||
"""Correct input field names pass validation and the block executes."""
|
||||
session = make_session(user_id=_TEST_USER_ID)
|
||||
|
||||
mock_block = make_mock_block_with_schema(
|
||||
block_id="ai-text-gen-id",
|
||||
name="AI Text Generator",
|
||||
input_properties={
|
||||
"prompt": {"type": "string"},
|
||||
"model": {"type": "string", "default": "gpt-4o-mini"},
|
||||
},
|
||||
required_fields=["prompt"],
|
||||
)
|
||||
|
||||
async def mock_execute(input_data, **kwargs):
|
||||
yield "response", "Generated text"
|
||||
|
||||
mock_block.execute = mock_execute
|
||||
|
||||
with (
|
||||
patch(
|
||||
"backend.api.features.chat.tools.run_block.get_block",
|
||||
return_value=mock_block,
|
||||
),
|
||||
patch(
|
||||
"backend.api.features.chat.tools.run_block.get_or_create_workspace",
|
||||
new_callable=AsyncMock,
|
||||
return_value=MagicMock(id="test-workspace-id"),
|
||||
),
|
||||
):
|
||||
tool = RunBlockTool()
|
||||
|
||||
response = await tool._execute(
|
||||
user_id=_TEST_USER_ID,
|
||||
session=session,
|
||||
block_id="ai-text-gen-id",
|
||||
input_data={
|
||||
"prompt": "Write a haiku",
|
||||
"model": "gpt-4o-mini", # correct field name
|
||||
},
|
||||
)
|
||||
|
||||
assert isinstance(response, BlockOutputResponse)
|
||||
assert response.success is True
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_missing_required_fields_returns_details(self):
|
||||
"""Missing required fields returns BlockDetailsResponse with schema."""
|
||||
session = make_session(user_id=_TEST_USER_ID)
|
||||
|
||||
mock_block = make_mock_block_with_schema(
|
||||
block_id="ai-text-gen-id",
|
||||
name="AI Text Generator",
|
||||
input_properties={
|
||||
"prompt": {"type": "string"},
|
||||
"model": {"type": "string", "default": "gpt-4o-mini"},
|
||||
},
|
||||
required_fields=["prompt"],
|
||||
)
|
||||
|
||||
with patch(
|
||||
"backend.api.features.chat.tools.run_block.get_block",
|
||||
return_value=mock_block,
|
||||
):
|
||||
tool = RunBlockTool()
|
||||
|
||||
# Only provide valid optional field, missing required 'prompt'
|
||||
response = await tool._execute(
|
||||
user_id=_TEST_USER_ID,
|
||||
session=session,
|
||||
block_id="ai-text-gen-id",
|
||||
input_data={
|
||||
"model": "gpt-4o-mini", # valid but optional
|
||||
},
|
||||
)
|
||||
|
||||
assert isinstance(response, BlockDetailsResponse)
|
||||
|
||||
@@ -0,0 +1,153 @@
|
||||
"""Tests for BlockDetailsResponse in RunBlockTool."""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from backend.api.features.chat.tools.models import BlockDetailsResponse
|
||||
from backend.api.features.chat.tools.run_block import RunBlockTool
|
||||
from backend.blocks._base import BlockType
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
from backend.integrations.providers import ProviderName
|
||||
|
||||
from ._test_data import make_session
|
||||
|
||||
_TEST_USER_ID = "test-user-run-block-details"
|
||||
|
||||
|
||||
def make_mock_block_with_inputs(
|
||||
block_id: str, name: str, description: str = "Test description"
|
||||
):
|
||||
"""Create a mock block with input/output schemas for testing."""
|
||||
mock = MagicMock()
|
||||
mock.id = block_id
|
||||
mock.name = name
|
||||
mock.description = description
|
||||
mock.block_type = BlockType.STANDARD
|
||||
mock.disabled = False
|
||||
|
||||
# Input schema with non-credential fields
|
||||
mock.input_schema = MagicMock()
|
||||
mock.input_schema.jsonschema.return_value = {
|
||||
"properties": {
|
||||
"url": {"type": "string", "description": "URL to fetch"},
|
||||
"method": {"type": "string", "description": "HTTP method"},
|
||||
},
|
||||
"required": ["url"],
|
||||
}
|
||||
mock.input_schema.get_credentials_fields.return_value = {}
|
||||
mock.input_schema.get_credentials_fields_info.return_value = {}
|
||||
|
||||
# Output schema
|
||||
mock.output_schema = MagicMock()
|
||||
mock.output_schema.jsonschema.return_value = {
|
||||
"properties": {
|
||||
"response": {"type": "object", "description": "HTTP response"},
|
||||
"error": {"type": "string", "description": "Error message"},
|
||||
}
|
||||
}
|
||||
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_run_block_returns_details_when_no_input_provided():
|
||||
"""When run_block is called without input_data, it should return BlockDetailsResponse."""
|
||||
session = make_session(user_id=_TEST_USER_ID)
|
||||
|
||||
# Create a block with inputs
|
||||
http_block = make_mock_block_with_inputs(
|
||||
"http-block-id", "HTTP Request", "Send HTTP requests"
|
||||
)
|
||||
|
||||
with patch(
|
||||
"backend.api.features.chat.tools.run_block.get_block",
|
||||
return_value=http_block,
|
||||
):
|
||||
# Mock credentials check to return no missing credentials
|
||||
with patch.object(
|
||||
RunBlockTool,
|
||||
"_resolve_block_credentials",
|
||||
new_callable=AsyncMock,
|
||||
return_value=({}, []), # (matched_credentials, missing_credentials)
|
||||
):
|
||||
tool = RunBlockTool()
|
||||
response = await tool._execute(
|
||||
user_id=_TEST_USER_ID,
|
||||
session=session,
|
||||
block_id="http-block-id",
|
||||
input_data={}, # Empty input data
|
||||
)
|
||||
|
||||
# Should return BlockDetailsResponse showing the schema
|
||||
assert isinstance(response, BlockDetailsResponse)
|
||||
assert response.block.id == "http-block-id"
|
||||
assert response.block.name == "HTTP Request"
|
||||
assert response.block.description == "Send HTTP requests"
|
||||
assert "url" in response.block.inputs["properties"]
|
||||
assert "method" in response.block.inputs["properties"]
|
||||
assert "response" in response.block.outputs["properties"]
|
||||
assert response.user_authenticated is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_run_block_returns_details_when_only_credentials_provided():
|
||||
"""When only credentials are provided (no actual input), should return details."""
|
||||
session = make_session(user_id=_TEST_USER_ID)
|
||||
|
||||
# Create a block with both credential and non-credential inputs
|
||||
mock = MagicMock()
|
||||
mock.id = "api-block-id"
|
||||
mock.name = "API Call"
|
||||
mock.description = "Make API calls"
|
||||
mock.block_type = BlockType.STANDARD
|
||||
mock.disabled = False
|
||||
|
||||
mock.input_schema = MagicMock()
|
||||
mock.input_schema.jsonschema.return_value = {
|
||||
"properties": {
|
||||
"credentials": {"type": "object", "description": "API credentials"},
|
||||
"endpoint": {"type": "string", "description": "API endpoint"},
|
||||
},
|
||||
"required": ["credentials", "endpoint"],
|
||||
}
|
||||
mock.input_schema.get_credentials_fields.return_value = {"credentials": True}
|
||||
mock.input_schema.get_credentials_fields_info.return_value = {}
|
||||
|
||||
mock.output_schema = MagicMock()
|
||||
mock.output_schema.jsonschema.return_value = {
|
||||
"properties": {"result": {"type": "object"}}
|
||||
}
|
||||
|
||||
with patch(
|
||||
"backend.api.features.chat.tools.run_block.get_block",
|
||||
return_value=mock,
|
||||
):
|
||||
with patch.object(
|
||||
RunBlockTool,
|
||||
"_resolve_block_credentials",
|
||||
new_callable=AsyncMock,
|
||||
return_value=(
|
||||
{
|
||||
"credentials": CredentialsMetaInput(
|
||||
id="cred-id",
|
||||
provider=ProviderName("test_provider"),
|
||||
type="api_key",
|
||||
title="Test Credential",
|
||||
)
|
||||
},
|
||||
[],
|
||||
),
|
||||
):
|
||||
tool = RunBlockTool()
|
||||
response = await tool._execute(
|
||||
user_id=_TEST_USER_ID,
|
||||
session=session,
|
||||
block_id="api-block-id",
|
||||
input_data={"credentials": {"some": "cred"}}, # Only credential
|
||||
)
|
||||
|
||||
# Should return details because no non-credential inputs provided
|
||||
assert isinstance(response, BlockDetailsResponse)
|
||||
assert response.block.id == "api-block-id"
|
||||
assert response.block.name == "API Call"
|
||||
@@ -7,7 +7,6 @@ import prisma.errors
|
||||
import prisma.models
|
||||
import prisma.types
|
||||
|
||||
from backend.api.features.library.exceptions import FolderValidationError
|
||||
import backend.api.features.store.exceptions as store_exceptions
|
||||
import backend.api.features.store.image_gen as store_image_gen
|
||||
import backend.api.features.store.media as store_media
|
||||
@@ -43,8 +42,6 @@ async def list_library_agents(
|
||||
page: int = 1,
|
||||
page_size: int = 50,
|
||||
include_executions: bool = False,
|
||||
folder_id: Optional[str] = None,
|
||||
include_root_only: bool = False,
|
||||
) -> library_model.LibraryAgentResponse:
|
||||
"""
|
||||
Retrieves a paginated list of LibraryAgent records for a given user.
|
||||
@@ -55,8 +52,6 @@ async def list_library_agents(
|
||||
sort_by: Sorting field (createdAt, updatedAt, isFavorite, isCreatedByUser).
|
||||
page: Current page (1-indexed).
|
||||
page_size: Number of items per page.
|
||||
folder_id: Filter by folder ID. If provided, only returns agents in this folder.
|
||||
include_root_only: If True, only returns agents without a folder (root-level).
|
||||
include_executions: Whether to include execution data for status calculation.
|
||||
Defaults to False for performance (UI fetches status separately).
|
||||
Set to True when accurate status/metrics are needed (e.g., agent generator).
|
||||
@@ -87,13 +82,6 @@ async def list_library_agents(
|
||||
"isArchived": False,
|
||||
}
|
||||
|
||||
# Apply folder filter
|
||||
if folder_id is not None:
|
||||
where_clause["folderId"] = folder_id
|
||||
elif include_root_only:
|
||||
where_clause["folderId"] = None
|
||||
|
||||
# Build search filter if applicable
|
||||
if search_term:
|
||||
where_clause["OR"] = [
|
||||
{
|
||||
@@ -646,7 +634,6 @@ async def update_library_agent(
|
||||
is_archived: Optional[bool] = None,
|
||||
is_deleted: Optional[Literal[False]] = None,
|
||||
settings: Optional[GraphSettings] = None,
|
||||
folder_id: Optional[str] = None,
|
||||
) -> library_model.LibraryAgent:
|
||||
"""
|
||||
Updates the specified LibraryAgent record.
|
||||
@@ -659,7 +646,6 @@ async def update_library_agent(
|
||||
is_favorite: Whether this agent is marked as a favorite.
|
||||
is_archived: Whether this agent is archived.
|
||||
settings: User-specific settings for this library agent.
|
||||
folder_id: Folder ID to move agent to (empty string "" for root, None to skip).
|
||||
|
||||
Returns:
|
||||
The updated LibraryAgent.
|
||||
@@ -687,7 +673,13 @@ async def update_library_agent(
|
||||
)
|
||||
update_fields["isDeleted"] = is_deleted
|
||||
if settings is not None:
|
||||
update_fields["settings"] = SafeJson(settings.model_dump())
|
||||
existing_agent = await get_library_agent(id=library_agent_id, user_id=user_id)
|
||||
current_settings_dict = (
|
||||
existing_agent.settings.model_dump() if existing_agent.settings else {}
|
||||
)
|
||||
new_settings = settings.model_dump(exclude_unset=True)
|
||||
merged_settings = {**current_settings_dict, **new_settings}
|
||||
update_fields["settings"] = SafeJson(merged_settings)
|
||||
|
||||
try:
|
||||
# If graph_version is provided, update to that specific version
|
||||
@@ -926,788 +918,6 @@ async def add_store_agent_to_library(
|
||||
raise DatabaseError("Failed to add agent to library") from e
|
||||
|
||||
|
||||
##############################################
|
||||
############ Folder DB Functions #############
|
||||
##############################################
|
||||
|
||||
MAX_FOLDER_DEPTH = 5
|
||||
|
||||
|
||||
async def list_folders(
|
||||
user_id: str,
|
||||
parent_id: Optional[str] = None,
|
||||
include_counts: bool = True,
|
||||
) -> list[library_model.LibraryFolder]:
|
||||
"""
|
||||
Lists folders for a user, optionally filtered by parent.
|
||||
|
||||
Args:
|
||||
user_id: The ID of the user.
|
||||
parent_id: If provided, only returns folders with this parent.
|
||||
If None, returns root-level folders.
|
||||
include_counts: Whether to include agent and subfolder counts.
|
||||
|
||||
Returns:
|
||||
A list of LibraryFolder objects.
|
||||
"""
|
||||
logger.debug(f"Listing folders for user #{user_id}, parent_id={parent_id}")
|
||||
|
||||
try:
|
||||
where_clause: prisma.types.LibraryFolderWhereInput = {
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
"parentId": parent_id,
|
||||
}
|
||||
|
||||
folders = await prisma.models.LibraryFolder.prisma().find_many(
|
||||
where=where_clause,
|
||||
order={"createdAt": "asc"},
|
||||
include=(
|
||||
{
|
||||
"LibraryAgents": {"where": {"isDeleted": False}},
|
||||
"Children": {"where": {"isDeleted": False}},
|
||||
}
|
||||
if include_counts
|
||||
else None
|
||||
),
|
||||
)
|
||||
|
||||
result = []
|
||||
for folder in folders:
|
||||
agent_count = len(folder.LibraryAgents) if folder.LibraryAgents else 0
|
||||
subfolder_count = len(folder.Children) if folder.Children else 0
|
||||
result.append(
|
||||
library_model.LibraryFolder.from_db(
|
||||
folder,
|
||||
agent_count=agent_count,
|
||||
subfolder_count=subfolder_count,
|
||||
)
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error listing folders: {e}")
|
||||
raise DatabaseError("Failed to list folders") from e
|
||||
|
||||
|
||||
async def get_folder_tree(
|
||||
user_id: str,
|
||||
) -> list[library_model.LibraryFolderTree]:
|
||||
"""
|
||||
Gets the full folder tree for a user.
|
||||
|
||||
Args:
|
||||
user_id: The ID of the user.
|
||||
|
||||
Returns:
|
||||
A list of LibraryFolderTree objects (root folders with nested children).
|
||||
"""
|
||||
logger.debug(f"Getting folder tree for user #{user_id}")
|
||||
|
||||
try:
|
||||
# Fetch all folders for the user
|
||||
all_folders = await prisma.models.LibraryFolder.prisma().find_many(
|
||||
where={
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
},
|
||||
order={"createdAt": "asc"},
|
||||
include={
|
||||
"LibraryAgents": {"where": {"isDeleted": False}},
|
||||
"Children": {"where": {"isDeleted": False}},
|
||||
},
|
||||
)
|
||||
|
||||
# Build a map of folder ID to folder data
|
||||
folder_map: dict[str, library_model.LibraryFolderTree] = {}
|
||||
for folder in all_folders:
|
||||
agent_count = len(folder.LibraryAgents) if folder.LibraryAgents else 0
|
||||
subfolder_count = len(folder.Children) if folder.Children else 0
|
||||
folder_map[folder.id] = library_model.LibraryFolderTree(
|
||||
**library_model.LibraryFolder.from_db(
|
||||
folder,
|
||||
agent_count=agent_count,
|
||||
subfolder_count=subfolder_count,
|
||||
).model_dump(),
|
||||
children=[],
|
||||
)
|
||||
|
||||
# Build the tree structure
|
||||
root_folders: list[library_model.LibraryFolderTree] = []
|
||||
for folder in all_folders:
|
||||
tree_folder = folder_map[folder.id]
|
||||
if folder.parentId and folder.parentId in folder_map:
|
||||
folder_map[folder.parentId].children.append(tree_folder)
|
||||
else:
|
||||
root_folders.append(tree_folder)
|
||||
|
||||
return root_folders
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error getting folder tree: {e}")
|
||||
raise DatabaseError("Failed to get folder tree") from e
|
||||
|
||||
|
||||
async def get_folder(
|
||||
folder_id: str,
|
||||
user_id: str,
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Gets a single folder by ID.
|
||||
|
||||
Args:
|
||||
folder_id: The ID of the folder.
|
||||
user_id: The ID of the user (for ownership verification).
|
||||
|
||||
Returns:
|
||||
The LibraryFolder object.
|
||||
|
||||
Raises:
|
||||
NotFoundError: If the folder doesn't exist or doesn't belong to the user.
|
||||
"""
|
||||
try:
|
||||
folder = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": folder_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
},
|
||||
include={
|
||||
"LibraryAgents": {"where": {"isDeleted": False}},
|
||||
"Children": {"where": {"isDeleted": False}},
|
||||
},
|
||||
)
|
||||
|
||||
if not folder:
|
||||
raise NotFoundError(f"Folder #{folder_id} not found")
|
||||
|
||||
agent_count = len(folder.LibraryAgents) if folder.LibraryAgents else 0
|
||||
subfolder_count = len(folder.Children) if folder.Children else 0
|
||||
|
||||
return library_model.LibraryFolder.from_db(
|
||||
folder,
|
||||
agent_count=agent_count,
|
||||
subfolder_count=subfolder_count,
|
||||
)
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error getting folder: {e}")
|
||||
raise DatabaseError("Failed to get folder") from e
|
||||
|
||||
|
||||
async def get_folder_depth(folder_id: str, user_id: str) -> int:
|
||||
"""
|
||||
Calculate the depth of a folder in the hierarchy (root=0).
|
||||
|
||||
Args:
|
||||
folder_id: The ID of the folder.
|
||||
user_id: The ID of the user.
|
||||
|
||||
Returns:
|
||||
The depth of the folder (0 for root-level folders).
|
||||
"""
|
||||
depth = 0
|
||||
current_id: str | None = folder_id
|
||||
|
||||
while current_id:
|
||||
folder = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": current_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not folder:
|
||||
break
|
||||
if folder.parentId:
|
||||
depth += 1
|
||||
current_id = folder.parentId
|
||||
else:
|
||||
break
|
||||
|
||||
return depth
|
||||
|
||||
|
||||
async def is_descendant_of(
|
||||
folder_id: str,
|
||||
potential_ancestor_id: str,
|
||||
user_id: str,
|
||||
) -> bool:
|
||||
"""
|
||||
Check if folder_id is a descendant of potential_ancestor_id.
|
||||
|
||||
Args:
|
||||
folder_id: The ID of the folder to check.
|
||||
potential_ancestor_id: The ID of the potential ancestor.
|
||||
user_id: The ID of the user.
|
||||
|
||||
Returns:
|
||||
True if folder_id is a descendant of potential_ancestor_id.
|
||||
"""
|
||||
current_id: str | None = folder_id
|
||||
|
||||
while current_id:
|
||||
if current_id == potential_ancestor_id:
|
||||
return True
|
||||
|
||||
folder = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": current_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not folder or not folder.parentId:
|
||||
break
|
||||
current_id = folder.parentId
|
||||
|
||||
return False
|
||||
|
||||
|
||||
async def validate_folder_operation(
|
||||
folder_id: Optional[str],
|
||||
target_parent_id: Optional[str],
|
||||
user_id: str,
|
||||
max_depth: int = MAX_FOLDER_DEPTH,
|
||||
) -> None:
|
||||
"""
|
||||
Validate that a folder move/create operation is valid.
|
||||
|
||||
Args:
|
||||
folder_id: The ID of the folder being moved (None for create).
|
||||
target_parent_id: The target parent ID (None for root).
|
||||
user_id: The ID of the user.
|
||||
max_depth: Maximum allowed nesting depth.
|
||||
|
||||
Raises:
|
||||
FolderValidationError: If the operation is invalid.
|
||||
"""
|
||||
# Cannot move folder into itself
|
||||
if folder_id and folder_id == target_parent_id:
|
||||
raise FolderValidationError("Cannot move folder into itself")
|
||||
|
||||
# Check for circular reference
|
||||
if folder_id and target_parent_id:
|
||||
if await is_descendant_of(target_parent_id, folder_id, user_id):
|
||||
raise FolderValidationError("Cannot move folder into its own descendant")
|
||||
|
||||
# Check depth limit
|
||||
if target_parent_id:
|
||||
parent_depth = await get_folder_depth(target_parent_id, user_id)
|
||||
if parent_depth + 1 >= max_depth:
|
||||
raise FolderValidationError(
|
||||
f"Maximum folder nesting depth of {max_depth} exceeded"
|
||||
)
|
||||
|
||||
|
||||
async def create_folder(
|
||||
user_id: str,
|
||||
name: str,
|
||||
parent_id: Optional[str] = None,
|
||||
icon: Optional[str] = None,
|
||||
color: Optional[str] = None,
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Creates a new folder for the user.
|
||||
|
||||
Args:
|
||||
user_id: The ID of the user.
|
||||
name: The folder name.
|
||||
parent_id: Optional parent folder ID.
|
||||
icon: Optional icon identifier.
|
||||
color: Optional hex color code.
|
||||
|
||||
Returns:
|
||||
The created LibraryFolder.
|
||||
|
||||
Raises:
|
||||
FolderValidationError: If validation fails.
|
||||
DatabaseError: If there's a database error.
|
||||
"""
|
||||
logger.debug(f"Creating folder '{name}' for user #{user_id}")
|
||||
|
||||
try:
|
||||
# Validate operation
|
||||
await validate_folder_operation(
|
||||
folder_id=None,
|
||||
target_parent_id=parent_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
# Verify parent exists if provided
|
||||
if parent_id:
|
||||
parent = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": parent_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not parent:
|
||||
raise NotFoundError(f"Parent folder #{parent_id} not found")
|
||||
|
||||
# Build data dict conditionally - don't include Parent key if no parent_id
|
||||
create_data: dict = {
|
||||
"name": name,
|
||||
"User": {"connect": {"id": user_id}},
|
||||
}
|
||||
if icon is not None:
|
||||
create_data["icon"] = icon
|
||||
if color is not None:
|
||||
create_data["color"] = color
|
||||
if parent_id:
|
||||
create_data["Parent"] = {"connect": {"id": parent_id}}
|
||||
|
||||
folder = await prisma.models.LibraryFolder.prisma().create(data=create_data)
|
||||
|
||||
return library_model.LibraryFolder.from_db(folder)
|
||||
|
||||
except prisma.errors.UniqueViolationError:
|
||||
raise FolderValidationError(
|
||||
"A folder with this name already exists in this location"
|
||||
)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error creating folder: {e}")
|
||||
raise DatabaseError("Failed to create folder") from e
|
||||
|
||||
|
||||
async def create_folder_with_unique_name(
|
||||
user_id: str,
|
||||
base_name: str,
|
||||
parent_id: Optional[str] = None,
|
||||
icon: Optional[str] = None,
|
||||
color: Optional[str] = None,
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Creates a folder, appending (2), (3), etc. if name exists.
|
||||
|
||||
Args:
|
||||
user_id: The ID of the user.
|
||||
base_name: The base folder name.
|
||||
parent_id: Optional parent folder ID.
|
||||
icon: Optional icon identifier.
|
||||
color: Optional hex color code.
|
||||
|
||||
Returns:
|
||||
The created LibraryFolder.
|
||||
"""
|
||||
name = base_name
|
||||
suffix = 1
|
||||
|
||||
while True:
|
||||
try:
|
||||
return await create_folder(
|
||||
user_id=user_id,
|
||||
name=name,
|
||||
parent_id=parent_id,
|
||||
icon=icon,
|
||||
color=color,
|
||||
)
|
||||
except FolderValidationError as e:
|
||||
if "already exists" in str(e):
|
||||
suffix += 1
|
||||
name = f"{base_name} ({suffix})"
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
async def update_folder(
|
||||
folder_id: str,
|
||||
user_id: str,
|
||||
name: Optional[str] = None,
|
||||
icon: Optional[str] = None,
|
||||
color: Optional[str] = None,
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Updates a folder's properties.
|
||||
|
||||
Args:
|
||||
folder_id: The ID of the folder to update.
|
||||
user_id: The ID of the user.
|
||||
name: New folder name.
|
||||
icon: New icon identifier.
|
||||
color: New hex color code.
|
||||
|
||||
Returns:
|
||||
The updated LibraryFolder.
|
||||
|
||||
Raises:
|
||||
NotFoundError: If the folder doesn't exist.
|
||||
DatabaseError: If there's a database error.
|
||||
"""
|
||||
logger.debug(f"Updating folder #{folder_id} for user #{user_id}")
|
||||
|
||||
try:
|
||||
# Verify folder exists and belongs to user
|
||||
existing = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": folder_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not existing:
|
||||
raise NotFoundError(f"Folder #{folder_id} not found")
|
||||
|
||||
update_data: prisma.types.LibraryFolderUpdateInput = {}
|
||||
if name is not None:
|
||||
update_data["name"] = name
|
||||
if icon is not None:
|
||||
update_data["icon"] = icon
|
||||
if color is not None:
|
||||
update_data["color"] = color
|
||||
|
||||
if not update_data:
|
||||
return await get_folder(folder_id, user_id)
|
||||
|
||||
folder = await prisma.models.LibraryFolder.prisma().update(
|
||||
where={"id": folder_id},
|
||||
data=update_data,
|
||||
include={
|
||||
"LibraryAgents": {"where": {"isDeleted": False}},
|
||||
"Children": {"where": {"isDeleted": False}},
|
||||
},
|
||||
)
|
||||
|
||||
if not folder:
|
||||
raise NotFoundError(f"Folder #{folder_id} not found")
|
||||
|
||||
agent_count = len(folder.LibraryAgents) if folder.LibraryAgents else 0
|
||||
subfolder_count = len(folder.Children) if folder.Children else 0
|
||||
|
||||
return library_model.LibraryFolder.from_db(
|
||||
folder,
|
||||
agent_count=agent_count,
|
||||
subfolder_count=subfolder_count,
|
||||
)
|
||||
|
||||
except prisma.errors.UniqueViolationError:
|
||||
raise FolderValidationError(
|
||||
"A folder with this name already exists in this location"
|
||||
)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error updating folder: {e}")
|
||||
raise DatabaseError("Failed to update folder") from e
|
||||
|
||||
|
||||
async def move_folder(
|
||||
folder_id: str,
|
||||
user_id: str,
|
||||
target_parent_id: Optional[str],
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Moves a folder to a new parent.
|
||||
|
||||
Args:
|
||||
folder_id: The ID of the folder to move.
|
||||
user_id: The ID of the user.
|
||||
target_parent_id: The target parent ID (None for root).
|
||||
|
||||
Returns:
|
||||
The moved LibraryFolder.
|
||||
|
||||
Raises:
|
||||
FolderValidationError: If the move is invalid.
|
||||
NotFoundError: If the folder doesn't exist.
|
||||
DatabaseError: If there's a database error.
|
||||
"""
|
||||
logger.debug(f"Moving folder #{folder_id} to parent #{target_parent_id}")
|
||||
|
||||
try:
|
||||
# Validate operation
|
||||
await validate_folder_operation(
|
||||
folder_id=folder_id,
|
||||
target_parent_id=target_parent_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
|
||||
# Verify folder exists
|
||||
existing = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": folder_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not existing:
|
||||
raise NotFoundError(f"Folder #{folder_id} not found")
|
||||
|
||||
# Verify target parent exists if provided
|
||||
if target_parent_id:
|
||||
parent = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": target_parent_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not parent:
|
||||
raise NotFoundError(
|
||||
f"Target parent folder #{target_parent_id} not found"
|
||||
)
|
||||
|
||||
folder = await prisma.models.LibraryFolder.prisma().update(
|
||||
where={"id": folder_id},
|
||||
data={
|
||||
"parentId": target_parent_id,
|
||||
},
|
||||
include={
|
||||
"LibraryAgents": {"where": {"isDeleted": False}},
|
||||
"Children": {"where": {"isDeleted": False}},
|
||||
},
|
||||
)
|
||||
|
||||
if not folder:
|
||||
raise NotFoundError(f"Folder #{folder_id} not found")
|
||||
|
||||
agent_count = len(folder.LibraryAgents) if folder.LibraryAgents else 0
|
||||
subfolder_count = len(folder.Children) if folder.Children else 0
|
||||
|
||||
return library_model.LibraryFolder.from_db(
|
||||
folder,
|
||||
agent_count=agent_count,
|
||||
subfolder_count=subfolder_count,
|
||||
)
|
||||
|
||||
except prisma.errors.UniqueViolationError:
|
||||
raise FolderValidationError(
|
||||
"A folder with this name already exists in this location"
|
||||
)
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error moving folder: {e}")
|
||||
raise DatabaseError("Failed to move folder") from e
|
||||
|
||||
|
||||
async def delete_folder(
|
||||
folder_id: str,
|
||||
user_id: str,
|
||||
soft_delete: bool = True,
|
||||
) -> None:
|
||||
"""
|
||||
Deletes a folder and all its contents (cascade).
|
||||
|
||||
Args:
|
||||
folder_id: The ID of the folder to delete.
|
||||
user_id: The ID of the user.
|
||||
soft_delete: If True, soft-deletes; otherwise hard-deletes.
|
||||
|
||||
Raises:
|
||||
NotFoundError: If the folder doesn't exist.
|
||||
DatabaseError: If there's a database error.
|
||||
"""
|
||||
logger.debug(f"Deleting folder #{folder_id} for user #{user_id}")
|
||||
|
||||
try:
|
||||
# Verify folder exists
|
||||
existing = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": folder_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not existing:
|
||||
raise NotFoundError(f"Folder #{folder_id} not found")
|
||||
|
||||
async with transaction() as tx:
|
||||
# Get all descendant folders recursively
|
||||
descendant_ids = await _get_descendant_folder_ids(folder_id, user_id, tx)
|
||||
all_folder_ids = [folder_id] + descendant_ids
|
||||
|
||||
if soft_delete:
|
||||
# Soft-delete all agents in these folders
|
||||
await prisma.models.LibraryAgent.prisma(tx).update_many(
|
||||
where={
|
||||
"folderId": {"in": all_folder_ids},
|
||||
"userId": user_id,
|
||||
},
|
||||
data={"isDeleted": True},
|
||||
)
|
||||
|
||||
# Soft-delete all folders
|
||||
await prisma.models.LibraryFolder.prisma(tx).update_many(
|
||||
where={
|
||||
"id": {"in": all_folder_ids},
|
||||
"userId": user_id,
|
||||
},
|
||||
data={"isDeleted": True},
|
||||
)
|
||||
else:
|
||||
# Move agents to root (or could hard-delete them)
|
||||
await prisma.models.LibraryAgent.prisma(tx).update_many(
|
||||
where={
|
||||
"folderId": {"in": all_folder_ids},
|
||||
"userId": user_id,
|
||||
},
|
||||
data={"folderId": None},
|
||||
)
|
||||
|
||||
# Hard-delete folders (children first due to FK constraints)
|
||||
for fid in reversed(all_folder_ids):
|
||||
await prisma.models.LibraryFolder.prisma(tx).delete(
|
||||
where={"id": fid}
|
||||
)
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error deleting folder: {e}")
|
||||
raise DatabaseError("Failed to delete folder") from e
|
||||
|
||||
|
||||
async def _get_descendant_folder_ids(
|
||||
folder_id: str,
|
||||
user_id: str,
|
||||
tx: Optional[prisma.Prisma] = None,
|
||||
) -> list[str]:
|
||||
"""
|
||||
Recursively get all descendant folder IDs.
|
||||
|
||||
Args:
|
||||
folder_id: The ID of the parent folder.
|
||||
user_id: The ID of the user.
|
||||
tx: Optional transaction.
|
||||
|
||||
Returns:
|
||||
A list of descendant folder IDs.
|
||||
"""
|
||||
prisma_client = prisma.models.LibraryFolder.prisma(tx)
|
||||
children = await prisma_client.find_many(
|
||||
where={
|
||||
"parentId": folder_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
|
||||
result: list[str] = []
|
||||
for child in children:
|
||||
result.append(child.id)
|
||||
result.extend(await _get_descendant_folder_ids(child.id, user_id, tx))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
async def move_agent_to_folder(
|
||||
library_agent_id: str,
|
||||
folder_id: Optional[str],
|
||||
user_id: str,
|
||||
) -> library_model.LibraryAgent:
|
||||
"""
|
||||
Moves a library agent to a folder.
|
||||
|
||||
Args:
|
||||
library_agent_id: The ID of the library agent.
|
||||
folder_id: The target folder ID (None for root).
|
||||
user_id: The ID of the user.
|
||||
|
||||
Returns:
|
||||
The updated LibraryAgent.
|
||||
|
||||
Raises:
|
||||
NotFoundError: If the agent or folder doesn't exist.
|
||||
DatabaseError: If there's a database error.
|
||||
"""
|
||||
logger.debug(f"Moving agent #{library_agent_id} to folder #{folder_id}")
|
||||
|
||||
try:
|
||||
# Verify agent exists
|
||||
agent = await prisma.models.LibraryAgent.prisma().find_first(
|
||||
where={
|
||||
"id": library_agent_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not agent:
|
||||
raise NotFoundError(f"Library agent #{library_agent_id} not found")
|
||||
|
||||
# Verify folder exists if provided
|
||||
if folder_id:
|
||||
folder = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": folder_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not folder:
|
||||
raise NotFoundError(f"Folder #{folder_id} not found")
|
||||
|
||||
await prisma.models.LibraryAgent.prisma().update(
|
||||
where={"id": library_agent_id},
|
||||
data={"folderId": folder_id},
|
||||
)
|
||||
|
||||
return await get_library_agent(library_agent_id, user_id)
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error moving agent to folder: {e}")
|
||||
raise DatabaseError("Failed to move agent to folder") from e
|
||||
|
||||
|
||||
async def bulk_move_agents_to_folder(
|
||||
agent_ids: list[str],
|
||||
folder_id: Optional[str],
|
||||
user_id: str,
|
||||
) -> list[library_model.LibraryAgent]:
|
||||
"""
|
||||
Moves multiple library agents to a folder.
|
||||
|
||||
Args:
|
||||
agent_ids: The IDs of the library agents.
|
||||
folder_id: The target folder ID (None for root).
|
||||
user_id: The ID of the user.
|
||||
|
||||
Returns:
|
||||
The updated LibraryAgents.
|
||||
|
||||
Raises:
|
||||
NotFoundError: If any agent or the folder doesn't exist.
|
||||
DatabaseError: If there's a database error.
|
||||
"""
|
||||
logger.debug(f"Bulk moving {len(agent_ids)} agents to folder #{folder_id}")
|
||||
|
||||
try:
|
||||
# Verify folder exists if provided
|
||||
if folder_id:
|
||||
folder = await prisma.models.LibraryFolder.prisma().find_first(
|
||||
where={
|
||||
"id": folder_id,
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
}
|
||||
)
|
||||
if not folder:
|
||||
raise NotFoundError(f"Folder #{folder_id} not found")
|
||||
|
||||
# Update all agents
|
||||
await prisma.models.LibraryAgent.prisma().update_many(
|
||||
where={
|
||||
"id": {"in": agent_ids},
|
||||
"userId": user_id,
|
||||
"isDeleted": False,
|
||||
},
|
||||
data={"folderId": folder_id},
|
||||
)
|
||||
|
||||
# Fetch and return updated agents
|
||||
agents = await prisma.models.LibraryAgent.prisma().find_many(
|
||||
where={
|
||||
"id": {"in": agent_ids},
|
||||
"userId": user_id,
|
||||
},
|
||||
include=library_agent_include(
|
||||
user_id, include_nodes=False, include_executions=False
|
||||
),
|
||||
)
|
||||
|
||||
return [library_model.LibraryAgent.from_db(agent) for agent in agents]
|
||||
|
||||
except prisma.errors.PrismaError as e:
|
||||
logger.error(f"Database error bulk moving agents to folder: {e}")
|
||||
raise DatabaseError("Failed to bulk move agents to folder") from e
|
||||
|
||||
|
||||
##############################################
|
||||
########### Presets DB Functions #############
|
||||
##############################################
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
class FolderValidationError(Exception):
|
||||
"""Raised when folder operations fail validation."""
|
||||
|
||||
pass
|
||||
@@ -26,95 +26,6 @@ class LibraryAgentStatus(str, Enum):
|
||||
ERROR = "ERROR"
|
||||
|
||||
|
||||
# === Folder Models ===
|
||||
|
||||
|
||||
class LibraryFolder(pydantic.BaseModel):
|
||||
"""Represents a folder for organizing library agents."""
|
||||
|
||||
id: str
|
||||
user_id: str
|
||||
name: str
|
||||
icon: str | None = None
|
||||
color: str | None = None
|
||||
parent_id: str | None = None
|
||||
created_at: datetime.datetime
|
||||
updated_at: datetime.datetime
|
||||
agent_count: int = 0 # Direct agents in folder
|
||||
subfolder_count: int = 0 # Direct child folders
|
||||
|
||||
@staticmethod
|
||||
def from_db(
|
||||
folder: prisma.models.LibraryFolder,
|
||||
agent_count: int = 0,
|
||||
subfolder_count: int = 0,
|
||||
) -> "LibraryFolder":
|
||||
"""Factory method that constructs a LibraryFolder from a Prisma model."""
|
||||
return LibraryFolder(
|
||||
id=folder.id,
|
||||
user_id=folder.userId,
|
||||
name=folder.name,
|
||||
icon=folder.icon,
|
||||
color=folder.color,
|
||||
parent_id=folder.parentId,
|
||||
created_at=folder.createdAt,
|
||||
updated_at=folder.updatedAt,
|
||||
agent_count=agent_count,
|
||||
subfolder_count=subfolder_count,
|
||||
)
|
||||
|
||||
|
||||
class LibraryFolderTree(LibraryFolder):
|
||||
"""Folder with nested children for tree view."""
|
||||
|
||||
children: list["LibraryFolderTree"] = []
|
||||
|
||||
|
||||
class FolderCreateRequest(pydantic.BaseModel):
|
||||
"""Request model for creating a folder."""
|
||||
|
||||
name: str = pydantic.Field(..., min_length=1, max_length=100)
|
||||
icon: str | None = None
|
||||
color: str | None = pydantic.Field(
|
||||
None, pattern=r"^#[0-9A-Fa-f]{6}$", description="Hex color code (#RRGGBB)"
|
||||
)
|
||||
parent_id: str | None = None
|
||||
|
||||
|
||||
class FolderUpdateRequest(pydantic.BaseModel):
|
||||
"""Request model for updating a folder."""
|
||||
|
||||
name: str | None = pydantic.Field(None, min_length=1, max_length=100)
|
||||
icon: str | None = None
|
||||
color: str | None = None
|
||||
|
||||
|
||||
class FolderMoveRequest(pydantic.BaseModel):
|
||||
"""Request model for moving a folder to a new parent."""
|
||||
|
||||
target_parent_id: str | None = None # None = move to root
|
||||
|
||||
|
||||
class BulkMoveAgentsRequest(pydantic.BaseModel):
|
||||
"""Request model for moving multiple agents to a folder."""
|
||||
|
||||
agent_ids: list[str]
|
||||
folder_id: str | None = None # None = move to root
|
||||
|
||||
|
||||
class FolderListResponse(pydantic.BaseModel):
|
||||
"""Response schema for a list of folders."""
|
||||
|
||||
folders: list[LibraryFolder]
|
||||
pagination: Pagination
|
||||
|
||||
|
||||
class FolderTreeResponse(pydantic.BaseModel):
|
||||
"""Response schema for folder tree structure."""
|
||||
|
||||
tree: list[LibraryFolderTree]
|
||||
|
||||
|
||||
class MarketplaceListingCreator(pydantic.BaseModel):
|
||||
"""Creator information for a marketplace listing."""
|
||||
|
||||
@@ -209,9 +120,6 @@ class LibraryAgent(pydantic.BaseModel):
|
||||
can_access_graph: bool
|
||||
is_latest_version: bool
|
||||
is_favorite: bool
|
||||
folder_id: str | None = None
|
||||
folder_name: str | None = None # Denormalized for display
|
||||
|
||||
recommended_schedule_cron: str | None = None
|
||||
settings: GraphSettings = pydantic.Field(default_factory=GraphSettings)
|
||||
marketplace_listing: Optional["MarketplaceListing"] = None
|
||||
@@ -320,10 +228,6 @@ class LibraryAgent(pydantic.BaseModel):
|
||||
creator=creator_data,
|
||||
)
|
||||
|
||||
# Folder information
|
||||
folder_id = agent.folderId
|
||||
folder_name = agent.Folder.name if agent.Folder else None
|
||||
|
||||
return LibraryAgent(
|
||||
id=agent.id,
|
||||
graph_id=agent.agentGraphId,
|
||||
@@ -355,8 +259,6 @@ class LibraryAgent(pydantic.BaseModel):
|
||||
can_access_graph=can_access_graph,
|
||||
is_latest_version=is_latest_version,
|
||||
is_favorite=agent.isFavorite,
|
||||
folder_id=folder_id,
|
||||
folder_name=folder_name,
|
||||
recommended_schedule_cron=agent.AgentGraph.recommendedScheduleCron,
|
||||
settings=_parse_settings(agent.settings),
|
||||
marketplace_listing=marketplace_listing_data,
|
||||
@@ -568,7 +470,3 @@ class LibraryAgentUpdateRequest(pydantic.BaseModel):
|
||||
settings: Optional[GraphSettings] = pydantic.Field(
|
||||
default=None, description="User-specific settings for this library agent"
|
||||
)
|
||||
folder_id: Optional[str] = pydantic.Field(
|
||||
default=None,
|
||||
description="Folder ID to move agent to (empty string for root)",
|
||||
)
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import fastapi
|
||||
|
||||
from .agents import router as agents_router
|
||||
from .folders import router as folders_router
|
||||
from .presets import router as presets_router
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
|
||||
router.include_router(presets_router)
|
||||
router.include_router(folders_router)
|
||||
router.include_router(agents_router)
|
||||
|
||||
@@ -41,34 +41,17 @@ async def list_library_agents(
|
||||
ge=1,
|
||||
description="Number of agents per page (must be >= 1)",
|
||||
),
|
||||
folder_id: Optional[str] = Query(
|
||||
None,
|
||||
description="Filter by folder ID",
|
||||
),
|
||||
include_root_only: bool = Query(
|
||||
False,
|
||||
description="Only return agents without a folder (root-level agents)",
|
||||
),
|
||||
) -> library_model.LibraryAgentResponse:
|
||||
"""
|
||||
Get all agents in the user's library (both created and saved).
|
||||
"""
|
||||
try:
|
||||
return await library_db.list_library_agents(
|
||||
user_id=user_id,
|
||||
search_term=search_term,
|
||||
sort_by=sort_by,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
folder_id=folder_id,
|
||||
include_root_only=include_root_only,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not list library agents for user #{user_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
return await library_db.list_library_agents(
|
||||
user_id=user_id,
|
||||
search_term=search_term,
|
||||
sort_by=sort_by,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
@@ -177,34 +160,15 @@ async def update_library_agent(
|
||||
"""
|
||||
Update the library agent with the given fields.
|
||||
"""
|
||||
try:
|
||||
return await library_db.update_library_agent(
|
||||
library_agent_id=library_agent_id,
|
||||
user_id=user_id,
|
||||
auto_update_version=payload.auto_update_version,
|
||||
graph_version=payload.graph_version,
|
||||
is_favorite=payload.is_favorite,
|
||||
is_archived=payload.is_archived,
|
||||
settings=payload.settings,
|
||||
folder_id=payload.folder_id,
|
||||
)
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except DatabaseError as e:
|
||||
logger.error(f"Database error while updating library agent: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail={"message": str(e), "hint": "Verify DB connection."},
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error while updating library agent: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail={"message": str(e), "hint": "Check server logs."},
|
||||
) from e
|
||||
return await library_db.update_library_agent(
|
||||
library_agent_id=library_agent_id,
|
||||
user_id=user_id,
|
||||
auto_update_version=payload.auto_update_version,
|
||||
graph_version=payload.graph_version,
|
||||
is_favorite=payload.is_favorite,
|
||||
is_archived=payload.is_archived,
|
||||
settings=payload.settings,
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
|
||||
@@ -1,408 +0,0 @@
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
from fastapi import APIRouter, HTTPException, Query, Security, status
|
||||
from fastapi.responses import Response
|
||||
|
||||
from backend.util.exceptions import DatabaseError, NotFoundError
|
||||
|
||||
from .. import db as library_db
|
||||
from .. import model as library_model
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/folders",
|
||||
tags=["library", "folders", "private"],
|
||||
dependencies=[Security(autogpt_auth_lib.requires_user)],
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"",
|
||||
summary="List Library Folders",
|
||||
response_model=library_model.FolderListResponse,
|
||||
responses={
|
||||
200: {"description": "List of folders"},
|
||||
500: {"description": "Server error"},
|
||||
},
|
||||
)
|
||||
async def list_folders(
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
parent_id: Optional[str] = Query(
|
||||
None,
|
||||
description="Filter by parent folder ID. If not provided, returns root-level folders.",
|
||||
),
|
||||
include_counts: bool = Query(
|
||||
True,
|
||||
description="Include agent and subfolder counts",
|
||||
),
|
||||
) -> library_model.FolderListResponse:
|
||||
"""
|
||||
List folders for the authenticated user.
|
||||
|
||||
Args:
|
||||
user_id: ID of the authenticated user.
|
||||
parent_id: Optional parent folder ID to filter by.
|
||||
include_counts: Whether to include agent and subfolder counts.
|
||||
|
||||
Returns:
|
||||
A FolderListResponse containing folders.
|
||||
"""
|
||||
try:
|
||||
folders = await library_db.list_folders(
|
||||
user_id=user_id,
|
||||
parent_id=parent_id,
|
||||
include_counts=include_counts,
|
||||
)
|
||||
return library_model.FolderListResponse(
|
||||
folders=folders,
|
||||
pagination=library_model.Pagination(
|
||||
total_items=len(folders),
|
||||
total_pages=1,
|
||||
current_page=1,
|
||||
page_size=len(folders),
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not list folders for user #{user_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
|
||||
|
||||
@router.get(
|
||||
"/tree",
|
||||
summary="Get Folder Tree",
|
||||
response_model=library_model.FolderTreeResponse,
|
||||
responses={
|
||||
200: {"description": "Folder tree structure"},
|
||||
500: {"description": "Server error"},
|
||||
},
|
||||
)
|
||||
async def get_folder_tree(
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
) -> library_model.FolderTreeResponse:
|
||||
"""
|
||||
Get the full folder tree for the authenticated user.
|
||||
|
||||
Args:
|
||||
user_id: ID of the authenticated user.
|
||||
|
||||
Returns:
|
||||
A FolderTreeResponse containing the nested folder structure.
|
||||
"""
|
||||
try:
|
||||
tree = await library_db.get_folder_tree(user_id=user_id)
|
||||
return library_model.FolderTreeResponse(tree=tree)
|
||||
except Exception as e:
|
||||
logger.error(f"Could not get folder tree for user #{user_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{folder_id}",
|
||||
summary="Get Folder",
|
||||
response_model=library_model.LibraryFolder,
|
||||
responses={
|
||||
200: {"description": "Folder details"},
|
||||
404: {"description": "Folder not found"},
|
||||
500: {"description": "Server error"},
|
||||
},
|
||||
)
|
||||
async def get_folder(
|
||||
folder_id: str,
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Get a specific folder.
|
||||
|
||||
Args:
|
||||
folder_id: ID of the folder to retrieve.
|
||||
user_id: ID of the authenticated user.
|
||||
|
||||
Returns:
|
||||
The requested LibraryFolder.
|
||||
"""
|
||||
try:
|
||||
return await library_db.get_folder(folder_id=folder_id, user_id=user_id)
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except Exception as e:
|
||||
logger.error(f"Could not get folder #{folder_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
|
||||
|
||||
@router.post(
|
||||
"",
|
||||
summary="Create Folder",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
response_model=library_model.LibraryFolder,
|
||||
responses={
|
||||
201: {"description": "Folder created successfully"},
|
||||
400: {"description": "Validation error"},
|
||||
404: {"description": "Parent folder not found"},
|
||||
409: {"description": "Folder name conflict"},
|
||||
500: {"description": "Server error"},
|
||||
},
|
||||
)
|
||||
async def create_folder(
|
||||
payload: library_model.FolderCreateRequest,
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Create a new folder.
|
||||
|
||||
Args:
|
||||
payload: The folder creation request.
|
||||
user_id: ID of the authenticated user.
|
||||
|
||||
Returns:
|
||||
The created LibraryFolder.
|
||||
"""
|
||||
try:
|
||||
return await library_db.create_folder(
|
||||
user_id=user_id,
|
||||
name=payload.name,
|
||||
parent_id=payload.parent_id,
|
||||
icon=payload.icon,
|
||||
color=payload.color,
|
||||
)
|
||||
except library_db.FolderValidationError as e:
|
||||
if "already exists" in str(e):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=str(e),
|
||||
) from e
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except DatabaseError as e:
|
||||
logger.error(f"Database error creating folder: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
|
||||
|
||||
@router.patch(
|
||||
"/{folder_id}",
|
||||
summary="Update Folder",
|
||||
response_model=library_model.LibraryFolder,
|
||||
responses={
|
||||
200: {"description": "Folder updated successfully"},
|
||||
400: {"description": "Validation error"},
|
||||
404: {"description": "Folder not found"},
|
||||
409: {"description": "Folder name conflict"},
|
||||
500: {"description": "Server error"},
|
||||
},
|
||||
)
|
||||
async def update_folder(
|
||||
folder_id: str,
|
||||
payload: library_model.FolderUpdateRequest,
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Update a folder's properties.
|
||||
|
||||
Args:
|
||||
folder_id: ID of the folder to update.
|
||||
payload: The folder update request.
|
||||
user_id: ID of the authenticated user.
|
||||
|
||||
Returns:
|
||||
The updated LibraryFolder.
|
||||
"""
|
||||
try:
|
||||
return await library_db.update_folder(
|
||||
folder_id=folder_id,
|
||||
user_id=user_id,
|
||||
name=payload.name,
|
||||
icon=payload.icon,
|
||||
color=payload.color,
|
||||
)
|
||||
except library_db.FolderValidationError as e:
|
||||
if "already exists" in str(e):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=str(e),
|
||||
) from e
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except DatabaseError as e:
|
||||
logger.error(f"Database error updating folder #{folder_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{folder_id}/move",
|
||||
summary="Move Folder",
|
||||
response_model=library_model.LibraryFolder,
|
||||
responses={
|
||||
200: {"description": "Folder moved successfully"},
|
||||
400: {"description": "Validation error (circular reference, depth exceeded)"},
|
||||
404: {"description": "Folder or target parent not found"},
|
||||
409: {"description": "Folder name conflict in target location"},
|
||||
500: {"description": "Server error"},
|
||||
},
|
||||
)
|
||||
async def move_folder(
|
||||
folder_id: str,
|
||||
payload: library_model.FolderMoveRequest,
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
) -> library_model.LibraryFolder:
|
||||
"""
|
||||
Move a folder to a new parent.
|
||||
|
||||
Args:
|
||||
folder_id: ID of the folder to move.
|
||||
payload: The move request with target parent.
|
||||
user_id: ID of the authenticated user.
|
||||
|
||||
Returns:
|
||||
The moved LibraryFolder.
|
||||
"""
|
||||
try:
|
||||
return await library_db.move_folder(
|
||||
folder_id=folder_id,
|
||||
user_id=user_id,
|
||||
target_parent_id=payload.target_parent_id,
|
||||
)
|
||||
except library_db.FolderValidationError as e:
|
||||
if "already exists" in str(e):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_409_CONFLICT,
|
||||
detail=str(e),
|
||||
) from e
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except DatabaseError as e:
|
||||
logger.error(f"Database error moving folder #{folder_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{folder_id}",
|
||||
summary="Delete Folder",
|
||||
status_code=status.HTTP_204_NO_CONTENT,
|
||||
responses={
|
||||
204: {"description": "Folder deleted successfully"},
|
||||
404: {"description": "Folder not found"},
|
||||
500: {"description": "Server error"},
|
||||
},
|
||||
)
|
||||
async def delete_folder(
|
||||
folder_id: str,
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
) -> Response:
|
||||
"""
|
||||
Soft-delete a folder and all its contents.
|
||||
|
||||
Args:
|
||||
folder_id: ID of the folder to delete.
|
||||
user_id: ID of the authenticated user.
|
||||
|
||||
Returns:
|
||||
204 No Content if successful.
|
||||
"""
|
||||
try:
|
||||
await library_db.delete_folder(
|
||||
folder_id=folder_id,
|
||||
user_id=user_id,
|
||||
soft_delete=True,
|
||||
)
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except DatabaseError as e:
|
||||
logger.error(f"Database error deleting folder #{folder_id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
|
||||
|
||||
# === Bulk Agent Operations ===
|
||||
|
||||
|
||||
@router.post(
|
||||
"/agents/bulk-move",
|
||||
summary="Bulk Move Agents",
|
||||
response_model=list[library_model.LibraryAgent],
|
||||
responses={
|
||||
200: {"description": "Agents moved successfully"},
|
||||
404: {"description": "Folder not found"},
|
||||
500: {"description": "Server error"},
|
||||
},
|
||||
)
|
||||
async def bulk_move_agents(
|
||||
payload: library_model.BulkMoveAgentsRequest,
|
||||
user_id: str = Security(autogpt_auth_lib.get_user_id),
|
||||
) -> list[library_model.LibraryAgent]:
|
||||
"""
|
||||
Move multiple agents to a folder.
|
||||
|
||||
Args:
|
||||
payload: The bulk move request with agent IDs and target folder.
|
||||
user_id: ID of the authenticated user.
|
||||
|
||||
Returns:
|
||||
The updated LibraryAgents.
|
||||
"""
|
||||
try:
|
||||
return await library_db.bulk_move_agents_to_folder(
|
||||
agent_ids=payload.agent_ids,
|
||||
folder_id=payload.folder_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
except NotFoundError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=str(e),
|
||||
) from e
|
||||
except DatabaseError as e:
|
||||
logger.error(f"Database error bulk moving agents: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e),
|
||||
) from e
|
||||
@@ -105,7 +105,6 @@ def library_agent_include(
|
||||
"""
|
||||
result: prisma.types.LibraryAgentInclude = {
|
||||
"Creator": True, # Always needed for creator info
|
||||
"Folder": True, # Always needed for folder info
|
||||
}
|
||||
|
||||
# Build AgentGraph include based on requested options
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- You are about to drop the column `search` on the `StoreListingVersion` table. All the data in the column will be lost.
|
||||
|
||||
*/
|
||||
-- DropIndex
|
||||
DROP INDEX "UnifiedContentEmbedding_search_idx";
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "LibraryAgent" ADD COLUMN "folderId" TEXT;
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "StoreListingVersion" DROP COLUMN "search";
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "LibraryFolder" (
|
||||
"id" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"userId" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"icon" TEXT,
|
||||
"color" TEXT,
|
||||
"parentId" TEXT,
|
||||
"isDeleted" BOOLEAN NOT NULL DEFAULT false,
|
||||
|
||||
CONSTRAINT "LibraryFolder_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LibraryFolder_userId_isDeleted_idx" ON "LibraryFolder"("userId", "isDeleted");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LibraryFolder_parentId_idx" ON "LibraryFolder"("parentId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "LibraryFolder_userId_parentId_name_key" ON "LibraryFolder"("userId", "parentId", "name");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "LibraryAgent_folderId_idx" ON "LibraryAgent"("folderId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LibraryAgent" ADD CONSTRAINT "LibraryAgent_folderId_fkey" FOREIGN KEY ("folderId") REFERENCES "LibraryFolder"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LibraryFolder" ADD CONSTRAINT "LibraryFolder_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "LibraryFolder" ADD CONSTRAINT "LibraryFolder_parentId_fkey" FOREIGN KEY ("parentId") REFERENCES "LibraryFolder"("id") ON DELETE CASCADE ON UPDATE CASCADE;
|
||||
68
autogpt_platform/backend/poetry.lock
generated
68
autogpt_platform/backend/poetry.lock
generated
@@ -441,14 +441,14 @@ develop = true
|
||||
colorama = "^0.4.6"
|
||||
cryptography = "^46.0"
|
||||
expiringdict = "^1.2.2"
|
||||
fastapi = "^0.128.0"
|
||||
fastapi = "^0.128.7"
|
||||
google-cloud-logging = "^3.13.0"
|
||||
launchdarkly-server-sdk = "^9.14.1"
|
||||
launchdarkly-server-sdk = "^9.15.0"
|
||||
pydantic = "^2.12.5"
|
||||
pydantic-settings = "^2.12.0"
|
||||
pyjwt = {version = "^2.11.0", extras = ["crypto"]}
|
||||
redis = "^6.2.0"
|
||||
supabase = "^2.27.2"
|
||||
supabase = "^2.28.0"
|
||||
uvicorn = "^0.40.0"
|
||||
|
||||
[package.source]
|
||||
@@ -1382,14 +1382,14 @@ tzdata = "*"
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.128.6"
|
||||
version = "0.128.7"
|
||||
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "fastapi-0.128.6-py3-none-any.whl", hash = "sha256:bb1c1ef87d6086a7132d0ab60869d6f1ee67283b20fbf84ec0003bd335099509"},
|
||||
{file = "fastapi-0.128.6.tar.gz", hash = "sha256:0cb3946557e792d731b26a42b04912f16367e3c3135ea8290f620e234f2b604f"},
|
||||
{file = "fastapi-0.128.7-py3-none-any.whl", hash = "sha256:6bd9bd31cb7047465f2d3fa3ba3f33b0870b17d4eaf7cdb36d1576ab060ad662"},
|
||||
{file = "fastapi-0.128.7.tar.gz", hash = "sha256:783c273416995486c155ad2c0e2b45905dedfaf20b9ef8d9f6a9124670639a24"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -3117,14 +3117,14 @@ urllib3 = ">=1.26.0,<3"
|
||||
|
||||
[[package]]
|
||||
name = "launchdarkly-server-sdk"
|
||||
version = "9.14.1"
|
||||
version = "9.15.0"
|
||||
description = "LaunchDarkly SDK for Python"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
python-versions = ">=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "launchdarkly_server_sdk-9.14.1-py3-none-any.whl", hash = "sha256:a9e2bd9ecdef845cd631ae0d4334a1115e5b44257c42eb2349492be4bac7815c"},
|
||||
{file = "launchdarkly_server_sdk-9.14.1.tar.gz", hash = "sha256:1df44baf0a0efa74d8c1dad7a00592b98bce7d19edded7f770da8dbc49922213"},
|
||||
{file = "launchdarkly_server_sdk-9.15.0-py3-none-any.whl", hash = "sha256:c267e29bfa3fb5e2a06a208448ada6ed5557a2924979b8d79c970b45d227c668"},
|
||||
{file = "launchdarkly_server_sdk-9.15.0.tar.gz", hash = "sha256:f31441b74bc1a69c381db57c33116509e407a2612628ad6dff0a7dbb39d5020b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -4728,14 +4728,14 @@ tests = ["coverage-conditional-plugin (>=0.9.0)", "portalocker[redis]", "pytest
|
||||
|
||||
[[package]]
|
||||
name = "postgrest"
|
||||
version = "2.27.3"
|
||||
version = "2.28.0"
|
||||
description = "PostgREST client for Python. This library provides an ORM interface to PostgREST."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "postgrest-2.27.3-py3-none-any.whl", hash = "sha256:ed79123af7127edd78d538bfe8351d277e45b1a36994a4dbf57ae27dde87a7b7"},
|
||||
{file = "postgrest-2.27.3.tar.gz", hash = "sha256:c2e2679addfc8eaab23197bad7ddaee6cbb4cbe8c483ebd2d2e5219543037cc3"},
|
||||
{file = "postgrest-2.28.0-py3-none-any.whl", hash = "sha256:7bca2f24dd1a1bf8a3d586c7482aba6cd41662da6733045fad585b63b7f7df75"},
|
||||
{file = "postgrest-2.28.0.tar.gz", hash = "sha256:c36b38646d25ea4255321d3d924ce70f8d20ec7799cb42c1221d6a818d4f6515"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -6260,14 +6260,14 @@ all = ["numpy"]
|
||||
|
||||
[[package]]
|
||||
name = "realtime"
|
||||
version = "2.27.3"
|
||||
version = "2.28.0"
|
||||
description = ""
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "realtime-2.27.3-py3-none-any.whl", hash = "sha256:f571115f86988e33c41c895cb3fba2eaa1b693aeaede3617288f44274ca90f43"},
|
||||
{file = "realtime-2.27.3.tar.gz", hash = "sha256:02b082243107656a5ef3fb63e8e2ab4c40bc199abb45adb8a42ed63f089a1041"},
|
||||
{file = "realtime-2.28.0-py3-none-any.whl", hash = "sha256:db1bd59bab9b1fcc9f9d3b1a073bed35bf4994d720e6751f10031a58d57a3836"},
|
||||
{file = "realtime-2.28.0.tar.gz", hash = "sha256:d18cedcebd6a8f22fcd509bc767f639761eb218b7b2b6f14fc4205b6259b50fc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -7024,14 +7024,14 @@ full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart
|
||||
|
||||
[[package]]
|
||||
name = "storage3"
|
||||
version = "2.27.3"
|
||||
version = "2.28.0"
|
||||
description = "Supabase Storage client for Python."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "storage3-2.27.3-py3-none-any.whl", hash = "sha256:11a05b7da84bccabeeea12d940bca3760cf63fe6ca441868677335cfe4fdfbe0"},
|
||||
{file = "storage3-2.27.3.tar.gz", hash = "sha256:dc1a4a010cf36d5482c5cb6c1c28fc5f00e23284342b89e4ae43b5eae8501ddb"},
|
||||
{file = "storage3-2.28.0-py3-none-any.whl", hash = "sha256:ecb50efd2ac71dabbdf97e99ad346eafa630c4c627a8e5a138ceb5fbbadae716"},
|
||||
{file = "storage3-2.28.0.tar.gz", hash = "sha256:bc1d008aff67de7a0f2bd867baee7aadbcdb6f78f5a310b4f7a38e8c13c19865"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -7091,35 +7091,35 @@ typing-extensions = {version = ">=4.5.0", markers = "python_version >= \"3.7\""}
|
||||
|
||||
[[package]]
|
||||
name = "supabase"
|
||||
version = "2.27.3"
|
||||
version = "2.28.0"
|
||||
description = "Supabase client for Python."
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase-2.27.3-py3-none-any.whl", hash = "sha256:082a74642fcf9954693f1ce8c251baf23e4bda26ffdbc8dcd4c99c82e60d69ff"},
|
||||
{file = "supabase-2.27.3.tar.gz", hash = "sha256:5e5a348232ac4315c1032ddd687278f0b982465471f0cbb52bca7e6a66495ff3"},
|
||||
{file = "supabase-2.28.0-py3-none-any.whl", hash = "sha256:42776971c7d0ccca16034df1ab96a31c50228eb1eb19da4249ad2f756fc20272"},
|
||||
{file = "supabase-2.28.0.tar.gz", hash = "sha256:aea299aaab2a2eed3c57e0be7fc035c6807214194cce795a3575add20268ece1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
httpx = ">=0.26,<0.29"
|
||||
postgrest = "2.27.3"
|
||||
realtime = "2.27.3"
|
||||
storage3 = "2.27.3"
|
||||
supabase-auth = "2.27.3"
|
||||
supabase-functions = "2.27.3"
|
||||
postgrest = "2.28.0"
|
||||
realtime = "2.28.0"
|
||||
storage3 = "2.28.0"
|
||||
supabase-auth = "2.28.0"
|
||||
supabase-functions = "2.28.0"
|
||||
yarl = ">=1.22.0"
|
||||
|
||||
[[package]]
|
||||
name = "supabase-auth"
|
||||
version = "2.27.3"
|
||||
version = "2.28.0"
|
||||
description = "Python Client Library for Supabase Auth"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase_auth-2.27.3-py3-none-any.whl", hash = "sha256:82a4262eaad85383319d394dab0eea11fcf3ebd774062aef8ea3874ae2f02579"},
|
||||
{file = "supabase_auth-2.27.3.tar.gz", hash = "sha256:39894d4bc60b6f23b5cff4d0d7d4c1659e5d69563cadf014d4896f780ca8ca78"},
|
||||
{file = "supabase_auth-2.28.0-py3-none-any.whl", hash = "sha256:2ac85026cc285054c7fa6d41924f3a333e9ec298c013e5b5e1754039ba7caec9"},
|
||||
{file = "supabase_auth-2.28.0.tar.gz", hash = "sha256:2bb8f18ff39934e44b28f10918db965659f3735cd6fbfcc022fe0b82dbf8233e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -7129,14 +7129,14 @@ pyjwt = {version = ">=2.10.1", extras = ["crypto"]}
|
||||
|
||||
[[package]]
|
||||
name = "supabase-functions"
|
||||
version = "2.27.3"
|
||||
version = "2.28.0"
|
||||
description = "Library for Supabase Functions"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "supabase_functions-2.27.3-py3-none-any.whl", hash = "sha256:9d14a931d49ede1c6cf5fbfceb11c44061535ba1c3f310f15384964d86a83d9e"},
|
||||
{file = "supabase_functions-2.27.3.tar.gz", hash = "sha256:e954f1646da8ca6e7e16accef58d0884a5f97b25956ee98e7d4927a210ed92f9"},
|
||||
{file = "supabase_functions-2.28.0-py3-none-any.whl", hash = "sha256:30bf2d586f8df285faf0621bb5d5bb3ec3157234fc820553ca156f009475e4ae"},
|
||||
{file = "supabase_functions-2.28.0.tar.gz", hash = "sha256:db3dddfc37aca5858819eb461130968473bd8c75bd284581013958526dac718b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -8440,4 +8440,4 @@ cffi = ["cffi (>=1.17,<2.0) ; platform_python_implementation != \"PyPy\" and pyt
|
||||
[metadata]
|
||||
lock-version = "2.1"
|
||||
python-versions = ">=3.10,<3.14"
|
||||
content-hash = "c06e96ad49388ba7a46786e9ea55ea2c1a57408e15613237b4bee40a592a12af"
|
||||
content-hash = "fa9c5deadf593e815dd2190f58e22152373900603f5f244b9616cd721de84d2f"
|
||||
|
||||
@@ -65,7 +65,7 @@ sentry-sdk = {extras = ["anthropic", "fastapi", "launchdarkly", "openai", "sqlal
|
||||
sqlalchemy = "^2.0.40"
|
||||
strenum = "^0.4.9"
|
||||
stripe = "^11.5.0"
|
||||
supabase = "2.27.3"
|
||||
supabase = "2.28.0"
|
||||
tenacity = "^9.1.4"
|
||||
todoist-api-python = "^2.1.7"
|
||||
tweepy = "^4.16.0"
|
||||
|
||||
@@ -51,7 +51,6 @@ model User {
|
||||
ChatSessions ChatSession[]
|
||||
AgentPresets AgentPreset[]
|
||||
LibraryAgents LibraryAgent[]
|
||||
LibraryFolders LibraryFolder[]
|
||||
|
||||
Profile Profile[]
|
||||
UserOnboarding UserOnboarding?
|
||||
@@ -396,9 +395,6 @@ model LibraryAgent {
|
||||
creatorId String?
|
||||
Creator Profile? @relation(fields: [creatorId], references: [id])
|
||||
|
||||
folderId String?
|
||||
Folder LibraryFolder? @relation(fields: [folderId], references: [id], onDelete: Restrict)
|
||||
|
||||
useGraphIsActiveVersion Boolean @default(false)
|
||||
|
||||
isFavorite Boolean @default(false)
|
||||
@@ -411,32 +407,6 @@ model LibraryAgent {
|
||||
@@unique([userId, agentGraphId, agentGraphVersion])
|
||||
@@index([agentGraphId, agentGraphVersion])
|
||||
@@index([creatorId])
|
||||
@@index([folderId])
|
||||
}
|
||||
|
||||
model LibraryFolder {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @default(now()) @updatedAt
|
||||
|
||||
userId String
|
||||
User User @relation(fields: [userId], references: [id], onDelete: Cascade)
|
||||
|
||||
name String
|
||||
icon String?
|
||||
color String?
|
||||
|
||||
parentId String?
|
||||
Parent LibraryFolder? @relation("FolderHierarchy", fields: [parentId], references: [id], onDelete: Cascade)
|
||||
Children LibraryFolder[] @relation("FolderHierarchy")
|
||||
|
||||
isDeleted Boolean @default(false)
|
||||
|
||||
LibraryAgents LibraryAgent[]
|
||||
|
||||
@@unique([userId, parentId, name]) // Name unique per parent per user
|
||||
@@index([userId, isDeleted])
|
||||
@@index([parentId])
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////
|
||||
|
||||
@@ -37,7 +37,7 @@ services:
|
||||
context: ../
|
||||
dockerfile: autogpt_platform/backend/Dockerfile
|
||||
target: migrate
|
||||
command: ["sh", "-c", "poetry run prisma generate && poetry run gen-prisma-stub && poetry run prisma migrate deploy"]
|
||||
command: ["sh", "-c", "prisma generate && python3 gen_prisma_types_stub.py && prisma migrate deploy"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
@@ -56,7 +56,7 @@ services:
|
||||
test:
|
||||
[
|
||||
"CMD-SHELL",
|
||||
"poetry run prisma migrate status | grep -q 'No pending migrations' || exit 1",
|
||||
"prisma migrate status | grep -q 'No pending migrations' || exit 1",
|
||||
]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
|
||||
@@ -32,7 +32,6 @@
|
||||
"dependencies": {
|
||||
"@ai-sdk/react": "3.0.61",
|
||||
"@faker-js/faker": "10.0.0",
|
||||
"@ferrucc-io/emoji-picker": "0.0.48",
|
||||
"@hookform/resolvers": "5.2.2",
|
||||
"@next/third-parties": "15.4.6",
|
||||
"@phosphor-icons/react": "2.1.10",
|
||||
|
||||
116
autogpt_platform/frontend/pnpm-lock.yaml
generated
116
autogpt_platform/frontend/pnpm-lock.yaml
generated
@@ -18,9 +18,6 @@ importers:
|
||||
'@faker-js/faker':
|
||||
specifier: 10.0.0
|
||||
version: 10.0.0
|
||||
'@ferrucc-io/emoji-picker':
|
||||
specifier: 0.0.48
|
||||
version: 0.0.48(@babel/core@7.28.5)(@babel/template@7.27.2)(@types/react@18.3.17)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(tailwindcss@3.4.17)
|
||||
'@hookform/resolvers':
|
||||
specifier: 5.2.2
|
||||
version: 5.2.2(react-hook-form@7.66.0(react@18.3.1))
|
||||
@@ -1510,14 +1507,6 @@ packages:
|
||||
resolution: {integrity: sha512-UollFEUkVXutsaP+Vndjxar40Gs5JL2HeLcl8xO1QAjJgOdhc3OmBFWyEylS+RddWaaBiAzH+5/17PLQJwDiLw==}
|
||||
engines: {node: ^20.19.0 || ^22.13.0 || ^23.5.0 || >=24.0.0, npm: '>=10'}
|
||||
|
||||
'@ferrucc-io/emoji-picker@0.0.48':
|
||||
resolution: {integrity: sha512-DJ5u+6VLF9OK7x+S/luwrVb5CHC6W16jL5b8vBUYNpxKWSuFgyliDHVtw1SGe6+dr5RUbf8WQwPJdKZmU3Ittg==}
|
||||
engines: {node: '>=18'}
|
||||
peerDependencies:
|
||||
react: ^18.2.0 || ^19.0.0
|
||||
react-dom: ^18.2.0 || ^19.0.0
|
||||
tailwindcss: '>=3.0.0'
|
||||
|
||||
'@floating-ui/core@1.7.3':
|
||||
resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==}
|
||||
|
||||
@@ -3125,10 +3114,6 @@ packages:
|
||||
'@shikijs/vscode-textmate@10.0.2':
|
||||
resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==}
|
||||
|
||||
'@sindresorhus/is@4.6.0':
|
||||
resolution: {integrity: sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
'@standard-schema/spec@1.0.0':
|
||||
resolution: {integrity: sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==}
|
||||
|
||||
@@ -3396,19 +3381,10 @@ packages:
|
||||
react: '>=16.8'
|
||||
react-dom: '>=16.8'
|
||||
|
||||
'@tanstack/react-virtual@3.13.18':
|
||||
resolution: {integrity: sha512-dZkhyfahpvlaV0rIKnvQiVoWPyURppl6w4m9IwMDpuIjcJ1sD9YGWrt0wISvgU7ewACXx2Ct46WPgI6qAD4v6A==}
|
||||
peerDependencies:
|
||||
react: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
|
||||
react-dom: ^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0
|
||||
|
||||
'@tanstack/table-core@8.21.3':
|
||||
resolution: {integrity: sha512-ldZXEhOBb8Is7xLs01fR3YEc3DERiz5silj8tnGkFZytt1abEvl/GhUmCE0PMLaMPTa3Jk4HbKmRlHmu+gCftg==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
'@tanstack/virtual-core@3.13.18':
|
||||
resolution: {integrity: sha512-Mx86Hqu1k39icq2Zusq+Ey2J6dDWTjDvEv43PJtRCoEYTLyfaPnxIQ6iy7YAOK0NV/qOEmZQ/uCufrppZxTgcg==}
|
||||
|
||||
'@testing-library/dom@10.4.1':
|
||||
resolution: {integrity: sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==}
|
||||
engines: {node: '>=18'}
|
||||
@@ -4402,10 +4378,6 @@ packages:
|
||||
resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
char-regex@1.0.2:
|
||||
resolution: {integrity: sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==}
|
||||
engines: {node: '>=10'}
|
||||
|
||||
character-entities-html4@2.1.0:
|
||||
resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==}
|
||||
|
||||
@@ -5023,9 +4995,6 @@ packages:
|
||||
emoji-regex@9.2.2:
|
||||
resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==}
|
||||
|
||||
emojilib@2.4.0:
|
||||
resolution: {integrity: sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==}
|
||||
|
||||
emojis-list@3.0.0:
|
||||
resolution: {integrity: sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==}
|
||||
engines: {node: '>= 4'}
|
||||
@@ -6006,24 +5975,6 @@ packages:
|
||||
resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==}
|
||||
hasBin: true
|
||||
|
||||
jotai@2.17.1:
|
||||
resolution: {integrity: sha512-TFNZZDa/0ewCLQyRC/Sq9crtixNj/Xdf/wmj9631xxMuKToVJZDbqcHIYN0OboH+7kh6P6tpIK7uKWClj86PKw==}
|
||||
engines: {node: '>=12.20.0'}
|
||||
peerDependencies:
|
||||
'@babel/core': '>=7.0.0'
|
||||
'@babel/template': '>=7.0.0'
|
||||
'@types/react': '>=17.0.0'
|
||||
react: '>=17.0.0'
|
||||
peerDependenciesMeta:
|
||||
'@babel/core':
|
||||
optional: true
|
||||
'@babel/template':
|
||||
optional: true
|
||||
'@types/react':
|
||||
optional: true
|
||||
react:
|
||||
optional: true
|
||||
|
||||
js-tokens@4.0.0:
|
||||
resolution: {integrity: sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==}
|
||||
|
||||
@@ -6642,10 +6593,6 @@ packages:
|
||||
node-abort-controller@3.1.1:
|
||||
resolution: {integrity: sha512-AGK2yQKIjRuqnc6VkX2Xj5d+QW8xZ87pa1UK6yA6ouUyuxfHuMP6umE5QK7UmTeOAymo+Zx1Fxiuw9rVx8taHQ==}
|
||||
|
||||
node-emoji@2.2.0:
|
||||
resolution: {integrity: sha512-Z3lTE9pLaJF47NyMhd4ww1yFTAP8YhYI8SleJiHzM46Fgpm5cnNzSl9XfzFNqbaz+VlJrIj3fXQ4DeN1Rjm6cw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
node-fetch-h2@2.3.0:
|
||||
resolution: {integrity: sha512-ofRW94Ab0T4AOh5Fk8t0h8OBWrmjb0SSB20xh1H8YnPV9EJ+f5AMoYSUQ2zgJ4Iq2HAK0I2l5/Nequ8YzFS3Hg==}
|
||||
engines: {node: 4.x || >=6.0.0}
|
||||
@@ -7744,10 +7691,6 @@ packages:
|
||||
resolution: {integrity: sha512-LH7FpTAkeD+y5xQC4fzS+tFtaNlvt3Ib1zKzvhjv/Y+cioV4zIuw4IZr2yhRLu67CWL7FR9/6KXKnjRoZTvGGQ==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
skin-tone@2.0.0:
|
||||
resolution: {integrity: sha512-kUMbT1oBJCpgrnKoSr0o6wPtvRWT9W9UKvGLwfJYO2WuahZRHOpEyL1ckyMGgMWh0UdpmaoFqKKD29WTomNEGA==}
|
||||
engines: {node: '>=8'}
|
||||
|
||||
slash@3.0.0:
|
||||
resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -8225,13 +8168,6 @@ packages:
|
||||
resolution: {integrity: sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==}
|
||||
engines: {node: '>=4'}
|
||||
|
||||
unicode-emoji-json@0.8.0:
|
||||
resolution: {integrity: sha512-3wDXXvp6YGoKGhS2O2H7+V+bYduOBydN1lnI0uVfr1cIdY02uFFiEH1i3kE5CCE4l6UqbLKVmEFW9USxTAMD1g==}
|
||||
|
||||
unicode-emoji-modifier-base@1.0.0:
|
||||
resolution: {integrity: sha512-yLSH4py7oFH3oG/9K+XWrz1pSi3dfUrWEnInbxMfArOfc1+33BlGPQtLsOYwvdMy11AwUBetYuaRxSPqgkq+8g==}
|
||||
engines: {node: '>=4'}
|
||||
|
||||
unicode-match-property-ecmascript@2.0.0:
|
||||
resolution: {integrity: sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==}
|
||||
engines: {node: '>=4'}
|
||||
@@ -9841,22 +9777,6 @@ snapshots:
|
||||
|
||||
'@faker-js/faker@10.0.0': {}
|
||||
|
||||
'@ferrucc-io/emoji-picker@0.0.48(@babel/core@7.28.5)(@babel/template@7.27.2)(@types/react@18.3.17)(react-dom@18.3.1(react@18.3.1))(react@18.3.1)(tailwindcss@3.4.17)':
|
||||
dependencies:
|
||||
'@tanstack/react-virtual': 3.13.18(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
|
||||
clsx: 2.1.1
|
||||
jotai: 2.17.1(@babel/core@7.28.5)(@babel/template@7.27.2)(@types/react@18.3.17)(react@18.3.1)
|
||||
node-emoji: 2.2.0
|
||||
react: 18.3.1
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
tailwind-merge: 2.6.0
|
||||
tailwindcss: 3.4.17
|
||||
unicode-emoji-json: 0.8.0
|
||||
transitivePeerDependencies:
|
||||
- '@babel/core'
|
||||
- '@babel/template'
|
||||
- '@types/react'
|
||||
|
||||
'@floating-ui/core@1.7.3':
|
||||
dependencies:
|
||||
'@floating-ui/utils': 0.2.10
|
||||
@@ -11618,8 +11538,6 @@ snapshots:
|
||||
|
||||
'@shikijs/vscode-textmate@10.0.2': {}
|
||||
|
||||
'@sindresorhus/is@4.6.0': {}
|
||||
|
||||
'@standard-schema/spec@1.0.0': {}
|
||||
|
||||
'@standard-schema/spec@1.1.0': {}
|
||||
@@ -12093,16 +12011,8 @@ snapshots:
|
||||
react: 18.3.1
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
|
||||
'@tanstack/react-virtual@3.13.18(react-dom@18.3.1(react@18.3.1))(react@18.3.1)':
|
||||
dependencies:
|
||||
'@tanstack/virtual-core': 3.13.18
|
||||
react: 18.3.1
|
||||
react-dom: 18.3.1(react@18.3.1)
|
||||
|
||||
'@tanstack/table-core@8.21.3': {}
|
||||
|
||||
'@tanstack/virtual-core@3.13.18': {}
|
||||
|
||||
'@testing-library/dom@10.4.1':
|
||||
dependencies:
|
||||
'@babel/code-frame': 7.27.1
|
||||
@@ -13194,8 +13104,6 @@ snapshots:
|
||||
ansi-styles: 4.3.0
|
||||
supports-color: 7.2.0
|
||||
|
||||
char-regex@1.0.2: {}
|
||||
|
||||
character-entities-html4@2.1.0: {}
|
||||
|
||||
character-entities-legacy@3.0.0: {}
|
||||
@@ -13839,8 +13747,6 @@ snapshots:
|
||||
|
||||
emoji-regex@9.2.2: {}
|
||||
|
||||
emojilib@2.4.0: {}
|
||||
|
||||
emojis-list@3.0.0: {}
|
||||
|
||||
endent@2.1.0:
|
||||
@@ -15122,13 +15028,6 @@ snapshots:
|
||||
|
||||
jiti@2.6.1: {}
|
||||
|
||||
jotai@2.17.1(@babel/core@7.28.5)(@babel/template@7.27.2)(@types/react@18.3.17)(react@18.3.1):
|
||||
optionalDependencies:
|
||||
'@babel/core': 7.28.5
|
||||
'@babel/template': 7.27.2
|
||||
'@types/react': 18.3.17
|
||||
react: 18.3.1
|
||||
|
||||
js-tokens@4.0.0: {}
|
||||
|
||||
js-yaml@4.1.0:
|
||||
@@ -15997,13 +15896,6 @@ snapshots:
|
||||
|
||||
node-abort-controller@3.1.1: {}
|
||||
|
||||
node-emoji@2.2.0:
|
||||
dependencies:
|
||||
'@sindresorhus/is': 4.6.0
|
||||
char-regex: 1.0.2
|
||||
emojilib: 2.4.0
|
||||
skin-tone: 2.0.0
|
||||
|
||||
node-fetch-h2@2.3.0:
|
||||
dependencies:
|
||||
http2-client: 1.3.5
|
||||
@@ -17304,10 +17196,6 @@ snapshots:
|
||||
dependencies:
|
||||
jsep: 1.4.0
|
||||
|
||||
skin-tone@2.0.0:
|
||||
dependencies:
|
||||
unicode-emoji-modifier-base: 1.0.0
|
||||
|
||||
slash@3.0.0: {}
|
||||
|
||||
sonner@2.0.7(react-dom@18.3.1(react@18.3.1))(react@18.3.1):
|
||||
@@ -17823,10 +17711,6 @@ snapshots:
|
||||
|
||||
unicode-canonical-property-names-ecmascript@2.0.1: {}
|
||||
|
||||
unicode-emoji-json@0.8.0: {}
|
||||
|
||||
unicode-emoji-modifier-base@1.0.0: {}
|
||||
|
||||
unicode-match-property-ecmascript@2.0.0:
|
||||
dependencies:
|
||||
unicode-canonical-property-names-ecmascript: 2.0.1
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
import type { ToolUIPart } from "ai";
|
||||
import { MorphingTextAnimation } from "../../components/MorphingTextAnimation/MorphingTextAnimation";
|
||||
import { ToolAccordion } from "../../components/ToolAccordion/ToolAccordion";
|
||||
import { BlockDetailsCard } from "./components/BlockDetailsCard/BlockDetailsCard";
|
||||
import { BlockOutputCard } from "./components/BlockOutputCard/BlockOutputCard";
|
||||
import { ErrorCard } from "./components/ErrorCard/ErrorCard";
|
||||
import { SetupRequirementsCard } from "./components/SetupRequirementsCard/SetupRequirementsCard";
|
||||
@@ -11,6 +12,7 @@ import {
|
||||
getAnimationText,
|
||||
getRunBlockToolOutput,
|
||||
isRunBlockBlockOutput,
|
||||
isRunBlockDetailsOutput,
|
||||
isRunBlockErrorOutput,
|
||||
isRunBlockSetupRequirementsOutput,
|
||||
ToolIcon,
|
||||
@@ -41,6 +43,7 @@ export function RunBlockTool({ part }: Props) {
|
||||
part.state === "output-available" &&
|
||||
!!output &&
|
||||
(isRunBlockBlockOutput(output) ||
|
||||
isRunBlockDetailsOutput(output) ||
|
||||
isRunBlockSetupRequirementsOutput(output) ||
|
||||
isRunBlockErrorOutput(output));
|
||||
|
||||
@@ -58,6 +61,10 @@ export function RunBlockTool({ part }: Props) {
|
||||
<ToolAccordion {...getAccordionMeta(output)}>
|
||||
{isRunBlockBlockOutput(output) && <BlockOutputCard output={output} />}
|
||||
|
||||
{isRunBlockDetailsOutput(output) && (
|
||||
<BlockDetailsCard output={output} />
|
||||
)}
|
||||
|
||||
{isRunBlockSetupRequirementsOutput(output) && (
|
||||
<SetupRequirementsCard output={output} />
|
||||
)}
|
||||
|
||||
@@ -0,0 +1,188 @@
|
||||
import type { Meta, StoryObj } from "@storybook/nextjs";
|
||||
import { ResponseType } from "@/app/api/__generated__/models/responseType";
|
||||
import type { BlockDetailsResponse } from "../../helpers";
|
||||
import { BlockDetailsCard } from "./BlockDetailsCard";
|
||||
|
||||
const meta: Meta<typeof BlockDetailsCard> = {
|
||||
title: "Copilot/RunBlock/BlockDetailsCard",
|
||||
component: BlockDetailsCard,
|
||||
parameters: {
|
||||
layout: "centered",
|
||||
},
|
||||
tags: ["autodocs"],
|
||||
decorators: [
|
||||
(Story) => (
|
||||
<div style={{ maxWidth: 480 }}>
|
||||
<Story />
|
||||
</div>
|
||||
),
|
||||
],
|
||||
};
|
||||
|
||||
export default meta;
|
||||
type Story = StoryObj<typeof meta>;
|
||||
|
||||
const baseBlock: BlockDetailsResponse = {
|
||||
type: ResponseType.block_details,
|
||||
message:
|
||||
"Here are the details for the GetWeather block. Provide the required inputs to run it.",
|
||||
session_id: "session-123",
|
||||
user_authenticated: true,
|
||||
block: {
|
||||
id: "block-abc-123",
|
||||
name: "GetWeather",
|
||||
description: "Fetches current weather data for a given location.",
|
||||
inputs: {
|
||||
type: "object",
|
||||
properties: {
|
||||
location: {
|
||||
title: "Location",
|
||||
type: "string",
|
||||
description:
|
||||
"City name or coordinates (e.g. 'London' or '51.5,-0.1')",
|
||||
},
|
||||
units: {
|
||||
title: "Units",
|
||||
type: "string",
|
||||
description: "Temperature units: 'metric' or 'imperial'",
|
||||
},
|
||||
},
|
||||
required: ["location"],
|
||||
},
|
||||
outputs: {
|
||||
type: "object",
|
||||
properties: {
|
||||
temperature: {
|
||||
title: "Temperature",
|
||||
type: "number",
|
||||
description: "Current temperature in the requested units",
|
||||
},
|
||||
condition: {
|
||||
title: "Condition",
|
||||
type: "string",
|
||||
description: "Weather condition description (e.g. 'Sunny', 'Rain')",
|
||||
},
|
||||
},
|
||||
},
|
||||
credentials: [],
|
||||
},
|
||||
};
|
||||
|
||||
export const Default: Story = {
|
||||
args: {
|
||||
output: baseBlock,
|
||||
},
|
||||
};
|
||||
|
||||
export const InputsOnly: Story = {
|
||||
args: {
|
||||
output: {
|
||||
...baseBlock,
|
||||
message: "This block requires inputs. No outputs are defined.",
|
||||
block: {
|
||||
...baseBlock.block,
|
||||
outputs: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const OutputsOnly: Story = {
|
||||
args: {
|
||||
output: {
|
||||
...baseBlock,
|
||||
message: "This block has no required inputs.",
|
||||
block: {
|
||||
...baseBlock.block,
|
||||
inputs: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const ManyFields: Story = {
|
||||
args: {
|
||||
output: {
|
||||
...baseBlock,
|
||||
message: "Block with many input and output fields.",
|
||||
block: {
|
||||
...baseBlock.block,
|
||||
name: "SendEmail",
|
||||
description: "Sends an email via SMTP.",
|
||||
inputs: {
|
||||
type: "object",
|
||||
properties: {
|
||||
to: {
|
||||
title: "To",
|
||||
type: "string",
|
||||
description: "Recipient email address",
|
||||
},
|
||||
subject: {
|
||||
title: "Subject",
|
||||
type: "string",
|
||||
description: "Email subject line",
|
||||
},
|
||||
body: {
|
||||
title: "Body",
|
||||
type: "string",
|
||||
description: "Email body content",
|
||||
},
|
||||
cc: {
|
||||
title: "CC",
|
||||
type: "string",
|
||||
description: "CC recipients (comma-separated)",
|
||||
},
|
||||
bcc: {
|
||||
title: "BCC",
|
||||
type: "string",
|
||||
description: "BCC recipients (comma-separated)",
|
||||
},
|
||||
},
|
||||
required: ["to", "subject", "body"],
|
||||
},
|
||||
outputs: {
|
||||
type: "object",
|
||||
properties: {
|
||||
message_id: {
|
||||
title: "Message ID",
|
||||
type: "string",
|
||||
description: "Unique ID of the sent email",
|
||||
},
|
||||
status: {
|
||||
title: "Status",
|
||||
type: "string",
|
||||
description: "Delivery status",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
export const NoFieldDescriptions: Story = {
|
||||
args: {
|
||||
output: {
|
||||
...baseBlock,
|
||||
message: "Fields without descriptions.",
|
||||
block: {
|
||||
...baseBlock.block,
|
||||
name: "SimpleBlock",
|
||||
inputs: {
|
||||
type: "object",
|
||||
properties: {
|
||||
input_a: { title: "Input A", type: "string" },
|
||||
input_b: { title: "Input B", type: "number" },
|
||||
},
|
||||
required: ["input_a"],
|
||||
},
|
||||
outputs: {
|
||||
type: "object",
|
||||
properties: {
|
||||
result: { title: "Result", type: "string" },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,103 @@
|
||||
"use client";
|
||||
|
||||
import type { BlockDetailsResponse } from "../../helpers";
|
||||
import {
|
||||
ContentBadge,
|
||||
ContentCard,
|
||||
ContentCardDescription,
|
||||
ContentCardTitle,
|
||||
ContentGrid,
|
||||
ContentMessage,
|
||||
} from "../../../../components/ToolAccordion/AccordionContent";
|
||||
|
||||
interface Props {
|
||||
output: BlockDetailsResponse;
|
||||
}
|
||||
|
||||
function SchemaFieldList({
|
||||
title,
|
||||
properties,
|
||||
required,
|
||||
}: {
|
||||
title: string;
|
||||
properties: Record<string, unknown>;
|
||||
required?: string[];
|
||||
}) {
|
||||
const entries = Object.entries(properties);
|
||||
if (entries.length === 0) return null;
|
||||
|
||||
const requiredSet = new Set(required ?? []);
|
||||
|
||||
return (
|
||||
<ContentCard>
|
||||
<ContentCardTitle className="text-xs">{title}</ContentCardTitle>
|
||||
<div className="mt-2 grid gap-2">
|
||||
{entries.map(([name, schema]) => {
|
||||
const field = schema as Record<string, unknown> | undefined;
|
||||
const fieldTitle =
|
||||
typeof field?.title === "string" ? field.title : name;
|
||||
const fieldType =
|
||||
typeof field?.type === "string" ? field.type : "unknown";
|
||||
const description =
|
||||
typeof field?.description === "string"
|
||||
? field.description
|
||||
: undefined;
|
||||
|
||||
return (
|
||||
<div key={name} className="rounded-xl border p-2">
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
<ContentCardTitle className="text-xs">
|
||||
{fieldTitle}
|
||||
</ContentCardTitle>
|
||||
<div className="flex gap-1">
|
||||
<ContentBadge>{fieldType}</ContentBadge>
|
||||
{requiredSet.has(name) && (
|
||||
<ContentBadge>Required</ContentBadge>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{description && (
|
||||
<ContentCardDescription className="mt-1 text-xs">
|
||||
{description}
|
||||
</ContentCardDescription>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</ContentCard>
|
||||
);
|
||||
}
|
||||
|
||||
export function BlockDetailsCard({ output }: Props) {
|
||||
const inputs = output.block.inputs as {
|
||||
properties?: Record<string, unknown>;
|
||||
required?: string[];
|
||||
} | null;
|
||||
const outputs = output.block.outputs as {
|
||||
properties?: Record<string, unknown>;
|
||||
required?: string[];
|
||||
} | null;
|
||||
|
||||
return (
|
||||
<ContentGrid>
|
||||
<ContentMessage>{output.message}</ContentMessage>
|
||||
|
||||
{inputs?.properties && Object.keys(inputs.properties).length > 0 && (
|
||||
<SchemaFieldList
|
||||
title="Inputs"
|
||||
properties={inputs.properties}
|
||||
required={inputs.required}
|
||||
/>
|
||||
)}
|
||||
|
||||
{outputs?.properties && Object.keys(outputs.properties).length > 0 && (
|
||||
<SchemaFieldList
|
||||
title="Outputs"
|
||||
properties={outputs.properties}
|
||||
required={outputs.required}
|
||||
/>
|
||||
)}
|
||||
</ContentGrid>
|
||||
);
|
||||
}
|
||||
@@ -10,18 +10,37 @@ import {
|
||||
import type { ToolUIPart } from "ai";
|
||||
import { OrbitLoader } from "../../components/OrbitLoader/OrbitLoader";
|
||||
|
||||
/** Block details returned on first run_block attempt (before input_data provided). */
|
||||
export interface BlockDetailsResponse {
|
||||
type: typeof ResponseType.block_details;
|
||||
message: string;
|
||||
session_id?: string | null;
|
||||
block: {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
inputs: Record<string, unknown>;
|
||||
outputs: Record<string, unknown>;
|
||||
credentials: unknown[];
|
||||
};
|
||||
user_authenticated: boolean;
|
||||
}
|
||||
|
||||
export interface RunBlockInput {
|
||||
block_id?: string;
|
||||
block_name?: string;
|
||||
input_data?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export type RunBlockToolOutput =
|
||||
| SetupRequirementsResponse
|
||||
| BlockDetailsResponse
|
||||
| BlockOutputResponse
|
||||
| ErrorResponse;
|
||||
|
||||
const RUN_BLOCK_OUTPUT_TYPES = new Set<string>([
|
||||
ResponseType.setup_requirements,
|
||||
ResponseType.block_details,
|
||||
ResponseType.block_output,
|
||||
ResponseType.error,
|
||||
]);
|
||||
@@ -35,6 +54,15 @@ export function isRunBlockSetupRequirementsOutput(
|
||||
);
|
||||
}
|
||||
|
||||
export function isRunBlockDetailsOutput(
|
||||
output: RunBlockToolOutput,
|
||||
): output is BlockDetailsResponse {
|
||||
return (
|
||||
output.type === ResponseType.block_details ||
|
||||
("block" in output && typeof output.block === "object")
|
||||
);
|
||||
}
|
||||
|
||||
export function isRunBlockBlockOutput(
|
||||
output: RunBlockToolOutput,
|
||||
): output is BlockOutputResponse {
|
||||
@@ -64,6 +92,7 @@ function parseOutput(output: unknown): RunBlockToolOutput | null {
|
||||
return output as RunBlockToolOutput;
|
||||
}
|
||||
if ("block_id" in output) return output as BlockOutputResponse;
|
||||
if ("block" in output) return output as BlockDetailsResponse;
|
||||
if ("setup_info" in output) return output as SetupRequirementsResponse;
|
||||
if ("error" in output || "details" in output)
|
||||
return output as ErrorResponse;
|
||||
@@ -84,17 +113,25 @@ export function getAnimationText(part: {
|
||||
output?: unknown;
|
||||
}): string {
|
||||
const input = part.input as RunBlockInput | undefined;
|
||||
const blockName = input?.block_name?.trim();
|
||||
const blockId = input?.block_id?.trim();
|
||||
const blockText = blockId ? ` "${blockId}"` : "";
|
||||
// Prefer block_name if available, otherwise fall back to block_id
|
||||
const blockText = blockName
|
||||
? ` "${blockName}"`
|
||||
: blockId
|
||||
? ` "${blockId}"`
|
||||
: "";
|
||||
|
||||
switch (part.state) {
|
||||
case "input-streaming":
|
||||
case "input-available":
|
||||
return `Running the block${blockText}`;
|
||||
return `Running${blockText}`;
|
||||
case "output-available": {
|
||||
const output = parseOutput(part.output);
|
||||
if (!output) return `Running the block${blockText}`;
|
||||
if (!output) return `Running${blockText}`;
|
||||
if (isRunBlockBlockOutput(output)) return `Ran "${output.block_name}"`;
|
||||
if (isRunBlockDetailsOutput(output))
|
||||
return `Details for "${output.block.name}"`;
|
||||
if (isRunBlockSetupRequirementsOutput(output)) {
|
||||
return `Setup needed for "${output.setup_info.agent_name}"`;
|
||||
}
|
||||
@@ -158,6 +195,21 @@ export function getAccordionMeta(output: RunBlockToolOutput): {
|
||||
};
|
||||
}
|
||||
|
||||
if (isRunBlockDetailsOutput(output)) {
|
||||
const inputKeys = Object.keys(
|
||||
(output.block.inputs as { properties?: Record<string, unknown> })
|
||||
?.properties ?? {},
|
||||
);
|
||||
return {
|
||||
icon,
|
||||
title: output.block.name,
|
||||
description:
|
||||
inputKeys.length > 0
|
||||
? `${inputKeys.length} input field${inputKeys.length === 1 ? "" : "s"} available`
|
||||
: output.message,
|
||||
};
|
||||
}
|
||||
|
||||
if (isRunBlockSetupRequirementsOutput(output)) {
|
||||
const missingCredsCount = Object.keys(
|
||||
(output.setup_info.user_readiness?.missing_credentials ?? {}) as Record<
|
||||
|
||||
@@ -1,25 +1,17 @@
|
||||
"use client";
|
||||
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { LibraryAgentSort } from "@/app/api/__generated__/models/libraryAgentSort";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { InfiniteScroll } from "@/components/contextual/InfiniteScroll/InfiniteScroll";
|
||||
import { HeartIcon } from "@phosphor-icons/react";
|
||||
import { useFavoriteAgents } from "../../hooks/useFavoriteAgents";
|
||||
import { LibraryAgentCard } from "../LibraryAgentCard/LibraryAgentCard";
|
||||
import { LibraryTabs, Tab } from "../LibraryTabs/LibraryTabs";
|
||||
import { LibraryActionSubHeader } from "../LibraryActionSubHeader/LibraryActionSubHeader";
|
||||
|
||||
interface Props {
|
||||
searchTerm: string;
|
||||
tabs: Tab[];
|
||||
activeTab: string;
|
||||
onTabChange: (tabId: string) => void;
|
||||
setLibrarySort: (value: LibraryAgentSort) => void;
|
||||
}
|
||||
|
||||
export function FavoritesSection({ searchTerm, tabs, activeTab, onTabChange, setLibrarySort }: Props) {
|
||||
export function FavoritesSection({ searchTerm }: Props) {
|
||||
const {
|
||||
allAgents: favoriteAgents,
|
||||
agentLoading: isLoading,
|
||||
@@ -29,26 +21,38 @@ export function FavoritesSection({ searchTerm, tabs, activeTab, onTabChange, set
|
||||
isFetchingNextPage,
|
||||
} = useFavoriteAgents({ searchTerm });
|
||||
|
||||
return (
|
||||
<>
|
||||
<LibraryActionSubHeader agentCount={agentCount} setLibrarySort={setLibrarySort} />
|
||||
<LibraryTabs tabs={tabs} activeTab={activeTab} onTabChange={onTabChange} />
|
||||
if (isLoading || favoriteAgents.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
{isLoading ? (
|
||||
<div className="flex h-[200px] items-center justify-center">
|
||||
<LoadingSpinner size="large" />
|
||||
return (
|
||||
<div className="!mb-8">
|
||||
<div className="mb-3 flex items-center gap-2 p-2">
|
||||
<HeartIcon className="h-5 w-5" weight="fill" />
|
||||
<div className="flex items-baseline gap-2">
|
||||
<Text variant="h4">Favorites</Text>
|
||||
{!isLoading && (
|
||||
<Text
|
||||
variant="body"
|
||||
data-testid="agents-count"
|
||||
className="relative bottom-px text-zinc-500"
|
||||
>
|
||||
{agentCount}
|
||||
</Text>
|
||||
)}
|
||||
</div>
|
||||
) : favoriteAgents.length === 0 ? (
|
||||
<div className="flex h-[200px] flex-col items-center justify-center gap-2 text-zinc-500">
|
||||
<HeartIcon className="h-10 w-10" />
|
||||
<Text variant="body">No favorite agents yet</Text>
|
||||
</div>
|
||||
) : (
|
||||
</div>
|
||||
|
||||
<div className="relative">
|
||||
<InfiniteScroll
|
||||
isFetchingNextPage={isFetchingNextPage}
|
||||
fetchNextPage={fetchNextPage}
|
||||
hasNextPage={hasNextPage}
|
||||
loader={<LoadingSpinner size="medium" />}
|
||||
loader={
|
||||
<div className="flex h-8 w-full items-center justify-center">
|
||||
<div className="h-6 w-6 animate-spin rounded-full border-b-2 border-t-2 border-neutral-800" />
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div className="grid grid-cols-1 gap-4 sm:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4">
|
||||
{favoriteAgents.map((agent: LibraryAgent) => (
|
||||
@@ -56,7 +60,9 @@ export function FavoritesSection({ searchTerm, tabs, activeTab, onTabChange, set
|
||||
))}
|
||||
</div>
|
||||
</InfiniteScroll>
|
||||
)}
|
||||
</>
|
||||
</div>
|
||||
|
||||
{favoriteAgents.length > 0 && <div className="!mt-10 border-t" />}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
import { HeartIcon } from "@phosphor-icons/react";
|
||||
import { useEffect, useState } from "react";
|
||||
|
||||
interface FlyingHeartProps {
|
||||
startPosition: { x: number; y: number } | null;
|
||||
targetPosition: { x: number; y: number } | null;
|
||||
onAnimationComplete: () => void;
|
||||
}
|
||||
|
||||
export function FlyingHeart({
|
||||
startPosition,
|
||||
targetPosition,
|
||||
onAnimationComplete,
|
||||
}: FlyingHeartProps) {
|
||||
const [isVisible, setIsVisible] = useState(false);
|
||||
|
||||
useEffect(() => {
|
||||
if (startPosition && targetPosition) {
|
||||
setIsVisible(true);
|
||||
}
|
||||
}, [startPosition, targetPosition]);
|
||||
|
||||
if (!startPosition || !targetPosition) return null;
|
||||
|
||||
return (
|
||||
<AnimatePresence>
|
||||
{isVisible && (
|
||||
<motion.div
|
||||
className="pointer-events-none fixed z-50"
|
||||
initial={{
|
||||
x: startPosition.x,
|
||||
y: startPosition.y,
|
||||
scale: 1,
|
||||
opacity: 1,
|
||||
}}
|
||||
animate={{
|
||||
x: targetPosition.x,
|
||||
y: targetPosition.y,
|
||||
scale: 0.5,
|
||||
opacity: 0,
|
||||
}}
|
||||
exit={{ opacity: 0 }}
|
||||
transition={{
|
||||
type: "spring",
|
||||
damping: 20,
|
||||
stiffness: 200,
|
||||
duration: 0.5,
|
||||
}}
|
||||
onAnimationComplete={() => {
|
||||
setIsVisible(false);
|
||||
onAnimationComplete();
|
||||
}}
|
||||
>
|
||||
<HeartIcon
|
||||
size={24}
|
||||
weight="fill"
|
||||
className="text-red-500 drop-shadow-md"
|
||||
/>
|
||||
</motion.div>
|
||||
)}
|
||||
</AnimatePresence>
|
||||
);
|
||||
}
|
||||
@@ -13,7 +13,7 @@ export function LibraryActionSubHeader({ agentCount, setLibrarySort }: Props) {
|
||||
return (
|
||||
<div className="flex items-baseline justify-between">
|
||||
<div className="flex items-baseline gap-4">
|
||||
<Text variant="h5">My agents</Text>
|
||||
<Text variant="h4">My agents</Text>
|
||||
<Text
|
||||
variant="body"
|
||||
data-testid="agents-count"
|
||||
|
||||
@@ -4,7 +4,6 @@ import { Text } from "@/components/atoms/Text/Text";
|
||||
import { CaretCircleRightIcon } from "@phosphor-icons/react";
|
||||
import Image from "next/image";
|
||||
import NextLink from "next/link";
|
||||
import { motion } from "framer-motion";
|
||||
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import Avatar, {
|
||||
@@ -15,24 +14,13 @@ import { Link } from "@/components/atoms/Link/Link";
|
||||
import { AgentCardMenu } from "./components/AgentCardMenu";
|
||||
import { FavoriteButton } from "./components/FavoriteButton";
|
||||
import { useLibraryAgentCard } from "./useLibraryAgentCard";
|
||||
import { useFavoriteAnimation } from "../../context/FavoriteAnimationContext";
|
||||
|
||||
interface Props {
|
||||
agent: LibraryAgent;
|
||||
draggable?: boolean;
|
||||
}
|
||||
|
||||
export function LibraryAgentCard({
|
||||
agent,
|
||||
draggable = true,
|
||||
}: Props) {
|
||||
export function LibraryAgentCard({ agent }: Props) {
|
||||
const { id, name, graph_id, can_access_graph, image_url } = agent;
|
||||
const { triggerFavoriteAnimation } = useFavoriteAnimation();
|
||||
|
||||
function handleDragStart(e: React.DragEvent<HTMLDivElement>) {
|
||||
e.dataTransfer.setData("application/agent-id", id);
|
||||
e.dataTransfer.effectAllowed = "move";
|
||||
}
|
||||
|
||||
const {
|
||||
isFromMarketplace,
|
||||
@@ -40,29 +28,14 @@ export function LibraryAgentCard({
|
||||
profile,
|
||||
creator_image_url,
|
||||
handleToggleFavorite,
|
||||
} = useLibraryAgentCard({
|
||||
agent,
|
||||
onFavoriteAdd: triggerFavoriteAnimation,
|
||||
});
|
||||
} = useLibraryAgentCard({ agent });
|
||||
|
||||
return (
|
||||
<div
|
||||
draggable={draggable}
|
||||
onDragStart={handleDragStart}
|
||||
className="cursor-grab active:cursor-grabbing"
|
||||
data-testid="library-agent-card"
|
||||
data-agent-id={id}
|
||||
className="group relative inline-flex h-[10.625rem] w-full max-w-[25rem] flex-col items-start justify-start gap-2.5 rounded-medium border border-zinc-100 bg-white transition-all duration-300 hover:shadow-md"
|
||||
>
|
||||
<motion.div
|
||||
layoutId={`agent-card-${id}`}
|
||||
data-testid="library-agent-card"
|
||||
data-agent-id={id}
|
||||
className="group relative inline-flex h-[10.625rem] w-full max-w-[25rem] flex-col items-start justify-start gap-2.5 rounded-medium border border-zinc-100 bg-white hover:shadow-md"
|
||||
transition={{
|
||||
type: "spring",
|
||||
damping: 25,
|
||||
stiffness: 300,
|
||||
}}
|
||||
style={{ willChange: "transform" }}
|
||||
>
|
||||
<NextLink href={`/library/agents/${id}`} className="flex-shrink-0">
|
||||
<div className="relative flex items-center gap-2 px-4 pt-3">
|
||||
<Avatar className="h-4 w-4 rounded-full">
|
||||
@@ -152,7 +125,6 @@ export function LibraryAgentCard({
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</motion.div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -5,10 +5,6 @@ import {
|
||||
useDeleteV2DeleteLibraryAgent,
|
||||
usePostV2ForkLibraryAgent,
|
||||
} from "@/app/api/__generated__/endpoints/library/library";
|
||||
import {
|
||||
usePostV2BulkMoveAgents,
|
||||
getGetV2ListLibraryFoldersQueryKey,
|
||||
} from "@/app/api/__generated__/endpoints/folders/folders";
|
||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
@@ -26,7 +22,6 @@ import { useQueryClient } from "@tanstack/react-query";
|
||||
import Link from "next/link";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
import { MoveToFolderDialog } from "../../MoveToFolderDialog/MoveToFolderDialog";
|
||||
|
||||
interface AgentCardMenuProps {
|
||||
agent: LibraryAgent;
|
||||
@@ -37,25 +32,11 @@ export function AgentCardMenu({ agent }: AgentCardMenuProps) {
|
||||
const queryClient = useQueryClient();
|
||||
const router = useRouter();
|
||||
const [showDeleteDialog, setShowDeleteDialog] = useState(false);
|
||||
const [showMoveDialog, setShowMoveDialog] = useState(false);
|
||||
const [isDeletingAgent, setIsDeletingAgent] = useState(false);
|
||||
const [isDuplicatingAgent, setIsDuplicatingAgent] = useState(false);
|
||||
const [isRemovingFromFolder, setIsRemovingFromFolder] = useState(false);
|
||||
|
||||
const { mutateAsync: deleteAgent } = useDeleteV2DeleteLibraryAgent();
|
||||
const { mutateAsync: forkAgent } = usePostV2ForkLibraryAgent();
|
||||
const { mutateAsync: bulkMoveAgents } = usePostV2BulkMoveAgents({
|
||||
mutation: {
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryAgentsQueryKey(),
|
||||
});
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryFoldersQueryKey(),
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
async function handleDuplicateAgent() {
|
||||
if (!agent.id) return;
|
||||
@@ -89,37 +70,6 @@ export function AgentCardMenu({ agent }: AgentCardMenuProps) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRemoveFromFolder() {
|
||||
if (!agent.id) return;
|
||||
|
||||
setIsRemovingFromFolder(true);
|
||||
|
||||
try {
|
||||
await bulkMoveAgents({
|
||||
data: {
|
||||
agent_ids: [agent.id],
|
||||
folder_id: undefined,
|
||||
},
|
||||
});
|
||||
|
||||
toast({
|
||||
title: "Removed from folder",
|
||||
description: "Agent has been moved back to your library.",
|
||||
});
|
||||
} catch (error: unknown) {
|
||||
toast({
|
||||
title: "Failed to remove from folder",
|
||||
description:
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "An unexpected error occurred.",
|
||||
variant: "destructive",
|
||||
});
|
||||
} finally {
|
||||
setIsRemovingFromFolder(false);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleDeleteAgent() {
|
||||
if (!agent.id) return;
|
||||
|
||||
@@ -188,31 +138,6 @@ export function AgentCardMenu({ agent }: AgentCardMenuProps) {
|
||||
Duplicate agent
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
setShowMoveDialog(true);
|
||||
}}
|
||||
className="flex items-center gap-2"
|
||||
>
|
||||
Move to folder
|
||||
</DropdownMenuItem>
|
||||
{agent.folder_id && (
|
||||
<>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
handleRemoveFromFolder();
|
||||
}}
|
||||
disabled={isRemovingFromFolder}
|
||||
className="flex items-center gap-2"
|
||||
>
|
||||
Remove from folder
|
||||
</DropdownMenuItem>
|
||||
</>
|
||||
)}
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
@@ -258,14 +183,6 @@ export function AgentCardMenu({ agent }: AgentCardMenuProps) {
|
||||
</div>
|
||||
</Dialog.Content>
|
||||
</Dialog>
|
||||
|
||||
<MoveToFolderDialog
|
||||
agentId={agent.id}
|
||||
agentName={agent.name}
|
||||
currentFolderId={agent.folder_id}
|
||||
isOpen={showMoveDialog}
|
||||
setIsOpen={setShowMoveDialog}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -3,12 +3,10 @@
|
||||
import { cn } from "@/lib/utils";
|
||||
import { HeartIcon } from "@phosphor-icons/react";
|
||||
import type { MouseEvent } from "react";
|
||||
import { useRef } from "react";
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
|
||||
interface FavoriteButtonProps {
|
||||
isFavorite: boolean;
|
||||
onClick: (e: MouseEvent<HTMLButtonElement>, position: { x: number; y: number }) => void;
|
||||
onClick: (e: MouseEvent<HTMLButtonElement>) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
@@ -17,46 +15,25 @@ export function FavoriteButton({
|
||||
onClick,
|
||||
className,
|
||||
}: FavoriteButtonProps) {
|
||||
const buttonRef = useRef<HTMLButtonElement>(null);
|
||||
|
||||
function handleClick(e: MouseEvent<HTMLButtonElement>) {
|
||||
const rect = buttonRef.current?.getBoundingClientRect();
|
||||
const position = rect
|
||||
? { x: rect.left + rect.width / 2 - 12, y: rect.top + rect.height / 2 - 12 }
|
||||
: { x: 0, y: 0 };
|
||||
onClick(e, position);
|
||||
}
|
||||
|
||||
return (
|
||||
<button
|
||||
ref={buttonRef}
|
||||
onClick={handleClick}
|
||||
onClick={onClick}
|
||||
className={cn(
|
||||
"rounded-full p-2 transition-all duration-200",
|
||||
"hover:scale-110 active:scale-95",
|
||||
"hover:scale-110",
|
||||
!isFavorite && "opacity-0 group-hover:opacity-100",
|
||||
className,
|
||||
)}
|
||||
aria-label={isFavorite ? "Remove from favorites" : "Add to favorites"}
|
||||
>
|
||||
<AnimatePresence mode="wait" initial={false}>
|
||||
<motion.div
|
||||
key={isFavorite ? "filled" : "empty"}
|
||||
initial={{ scale: 0.5, opacity: 0 }}
|
||||
animate={{ scale: 1, opacity: 1 }}
|
||||
exit={{ scale: 0.5, opacity: 0 }}
|
||||
transition={{ type: "spring", damping: 15, stiffness: 300 }}
|
||||
>
|
||||
<HeartIcon
|
||||
size={20}
|
||||
weight={isFavorite ? "fill" : "regular"}
|
||||
className={cn(
|
||||
"transition-colors duration-200",
|
||||
isFavorite ? "text-red-500" : "text-gray-600 hover:text-red-500",
|
||||
)}
|
||||
/>
|
||||
</motion.div>
|
||||
</AnimatePresence>
|
||||
<HeartIcon
|
||||
size={20}
|
||||
weight={isFavorite ? "fill" : "regular"}
|
||||
className={cn(
|
||||
"transition-colors duration-200",
|
||||
isFavorite ? "text-red-500" : "text-gray-600 hover:text-red-500",
|
||||
)}
|
||||
/>
|
||||
</button>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -14,11 +14,11 @@ import { updateFavoriteInQueries } from "./helpers";
|
||||
|
||||
interface Props {
|
||||
agent: LibraryAgent;
|
||||
onFavoriteAdd?: (position: { x: number; y: number }) => void;
|
||||
}
|
||||
|
||||
export function useLibraryAgentCard({ agent, onFavoriteAdd }: Props) {
|
||||
const { id, is_favorite, creator_image_url, marketplace_listing } = agent;
|
||||
export function useLibraryAgentCard({ agent }: Props) {
|
||||
const { id, name, is_favorite, creator_image_url, marketplace_listing } =
|
||||
agent;
|
||||
|
||||
const isFromMarketplace = Boolean(marketplace_listing);
|
||||
const [isFavorite, setIsFavorite] = useState(is_favorite);
|
||||
@@ -49,31 +49,26 @@ export function useLibraryAgentCard({ agent, onFavoriteAdd }: Props) {
|
||||
});
|
||||
}
|
||||
|
||||
async function handleToggleFavorite(
|
||||
e: React.MouseEvent,
|
||||
position: { x: number; y: number }
|
||||
) {
|
||||
async function handleToggleFavorite(e: React.MouseEvent) {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
|
||||
const newIsFavorite = !isFavorite;
|
||||
|
||||
// Optimistic update - update UI immediately
|
||||
setIsFavorite(newIsFavorite);
|
||||
updateQueryData(newIsFavorite);
|
||||
|
||||
// Trigger animation immediately for adding to favorites
|
||||
if (newIsFavorite && onFavoriteAdd) {
|
||||
onFavoriteAdd(position);
|
||||
}
|
||||
|
||||
try {
|
||||
await updateLibraryAgent({
|
||||
libraryAgentId: id,
|
||||
data: { is_favorite: newIsFavorite },
|
||||
});
|
||||
|
||||
toast({
|
||||
title: newIsFavorite ? "Added to favorites" : "Removed from favorites",
|
||||
description: `${name} has been ${newIsFavorite ? "added to" : "removed from"} your favorites.`,
|
||||
});
|
||||
} catch {
|
||||
// Revert on failure
|
||||
setIsFavorite(!newIsFavorite);
|
||||
updateQueryData(!newIsFavorite);
|
||||
|
||||
|
||||
@@ -1,66 +1,30 @@
|
||||
"use client";
|
||||
|
||||
import { LibraryAgentSort } from "@/app/api/__generated__/models/libraryAgentSort";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { InfiniteScroll } from "@/components/contextual/InfiniteScroll/InfiniteScroll";
|
||||
import { LibraryActionSubHeader } from "../LibraryActionSubHeader/LibraryActionSubHeader";
|
||||
import { LibraryAgentCard } from "../LibraryAgentCard/LibraryAgentCard";
|
||||
import { LibraryFolder } from "../LibraryFolder/LibraryFolder";
|
||||
import { LibrarySubSection } from "../LibrarySubSection/LibrarySubSection";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { ArrowLeftIcon, HeartIcon } from "@phosphor-icons/react";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Tab } from "../LibraryTabs/LibraryTabs";
|
||||
import { LayoutGroup } from "framer-motion";
|
||||
import { LibraryFolderEditDialog } from "../LibraryFolderEditDialog/LibraryFolderEditDialog";
|
||||
import { LibraryFolderDeleteDialog } from "../LibraryFolderDeleteDialog/LibraryFolderDeleteDialog";
|
||||
import { useLibraryAgentList } from "./useLibraryAgentList";
|
||||
|
||||
interface Props {
|
||||
searchTerm: string;
|
||||
librarySort: LibraryAgentSort;
|
||||
setLibrarySort: (value: LibraryAgentSort) => void;
|
||||
selectedFolderId: string | null;
|
||||
onFolderSelect: (folderId: string | null) => void;
|
||||
tabs: Tab[];
|
||||
activeTab: string;
|
||||
onTabChange: (tabId: string) => void;
|
||||
}
|
||||
|
||||
export function LibraryAgentList({
|
||||
searchTerm,
|
||||
librarySort,
|
||||
setLibrarySort,
|
||||
selectedFolderId,
|
||||
onFolderSelect,
|
||||
tabs,
|
||||
activeTab,
|
||||
onTabChange,
|
||||
}: Props) {
|
||||
const {
|
||||
isFavoritesTab,
|
||||
agentLoading,
|
||||
agentCount,
|
||||
agents,
|
||||
allAgents: agents,
|
||||
hasNextPage,
|
||||
isFetchingNextPage,
|
||||
fetchNextPage,
|
||||
foldersData,
|
||||
currentFolder,
|
||||
showFolders,
|
||||
editingFolder,
|
||||
setEditingFolder,
|
||||
deletingFolder,
|
||||
setDeletingFolder,
|
||||
handleAgentDrop,
|
||||
handleFolderDeleted,
|
||||
} = useLibraryAgentList({
|
||||
searchTerm,
|
||||
librarySort,
|
||||
selectedFolderId,
|
||||
onFolderSelect,
|
||||
activeTab,
|
||||
});
|
||||
} = useLibraryAgentList({ searchTerm, librarySort });
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -68,42 +32,11 @@ export function LibraryAgentList({
|
||||
agentCount={agentCount}
|
||||
setLibrarySort={setLibrarySort}
|
||||
/>
|
||||
{!selectedFolderId && (
|
||||
<LibrarySubSection
|
||||
tabs={tabs}
|
||||
activeTab={activeTab}
|
||||
onTabChange={onTabChange}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div>
|
||||
{selectedFolderId && (
|
||||
<div className="mb-4 flex items-center gap-3">
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="small"
|
||||
onClick={() => onFolderSelect(null)}
|
||||
className="gap-2"
|
||||
>
|
||||
<ArrowLeftIcon className="h-4 w-4" />
|
||||
Back to Library
|
||||
</Button>
|
||||
{currentFolder && (
|
||||
<Text variant="h4" className="text-zinc-700">
|
||||
{currentFolder.icon} {currentFolder.name}
|
||||
</Text>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className="px-2">
|
||||
{agentLoading ? (
|
||||
<div className="flex h-[200px] items-center justify-center">
|
||||
<LoadingSpinner size="large" />
|
||||
</div>
|
||||
) : isFavoritesTab && agents.length === 0 ? (
|
||||
<div className="flex h-[200px] flex-col items-center justify-center gap-2 text-zinc-500">
|
||||
<HeartIcon className="h-10 w-10" />
|
||||
<Text variant="body">No favorite agents yet</Text>
|
||||
</div>
|
||||
) : (
|
||||
<InfiniteScroll
|
||||
isFetchingNextPage={isFetchingNextPage}
|
||||
@@ -111,52 +44,14 @@ export function LibraryAgentList({
|
||||
hasNextPage={hasNextPage}
|
||||
loader={<LoadingSpinner size="medium" />}
|
||||
>
|
||||
<LayoutGroup>
|
||||
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2 md:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4">
|
||||
{showFolders &&
|
||||
foldersData?.folders.map((folder) => (
|
||||
<LibraryFolder
|
||||
key={folder.id}
|
||||
id={folder.id}
|
||||
name={folder.name}
|
||||
agentCount={folder.agent_count ?? 0}
|
||||
color={folder.color ?? undefined}
|
||||
icon={folder.icon ?? "📁"}
|
||||
onAgentDrop={handleAgentDrop}
|
||||
onClick={() => onFolderSelect(folder.id)}
|
||||
onEdit={() => setEditingFolder(folder)}
|
||||
onDelete={() => setDeletingFolder(folder)}
|
||||
/>
|
||||
))}
|
||||
{agents.map((agent) => (
|
||||
<LibraryAgentCard key={agent.id} agent={agent} />
|
||||
))}
|
||||
</div>
|
||||
</LayoutGroup>
|
||||
<div className="grid grid-cols-1 gap-6 sm:grid-cols-2 md:grid-cols-2 lg:grid-cols-3 xl:grid-cols-4">
|
||||
{agents.map((agent) => (
|
||||
<LibraryAgentCard key={agent.id} agent={agent} />
|
||||
))}
|
||||
</div>
|
||||
</InfiniteScroll>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{editingFolder && (
|
||||
<LibraryFolderEditDialog
|
||||
folder={editingFolder}
|
||||
isOpen={!!editingFolder}
|
||||
setIsOpen={(open) => {
|
||||
if (!open) setEditingFolder(null);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{deletingFolder && (
|
||||
<LibraryFolderDeleteDialog
|
||||
folder={deletingFolder}
|
||||
isOpen={!!deletingFolder}
|
||||
setIsOpen={(open) => {
|
||||
if (!open) setDeletingFolder(null);
|
||||
}}
|
||||
onDeleted={handleFolderDeleted}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,71 +1,36 @@
|
||||
"use client";
|
||||
|
||||
import { useGetV2ListLibraryAgentsInfinite } from "@/app/api/__generated__/endpoints/library/library";
|
||||
import { getGetV2ListLibraryAgentsQueryKey } from "@/app/api/__generated__/endpoints/library/library";
|
||||
import {
|
||||
useGetV2ListLibraryFolders,
|
||||
usePostV2BulkMoveAgents,
|
||||
getGetV2ListLibraryFoldersQueryKey,
|
||||
} from "@/app/api/__generated__/endpoints/folders/folders";
|
||||
import type { getV2ListLibraryFoldersResponseSuccess } from "@/app/api/__generated__/endpoints/folders/folders";
|
||||
import type { LibraryFolder } from "@/app/api/__generated__/models/libraryFolder";
|
||||
import { LibraryAgentSort } from "@/app/api/__generated__/models/libraryAgentSort";
|
||||
import {
|
||||
okData,
|
||||
getPaginatedTotalCount,
|
||||
getPaginationNextPageNumber,
|
||||
unpaginate,
|
||||
} from "@/app/api/helpers";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { useFavoriteAgents } from "../../hooks/useFavoriteAgents";
|
||||
import { getQueryClient } from "@/lib/react-query/queryClient";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { useEffect, useRef } from "react";
|
||||
|
||||
interface Props {
|
||||
searchTerm: string;
|
||||
librarySort: LibraryAgentSort;
|
||||
selectedFolderId: string | null;
|
||||
onFolderSelect: (folderId: string | null) => void;
|
||||
activeTab: string;
|
||||
}
|
||||
|
||||
export function useLibraryAgentList({
|
||||
searchTerm,
|
||||
librarySort,
|
||||
selectedFolderId,
|
||||
onFolderSelect,
|
||||
activeTab,
|
||||
}: Props) {
|
||||
const isFavoritesTab = activeTab === "favorites";
|
||||
const { toast } = useToast();
|
||||
const stableQueryClient = getQueryClient();
|
||||
const queryClient = useQueryClient();
|
||||
export function useLibraryAgentList({ searchTerm, librarySort }: Props) {
|
||||
const queryClient = getQueryClient();
|
||||
const prevSortRef = useRef<LibraryAgentSort | null>(null);
|
||||
|
||||
const [editingFolder, setEditingFolder] = useState<LibraryFolder | null>(
|
||||
null,
|
||||
);
|
||||
const [deletingFolder, setDeletingFolder] = useState<LibraryFolder | null>(
|
||||
null,
|
||||
);
|
||||
|
||||
// --- Agent list fetching ---
|
||||
|
||||
const {
|
||||
data: agentsQueryData,
|
||||
fetchNextPage,
|
||||
hasNextPage,
|
||||
isFetchingNextPage,
|
||||
isLoading: allAgentsLoading,
|
||||
isLoading: agentLoading,
|
||||
} = useGetV2ListLibraryAgentsInfinite(
|
||||
{
|
||||
page: 1,
|
||||
page_size: 20,
|
||||
search_term: searchTerm || undefined,
|
||||
sort_by: librarySort,
|
||||
folder_id: selectedFolderId ?? undefined,
|
||||
include_root_only: selectedFolderId === null ? true : undefined,
|
||||
},
|
||||
{
|
||||
query: {
|
||||
@@ -74,148 +39,28 @@ export function useLibraryAgentList({
|
||||
},
|
||||
);
|
||||
|
||||
// Reset queries when sort changes to ensure fresh data with correct sorting
|
||||
useEffect(() => {
|
||||
if (prevSortRef.current !== null && prevSortRef.current !== librarySort) {
|
||||
stableQueryClient.resetQueries({
|
||||
// Reset all library agent queries to ensure fresh fetch with new sort
|
||||
queryClient.resetQueries({
|
||||
queryKey: ["/api/library/agents"],
|
||||
});
|
||||
}
|
||||
prevSortRef.current = librarySort;
|
||||
}, [librarySort, stableQueryClient]);
|
||||
}, [librarySort, queryClient]);
|
||||
|
||||
const allAgentsList = agentsQueryData
|
||||
const allAgents = agentsQueryData
|
||||
? unpaginate(agentsQueryData, "agents")
|
||||
: [];
|
||||
const allAgentsCount = getPaginatedTotalCount(agentsQueryData);
|
||||
|
||||
// --- Favorites ---
|
||||
|
||||
const favoriteAgentsData = useFavoriteAgents({ searchTerm });
|
||||
|
||||
const {
|
||||
agentLoading,
|
||||
agentCount,
|
||||
allAgents: agents,
|
||||
hasNextPage: agentsHasNextPage,
|
||||
isFetchingNextPage: agentsIsFetchingNextPage,
|
||||
fetchNextPage: agentsFetchNextPage,
|
||||
} = isFavoritesTab
|
||||
? favoriteAgentsData
|
||||
: {
|
||||
agentLoading: allAgentsLoading,
|
||||
agentCount: allAgentsCount,
|
||||
allAgents: allAgentsList,
|
||||
hasNextPage: hasNextPage,
|
||||
isFetchingNextPage: isFetchingNextPage,
|
||||
fetchNextPage: fetchNextPage,
|
||||
};
|
||||
|
||||
// --- Folders ---
|
||||
|
||||
const { data: foldersData } = useGetV2ListLibraryFolders(undefined, {
|
||||
query: { select: okData },
|
||||
});
|
||||
|
||||
const { mutate: moveAgentToFolder } = usePostV2BulkMoveAgents({
|
||||
mutation: {
|
||||
onMutate: async ({ data }) => {
|
||||
await queryClient.cancelQueries({
|
||||
queryKey: getGetV2ListLibraryFoldersQueryKey(),
|
||||
});
|
||||
await queryClient.cancelQueries({
|
||||
queryKey: getGetV2ListLibraryAgentsQueryKey(),
|
||||
});
|
||||
|
||||
const previousFolders = queryClient.getQueriesData<
|
||||
getV2ListLibraryFoldersResponseSuccess
|
||||
>({ queryKey: getGetV2ListLibraryFoldersQueryKey() });
|
||||
|
||||
if (data.folder_id) {
|
||||
queryClient.setQueriesData<getV2ListLibraryFoldersResponseSuccess>(
|
||||
{ queryKey: getGetV2ListLibraryFoldersQueryKey() },
|
||||
(old) => {
|
||||
if (!old?.data?.folders) return old;
|
||||
return {
|
||||
...old,
|
||||
data: {
|
||||
...old.data,
|
||||
folders: old.data.folders.map((f) =>
|
||||
f.id === data.folder_id
|
||||
? {
|
||||
...f,
|
||||
agent_count:
|
||||
(f.agent_count ?? 0) + data.agent_ids.length,
|
||||
}
|
||||
: f,
|
||||
),
|
||||
},
|
||||
};
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
return { previousFolders };
|
||||
},
|
||||
onError: (_error, _variables, context) => {
|
||||
if (context?.previousFolders) {
|
||||
for (const [queryKey, data] of context.previousFolders) {
|
||||
queryClient.setQueryData(queryKey, data);
|
||||
}
|
||||
}
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to move agent. Please try again.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryFoldersQueryKey(),
|
||||
});
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryAgentsQueryKey(),
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
function handleAgentDrop(agentId: string, folderId: string) {
|
||||
moveAgentToFolder({
|
||||
data: {
|
||||
agent_ids: [agentId],
|
||||
folder_id: folderId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const currentFolder = selectedFolderId
|
||||
? foldersData?.folders.find((f) => f.id === selectedFolderId)
|
||||
: null;
|
||||
|
||||
const showFolders = !isFavoritesTab && !selectedFolderId;
|
||||
|
||||
function handleFolderDeleted() {
|
||||
if (selectedFolderId === deletingFolder?.id) {
|
||||
onFolderSelect(null);
|
||||
}
|
||||
}
|
||||
const agentCount = getPaginatedTotalCount(agentsQueryData);
|
||||
|
||||
return {
|
||||
isFavoritesTab,
|
||||
allAgents,
|
||||
agentLoading,
|
||||
hasNextPage,
|
||||
agentCount,
|
||||
agents,
|
||||
hasNextPage: agentsHasNextPage,
|
||||
isFetchingNextPage: agentsIsFetchingNextPage,
|
||||
fetchNextPage: agentsFetchNextPage,
|
||||
foldersData,
|
||||
currentFolder,
|
||||
showFolders,
|
||||
editingFolder,
|
||||
setEditingFolder,
|
||||
deletingFolder,
|
||||
setDeletingFolder,
|
||||
handleAgentDrop,
|
||||
handleFolderDeleted,
|
||||
isFetchingNextPage,
|
||||
fetchNextPage,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,353 +0,0 @@
|
||||
import { useState } from "react";
|
||||
import { motion } from "framer-motion";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
|
||||
type FolderSize = "xs" | "sm" | "md" | "lg" | "xl";
|
||||
export type FolderColorName =
|
||||
| "neutral"
|
||||
| "slate"
|
||||
| "zinc"
|
||||
| "stone"
|
||||
| "red"
|
||||
| "orange"
|
||||
| "amber"
|
||||
| "yellow"
|
||||
| "lime"
|
||||
| "green"
|
||||
| "emerald"
|
||||
| "teal"
|
||||
| "cyan"
|
||||
| "sky"
|
||||
| "blue"
|
||||
| "indigo"
|
||||
| "violet"
|
||||
| "purple"
|
||||
| "fuchsia"
|
||||
| "pink"
|
||||
| "rose";
|
||||
|
||||
export type FolderColor = FolderColorName | (string & {});
|
||||
|
||||
const hexToColorName: Record<string, FolderColorName> = {
|
||||
"#3B82F6": "blue",
|
||||
"#3b82f6": "blue",
|
||||
"#A855F7": "purple",
|
||||
"#a855f7": "purple",
|
||||
"#10B981": "emerald",
|
||||
"#10b981": "emerald",
|
||||
"#F97316": "orange",
|
||||
"#f97316": "orange",
|
||||
"#EC4899": "pink",
|
||||
"#ec4899": "pink",
|
||||
};
|
||||
|
||||
function resolveColor(color: FolderColor | undefined): FolderColorName {
|
||||
if (!color) return "blue";
|
||||
if (color in hexToColorName) return hexToColorName[color];
|
||||
if (color in colorMap) return color as FolderColorName;
|
||||
return "blue";
|
||||
}
|
||||
|
||||
interface Props {
|
||||
className?: string;
|
||||
size?: FolderSize | number;
|
||||
color?: FolderColor;
|
||||
icon?: string;
|
||||
isOpen?: boolean;
|
||||
}
|
||||
|
||||
const sizeMap: Record<FolderSize, number> = {
|
||||
xs: 0.4,
|
||||
sm: 0.75,
|
||||
md: 1,
|
||||
lg: 1.25,
|
||||
xl: 1.5,
|
||||
};
|
||||
|
||||
const colorMap: Record<
|
||||
FolderColorName,
|
||||
{
|
||||
bg: string;
|
||||
border: string;
|
||||
borderLight: string;
|
||||
fill: string;
|
||||
stroke: string;
|
||||
}
|
||||
> = {
|
||||
neutral: {
|
||||
bg: "bg-neutral-300",
|
||||
border: "border-neutral-300",
|
||||
borderLight: "border-neutral-200",
|
||||
fill: "fill-neutral-300",
|
||||
stroke: "stroke-neutral-400",
|
||||
},
|
||||
slate: {
|
||||
bg: "bg-slate-300",
|
||||
border: "border-slate-300",
|
||||
borderLight: "border-slate-200",
|
||||
fill: "fill-slate-300",
|
||||
stroke: "stroke-slate-400",
|
||||
},
|
||||
zinc: {
|
||||
bg: "bg-zinc-300",
|
||||
border: "border-zinc-300",
|
||||
borderLight: "border-zinc-200",
|
||||
fill: "fill-zinc-300",
|
||||
stroke: "stroke-zinc-400",
|
||||
},
|
||||
stone: {
|
||||
bg: "bg-stone-300",
|
||||
border: "border-stone-300",
|
||||
borderLight: "border-stone-200",
|
||||
fill: "fill-stone-300",
|
||||
stroke: "stroke-stone-400",
|
||||
},
|
||||
red: {
|
||||
bg: "bg-red-300",
|
||||
border: "border-red-300",
|
||||
borderLight: "border-red-200",
|
||||
fill: "fill-red-300",
|
||||
stroke: "stroke-red-400",
|
||||
},
|
||||
orange: {
|
||||
bg: "bg-orange-200",
|
||||
border: "border-orange-200",
|
||||
borderLight: "border-orange-200",
|
||||
fill: "fill-orange-200",
|
||||
stroke: "stroke-orange-400",
|
||||
},
|
||||
amber: {
|
||||
bg: "bg-amber-200",
|
||||
border: "border-amber-200",
|
||||
borderLight: "border-amber-200",
|
||||
fill: "fill-amber-200",
|
||||
stroke: "stroke-amber-400",
|
||||
},
|
||||
yellow: {
|
||||
bg: "bg-yellow-200",
|
||||
border: "border-yellow-200",
|
||||
borderLight: "border-yellow-200",
|
||||
fill: "fill-yellow-200",
|
||||
stroke: "stroke-yellow-400",
|
||||
},
|
||||
lime: {
|
||||
bg: "bg-lime-300",
|
||||
border: "border-lime-300",
|
||||
borderLight: "border-lime-200",
|
||||
fill: "fill-lime-300",
|
||||
stroke: "stroke-lime-400",
|
||||
},
|
||||
green: {
|
||||
bg: "bg-green-200",
|
||||
border: "border-green-200",
|
||||
borderLight: "border-green-200",
|
||||
fill: "fill-green-200",
|
||||
stroke: "stroke-green-400",
|
||||
},
|
||||
emerald: {
|
||||
bg: "bg-emerald-300",
|
||||
border: "border-emerald-300",
|
||||
borderLight: "border-emerald-200",
|
||||
fill: "fill-emerald-300",
|
||||
stroke: "stroke-emerald-400",
|
||||
},
|
||||
teal: {
|
||||
bg: "bg-teal-300",
|
||||
border: "border-teal-300",
|
||||
borderLight: "border-teal-200",
|
||||
fill: "fill-teal-300",
|
||||
stroke: "stroke-teal-400",
|
||||
},
|
||||
cyan: {
|
||||
bg: "bg-cyan-300",
|
||||
border: "border-cyan-300",
|
||||
borderLight: "border-cyan-200",
|
||||
fill: "fill-cyan-300",
|
||||
stroke: "stroke-cyan-400",
|
||||
},
|
||||
sky: {
|
||||
bg: "bg-sky-300",
|
||||
border: "border-sky-300",
|
||||
borderLight: "border-sky-200",
|
||||
fill: "fill-sky-300",
|
||||
stroke: "stroke-sky-400",
|
||||
},
|
||||
blue: {
|
||||
bg: "bg-blue-300",
|
||||
border: "border-blue-300",
|
||||
borderLight: "border-blue-200",
|
||||
fill: "fill-blue-300",
|
||||
stroke: "stroke-blue-400",
|
||||
},
|
||||
indigo: {
|
||||
bg: "bg-indigo-300",
|
||||
border: "border-indigo-300",
|
||||
borderLight: "border-indigo-200",
|
||||
fill: "fill-indigo-300",
|
||||
stroke: "stroke-indigo-400",
|
||||
},
|
||||
violet: {
|
||||
bg: "bg-violet-300",
|
||||
border: "border-violet-300",
|
||||
borderLight: "border-violet-200",
|
||||
fill: "fill-violet-300",
|
||||
stroke: "stroke-violet-400",
|
||||
},
|
||||
purple: {
|
||||
bg: "bg-purple-200",
|
||||
border: "border-purple-200",
|
||||
borderLight: "border-purple-200",
|
||||
fill: "fill-purple-200",
|
||||
stroke: "stroke-purple-400",
|
||||
},
|
||||
fuchsia: {
|
||||
bg: "bg-fuchsia-300",
|
||||
border: "border-fuchsia-300",
|
||||
borderLight: "border-fuchsia-200",
|
||||
fill: "fill-fuchsia-300",
|
||||
stroke: "stroke-fuchsia-400",
|
||||
},
|
||||
pink: {
|
||||
bg: "bg-pink-300",
|
||||
border: "border-pink-300",
|
||||
borderLight: "border-pink-200",
|
||||
fill: "fill-pink-300",
|
||||
stroke: "stroke-pink-400",
|
||||
},
|
||||
rose: {
|
||||
bg: "bg-rose-300",
|
||||
border: "border-rose-300",
|
||||
borderLight: "border-rose-200",
|
||||
fill: "fill-rose-300",
|
||||
stroke: "stroke-rose-400",
|
||||
},
|
||||
};
|
||||
|
||||
export function FolderIcon({
|
||||
className = "",
|
||||
size = "xs",
|
||||
color = "blue",
|
||||
icon,
|
||||
isOpen = false,
|
||||
}: Props) {
|
||||
const scale = typeof size === "number" ? size : sizeMap[size];
|
||||
const resolvedColor = resolveColor(color);
|
||||
const colors = colorMap[resolvedColor];
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`group relative cursor-pointer ${className}`}
|
||||
style={{
|
||||
width: 320 * scale,
|
||||
height: 208 * scale,
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className="h-52 w-80 origin-top-left"
|
||||
style={{ transform: `scale(${scale})`, perspective: "500px" }}
|
||||
>
|
||||
<div
|
||||
className={`folder-back relative mx-auto flex h-full w-[87.5%] justify-center overflow-visible rounded-3xl ${colors.bg} ${colors.border}`}
|
||||
>
|
||||
{[
|
||||
{
|
||||
initial: { rotate: -3, x: -38, y: 2 },
|
||||
open: { rotate: -8, x: -70, y: -75 },
|
||||
transition: {
|
||||
type: "spring" as const,
|
||||
bounce: 0.15,
|
||||
stiffness: 160,
|
||||
damping: 22,
|
||||
},
|
||||
className: "z-10",
|
||||
},
|
||||
{
|
||||
initial: { rotate: 0, x: 0, y: 0 },
|
||||
open: { rotate: 1, x: 2, y: -95 },
|
||||
transition: {
|
||||
type: "spring" as const,
|
||||
duration: 0.55,
|
||||
bounce: 0.12,
|
||||
stiffness: 190,
|
||||
damping: 24,
|
||||
},
|
||||
className: "z-20",
|
||||
},
|
||||
{
|
||||
initial: { rotate: 3.5, x: 42, y: 1 },
|
||||
open: { rotate: 9, x: 75, y: -80 },
|
||||
transition: {
|
||||
type: "spring" as const,
|
||||
duration: 0.58,
|
||||
bounce: 0.17,
|
||||
stiffness: 170,
|
||||
damping: 21,
|
||||
},
|
||||
className: "z-10",
|
||||
},
|
||||
].map((page, i) => (
|
||||
<motion.div
|
||||
key={i}
|
||||
initial={page.initial}
|
||||
animate={isOpen ? page.open : page.initial}
|
||||
transition={page.transition}
|
||||
className={`absolute top-2 h-fit w-32 rounded-xl shadow-lg ${page.className}`}
|
||||
>
|
||||
<Page color={resolvedColor} />
|
||||
</motion.div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<motion.div
|
||||
animate={{
|
||||
rotateX: isOpen ? -15 : 0,
|
||||
}}
|
||||
transition={{ type: "spring", duration: 0.5, bounce: 0.25 }}
|
||||
className="absolute inset-x-0 -bottom-px z-30 mx-auto flex h-44 w-[87.5%] origin-bottom items-end justify-center overflow-visible"
|
||||
style={{ transformStyle: "preserve-3d" }}
|
||||
>
|
||||
<svg
|
||||
className="h-auto w-full"
|
||||
viewBox="0 0 173 109"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
preserveAspectRatio="none"
|
||||
>
|
||||
<path
|
||||
className={`${colors.fill} ${colors.stroke}`}
|
||||
d="M15.0423 0.500003C0.5 0.500009 0.5 14.2547 0.5 14.2547V92.5C0.5 101.337 7.66344 108.5 16.5 108.5H156.5C165.337 108.5 172.5 101.337 172.5 92.5V34.3302C172.5 25.4936 165.355 18.3302 156.519 18.3302H108.211C98.1341 18.3302 91.2921 5.57144 82.0156 1.63525C80.3338 0.921645 78.2634 0.500002 75.7187 0.500003H15.0423Z"
|
||||
/>
|
||||
</svg>
|
||||
<div className="absolute inset-0 flex items-center justify-center text-7xl">
|
||||
{icon}
|
||||
</div>
|
||||
</motion.div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface PageProps {
|
||||
color: FolderColorName;
|
||||
}
|
||||
|
||||
function Page({ color = "blue" }: PageProps) {
|
||||
const colors = colorMap[color];
|
||||
return (
|
||||
<div
|
||||
className={`h-full w-full rounded-xl border bg-white p-4 ${colors.borderLight}`}
|
||||
>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Text variant="h5" className="text-black">
|
||||
agent.json
|
||||
</Text>
|
||||
{Array.from({ length: 8 }).map((_, i) => (
|
||||
<div key={i} className="flex gap-2">
|
||||
<div className="h-1.5 flex-1 rounded-full bg-neutral-100" />
|
||||
<div className="h-1.5 flex-1 rounded-full bg-neutral-100" />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,129 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { FolderIcon, FolderColor } from "./FolderIcon";
|
||||
import { useState } from "react";
|
||||
import { PencilSimpleIcon, TrashIcon } from "@phosphor-icons/react";
|
||||
|
||||
interface Props {
|
||||
id: string;
|
||||
name: string;
|
||||
agentCount: number;
|
||||
color?: FolderColor;
|
||||
icon: string;
|
||||
onEdit?: () => void;
|
||||
onDelete?: () => void;
|
||||
onAgentDrop?: (agentId: string, folderId: string) => void;
|
||||
onClick?: () => void;
|
||||
}
|
||||
|
||||
export function LibraryFolder({
|
||||
id,
|
||||
name,
|
||||
agentCount,
|
||||
color,
|
||||
icon,
|
||||
onEdit,
|
||||
onDelete,
|
||||
onAgentDrop,
|
||||
onClick,
|
||||
}: Props) {
|
||||
const [isHovered, setIsHovered] = useState(false);
|
||||
const [isDragOver, setIsDragOver] = useState(false);
|
||||
|
||||
function handleDragOver(e: React.DragEvent<HTMLDivElement>) {
|
||||
if (e.dataTransfer.types.includes("application/agent-id")) {
|
||||
e.preventDefault();
|
||||
e.dataTransfer.dropEffect = "move";
|
||||
setIsDragOver(true);
|
||||
}
|
||||
}
|
||||
|
||||
function handleDragLeave() {
|
||||
setIsDragOver(false);
|
||||
}
|
||||
|
||||
function handleDrop(e: React.DragEvent<HTMLDivElement>) {
|
||||
e.preventDefault();
|
||||
setIsDragOver(false);
|
||||
const agentId = e.dataTransfer.getData("application/agent-id");
|
||||
if (agentId && onAgentDrop) {
|
||||
onAgentDrop(agentId, id);
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
data-testid="library-folder"
|
||||
data-folder-id={id}
|
||||
className={`group relative inline-flex h-[10.625rem] w-full max-w-[25rem] cursor-pointer flex-col items-start justify-start gap-2.5 rounded-medium border bg-white p-4 transition-all duration-200 hover:shadow-md ${
|
||||
isDragOver
|
||||
? "border-blue-400 bg-blue-50 ring-2 ring-blue-200"
|
||||
: "border-zinc-100"
|
||||
}`}
|
||||
onMouseEnter={() => setIsHovered(true)}
|
||||
onMouseLeave={() => setIsHovered(false)}
|
||||
onDragOver={handleDragOver}
|
||||
onDragLeave={handleDragLeave}
|
||||
onDrop={handleDrop}
|
||||
onClick={onClick}
|
||||
>
|
||||
<div className="flex w-full items-start justify-between gap-4">
|
||||
{/* Left side - Folder name and agent count */}
|
||||
<div className="flex flex-1 flex-col gap-2">
|
||||
<Text
|
||||
variant="h5"
|
||||
data-testid="library-folder-name"
|
||||
className="line-clamp-2 hyphens-auto break-words"
|
||||
>
|
||||
{name}
|
||||
</Text>
|
||||
<Text
|
||||
variant="small"
|
||||
className="text-zinc-500"
|
||||
data-testid="library-folder-agent-count"
|
||||
>
|
||||
{agentCount} {agentCount === 1 ? "agent" : "agents"}
|
||||
</Text>
|
||||
</div>
|
||||
|
||||
{/* Right side - Custom folder icon */}
|
||||
<div className="flex-shrink-0">
|
||||
<FolderIcon isOpen={isHovered} color={color} icon={icon} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Action buttons - visible on hover */}
|
||||
<div
|
||||
className="flex items-center justify-end gap-2"
|
||||
data-testid="library-folder-actions"
|
||||
>
|
||||
<Button
|
||||
variant="icon"
|
||||
size="icon"
|
||||
aria-label="Edit agent"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onEdit?.();
|
||||
}}
|
||||
className="h-8 w-8 p-2"
|
||||
>
|
||||
<PencilSimpleIcon className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="icon"
|
||||
size="icon"
|
||||
aria-label="Delete agent"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
onDelete?.();
|
||||
}}
|
||||
className="h-8 w-8 p-2 hover:border-red-300 hover:bg-red-50 hover:text-red-600"
|
||||
>
|
||||
<TrashIcon className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,241 +0,0 @@
|
||||
"use client";
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Input } from "@/components/atoms/Input/Input";
|
||||
import { Select } from "@/components/atoms/Select/Select";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormMessage,
|
||||
} from "@/components/molecules/Form/Form";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { FolderSimpleIcon } from "@phosphor-icons/react";
|
||||
import { useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { z } from "zod";
|
||||
import { EmojiPicker } from "@ferrucc-io/emoji-picker";
|
||||
import {
|
||||
usePostV2CreateFolder,
|
||||
useGetV2ListLibraryFolders,
|
||||
getGetV2ListLibraryFoldersQueryKey,
|
||||
} from "@/app/api/__generated__/endpoints/folders/folders";
|
||||
import { okData } from "@/app/api/helpers";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
|
||||
const FOLDER_COLORS = [
|
||||
{ value: "#3B82F6", label: "Blue" },
|
||||
{ value: "#A855F7", label: "Purple" },
|
||||
{ value: "#10B981", label: "Green" },
|
||||
{ value: "#F97316", label: "Orange" },
|
||||
{ value: "#EC4899", label: "Pink" },
|
||||
];
|
||||
|
||||
export const libraryFolderCreationFormSchema = z.object({
|
||||
folderName: z.string().min(1, "Folder name is required"),
|
||||
folderColor: z.string().min(1, "Folder color is required"),
|
||||
folderIcon: z.string().min(1, "Folder icon is required"),
|
||||
});
|
||||
|
||||
export default function LibraryFolderCreationDialog() {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const queryClient = useQueryClient();
|
||||
const { toast } = useToast();
|
||||
|
||||
const { data: foldersData } = useGetV2ListLibraryFolders(undefined, {
|
||||
query: { select: okData },
|
||||
});
|
||||
|
||||
const { mutate: createFolder, isPending } = usePostV2CreateFolder({
|
||||
mutation: {
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({ queryKey: getGetV2ListLibraryFoldersQueryKey() });
|
||||
setIsOpen(false);
|
||||
form.reset();
|
||||
toast({
|
||||
title: "Folder created",
|
||||
description: "Your folder has been created successfully.",
|
||||
});
|
||||
},
|
||||
onError: () => {
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to create folder. Please try again.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const form = useForm<z.infer<typeof libraryFolderCreationFormSchema>>({
|
||||
resolver: zodResolver(libraryFolderCreationFormSchema),
|
||||
defaultValues: {
|
||||
folderName: "",
|
||||
folderColor: "",
|
||||
folderIcon: "",
|
||||
},
|
||||
});
|
||||
|
||||
function onSubmit(values: z.infer<typeof libraryFolderCreationFormSchema>) {
|
||||
const existingNames = (foldersData?.folders ?? []).map((f) =>
|
||||
f.name.toLowerCase(),
|
||||
);
|
||||
if (existingNames.includes(values.folderName.trim().toLowerCase())) {
|
||||
form.setError("folderName", {
|
||||
message: "A folder with this name already exists",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
createFolder({
|
||||
data: {
|
||||
name: values.folderName.trim(),
|
||||
color: values.folderColor,
|
||||
icon: values.folderIcon,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
title="Create Folder"
|
||||
styling={{ maxWidth: "30rem" }}
|
||||
controlled={{
|
||||
isOpen,
|
||||
set: setIsOpen,
|
||||
}}
|
||||
onClose={() => {
|
||||
setIsOpen(false);
|
||||
}}
|
||||
>
|
||||
<Dialog.Trigger>
|
||||
<Button
|
||||
data-testid="upload-agent-button"
|
||||
variant="secondary"
|
||||
className="h-fit w-fit"
|
||||
size="small"
|
||||
>
|
||||
<FolderSimpleIcon width={18} height={18} />
|
||||
<span className="create-folder">Create folder</span>
|
||||
</Button>
|
||||
</Dialog.Trigger>
|
||||
<Dialog.Content>
|
||||
<Form
|
||||
form={form}
|
||||
onSubmit={(values) => onSubmit(values)}
|
||||
className="flex flex-col justify-center px-1 gap-2"
|
||||
>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="folderName"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormControl>
|
||||
<Input
|
||||
{...field}
|
||||
id={field.name}
|
||||
label="Folder name"
|
||||
placeholder="Enter folder name"
|
||||
className="w-full !mb-0"
|
||||
wrapperClassName="!mb-0"
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="folderColor"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormControl>
|
||||
<Select
|
||||
id="folderColor"
|
||||
label="Folder color"
|
||||
placeholder="Select a color"
|
||||
value={field.value}
|
||||
onValueChange={field.onChange}
|
||||
options={FOLDER_COLORS.map((color) => ({
|
||||
value: color.value,
|
||||
label: color.label,
|
||||
icon: (
|
||||
<div
|
||||
className="h-4 w-4 rounded-full"
|
||||
style={{ backgroundColor: color.value }}
|
||||
/>
|
||||
),
|
||||
}))}
|
||||
wrapperClassName="!mb-0"
|
||||
renderItem={(option) => (
|
||||
<div className="flex items-center gap-2">
|
||||
{option.icon}
|
||||
<span>{option.label}</span>
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="folderIcon"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Text variant="large-medium" as="span" className="text-black">
|
||||
Folder icon
|
||||
</Text>
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="flex items-center gap-3">
|
||||
<Text variant="small" className="text-zinc-500">
|
||||
Selected:
|
||||
</Text>
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-lg border border-zinc-200 bg-zinc-50 text-2xl">
|
||||
{form.watch("folderIcon") || (
|
||||
<span className="text-sm text-zinc-400">—</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="h-[295px] w-full overflow-hidden">
|
||||
|
||||
<EmojiPicker
|
||||
onEmojiSelect={(emoji) => {
|
||||
field.onChange(emoji);
|
||||
}}
|
||||
emojiSize={32}
|
||||
className="w-full rounded-2xl px-2"
|
||||
>
|
||||
<EmojiPicker.Group>
|
||||
<EmojiPicker.List hideStickyHeader containerHeight={295} />
|
||||
</EmojiPicker.Group>
|
||||
</EmojiPicker>
|
||||
</div>
|
||||
</div>
|
||||
<FormMessage />
|
||||
</div>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
variant="primary"
|
||||
className="mt-2 min-w-[18rem]"
|
||||
disabled={!form.formState.isValid || isPending}
|
||||
loading={isPending}
|
||||
>
|
||||
Create
|
||||
</Button>
|
||||
</Form>
|
||||
</Dialog.Content>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,96 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import {
|
||||
useDeleteV2DeleteFolder,
|
||||
getGetV2ListLibraryFoldersQueryKey,
|
||||
} from "@/app/api/__generated__/endpoints/folders/folders";
|
||||
import { getGetV2ListLibraryAgentsQueryKey } from "@/app/api/__generated__/endpoints/library/library";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import type { LibraryFolder } from "@/app/api/__generated__/models/libraryFolder";
|
||||
|
||||
interface Props {
|
||||
folder: LibraryFolder;
|
||||
isOpen: boolean;
|
||||
setIsOpen: (open: boolean) => void;
|
||||
onDeleted?: () => void;
|
||||
}
|
||||
|
||||
export function LibraryFolderDeleteDialog({
|
||||
folder,
|
||||
isOpen,
|
||||
setIsOpen,
|
||||
onDeleted,
|
||||
}: Props) {
|
||||
const queryClient = useQueryClient();
|
||||
const { toast } = useToast();
|
||||
|
||||
const { mutate: deleteFolder, isPending } = useDeleteV2DeleteFolder({
|
||||
mutation: {
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryFoldersQueryKey(),
|
||||
});
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryAgentsQueryKey(),
|
||||
});
|
||||
toast({
|
||||
title: "Folder deleted",
|
||||
description: `"${folder.name}" has been deleted.`,
|
||||
});
|
||||
setIsOpen(false);
|
||||
onDeleted?.();
|
||||
},
|
||||
onError: () => {
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to delete folder. Please try again.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
function handleDelete() {
|
||||
deleteFolder({ folderId: folder.id });
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
controlled={{
|
||||
isOpen,
|
||||
set: setIsOpen,
|
||||
}}
|
||||
styling={{ maxWidth: "32rem" }}
|
||||
title="Delete folder"
|
||||
>
|
||||
<Dialog.Content>
|
||||
<div>
|
||||
<Text variant="large">
|
||||
Are you sure you want to delete “{folder.name}”? Agents
|
||||
inside this folder will be moved back to your library.
|
||||
</Text>
|
||||
<Dialog.Footer>
|
||||
<Button
|
||||
variant="secondary"
|
||||
disabled={isPending}
|
||||
onClick={() => setIsOpen(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={handleDelete}
|
||||
loading={isPending}
|
||||
>
|
||||
Delete Folder
|
||||
</Button>
|
||||
</Dialog.Footer>
|
||||
</div>
|
||||
</Dialog.Content>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,303 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Input } from "@/components/atoms/Input/Input";
|
||||
import { Select } from "@/components/atoms/Select/Select";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormMessage,
|
||||
} from "@/components/molecules/Form/Form";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useEffect } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import { z } from "zod";
|
||||
import { EmojiPicker } from "@ferrucc-io/emoji-picker";
|
||||
import {
|
||||
usePatchV2UpdateFolder,
|
||||
useGetV2ListLibraryFolders,
|
||||
getGetV2ListLibraryFoldersQueryKey,
|
||||
} from "@/app/api/__generated__/endpoints/folders/folders";
|
||||
import { okData } from "@/app/api/helpers";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import type { LibraryFolder } from "@/app/api/__generated__/models/libraryFolder";
|
||||
import type { getV2ListLibraryFoldersResponseSuccess } from "@/app/api/__generated__/endpoints/folders/folders";
|
||||
|
||||
const FOLDER_COLORS = [
|
||||
{ value: "#3B82F6", label: "Blue" },
|
||||
{ value: "#A855F7", label: "Purple" },
|
||||
{ value: "#10B981", label: "Green" },
|
||||
{ value: "#F97316", label: "Orange" },
|
||||
{ value: "#EC4899", label: "Pink" },
|
||||
];
|
||||
|
||||
const editFolderSchema = z.object({
|
||||
folderName: z.string().min(1, "Folder name is required"),
|
||||
folderColor: z.string().min(1, "Folder color is required"),
|
||||
folderIcon: z.string().min(1, "Folder icon is required"),
|
||||
});
|
||||
|
||||
interface Props {
|
||||
folder: LibraryFolder;
|
||||
isOpen: boolean;
|
||||
setIsOpen: (open: boolean) => void;
|
||||
}
|
||||
|
||||
export function LibraryFolderEditDialog({ folder, isOpen, setIsOpen }: Props) {
|
||||
const queryClient = useQueryClient();
|
||||
const { toast } = useToast();
|
||||
|
||||
const { data: foldersData } = useGetV2ListLibraryFolders(undefined, {
|
||||
query: { select: okData },
|
||||
});
|
||||
|
||||
const form = useForm<z.infer<typeof editFolderSchema>>({
|
||||
resolver: zodResolver(editFolderSchema),
|
||||
defaultValues: {
|
||||
folderName: folder.name,
|
||||
folderColor: folder.color ?? "",
|
||||
folderIcon: folder.icon ?? "",
|
||||
},
|
||||
});
|
||||
|
||||
useEffect(() => {
|
||||
if (isOpen) {
|
||||
form.reset({
|
||||
folderName: folder.name,
|
||||
folderColor: folder.color ?? "",
|
||||
folderIcon: folder.icon ?? "",
|
||||
});
|
||||
}
|
||||
}, [isOpen, folder, form]);
|
||||
|
||||
const { mutate: updateFolder, isPending } = usePatchV2UpdateFolder({
|
||||
mutation: {
|
||||
onMutate: async ({ folderId, data }) => {
|
||||
await queryClient.cancelQueries({
|
||||
queryKey: getGetV2ListLibraryFoldersQueryKey(),
|
||||
});
|
||||
|
||||
const previousData = queryClient.getQueriesData<
|
||||
getV2ListLibraryFoldersResponseSuccess
|
||||
>({
|
||||
queryKey: getGetV2ListLibraryFoldersQueryKey(),
|
||||
});
|
||||
|
||||
queryClient.setQueriesData<getV2ListLibraryFoldersResponseSuccess>(
|
||||
{ queryKey: getGetV2ListLibraryFoldersQueryKey() },
|
||||
(old) => {
|
||||
if (!old?.data?.folders) return old;
|
||||
return {
|
||||
...old,
|
||||
data: {
|
||||
...old.data,
|
||||
folders: old.data.folders.map((f) =>
|
||||
f.id === folderId
|
||||
? {
|
||||
...f,
|
||||
name: data.name ?? f.name,
|
||||
color: data.color ?? f.color,
|
||||
icon: data.icon ?? f.icon,
|
||||
}
|
||||
: f,
|
||||
),
|
||||
},
|
||||
};
|
||||
},
|
||||
);
|
||||
|
||||
return { previousData };
|
||||
},
|
||||
onError: (
|
||||
error: { detail?: string; response?: { detail?: string } },
|
||||
_variables,
|
||||
context,
|
||||
) => {
|
||||
if (context?.previousData) {
|
||||
for (const [queryKey, data] of context.previousData) {
|
||||
queryClient.setQueryData(queryKey, data);
|
||||
}
|
||||
}
|
||||
const detail =
|
||||
error.detail ?? error.response?.detail ?? "";
|
||||
if (
|
||||
typeof detail === "string" &&
|
||||
detail.toLowerCase().includes("already exists")
|
||||
) {
|
||||
form.setError("folderName", {
|
||||
message: "A folder with this name already exists",
|
||||
});
|
||||
return;
|
||||
}
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to update folder. Please try again.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
onSuccess: () => {
|
||||
setIsOpen(false);
|
||||
toast({
|
||||
title: "Folder updated",
|
||||
description: "Your folder has been updated successfully.",
|
||||
});
|
||||
},
|
||||
onSettled: () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryFoldersQueryKey(),
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
function onSubmit(values: z.infer<typeof editFolderSchema>) {
|
||||
const trimmedName = values.folderName.trim();
|
||||
const existingNames = (foldersData?.folders ?? [])
|
||||
.filter((f) => f.id !== folder.id)
|
||||
.map((f) => f.name.toLowerCase());
|
||||
|
||||
if (existingNames.includes(trimmedName.toLowerCase())) {
|
||||
form.setError("folderName", {
|
||||
message: "A folder with this name already exists",
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
updateFolder({
|
||||
folderId: folder.id,
|
||||
data: {
|
||||
name: trimmedName,
|
||||
color: values.folderColor,
|
||||
icon: values.folderIcon,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
title="Edit Folder"
|
||||
styling={{ maxWidth: "30rem" }}
|
||||
controlled={{
|
||||
isOpen,
|
||||
set: setIsOpen,
|
||||
}}
|
||||
>
|
||||
<Dialog.Content>
|
||||
<Form
|
||||
form={form}
|
||||
onSubmit={(values) => onSubmit(values)}
|
||||
className="flex flex-col justify-center gap-4 px-1"
|
||||
>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="folderName"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormControl>
|
||||
<Input
|
||||
{...field}
|
||||
id={field.name}
|
||||
label="Folder name"
|
||||
placeholder="Enter folder name"
|
||||
className="w-full rounded-[10px]"
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="folderColor"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormControl>
|
||||
<Select
|
||||
id="folderColor"
|
||||
label="Folder color"
|
||||
placeholder="Select a color"
|
||||
value={field.value}
|
||||
onValueChange={field.onChange}
|
||||
options={FOLDER_COLORS.map((color) => ({
|
||||
value: color.value,
|
||||
label: color.label,
|
||||
icon: (
|
||||
<div
|
||||
className="h-4 w-4 rounded-full"
|
||||
style={{ backgroundColor: color.value }}
|
||||
/>
|
||||
),
|
||||
}))}
|
||||
renderItem={(option) => (
|
||||
<div className="flex items-center gap-2">
|
||||
{option.icon}
|
||||
<span>{option.label}</span>
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="folderIcon"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<div className="flex flex-col gap-2">
|
||||
<Text variant="large-medium" as="span" className="text-black">
|
||||
Folder icon
|
||||
</Text>
|
||||
<div className="flex flex-col gap-3">
|
||||
<div className="flex items-center gap-3">
|
||||
<Text variant="small" className="text-zinc-500">
|
||||
Selected:
|
||||
</Text>
|
||||
<div className="flex h-10 w-10 items-center justify-center rounded-lg border border-zinc-200 bg-zinc-50 text-2xl">
|
||||
{form.watch("folderIcon") || (
|
||||
<span className="text-sm text-zinc-400">—</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="h-[295px] w-full overflow-hidden">
|
||||
<EmojiPicker
|
||||
onEmojiSelect={(emoji) => {
|
||||
field.onChange(emoji);
|
||||
}}
|
||||
emojiSize={32}
|
||||
className="w-full"
|
||||
>
|
||||
<EmojiPicker.Group>
|
||||
<EmojiPicker.List hideStickyHeader containerHeight={295} />
|
||||
</EmojiPicker.Group>
|
||||
</EmojiPicker>
|
||||
</div>
|
||||
</div>
|
||||
<FormMessage />
|
||||
</div>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<Button
|
||||
type="submit"
|
||||
variant="primary"
|
||||
className="mt-2 min-w-[18rem]"
|
||||
disabled={!form.formState.isValid || isPending}
|
||||
loading={isPending}
|
||||
>
|
||||
Save Changes
|
||||
</Button>
|
||||
</Form>
|
||||
</Dialog.Content>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,17 +0,0 @@
|
||||
import LibraryFolderCreationDialog from "../LibraryFolderCreationDialog/LibraryFolderCreationDialog";
|
||||
import { LibraryTabs, Tab } from "../LibraryTabs/LibraryTabs";
|
||||
|
||||
interface Props {
|
||||
tabs: Tab[];
|
||||
activeTab: string;
|
||||
onTabChange: (tabId: string) => void;
|
||||
}
|
||||
|
||||
export function LibrarySubSection({ tabs, activeTab, onTabChange }: Props) {
|
||||
return (
|
||||
<div className="flex justify-between items-center gap-4">
|
||||
<LibraryTabs tabs={tabs} activeTab={activeTab} onTabChange={onTabChange} />
|
||||
<LibraryFolderCreationDialog />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,134 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect, useRef } from "react";
|
||||
import { motion } from "framer-motion";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Icon } from "@phosphor-icons/react";
|
||||
import { useFavoriteAnimation } from "../../context/FavoriteAnimationContext";
|
||||
|
||||
export interface Tab {
|
||||
id: string;
|
||||
title: string;
|
||||
icon: Icon;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
tabs: Tab[];
|
||||
activeTab: string;
|
||||
onTabChange: (tabId: string) => void;
|
||||
layoutId?: string;
|
||||
}
|
||||
|
||||
export function LibraryTabs({ tabs, activeTab, onTabChange, layoutId = "library-tabs" }: Props) {
|
||||
const { registerFavoritesTabRef } = useFavoriteAnimation();
|
||||
|
||||
return (
|
||||
<div className="flex gap-2 items-center">
|
||||
{tabs.map((tab) => (
|
||||
<TabButton
|
||||
key={tab.id}
|
||||
tab={tab}
|
||||
isActive={activeTab === tab.id}
|
||||
onSelect={onTabChange}
|
||||
layoutId={layoutId}
|
||||
onRefReady={tab.id === "favorites" ? registerFavoritesTabRef : undefined}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
interface TabButtonProps {
|
||||
tab: Tab;
|
||||
isActive: boolean;
|
||||
onSelect: (tabId: string) => void;
|
||||
layoutId: string;
|
||||
onRefReady?: (element: HTMLElement | null) => void;
|
||||
}
|
||||
|
||||
function TabButton({ tab, isActive, onSelect, layoutId, onRefReady }: TabButtonProps) {
|
||||
const [isLoaded, setIsLoaded] = useState(false);
|
||||
const buttonRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (isActive && !isLoaded) {
|
||||
setIsLoaded(true);
|
||||
}
|
||||
}, [isActive, isLoaded]);
|
||||
|
||||
useEffect(() => {
|
||||
if (onRefReady) {
|
||||
onRefReady(buttonRef.current);
|
||||
}
|
||||
}, [onRefReady]);
|
||||
|
||||
const ButtonIcon = tab.icon;
|
||||
const activeColor = "text-primary";
|
||||
|
||||
return (
|
||||
<motion.div
|
||||
ref={buttonRef}
|
||||
layoutId={`${layoutId}-button-${tab.id}`}
|
||||
transition={{
|
||||
layout: {
|
||||
type: "spring",
|
||||
damping: 20,
|
||||
stiffness: 230,
|
||||
mass: 1.2,
|
||||
ease: [0.215, 0.61, 0.355, 1],
|
||||
},
|
||||
}}
|
||||
onClick={() => {
|
||||
onSelect(tab.id);
|
||||
setIsLoaded(true);
|
||||
}}
|
||||
className="w-fit h-fit flex"
|
||||
style={{ willChange: "transform" }}
|
||||
>
|
||||
<motion.div
|
||||
layout
|
||||
transition={{
|
||||
layout: {
|
||||
type: "spring",
|
||||
damping: 20,
|
||||
stiffness: 230,
|
||||
mass: 1.2,
|
||||
},
|
||||
}}
|
||||
className={cn(
|
||||
"flex items-center gap-1.5 bg-zinc-200 border-zinc-200 text-black hover:bg-zinc-300 hover:border-zinc-300 overflow-hidden transition-colors duration-75 ease-out py-2 px-3 cursor-pointer h-fit",
|
||||
isActive && activeColor,
|
||||
isActive ? "px-4" : "px-3"
|
||||
)}
|
||||
style={{
|
||||
borderRadius: "25px",
|
||||
}}
|
||||
>
|
||||
<motion.div
|
||||
layoutId={`${layoutId}-icon-${tab.id}`}
|
||||
className="shrink-0"
|
||||
>
|
||||
<ButtonIcon size={18} />
|
||||
</motion.div>
|
||||
{isActive && (
|
||||
<motion.div
|
||||
className="flex items-center"
|
||||
initial={isLoaded ? { opacity: 0, filter: "blur(4px)" } : false}
|
||||
animate={{ opacity: 1, filter: "blur(0px)" }}
|
||||
transition={{
|
||||
duration: isLoaded ? 0.2 : 0,
|
||||
ease: [0.86, 0, 0.07, 1],
|
||||
}}
|
||||
>
|
||||
<motion.span
|
||||
layoutId={`${layoutId}-text-${tab.id}`}
|
||||
className="font-sans font-medium text-sm text-black"
|
||||
>
|
||||
{tab.title}
|
||||
</motion.span>
|
||||
</motion.div>
|
||||
)}
|
||||
</motion.div>
|
||||
</motion.div>
|
||||
);
|
||||
}
|
||||
@@ -1,135 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@/components/atoms/Button/Button";
|
||||
import { Input } from "@/components/atoms/Input/Input";
|
||||
import { Text } from "@/components/atoms/Text/Text";
|
||||
import { Dialog } from "@/components/molecules/Dialog/Dialog";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import {
|
||||
useGetV2ListLibraryFolders,
|
||||
usePostV2BulkMoveAgents,
|
||||
getGetV2ListLibraryFoldersQueryKey,
|
||||
} from "@/app/api/__generated__/endpoints/folders/folders";
|
||||
import { getGetV2ListLibraryAgentsQueryKey } from "@/app/api/__generated__/endpoints/library/library";
|
||||
import { okData } from "@/app/api/helpers";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { useState } from "react";
|
||||
|
||||
interface Props {
|
||||
agentId: string;
|
||||
agentName: string;
|
||||
currentFolderId?: string | null;
|
||||
isOpen: boolean;
|
||||
setIsOpen: (open: boolean) => void;
|
||||
}
|
||||
|
||||
export function MoveToFolderDialog({
|
||||
agentId,
|
||||
agentName,
|
||||
currentFolderId,
|
||||
isOpen,
|
||||
setIsOpen,
|
||||
}: Props) {
|
||||
const queryClient = useQueryClient();
|
||||
const { toast } = useToast();
|
||||
const [search, setSearch] = useState("");
|
||||
|
||||
const { data: foldersData } = useGetV2ListLibraryFolders(undefined, {
|
||||
query: { select: okData },
|
||||
});
|
||||
|
||||
const { mutate: moveAgent, isPending } = usePostV2BulkMoveAgents({
|
||||
mutation: {
|
||||
onSuccess: () => {
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryAgentsQueryKey(),
|
||||
});
|
||||
queryClient.invalidateQueries({
|
||||
queryKey: getGetV2ListLibraryFoldersQueryKey(),
|
||||
});
|
||||
setIsOpen(false);
|
||||
setSearch("");
|
||||
toast({
|
||||
title: "Agent moved",
|
||||
description: `"${agentName}" has been moved.`,
|
||||
});
|
||||
},
|
||||
onError: () => {
|
||||
toast({
|
||||
title: "Error",
|
||||
description: "Failed to move agent. Please try again.",
|
||||
variant: "destructive",
|
||||
});
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const folders = (foldersData?.folders ?? []).filter(
|
||||
(f) =>
|
||||
f.id !== currentFolderId &&
|
||||
f.name.toLowerCase().includes(search.toLowerCase()),
|
||||
);
|
||||
|
||||
function handleMoveToFolder(folderId: string) {
|
||||
moveAgent({
|
||||
data: {
|
||||
agent_ids: [agentId],
|
||||
folder_id: folderId,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
controlled={{ isOpen, set: setIsOpen }}
|
||||
styling={{ maxWidth: "28rem" }}
|
||||
title="Move to folder"
|
||||
onClose={() => {
|
||||
setSearch("");
|
||||
}}
|
||||
>
|
||||
<Dialog.Content>
|
||||
<div className="flex flex-col gap-3">
|
||||
<Input
|
||||
id="search-folders"
|
||||
label="Search folders"
|
||||
placeholder="Search folders..."
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
className="w-full"
|
||||
/>
|
||||
<div className="max-h-[280px] overflow-y-auto">
|
||||
{folders.length === 0 ? (
|
||||
<div className="flex h-20 items-center justify-center">
|
||||
<Text variant="small" className="text-zinc-400">
|
||||
No folders found
|
||||
</Text>
|
||||
</div>
|
||||
) : (
|
||||
<div className="flex flex-col gap-1">
|
||||
{folders.map((folder) => (
|
||||
<Button
|
||||
key={folder.id}
|
||||
variant="ghost"
|
||||
className="w-full justify-start gap-3 px-3 py-2.5"
|
||||
disabled={isPending}
|
||||
onClick={() => handleMoveToFolder(folder.id)}
|
||||
>
|
||||
<span className="text-lg">{folder.icon ?? "📁"}</span>
|
||||
<div className="flex flex-col items-start">
|
||||
<Text variant="small-medium">{folder.name}</Text>
|
||||
<Text variant="small" className="text-zinc-400">
|
||||
{folder.agent_count ?? 0}{" "}
|
||||
{(folder.agent_count ?? 0) === 1 ? "agent" : "agents"}
|
||||
</Text>
|
||||
</div>
|
||||
</Button>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</Dialog.Content>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
@@ -1,77 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { createContext, useContext, useState, useCallback, useRef } from "react";
|
||||
import { FlyingHeart } from "../components/FlyingHeart/FlyingHeart";
|
||||
|
||||
interface FavoriteAnimationContextType {
|
||||
triggerFavoriteAnimation: (startPosition: { x: number; y: number }) => void;
|
||||
registerFavoritesTabRef: (element: HTMLElement | null) => void;
|
||||
}
|
||||
|
||||
const FavoriteAnimationContext = createContext<FavoriteAnimationContextType | null>(null);
|
||||
|
||||
interface FavoriteAnimationProviderProps {
|
||||
children: React.ReactNode;
|
||||
onAnimationComplete?: () => void;
|
||||
}
|
||||
|
||||
export function FavoriteAnimationProvider({
|
||||
children,
|
||||
onAnimationComplete,
|
||||
}: FavoriteAnimationProviderProps) {
|
||||
const [animationState, setAnimationState] = useState<{
|
||||
startPosition: { x: number; y: number } | null;
|
||||
targetPosition: { x: number; y: number } | null;
|
||||
}>({
|
||||
startPosition: null,
|
||||
targetPosition: null,
|
||||
});
|
||||
|
||||
const favoritesTabRef = useRef<HTMLElement | null>(null);
|
||||
|
||||
const registerFavoritesTabRef = useCallback((element: HTMLElement | null) => {
|
||||
favoritesTabRef.current = element;
|
||||
}, []);
|
||||
|
||||
const triggerFavoriteAnimation = useCallback(
|
||||
(startPosition: { x: number; y: number }) => {
|
||||
if (favoritesTabRef.current) {
|
||||
const rect = favoritesTabRef.current.getBoundingClientRect();
|
||||
const targetPosition = {
|
||||
x: rect.left + rect.width / 2 - 12,
|
||||
y: rect.top + rect.height / 2 - 12,
|
||||
};
|
||||
setAnimationState({ startPosition, targetPosition });
|
||||
}
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
function handleAnimationComplete() {
|
||||
setAnimationState({ startPosition: null, targetPosition: null });
|
||||
onAnimationComplete?.();
|
||||
}
|
||||
|
||||
return (
|
||||
<FavoriteAnimationContext.Provider
|
||||
value={{ triggerFavoriteAnimation, registerFavoritesTabRef }}
|
||||
>
|
||||
{children}
|
||||
<FlyingHeart
|
||||
startPosition={animationState.startPosition}
|
||||
targetPosition={animationState.targetPosition}
|
||||
onAnimationComplete={handleAnimationComplete}
|
||||
/>
|
||||
</FavoriteAnimationContext.Provider>
|
||||
);
|
||||
}
|
||||
|
||||
export function useFavoriteAnimation() {
|
||||
const context = useContext(FavoriteAnimationContext);
|
||||
if (!context) {
|
||||
throw new Error(
|
||||
"useFavoriteAnimation must be used within FavoriteAnimationProvider"
|
||||
);
|
||||
}
|
||||
return context;
|
||||
}
|
||||
@@ -1,53 +1,28 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useState, useCallback } from "react";
|
||||
import { HeartIcon, ListIcon } from "@phosphor-icons/react";
|
||||
import { useEffect } from "react";
|
||||
import { FavoritesSection } from "./components/FavoritesSection/FavoritesSection";
|
||||
import { LibraryActionHeader } from "./components/LibraryActionHeader/LibraryActionHeader";
|
||||
import { LibraryAgentList } from "./components/LibraryAgentList/LibraryAgentList";
|
||||
import { Tab } from "./components/LibraryTabs/LibraryTabs";
|
||||
import { useLibraryListPage } from "./components/useLibraryListPage";
|
||||
import { FavoriteAnimationProvider } from "./context/FavoriteAnimationContext";
|
||||
|
||||
const LIBRARY_TABS: Tab[] = [
|
||||
{ id: "all", title: "All", icon: ListIcon },
|
||||
{ id: "favorites", title: "Favorites", icon: HeartIcon },
|
||||
];
|
||||
|
||||
export default function LibraryPage() {
|
||||
const { searchTerm, setSearchTerm, librarySort, setLibrarySort } =
|
||||
useLibraryListPage();
|
||||
const [selectedFolderId, setSelectedFolderId] = useState<string | null>(null);
|
||||
const [activeTab, setActiveTab] = useState(LIBRARY_TABS[0].id);
|
||||
|
||||
useEffect(() => {
|
||||
document.title = "Library – AutoGPT Platform";
|
||||
}, []);
|
||||
|
||||
function handleTabChange(tabId: string) {
|
||||
setActiveTab(tabId);
|
||||
setSelectedFolderId(null);
|
||||
}
|
||||
|
||||
const handleFavoriteAnimationComplete = useCallback(() => {
|
||||
setActiveTab("favorites");
|
||||
setSelectedFolderId(null);
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<FavoriteAnimationProvider onAnimationComplete={handleFavoriteAnimationComplete}>
|
||||
<main className="pt-160 container min-h-screen space-y-4 pb-20 pt-16 sm:px-8 md:px-12">
|
||||
<LibraryActionHeader setSearchTerm={setSearchTerm} />
|
||||
<LibraryAgentList
|
||||
searchTerm={searchTerm}
|
||||
librarySort={librarySort}
|
||||
setLibrarySort={setLibrarySort}
|
||||
selectedFolderId={selectedFolderId}
|
||||
onFolderSelect={setSelectedFolderId}
|
||||
tabs={LIBRARY_TABS}
|
||||
activeTab={activeTab}
|
||||
onTabChange={handleTabChange}
|
||||
/>
|
||||
</main>
|
||||
</FavoriteAnimationProvider>
|
||||
<main className="pt-160 container min-h-screen space-y-4 pb-20 pt-16 sm:px-8 md:px-12">
|
||||
<LibraryActionHeader setSearchTerm={setSearchTerm} />
|
||||
<FavoritesSection searchTerm={searchTerm} />
|
||||
<LibraryAgentList
|
||||
searchTerm={searchTerm}
|
||||
librarySort={librarySort}
|
||||
setLibrarySort={setLibrarySort}
|
||||
/>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1053,6 +1053,7 @@
|
||||
"$ref": "#/components/schemas/ClarificationNeededResponse"
|
||||
},
|
||||
{ "$ref": "#/components/schemas/BlockListResponse" },
|
||||
{ "$ref": "#/components/schemas/BlockDetailsResponse" },
|
||||
{ "$ref": "#/components/schemas/BlockOutputResponse" },
|
||||
{ "$ref": "#/components/schemas/DocSearchResultsResponse" },
|
||||
{ "$ref": "#/components/schemas/DocPageResponse" },
|
||||
@@ -3588,29 +3589,6 @@
|
||||
"title": "Page Size"
|
||||
},
|
||||
"description": "Number of agents per page (must be >= 1)"
|
||||
},
|
||||
{
|
||||
"name": "folder_id",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"description": "Filter by folder ID",
|
||||
"title": "Folder Id"
|
||||
},
|
||||
"description": "Filter by folder ID"
|
||||
},
|
||||
{
|
||||
"name": "include_root_only",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "boolean",
|
||||
"description": "Only return agents without a folder (root-level agents)",
|
||||
"default": false,
|
||||
"title": "Include Root Only"
|
||||
},
|
||||
"description": "Only return agents without a folder (root-level agents)"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
@@ -3978,340 +3956,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/library/folders": {
|
||||
"get": {
|
||||
"tags": ["v2", "library", "folders", "private"],
|
||||
"summary": "List Library Folders",
|
||||
"description": "List folders for the authenticated user.\n\nArgs:\n user_id: ID of the authenticated user.\n parent_id: Optional parent folder ID to filter by.\n include_counts: Whether to include agent and subfolder counts.\n\nReturns:\n A FolderListResponse containing folders.",
|
||||
"operationId": "getV2List library folders",
|
||||
"security": [{ "HTTPBearerJWT": [] }],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "parent_id",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"description": "Filter by parent folder ID. If not provided, returns root-level folders.",
|
||||
"title": "Parent Id"
|
||||
},
|
||||
"description": "Filter by parent folder ID. If not provided, returns root-level folders."
|
||||
},
|
||||
{
|
||||
"name": "include_counts",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "boolean",
|
||||
"description": "Include agent and subfolder counts",
|
||||
"default": true,
|
||||
"title": "Include Counts"
|
||||
},
|
||||
"description": "Include agent and subfolder counts"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "List of folders",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/FolderListResponse" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": { "description": "Server error" }
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"tags": ["v2", "library", "folders", "private"],
|
||||
"summary": "Create Folder",
|
||||
"description": "Create a new folder.\n\nArgs:\n payload: The folder creation request.\n user_id: ID of the authenticated user.\n\nReturns:\n The created LibraryFolder.",
|
||||
"operationId": "postV2Create folder",
|
||||
"security": [{ "HTTPBearerJWT": [] }],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/FolderCreateRequest" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "Folder created successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/LibraryFolder" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": { "description": "Validation error" },
|
||||
"401": {
|
||||
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
},
|
||||
"404": { "description": "Parent folder not found" },
|
||||
"409": { "description": "Folder name conflict" },
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": { "description": "Server error" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/library/folders/agents/bulk-move": {
|
||||
"post": {
|
||||
"tags": ["v2", "library", "folders", "private"],
|
||||
"summary": "Bulk Move Agents",
|
||||
"description": "Move multiple agents to a folder.\n\nArgs:\n payload: The bulk move request with agent IDs and target folder.\n user_id: ID of the authenticated user.\n\nReturns:\n The updated LibraryAgents.",
|
||||
"operationId": "postV2Bulk move agents",
|
||||
"requestBody": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/BulkMoveAgentsRequest" }
|
||||
}
|
||||
},
|
||||
"required": true
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Agents moved successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"items": { "$ref": "#/components/schemas/LibraryAgent" },
|
||||
"type": "array",
|
||||
"title": "Response Postv2Bulk Move Agents"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
},
|
||||
"404": { "description": "Folder not found" },
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": { "description": "Server error" }
|
||||
},
|
||||
"security": [{ "HTTPBearerJWT": [] }]
|
||||
}
|
||||
},
|
||||
"/api/library/folders/tree": {
|
||||
"get": {
|
||||
"tags": ["v2", "library", "folders", "private"],
|
||||
"summary": "Get Folder Tree",
|
||||
"description": "Get the full folder tree for the authenticated user.\n\nArgs:\n user_id: ID of the authenticated user.\n\nReturns:\n A FolderTreeResponse containing the nested folder structure.",
|
||||
"operationId": "getV2Get folder tree",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Folder tree structure",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/FolderTreeResponse" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
},
|
||||
"500": { "description": "Server error" }
|
||||
},
|
||||
"security": [{ "HTTPBearerJWT": [] }]
|
||||
}
|
||||
},
|
||||
"/api/library/folders/{folder_id}": {
|
||||
"delete": {
|
||||
"tags": ["v2", "library", "folders", "private"],
|
||||
"summary": "Delete Folder",
|
||||
"description": "Soft-delete a folder and all its contents.\n\nArgs:\n folder_id: ID of the folder to delete.\n user_id: ID of the authenticated user.\n\nReturns:\n 204 No Content if successful.",
|
||||
"operationId": "deleteV2Delete folder",
|
||||
"security": [{ "HTTPBearerJWT": [] }],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "folder_id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": { "type": "string", "title": "Folder Id" }
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"204": { "description": "Folder deleted successfully" },
|
||||
"401": {
|
||||
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
},
|
||||
"404": { "description": "Folder not found" },
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": { "description": "Server error" }
|
||||
}
|
||||
},
|
||||
"get": {
|
||||
"tags": ["v2", "library", "folders", "private"],
|
||||
"summary": "Get Folder",
|
||||
"description": "Get a specific folder.\n\nArgs:\n folder_id: ID of the folder to retrieve.\n user_id: ID of the authenticated user.\n\nReturns:\n The requested LibraryFolder.",
|
||||
"operationId": "getV2Get folder",
|
||||
"security": [{ "HTTPBearerJWT": [] }],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "folder_id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": { "type": "string", "title": "Folder Id" }
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Folder details",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/LibraryFolder" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
},
|
||||
"404": { "description": "Folder not found" },
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": { "description": "Server error" }
|
||||
}
|
||||
},
|
||||
"patch": {
|
||||
"tags": ["v2", "library", "folders", "private"],
|
||||
"summary": "Update Folder",
|
||||
"description": "Update a folder's properties.\n\nArgs:\n folder_id: ID of the folder to update.\n payload: The folder update request.\n user_id: ID of the authenticated user.\n\nReturns:\n The updated LibraryFolder.",
|
||||
"operationId": "patchV2Update folder",
|
||||
"security": [{ "HTTPBearerJWT": [] }],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "folder_id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": { "type": "string", "title": "Folder Id" }
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/FolderUpdateRequest" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Folder updated successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/LibraryFolder" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": { "description": "Validation error" },
|
||||
"401": {
|
||||
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
},
|
||||
"404": { "description": "Folder not found" },
|
||||
"409": { "description": "Folder name conflict" },
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": { "description": "Server error" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/library/folders/{folder_id}/move": {
|
||||
"post": {
|
||||
"tags": ["v2", "library", "folders", "private"],
|
||||
"summary": "Move Folder",
|
||||
"description": "Move a folder to a new parent.\n\nArgs:\n folder_id: ID of the folder to move.\n payload: The move request with target parent.\n user_id: ID of the authenticated user.\n\nReturns:\n The moved LibraryFolder.",
|
||||
"operationId": "postV2Move folder",
|
||||
"security": [{ "HTTPBearerJWT": [] }],
|
||||
"parameters": [
|
||||
{
|
||||
"name": "folder_id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": { "type": "string", "title": "Folder Id" }
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/FolderMoveRequest" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Folder moved successfully",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/LibraryFolder" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Validation error (circular reference, depth exceeded)"
|
||||
},
|
||||
"401": {
|
||||
"$ref": "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
},
|
||||
"404": { "description": "Folder or target parent not found" },
|
||||
"409": { "description": "Folder name conflict in target location" },
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": { "$ref": "#/components/schemas/HTTPValidationError" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"500": { "description": "Server error" }
|
||||
}
|
||||
}
|
||||
},
|
||||
"/api/library/presets": {
|
||||
"get": {
|
||||
"tags": ["v2", "presets"],
|
||||
@@ -7315,6 +6959,58 @@
|
||||
"enum": ["run", "byte", "second"],
|
||||
"title": "BlockCostType"
|
||||
},
|
||||
"BlockDetails": {
|
||||
"properties": {
|
||||
"id": { "type": "string", "title": "Id" },
|
||||
"name": { "type": "string", "title": "Name" },
|
||||
"description": { "type": "string", "title": "Description" },
|
||||
"inputs": {
|
||||
"additionalProperties": true,
|
||||
"type": "object",
|
||||
"title": "Inputs",
|
||||
"default": {}
|
||||
},
|
||||
"outputs": {
|
||||
"additionalProperties": true,
|
||||
"type": "object",
|
||||
"title": "Outputs",
|
||||
"default": {}
|
||||
},
|
||||
"credentials": {
|
||||
"items": { "$ref": "#/components/schemas/CredentialsMetaInput" },
|
||||
"type": "array",
|
||||
"title": "Credentials",
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["id", "name", "description"],
|
||||
"title": "BlockDetails",
|
||||
"description": "Detailed block information."
|
||||
},
|
||||
"BlockDetailsResponse": {
|
||||
"properties": {
|
||||
"type": {
|
||||
"$ref": "#/components/schemas/ResponseType",
|
||||
"default": "block_details"
|
||||
},
|
||||
"message": { "type": "string", "title": "Message" },
|
||||
"session_id": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Session Id"
|
||||
},
|
||||
"block": { "$ref": "#/components/schemas/BlockDetails" },
|
||||
"user_authenticated": {
|
||||
"type": "boolean",
|
||||
"title": "User Authenticated",
|
||||
"default": false
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["message", "block"],
|
||||
"title": "BlockDetailsResponse",
|
||||
"description": "Response for block details (first run_block attempt)."
|
||||
},
|
||||
"BlockInfo": {
|
||||
"properties": {
|
||||
"id": { "type": "string", "title": "Id" },
|
||||
@@ -7370,62 +7066,13 @@
|
||||
"properties": {
|
||||
"id": { "type": "string", "title": "Id" },
|
||||
"name": { "type": "string", "title": "Name" },
|
||||
"description": { "type": "string", "title": "Description" },
|
||||
"categories": {
|
||||
"items": { "type": "string" },
|
||||
"type": "array",
|
||||
"title": "Categories"
|
||||
},
|
||||
"input_schema": {
|
||||
"additionalProperties": true,
|
||||
"type": "object",
|
||||
"title": "Input Schema"
|
||||
},
|
||||
"output_schema": {
|
||||
"additionalProperties": true,
|
||||
"type": "object",
|
||||
"title": "Output Schema"
|
||||
},
|
||||
"required_inputs": {
|
||||
"items": { "$ref": "#/components/schemas/BlockInputFieldInfo" },
|
||||
"type": "array",
|
||||
"title": "Required Inputs",
|
||||
"description": "List of required input fields for this block"
|
||||
}
|
||||
"description": { "type": "string", "title": "Description" }
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"description",
|
||||
"categories",
|
||||
"input_schema",
|
||||
"output_schema"
|
||||
],
|
||||
"required": ["id", "name", "description"],
|
||||
"title": "BlockInfoSummary",
|
||||
"description": "Summary of a block for search results."
|
||||
},
|
||||
"BlockInputFieldInfo": {
|
||||
"properties": {
|
||||
"name": { "type": "string", "title": "Name" },
|
||||
"type": { "type": "string", "title": "Type" },
|
||||
"description": {
|
||||
"type": "string",
|
||||
"title": "Description",
|
||||
"default": ""
|
||||
},
|
||||
"required": {
|
||||
"type": "boolean",
|
||||
"title": "Required",
|
||||
"default": false
|
||||
},
|
||||
"default": { "anyOf": [{}, { "type": "null" }], "title": "Default" }
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["name", "type"],
|
||||
"title": "BlockInputFieldInfo",
|
||||
"description": "Information about a block input field."
|
||||
},
|
||||
"BlockListResponse": {
|
||||
"properties": {
|
||||
"type": {
|
||||
@@ -7443,12 +7090,7 @@
|
||||
"title": "Blocks"
|
||||
},
|
||||
"count": { "type": "integer", "title": "Count" },
|
||||
"query": { "type": "string", "title": "Query" },
|
||||
"usage_hint": {
|
||||
"type": "string",
|
||||
"title": "Usage Hint",
|
||||
"default": "To execute a block, call run_block with block_id set to the block's 'id' field and input_data containing the required fields from input_schema."
|
||||
}
|
||||
"query": { "type": "string", "title": "Query" }
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["message", "blocks", "count", "query"],
|
||||
@@ -7693,23 +7335,6 @@
|
||||
"required": ["file"],
|
||||
"title": "Body_postV2Upload submission media"
|
||||
},
|
||||
"BulkMoveAgentsRequest": {
|
||||
"properties": {
|
||||
"agent_ids": {
|
||||
"items": { "type": "string" },
|
||||
"type": "array",
|
||||
"title": "Agent Ids"
|
||||
},
|
||||
"folder_id": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Folder Id"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["agent_ids"],
|
||||
"title": "BulkMoveAgentsRequest",
|
||||
"description": "Request model for moving multiple agents to a folder."
|
||||
},
|
||||
"ChangelogEntry": {
|
||||
"properties": {
|
||||
"version": { "type": "string", "title": "Version" },
|
||||
@@ -8394,96 +8019,6 @@
|
||||
"title": "ExecutionStartedResponse",
|
||||
"description": "Response for run/schedule actions."
|
||||
},
|
||||
"FolderCreateRequest": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"maxLength": 100,
|
||||
"minLength": 1,
|
||||
"title": "Name"
|
||||
},
|
||||
"icon": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Icon"
|
||||
},
|
||||
"color": {
|
||||
"anyOf": [
|
||||
{ "type": "string", "pattern": "^#[0-9A-Fa-f]{6}$" },
|
||||
{ "type": "null" }
|
||||
],
|
||||
"title": "Color",
|
||||
"description": "Hex color code (#RRGGBB)"
|
||||
},
|
||||
"parent_id": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Parent Id"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["name"],
|
||||
"title": "FolderCreateRequest",
|
||||
"description": "Request model for creating a folder."
|
||||
},
|
||||
"FolderListResponse": {
|
||||
"properties": {
|
||||
"folders": {
|
||||
"items": { "$ref": "#/components/schemas/LibraryFolder" },
|
||||
"type": "array",
|
||||
"title": "Folders"
|
||||
},
|
||||
"pagination": { "$ref": "#/components/schemas/Pagination" }
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["folders", "pagination"],
|
||||
"title": "FolderListResponse",
|
||||
"description": "Response schema for a list of folders."
|
||||
},
|
||||
"FolderMoveRequest": {
|
||||
"properties": {
|
||||
"target_parent_id": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Target Parent Id"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"title": "FolderMoveRequest",
|
||||
"description": "Request model for moving a folder to a new parent."
|
||||
},
|
||||
"FolderTreeResponse": {
|
||||
"properties": {
|
||||
"tree": {
|
||||
"items": { "$ref": "#/components/schemas/LibraryFolderTree" },
|
||||
"type": "array",
|
||||
"title": "Tree"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["tree"],
|
||||
"title": "FolderTreeResponse",
|
||||
"description": "Response schema for folder tree structure."
|
||||
},
|
||||
"FolderUpdateRequest": {
|
||||
"properties": {
|
||||
"name": {
|
||||
"anyOf": [
|
||||
{ "type": "string", "maxLength": 100, "minLength": 1 },
|
||||
{ "type": "null" }
|
||||
],
|
||||
"title": "Name"
|
||||
},
|
||||
"icon": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Icon"
|
||||
},
|
||||
"color": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Color"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"title": "FolderUpdateRequest",
|
||||
"description": "Request model for updating a folder."
|
||||
},
|
||||
"Graph": {
|
||||
"properties": {
|
||||
"id": { "type": "string", "title": "Id" },
|
||||
@@ -9372,14 +8907,6 @@
|
||||
"title": "Is Latest Version"
|
||||
},
|
||||
"is_favorite": { "type": "boolean", "title": "Is Favorite" },
|
||||
"folder_id": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Folder Id"
|
||||
},
|
||||
"folder_name": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Folder Name"
|
||||
},
|
||||
"recommended_schedule_cron": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Recommended Schedule Cron"
|
||||
@@ -9647,109 +9174,12 @@
|
||||
{ "type": "null" }
|
||||
],
|
||||
"description": "User-specific settings for this library agent"
|
||||
},
|
||||
"folder_id": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Folder Id",
|
||||
"description": "Folder ID to move agent to (empty string for root)"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"title": "LibraryAgentUpdateRequest",
|
||||
"description": "Schema for updating a library agent via PUT.\n\nIncludes flags for auto-updating version, marking as favorite,\narchiving, or deleting."
|
||||
},
|
||||
"LibraryFolder": {
|
||||
"properties": {
|
||||
"id": { "type": "string", "title": "Id" },
|
||||
"user_id": { "type": "string", "title": "User Id" },
|
||||
"name": { "type": "string", "title": "Name" },
|
||||
"icon": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Icon"
|
||||
},
|
||||
"color": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Color"
|
||||
},
|
||||
"parent_id": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Parent Id"
|
||||
},
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Created At"
|
||||
},
|
||||
"updated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Updated At"
|
||||
},
|
||||
"agent_count": {
|
||||
"type": "integer",
|
||||
"title": "Agent Count",
|
||||
"default": 0
|
||||
},
|
||||
"subfolder_count": {
|
||||
"type": "integer",
|
||||
"title": "Subfolder Count",
|
||||
"default": 0
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["id", "user_id", "name", "created_at", "updated_at"],
|
||||
"title": "LibraryFolder",
|
||||
"description": "Represents a folder for organizing library agents."
|
||||
},
|
||||
"LibraryFolderTree": {
|
||||
"properties": {
|
||||
"id": { "type": "string", "title": "Id" },
|
||||
"user_id": { "type": "string", "title": "User Id" },
|
||||
"name": { "type": "string", "title": "Name" },
|
||||
"icon": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Icon"
|
||||
},
|
||||
"color": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Color"
|
||||
},
|
||||
"parent_id": {
|
||||
"anyOf": [{ "type": "string" }, { "type": "null" }],
|
||||
"title": "Parent Id"
|
||||
},
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Created At"
|
||||
},
|
||||
"updated_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Updated At"
|
||||
},
|
||||
"agent_count": {
|
||||
"type": "integer",
|
||||
"title": "Agent Count",
|
||||
"default": 0
|
||||
},
|
||||
"subfolder_count": {
|
||||
"type": "integer",
|
||||
"title": "Subfolder Count",
|
||||
"default": 0
|
||||
},
|
||||
"children": {
|
||||
"items": { "$ref": "#/components/schemas/LibraryFolderTree" },
|
||||
"type": "array",
|
||||
"title": "Children",
|
||||
"default": []
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": ["id", "user_id", "name", "created_at", "updated_at"],
|
||||
"title": "LibraryFolderTree",
|
||||
"description": "Folder with nested children for tree view."
|
||||
},
|
||||
"Link": {
|
||||
"properties": {
|
||||
"id": { "type": "string", "title": "Id" },
|
||||
@@ -11053,6 +10483,7 @@
|
||||
"agent_saved",
|
||||
"clarification_needed",
|
||||
"block_list",
|
||||
"block_details",
|
||||
"block_output",
|
||||
"doc_search_results",
|
||||
"doc_page",
|
||||
|
||||
165
plans/SECRT-1950-claude-ci-optimizations.md
Normal file
165
plans/SECRT-1950-claude-ci-optimizations.md
Normal file
@@ -0,0 +1,165 @@
|
||||
# Implementation Plan: SECRT-1950 - Apply E2E CI Optimizations to Claude Code Workflows
|
||||
|
||||
## Ticket
|
||||
[SECRT-1950](https://linear.app/autogpt/issue/SECRT-1950)
|
||||
|
||||
## Summary
|
||||
Apply Pwuts's CI performance optimizations from PR #12090 to Claude Code workflows.
|
||||
|
||||
## Reference PR
|
||||
https://github.com/Significant-Gravitas/AutoGPT/pull/12090
|
||||
|
||||
---
|
||||
|
||||
## Analysis
|
||||
|
||||
### Current State (claude.yml)
|
||||
|
||||
**pnpm caching (lines 104-118):**
|
||||
```yaml
|
||||
- name: Set up Node.js
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22"
|
||||
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Set pnpm store directory
|
||||
run: |
|
||||
pnpm config set store-dir ~/.pnpm-store
|
||||
echo "PNPM_HOME=$HOME/.pnpm-store" >> $GITHUB_ENV
|
||||
|
||||
- name: Cache frontend dependencies
|
||||
uses: actions/cache@v5
|
||||
with:
|
||||
path: ~/.pnpm-store
|
||||
key: ${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml', 'autogpt_platform/frontend/package.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pnpm-${{ hashFiles('autogpt_platform/frontend/pnpm-lock.yaml') }}
|
||||
${{ runner.os }}-pnpm-
|
||||
```
|
||||
|
||||
**Docker setup (lines 134-165):**
|
||||
- Uses `docker-buildx-action@v3`
|
||||
- Has manual Docker image caching via `actions/cache`
|
||||
- Runs `docker compose up` without buildx bake optimization
|
||||
|
||||
### Pwuts's Optimizations (PR #12090)
|
||||
|
||||
1. **Simplified pnpm caching** - Use `setup-node` built-in cache:
|
||||
```yaml
|
||||
- name: Enable corepack
|
||||
run: corepack enable
|
||||
|
||||
- name: Set up Node
|
||||
uses: actions/setup-node@v6
|
||||
with:
|
||||
node-version: "22.18.0"
|
||||
cache: "pnpm"
|
||||
cache-dependency-path: autogpt_platform/frontend/pnpm-lock.yaml
|
||||
```
|
||||
|
||||
2. **Docker build caching via buildx bake**:
|
||||
```yaml
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: docker-container
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Expose GHA cache to docker buildx CLI
|
||||
uses: crazy-max/ghaction-github-runtime@v3
|
||||
|
||||
- name: Build Docker images (with cache)
|
||||
run: |
|
||||
pip install pyyaml
|
||||
docker compose -f docker-compose.yml config > docker-compose.resolved.yml
|
||||
python ../.github/workflows/scripts/docker-ci-fix-compose-build-cache.py \
|
||||
--source docker-compose.resolved.yml \
|
||||
--cache-from "type=gha" \
|
||||
--cache-to "type=gha,mode=max" \
|
||||
...
|
||||
docker buildx bake --allow=fs.read=.. -f docker-compose.resolved.yml --load
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Proposed Changes
|
||||
|
||||
### 1. Update pnpm caching in `claude.yml`
|
||||
|
||||
**Before:**
|
||||
- Manual cache key generation
|
||||
- Separate `actions/cache` step
|
||||
- Manual pnpm store directory config
|
||||
|
||||
**After:**
|
||||
- Use `setup-node` built-in `cache: "pnpm"` option
|
||||
- Remove manual cache step
|
||||
- Keep `corepack enable` before `setup-node`
|
||||
|
||||
### 2. Update Docker build in `claude.yml`
|
||||
|
||||
**Before:**
|
||||
- Manual Docker layer caching via `actions/cache` with `/tmp/.buildx-cache`
|
||||
- Simple `docker compose build`
|
||||
|
||||
**After:**
|
||||
- Use `crazy-max/ghaction-github-runtime@v3` to expose GHA cache
|
||||
- Use `docker-ci-fix-compose-build-cache.py` script
|
||||
- Build with `docker buildx bake`
|
||||
|
||||
### 3. Apply same changes to other Claude workflows
|
||||
|
||||
- `claude-dependabot.yml` - Check if it has similar patterns
|
||||
- `claude-ci-failure-auto-fix.yml` - Check if it has similar patterns
|
||||
- `copilot-setup-steps.yml` - Reusable workflow, may be the source of truth
|
||||
|
||||
---
|
||||
|
||||
## Files to Modify
|
||||
|
||||
1. `.github/workflows/claude.yml`
|
||||
2. `.github/workflows/claude-dependabot.yml` (if applicable)
|
||||
3. `.github/workflows/claude-ci-failure-auto-fix.yml` (if applicable)
|
||||
|
||||
## Dependencies
|
||||
|
||||
- PR #12090 must be merged first (provides the `docker-ci-fix-compose-build-cache.py` script)
|
||||
- Backend Dockerfile optimizations (already in PR #12090)
|
||||
|
||||
---
|
||||
|
||||
## Test Plan
|
||||
|
||||
1. Create PR with changes
|
||||
2. Trigger Claude workflow manually or via `@claude` mention on a test issue
|
||||
3. Compare CI runtime before/after
|
||||
4. Verify Claude agent still works correctly (can checkout, build, run tests)
|
||||
|
||||
---
|
||||
|
||||
## Risk Assessment
|
||||
|
||||
**Low risk:**
|
||||
- These are CI infrastructure changes, not code changes
|
||||
- If caching fails, builds fall back to uncached (slower but works)
|
||||
- Changes mirror proven patterns from PR #12090
|
||||
|
||||
---
|
||||
|
||||
## Questions for Reviewer
|
||||
|
||||
1. Should we wait for PR #12090 to merge before creating this PR?
|
||||
2. Does `copilot-setup-steps.yml` need updating, or is it a separate concern?
|
||||
3. Any concerns about cache key collisions between frontend E2E and Claude workflows?
|
||||
|
||||
---
|
||||
|
||||
## Verified
|
||||
|
||||
- ✅ **`claude-dependabot.yml`**: Has same pnpm caching pattern as `claude.yml` (manual `actions/cache`) — NEEDS UPDATE
|
||||
- ✅ **`claude-ci-failure-auto-fix.yml`**: Simple workflow with no pnpm or Docker caching — NO CHANGES NEEDED
|
||||
- ✅ **Script path**: `docker-ci-fix-compose-build-cache.py` will be at `.github/workflows/scripts/` after PR #12090 merges
|
||||
- ✅ **Test seed caching**: NOT APPLICABLE — Claude workflows spin up a dev environment but don't run E2E tests with pre-seeded data. The seed caching in PR #12090 is specific to the frontend E2E test suite which needs consistent test data. Claude just needs the services running.
|
||||
Reference in New Issue
Block a user