mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-11 15:25:16 -05:00
Compare commits
3 Commits
chore/comb
...
ci/speedup
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b638c9f9f9 | ||
|
|
756aea2a23 | ||
|
|
4e5200e762 |
53
.github/workflows/platform-backend-ci.yml
vendored
53
.github/workflows/platform-backend-ci.yml
vendored
@@ -1,6 +1,7 @@
|
|||||||
name: AutoGPT Platform - Backend CI
|
name: AutoGPT Platform - Backend CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches: [master, dev, ci-test*]
|
branches: [master, dev, ci-test*]
|
||||||
paths:
|
paths:
|
||||||
@@ -32,7 +33,9 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.11", "3.12", "3.13"]
|
# Use Python 3.13 to match Docker image (see backend/Dockerfile)
|
||||||
|
# ClamAV tests moved to platform-backend-security-ci.yml (runs on merge to master)
|
||||||
|
python-version: ["3.13"]
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
services:
|
services:
|
||||||
@@ -48,23 +51,6 @@ jobs:
|
|||||||
env:
|
env:
|
||||||
RABBITMQ_DEFAULT_USER: ${{ env.RABBITMQ_DEFAULT_USER }}
|
RABBITMQ_DEFAULT_USER: ${{ env.RABBITMQ_DEFAULT_USER }}
|
||||||
RABBITMQ_DEFAULT_PASS: ${{ env.RABBITMQ_DEFAULT_PASS }}
|
RABBITMQ_DEFAULT_PASS: ${{ env.RABBITMQ_DEFAULT_PASS }}
|
||||||
clamav:
|
|
||||||
image: clamav/clamav-debian:latest
|
|
||||||
ports:
|
|
||||||
- 3310:3310
|
|
||||||
env:
|
|
||||||
CLAMAV_NO_FRESHCLAMD: false
|
|
||||||
CLAMD_CONF_StreamMaxLength: 50M
|
|
||||||
CLAMD_CONF_MaxFileSize: 100M
|
|
||||||
CLAMD_CONF_MaxScanSize: 100M
|
|
||||||
CLAMD_CONF_MaxThreads: 4
|
|
||||||
CLAMD_CONF_ReadTimeout: 300
|
|
||||||
options: >-
|
|
||||||
--health-cmd "clamdscan --version || exit 1"
|
|
||||||
--health-interval 30s
|
|
||||||
--health-timeout 10s
|
|
||||||
--health-retries 5
|
|
||||||
--health-start-period 180s
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
@@ -146,35 +132,6 @@ jobs:
|
|||||||
# outputs:
|
# outputs:
|
||||||
# DB_URL, API_URL, GRAPHQL_URL, ANON_KEY, SERVICE_ROLE_KEY, JWT_SECRET
|
# DB_URL, API_URL, GRAPHQL_URL, ANON_KEY, SERVICE_ROLE_KEY, JWT_SECRET
|
||||||
|
|
||||||
- name: Wait for ClamAV to be ready
|
|
||||||
run: |
|
|
||||||
echo "Waiting for ClamAV daemon to start..."
|
|
||||||
max_attempts=60
|
|
||||||
attempt=0
|
|
||||||
|
|
||||||
until nc -z localhost 3310 || [ $attempt -eq $max_attempts ]; do
|
|
||||||
echo "ClamAV is unavailable - sleeping (attempt $((attempt+1))/$max_attempts)"
|
|
||||||
sleep 5
|
|
||||||
attempt=$((attempt+1))
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ $attempt -eq $max_attempts ]; then
|
|
||||||
echo "ClamAV failed to start after $((max_attempts*5)) seconds"
|
|
||||||
echo "Checking ClamAV service logs..."
|
|
||||||
docker logs $(docker ps -q --filter "ancestor=clamav/clamav-debian:latest") 2>&1 | tail -50 || echo "No ClamAV container found"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "ClamAV is ready!"
|
|
||||||
|
|
||||||
# Verify ClamAV is responsive
|
|
||||||
echo "Testing ClamAV connection..."
|
|
||||||
timeout 10 bash -c 'echo "PING" | nc localhost 3310' || {
|
|
||||||
echo "ClamAV is not responding to PING"
|
|
||||||
docker logs $(docker ps -q --filter "ancestor=clamav/clamav-debian:latest") 2>&1 | tail -50 || echo "No ClamAV container found"
|
|
||||||
exit 1
|
|
||||||
}
|
|
||||||
|
|
||||||
- name: Run Database Migrations
|
- name: Run Database Migrations
|
||||||
run: poetry run prisma migrate dev --name updates
|
run: poetry run prisma migrate dev --name updates
|
||||||
env:
|
env:
|
||||||
@@ -203,6 +160,8 @@ jobs:
|
|||||||
REDIS_HOST: "localhost"
|
REDIS_HOST: "localhost"
|
||||||
REDIS_PORT: "6379"
|
REDIS_PORT: "6379"
|
||||||
ENCRYPTION_KEY: "dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw=" # DO NOT USE IN PRODUCTION!!
|
ENCRYPTION_KEY: "dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw=" # DO NOT USE IN PRODUCTION!!
|
||||||
|
# ClamAV is not available in main CI - security tests run in platform-backend-security-ci.yml
|
||||||
|
CLAMAV_SERVICE_ENABLED: "false"
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CI: true
|
CI: true
|
||||||
|
|||||||
94
.github/workflows/platform-frontend-ci.yml
vendored
94
.github/workflows/platform-frontend-ci.yml
vendored
@@ -1,6 +1,7 @@
|
|||||||
name: AutoGPT Platform - Frontend CI
|
name: AutoGPT Platform - Frontend CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches: [master, dev]
|
branches: [master, dev]
|
||||||
paths:
|
paths:
|
||||||
@@ -154,35 +155,78 @@ jobs:
|
|||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Cache Docker layers
|
# Docker image tar caching - loads images from cache in parallel for faster startup
|
||||||
|
- name: Set up Docker image cache
|
||||||
|
id: docker-cache
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: /tmp/.buildx-cache
|
path: ~/docker-cache
|
||||||
key: ${{ runner.os }}-buildx-frontend-test-${{ hashFiles('autogpt_platform/docker-compose.yml', 'autogpt_platform/backend/Dockerfile', 'autogpt_platform/backend/pyproject.toml', 'autogpt_platform/backend/poetry.lock') }}
|
key: docker-images-frontend-${{ runner.os }}-${{ hashFiles('autogpt_platform/docker-compose.yml') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-buildx-frontend-test-
|
docker-images-frontend-${{ runner.os }}-
|
||||||
|
|
||||||
|
- name: Load or pull Docker images
|
||||||
|
working-directory: autogpt_platform
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/docker-cache
|
||||||
|
|
||||||
|
# Define image list for easy maintenance
|
||||||
|
IMAGES=(
|
||||||
|
"redis:latest"
|
||||||
|
"rabbitmq:management"
|
||||||
|
"kong:2.8.1"
|
||||||
|
"supabase/gotrue:v2.170.0"
|
||||||
|
"supabase/postgres:15.8.1.049"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if any cached tar files exist
|
||||||
|
if ls ~/docker-cache/*.tar 1> /dev/null 2>&1; then
|
||||||
|
echo "Docker cache found, loading images in parallel..."
|
||||||
|
for image in "${IMAGES[@]}"; do
|
||||||
|
filename=$(echo "$image" | tr ':/' '--')
|
||||||
|
if [ -f ~/docker-cache/${filename}.tar ]; then
|
||||||
|
echo "Loading $image..."
|
||||||
|
docker load -i ~/docker-cache/${filename}.tar || echo "Warning: Failed to load $image from cache" &
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
wait
|
||||||
|
echo "All cached images loaded"
|
||||||
|
else
|
||||||
|
echo "No Docker cache found, pulling images in parallel..."
|
||||||
|
for image in "${IMAGES[@]}"; do
|
||||||
|
docker pull "$image" &
|
||||||
|
done
|
||||||
|
wait
|
||||||
|
|
||||||
|
# Only save cache on main branches (not PRs) to avoid cache pollution
|
||||||
|
if [[ "${{ github.ref }}" == "refs/heads/master" ]] || [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||||
|
echo "Saving Docker images to cache in parallel..."
|
||||||
|
for image in "${IMAGES[@]}"; do
|
||||||
|
filename=$(echo "$image" | tr ':/' '--')
|
||||||
|
echo "Saving $image..."
|
||||||
|
docker save -o ~/docker-cache/${filename}.tar "$image" || echo "Warning: Failed to save $image" &
|
||||||
|
done
|
||||||
|
wait
|
||||||
|
echo "Docker image cache saved"
|
||||||
|
else
|
||||||
|
echo "Skipping cache save for PR/feature branch"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Docker images ready for use"
|
||||||
|
|
||||||
- name: Run docker compose
|
- name: Run docker compose
|
||||||
run: |
|
run: |
|
||||||
NEXT_PUBLIC_PW_TEST=true docker compose -f ../docker-compose.yml up -d
|
NEXT_PUBLIC_PW_TEST=true docker compose -f ../docker-compose.yml up -d
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
BUILDX_CACHE_FROM: type=local,src=/tmp/.buildx-cache
|
|
||||||
BUILDX_CACHE_TO: type=local,dest=/tmp/.buildx-cache-new,mode=max
|
|
||||||
|
|
||||||
- name: Move cache
|
|
||||||
run: |
|
|
||||||
rm -rf /tmp/.buildx-cache
|
|
||||||
if [ -d "/tmp/.buildx-cache-new" ]; then
|
|
||||||
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Wait for services to be ready
|
- name: Wait for services to be ready
|
||||||
run: |
|
run: |
|
||||||
echo "Waiting for rest_server to be ready..."
|
echo "Waiting for rest_server to be ready..."
|
||||||
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
timeout 30 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
||||||
echo "Waiting for database to be ready..."
|
echo "Waiting for database to be ready..."
|
||||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
timeout 30 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
||||||
|
|
||||||
- name: Create E2E test data
|
- name: Create E2E test data
|
||||||
run: |
|
run: |
|
||||||
@@ -221,9 +265,27 @@ jobs:
|
|||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pnpm install --frozen-lockfile
|
run: pnpm install --frozen-lockfile
|
||||||
|
|
||||||
- name: Install Browser 'chromium'
|
# Playwright browser caching - saves 30-60s when cache hits
|
||||||
|
- name: Get Playwright version
|
||||||
|
id: playwright-version
|
||||||
|
run: |
|
||||||
|
echo "version=$(pnpm list @playwright/test --json | jq -r '.[0].dependencies["@playwright/test"].version')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Cache Playwright browsers
|
||||||
|
uses: actions/cache@v4
|
||||||
|
id: playwright-cache
|
||||||
|
with:
|
||||||
|
path: ~/.cache/ms-playwright
|
||||||
|
key: playwright-${{ runner.os }}-${{ steps.playwright-version.outputs.version }}
|
||||||
|
|
||||||
|
- name: Install Playwright browsers
|
||||||
|
if: steps.playwright-cache.outputs.cache-hit != 'true'
|
||||||
run: pnpm playwright install --with-deps chromium
|
run: pnpm playwright install --with-deps chromium
|
||||||
|
|
||||||
|
- name: Install Playwright deps only (when cache hit)
|
||||||
|
if: steps.playwright-cache.outputs.cache-hit == 'true'
|
||||||
|
run: pnpm playwright install-deps chromium
|
||||||
|
|
||||||
- name: Run Playwright tests
|
- name: Run Playwright tests
|
||||||
run: pnpm test:no-build
|
run: pnpm test:no-build
|
||||||
|
|
||||||
|
|||||||
65
.github/workflows/platform-fullstack-ci.yml
vendored
65
.github/workflows/platform-fullstack-ci.yml
vendored
@@ -1,6 +1,7 @@
|
|||||||
name: AutoGPT Platform - Frontend CI
|
name: AutoGPT Platform - Frontend CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
push:
|
push:
|
||||||
branches: [master, dev]
|
branches: [master, dev]
|
||||||
paths:
|
paths:
|
||||||
@@ -83,6 +84,66 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
cp ../backend/.env.default ../backend/.env
|
cp ../backend/.env.default ../backend/.env
|
||||||
|
|
||||||
|
# Docker image tar caching - loads images from cache in parallel for faster startup
|
||||||
|
- name: Set up Docker image cache
|
||||||
|
id: docker-cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/docker-cache
|
||||||
|
key: docker-images-fullstack-${{ runner.os }}-${{ hashFiles('autogpt_platform/docker-compose.yml') }}
|
||||||
|
restore-keys: |
|
||||||
|
docker-images-fullstack-${{ runner.os }}-
|
||||||
|
|
||||||
|
- name: Load or pull Docker images
|
||||||
|
working-directory: autogpt_platform
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/docker-cache
|
||||||
|
|
||||||
|
# Define image list for easy maintenance
|
||||||
|
IMAGES=(
|
||||||
|
"redis:latest"
|
||||||
|
"rabbitmq:management"
|
||||||
|
"kong:2.8.1"
|
||||||
|
"supabase/gotrue:v2.170.0"
|
||||||
|
"supabase/postgres:15.8.1.049"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if any cached tar files exist
|
||||||
|
if ls ~/docker-cache/*.tar 1> /dev/null 2>&1; then
|
||||||
|
echo "Docker cache found, loading images in parallel..."
|
||||||
|
for image in "${IMAGES[@]}"; do
|
||||||
|
filename=$(echo "$image" | tr ':/' '--')
|
||||||
|
if [ -f ~/docker-cache/${filename}.tar ]; then
|
||||||
|
echo "Loading $image..."
|
||||||
|
docker load -i ~/docker-cache/${filename}.tar || echo "Warning: Failed to load $image from cache" &
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
wait
|
||||||
|
echo "All cached images loaded"
|
||||||
|
else
|
||||||
|
echo "No Docker cache found, pulling images in parallel..."
|
||||||
|
for image in "${IMAGES[@]}"; do
|
||||||
|
docker pull "$image" &
|
||||||
|
done
|
||||||
|
wait
|
||||||
|
|
||||||
|
# Only save cache on main branches (not PRs) to avoid cache pollution
|
||||||
|
if [[ "${{ github.ref }}" == "refs/heads/master" ]] || [[ "${{ github.ref }}" == "refs/heads/dev" ]]; then
|
||||||
|
echo "Saving Docker images to cache in parallel..."
|
||||||
|
for image in "${IMAGES[@]}"; do
|
||||||
|
filename=$(echo "$image" | tr ':/' '--')
|
||||||
|
echo "Saving $image..."
|
||||||
|
docker save -o ~/docker-cache/${filename}.tar "$image" || echo "Warning: Failed to save $image" &
|
||||||
|
done
|
||||||
|
wait
|
||||||
|
echo "Docker image cache saved"
|
||||||
|
else
|
||||||
|
echo "Skipping cache save for PR/feature branch"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Docker images ready for use"
|
||||||
|
|
||||||
- name: Run docker compose
|
- name: Run docker compose
|
||||||
run: |
|
run: |
|
||||||
docker compose -f ../docker-compose.yml --profile local --profile deps_backend up -d
|
docker compose -f ../docker-compose.yml --profile local --profile deps_backend up -d
|
||||||
@@ -104,9 +165,9 @@ jobs:
|
|||||||
- name: Wait for services to be ready
|
- name: Wait for services to be ready
|
||||||
run: |
|
run: |
|
||||||
echo "Waiting for rest_server to be ready..."
|
echo "Waiting for rest_server to be ready..."
|
||||||
timeout 60 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
timeout 30 sh -c 'until curl -f http://localhost:8006/health 2>/dev/null; do sleep 2; done' || echo "Rest server health check timeout, continuing..."
|
||||||
echo "Waiting for database to be ready..."
|
echo "Waiting for database to be ready..."
|
||||||
timeout 60 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
timeout 30 sh -c 'until docker compose -f ../docker-compose.yml exec -T db pg_isready -U postgres 2>/dev/null; do sleep 2; done' || echo "Database ready check timeout, continuing..."
|
||||||
|
|
||||||
- name: Generate API queries
|
- name: Generate API queries
|
||||||
run: pnpm generate:api:force
|
run: pnpm generate:api:force
|
||||||
|
|||||||
146
.github/workflows/platform-security-ci.yml
vendored
Normal file
146
.github/workflows/platform-security-ci.yml
vendored
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
name: AutoGPT Platform - Backend Security CI
|
||||||
|
|
||||||
|
# This workflow runs ClamAV-dependent security tests.
|
||||||
|
# It only runs on merge to master to avoid the 3-5 minute ClamAV startup time on every PR.
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches: [master]
|
||||||
|
paths:
|
||||||
|
- "autogpt_platform/backend/**/file*.py"
|
||||||
|
- "autogpt_platform/backend/**/scan*.py"
|
||||||
|
- "autogpt_platform/backend/**/virus*.py"
|
||||||
|
- "autogpt_platform/backend/**/media*.py"
|
||||||
|
- ".github/workflows/platform-backend-security-ci.yml"
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ format('backend-security-ci-{0}', github.sha) }}
|
||||||
|
cancel-in-progress: false
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
working-directory: autogpt_platform/backend
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security-tests:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 15
|
||||||
|
|
||||||
|
services:
|
||||||
|
redis:
|
||||||
|
image: redis:latest
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
|
clamav:
|
||||||
|
image: clamav/clamav-debian:latest
|
||||||
|
ports:
|
||||||
|
- 3310:3310
|
||||||
|
env:
|
||||||
|
CLAMAV_NO_FRESHCLAMD: false
|
||||||
|
CLAMD_CONF_StreamMaxLength: 50M
|
||||||
|
CLAMD_CONF_MaxFileSize: 100M
|
||||||
|
CLAMD_CONF_MaxScanSize: 100M
|
||||||
|
CLAMD_CONF_MaxThreads: 4
|
||||||
|
CLAMD_CONF_ReadTimeout: 300
|
||||||
|
options: >-
|
||||||
|
--health-cmd "clamdscan --version || exit 1"
|
||||||
|
--health-interval 30s
|
||||||
|
--health-timeout 10s
|
||||||
|
--health-retries 5
|
||||||
|
--health-start-period 180s
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
submodules: true
|
||||||
|
|
||||||
|
- name: Set up Python 3.13
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.13"
|
||||||
|
|
||||||
|
- name: Setup Supabase
|
||||||
|
uses: supabase/setup-cli@v1
|
||||||
|
with:
|
||||||
|
version: 1.178.1
|
||||||
|
|
||||||
|
- name: Set up Python dependency cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: ~/.cache/pypoetry
|
||||||
|
key: poetry-${{ runner.os }}-${{ hashFiles('autogpt_platform/backend/poetry.lock') }}
|
||||||
|
|
||||||
|
- name: Install Poetry
|
||||||
|
run: |
|
||||||
|
HEAD_POETRY_VERSION=$(python ../../.github/workflows/scripts/get_package_version_from_lockfile.py poetry)
|
||||||
|
echo "Using Poetry version ${HEAD_POETRY_VERSION}"
|
||||||
|
curl -sSL https://install.python-poetry.org | POETRY_VERSION=$HEAD_POETRY_VERSION python3 -
|
||||||
|
|
||||||
|
- name: Install Python dependencies
|
||||||
|
run: poetry install
|
||||||
|
|
||||||
|
- name: Generate Prisma Client
|
||||||
|
run: poetry run prisma generate
|
||||||
|
|
||||||
|
- id: supabase
|
||||||
|
name: Start Supabase
|
||||||
|
working-directory: .
|
||||||
|
run: |
|
||||||
|
supabase init
|
||||||
|
supabase start --exclude postgres-meta,realtime,storage-api,imgproxy,inbucket,studio,edge-runtime,logflare,vector,supavisor
|
||||||
|
supabase status -o env | sed 's/="/=/; s/"$//' >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Wait for ClamAV to be ready
|
||||||
|
run: |
|
||||||
|
echo "Waiting for ClamAV daemon to start..."
|
||||||
|
max_attempts=60
|
||||||
|
attempt=0
|
||||||
|
|
||||||
|
until nc -z localhost 3310 || [ $attempt -eq $max_attempts ]; do
|
||||||
|
echo "ClamAV is unavailable - sleeping (attempt $((attempt+1))/$max_attempts)"
|
||||||
|
sleep 5
|
||||||
|
attempt=$((attempt+1))
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ $attempt -eq $max_attempts ]; then
|
||||||
|
echo "ClamAV failed to start after $((max_attempts*5)) seconds"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "ClamAV is ready!"
|
||||||
|
|
||||||
|
- name: Run Database Migrations
|
||||||
|
run: poetry run prisma migrate dev --name updates
|
||||||
|
env:
|
||||||
|
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||||
|
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||||
|
|
||||||
|
- name: Run security-related tests
|
||||||
|
run: |
|
||||||
|
poetry run pytest -v \
|
||||||
|
backend/util/virus_scanner_test.py \
|
||||||
|
backend/util/file_test.py \
|
||||||
|
backend/server/v2/store/media_test.py \
|
||||||
|
-x
|
||||||
|
env:
|
||||||
|
DATABASE_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||||
|
DIRECT_URL: ${{ steps.supabase.outputs.DB_URL }}
|
||||||
|
SUPABASE_URL: ${{ steps.supabase.outputs.API_URL }}
|
||||||
|
SUPABASE_SERVICE_ROLE_KEY: ${{ steps.supabase.outputs.SERVICE_ROLE_KEY }}
|
||||||
|
JWT_VERIFY_KEY: ${{ steps.supabase.outputs.JWT_SECRET }}
|
||||||
|
REDIS_HOST: "localhost"
|
||||||
|
REDIS_PORT: "6379"
|
||||||
|
ENCRYPTION_KEY: "dvziYgz0KSK8FENhju0ZYi8-fRTfAdlz6YLhdB_jhNw="
|
||||||
|
CLAMAV_SERVICE_HOST: "localhost"
|
||||||
|
CLAMAV_SERVICE_PORT: "3310"
|
||||||
|
CLAMAV_SERVICE_ENABLED: "true"
|
||||||
|
|
||||||
|
env:
|
||||||
|
CI: true
|
||||||
|
PLAIN_OUTPUT: True
|
||||||
|
RUN_ENV: local
|
||||||
|
PORT: 8080
|
||||||
@@ -31,8 +31,26 @@ from backend.util.exceptions import NotFoundError
|
|||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
config = backend.server.v2.chat.config.ChatConfig()
|
# Lazy initialization to avoid import-time errors when API keys are not set
|
||||||
client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
|
_config: backend.server.v2.chat.config.ChatConfig | None = None
|
||||||
|
_client: AsyncOpenAI | None = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> backend.server.v2.chat.config.ChatConfig:
|
||||||
|
"""Get the chat config, creating it on first access."""
|
||||||
|
global _config
|
||||||
|
if _config is None:
|
||||||
|
_config = backend.server.v2.chat.config.ChatConfig()
|
||||||
|
return _config
|
||||||
|
|
||||||
|
|
||||||
|
def get_client() -> AsyncOpenAI:
|
||||||
|
"""Get the OpenAI client, creating it on first access."""
|
||||||
|
global _client
|
||||||
|
if _client is None:
|
||||||
|
config = get_config()
|
||||||
|
_client = AsyncOpenAI(api_key=config.api_key, base_url=config.base_url)
|
||||||
|
return _client
|
||||||
|
|
||||||
|
|
||||||
async def create_chat_session(
|
async def create_chat_session(
|
||||||
@@ -130,6 +148,7 @@ async def stream_chat_completion(
|
|||||||
f"new message_count={len(session.messages)}"
|
f"new message_count={len(session.messages)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
config = get_config()
|
||||||
if len(session.messages) > config.max_context_messages:
|
if len(session.messages) > config.max_context_messages:
|
||||||
raise ValueError(f"Max messages exceeded: {config.max_context_messages}")
|
raise ValueError(f"Max messages exceeded: {config.max_context_messages}")
|
||||||
|
|
||||||
@@ -345,6 +364,8 @@ async def _stream_chat_chunks(
|
|||||||
SSE formatted JSON response objects
|
SSE formatted JSON response objects
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
config = get_config()
|
||||||
|
client = get_client()
|
||||||
model = config.model
|
model = config.model
|
||||||
|
|
||||||
logger.info("Starting pure chat stream")
|
logger.info("Starting pure chat stream")
|
||||||
|
|||||||
@@ -135,6 +135,9 @@ filterwarnings = [
|
|||||||
"ignore:'audioop' is deprecated:DeprecationWarning:discord.player",
|
"ignore:'audioop' is deprecated:DeprecationWarning:discord.player",
|
||||||
"ignore:invalid escape sequence:DeprecationWarning:tweepy.api",
|
"ignore:invalid escape sequence:DeprecationWarning:tweepy.api",
|
||||||
]
|
]
|
||||||
|
# Exclude security tests that require ClamAV - these run in platform-backend-security-ci.yml
|
||||||
|
testpaths = ["backend"]
|
||||||
|
addopts = "--ignore=backend/util/virus_scanner_test.py --ignore=backend/util/file_test.py --ignore=backend/server/v2/store/media_test.py"
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
target-version = "py310"
|
target-version = "py310"
|
||||||
|
|||||||
Reference in New Issue
Block a user