mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-08 14:53:53 -05:00
### Summary Performance optimization for the platform's store and creator functionality by adding targeted database indexes and implementing materialized views to reduce query execution time. ### Changes 🏗️ **Database Performance Optimizations:** - Added strategic database indexes for `StoreListing`, `StoreListingVersion`, `StoreListingReview`, `AgentGraphExecution`, and `Profile` tables - Implemented materialized views (`mv_agent_run_counts`, `mv_review_stats`) to cache expensive aggregation queries - Optimized `StoreAgent` and `Creator` views to use materialized views and improved query patterns - Added automated refresh function with 15-minute scheduling for materialized views (when pg_cron extension is available) **Key Performance Improvements:** - Filtered indexes on approved store listings to speed up marketplace queries - GIN index on categories for faster category-based searches - Composite indexes for common query patterns (e.g., listing + version lookups) - Pre-computed agent run counts and review statistics to eliminate expensive aggregations ### Checklist 📋 #### For code changes: - [x] I have clearly listed my changes in the PR description - [x] I have made a test plan - [x] I have tested my changes according to the test plan: - [x] Verified migration runs successfully without errors - [x] Confirmed materialized views are created and populated correctly - [x] Tested StoreAgent and Creator view queries return expected results - [x] Validated automatic refresh function works properly - [x] Confirmed rollback migration successfully removes all changes #### For configuration changes: - [x] `.env.example` is updated or already compatible with my changes - [x] `docker-compose.yml` is updated or already compatible with my changes - [x] I have included a list of my configuration changes in the PR description (under **Changes**) **Note:** No configuration changes were required as this is purely a database schema optimization.
126 lines
4.4 KiB
Python
126 lines
4.4 KiB
Python
import os
|
|
import subprocess
|
|
import sys
|
|
import time
|
|
|
|
|
|
def wait_for_postgres(max_retries=5, delay=5):
|
|
for _ in range(max_retries):
|
|
try:
|
|
result = subprocess.run(
|
|
[
|
|
"docker",
|
|
"compose",
|
|
"-f",
|
|
"docker-compose.test.yaml",
|
|
"--env-file",
|
|
"../.env",
|
|
"exec",
|
|
"db",
|
|
"pg_isready",
|
|
"-U",
|
|
"postgres",
|
|
"-d",
|
|
"postgres",
|
|
],
|
|
check=True,
|
|
capture_output=True,
|
|
text=True,
|
|
)
|
|
if "accepting connections" in result.stdout:
|
|
print("PostgreSQL is ready.")
|
|
return True
|
|
except subprocess.CalledProcessError:
|
|
print(f"PostgreSQL is not ready yet. Retrying in {delay} seconds...")
|
|
time.sleep(delay)
|
|
print("Failed to connect to PostgreSQL.")
|
|
return False
|
|
|
|
|
|
def run_command(command, check=True):
|
|
try:
|
|
subprocess.run(command, check=check)
|
|
except subprocess.CalledProcessError as e:
|
|
print(f"Command failed: {e}")
|
|
sys.exit(1)
|
|
|
|
|
|
def test():
|
|
# Start PostgreSQL with Docker Compose
|
|
run_command(
|
|
[
|
|
"docker",
|
|
"compose",
|
|
"-f",
|
|
"docker-compose.test.yaml",
|
|
"--env-file",
|
|
"../.env",
|
|
"up",
|
|
"-d",
|
|
]
|
|
)
|
|
|
|
if not wait_for_postgres():
|
|
run_command(["docker", "compose", "-f", "docker-compose.test.yaml", "down"])
|
|
sys.exit(1)
|
|
|
|
# IMPORTANT: Set test database environment variables to prevent accidentally
|
|
# resetting the developer's local database.
|
|
#
|
|
# This script spins up a separate test database container (postgres-test) using
|
|
# docker-compose.test.yaml. We explicitly set DATABASE_URL and DIRECT_URL to point
|
|
# to this test database to ensure that:
|
|
# 1. The prisma migrate reset command only affects the test database
|
|
# 2. Tests run against the test database, not the developer's local database
|
|
# 3. Any database operations during testing are isolated from development data
|
|
#
|
|
# Without this, if a developer has DATABASE_URL set in their environment pointing
|
|
# to their development database, running tests would wipe their local data!
|
|
test_env = os.environ.copy()
|
|
|
|
# Load database configuration from .env file
|
|
dotenv_path = os.path.join(os.path.dirname(__file__), "../.env")
|
|
if os.path.exists(dotenv_path):
|
|
with open(dotenv_path) as f:
|
|
for line in f:
|
|
if line.strip() and not line.startswith("#"):
|
|
key, value = line.strip().split("=", 1)
|
|
os.environ[key] = value
|
|
|
|
# Get database config from environment (now populated from .env)
|
|
db_user = os.getenv("POSTGRES_USER", "postgres")
|
|
db_pass = os.getenv("POSTGRES_PASSWORD", "postgres")
|
|
db_name = os.getenv("POSTGRES_DB", "postgres")
|
|
db_port = os.getenv("POSTGRES_PORT", "5432")
|
|
|
|
# Construct the test database URL - this ensures we're always pointing to the test container
|
|
test_env["DATABASE_URL"] = (
|
|
f"postgresql://{db_user}:{db_pass}@localhost:{db_port}/{db_name}"
|
|
)
|
|
test_env["DIRECT_URL"] = test_env["DATABASE_URL"]
|
|
|
|
test_env["DB_PORT"] = db_port
|
|
test_env["DB_NAME"] = db_name
|
|
test_env["DB_PASS"] = db_pass
|
|
test_env["DB_USER"] = db_user
|
|
|
|
# Run Prisma migrations with test database
|
|
# First, reset the database to ensure clean state for tests
|
|
# This is safe because we've explicitly set DATABASE_URL to the test database above
|
|
subprocess.run(
|
|
["prisma", "migrate", "reset", "--force", "--skip-seed"],
|
|
env=test_env,
|
|
check=False,
|
|
)
|
|
# Then apply migrations to get the test database schema up to date
|
|
subprocess.run(["prisma", "migrate", "deploy"], env=test_env, check=True)
|
|
|
|
# Run the tests with test database environment
|
|
# This ensures all database connections in the tests use the test database,
|
|
# not any database that might be configured in the developer's environment
|
|
result = subprocess.run(["pytest"] + sys.argv[1:], env=test_env, check=False)
|
|
|
|
run_command(["docker", "compose", "-f", "docker-compose.test.yaml", "down"])
|
|
|
|
sys.exit(result.returncode)
|