Merge branch 'swiftyos/open-1993-add-rsc-and-mocking-of-api-to-storybooks' into swiftyos/open-1920-marketplace-home-components

This commit is contained in:
SwiftyOS
2024-11-06 10:10:17 +01:00
18 changed files with 2777 additions and 754 deletions

View File

@@ -0,0 +1,32 @@
import fastapi
import fastapi.responses
store_router = fastapi.APIRouter()
@store_router.get("agents", tags=["store"])
def get_agents(
featured: bool, top: bool, categories: str, page: int = 1, page_size: int = 20
):
return fastapi.responses.JSONResponse(
content=[
{
"agentName": "Super SEO Optimizer",
"agentImage": "https://ddz4ak4pa3d19.cloudfront.net/cache/cc/11/cc1172271dcf723a34f488a3344e82b2.jpg",
"creatorName": "AI Labs",
"description": "Boost your website's search engine rankings with our advanced AI-powered SEO optimization tool.",
"runs": 100000,
"rating": 4.9,
"featured": True,
},
{
"agentName": "Content Wizard",
"agentImage": "https://upload.wikimedia.org/wikipedia/commons/c/c5/Big_buck_bunny_poster_big.jpg",
"creatorName": "WriteRight Inc.",
"description": "Generate high-quality, engaging content for your blog, social media, or marketing campaigns.",
"runs": 75000,
"rating": 4.7,
"featured": True,
},
]
)

View File

@@ -0,0 +1,137 @@
-- CreateEnum
CREATE TYPE "SubmissionStatus" AS ENUM ('DAFT', 'PENDING', 'APPROVED', 'REJECTED');
-- CreateTable
CREATE TABLE "Profile" (
"id" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"userId" TEXT NOT NULL,
"username" TEXT NOT NULL,
"description" TEXT NOT NULL,
"links" TEXT[],
"avatarUrl" TEXT,
CONSTRAINT "Profile_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "StoreListing" (
"id" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"isDeleted" BOOLEAN NOT NULL DEFAULT false,
"isApproved" BOOLEAN NOT NULL DEFAULT false,
"agentId" TEXT NOT NULL,
"agentVersion" INTEGER NOT NULL,
"owningUserId" TEXT NOT NULL,
CONSTRAINT "StoreListing_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "StoreListingVersion" (
"id" UUID NOT NULL,
"version" INTEGER NOT NULL DEFAULT 1,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"agentId" TEXT NOT NULL,
"agentVersion" INTEGER NOT NULL,
"slug" TEXT NOT NULL,
"name" TEXT NOT NULL,
"videoUrl" TEXT,
"imageUrls" TEXT[],
"description" TEXT NOT NULL,
"categories" TEXT[],
"isFeatured" BOOLEAN NOT NULL DEFAULT false,
"isDeleted" BOOLEAN NOT NULL DEFAULT false,
"isAvailable" BOOLEAN NOT NULL DEFAULT true,
"isApproved" BOOLEAN NOT NULL DEFAULT false,
"storeListingId" UUID,
CONSTRAINT "StoreListingVersion_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "StoreListingReview" (
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"storeListingVersionId" UUID NOT NULL,
"reviewByUserId" TEXT NOT NULL,
"score" INTEGER NOT NULL,
"comments" TEXT,
CONSTRAINT "StoreListingReview_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "StoreListingSubmission" (
"id" UUID NOT NULL,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"storeListingId" UUID NOT NULL,
"storeListingVersionId" UUID NOT NULL,
"reviewerId" TEXT NOT NULL,
"Status" "SubmissionStatus" NOT NULL DEFAULT 'PENDING',
"reviewComments" TEXT,
CONSTRAINT "StoreListingSubmission_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "Profile_username_key" ON "Profile"("username");
-- CreateIndex
CREATE INDEX "Profile_username_idx" ON "Profile"("username");
-- CreateIndex
CREATE INDEX "StoreListing_isApproved_idx" ON "StoreListing"("isApproved");
-- CreateIndex
CREATE INDEX "StoreListing_agentId_idx" ON "StoreListing"("agentId");
-- CreateIndex
CREATE INDEX "StoreListing_owningUserId_idx" ON "StoreListing"("owningUserId");
-- CreateIndex
CREATE INDEX "StoreListingVersion_agentId_agentVersion_isApproved_idx" ON "StoreListingVersion"("agentId", "agentVersion", "isApproved");
-- CreateIndex
CREATE UNIQUE INDEX "StoreListingVersion_agentId_agentVersion_key" ON "StoreListingVersion"("agentId", "agentVersion");
-- CreateIndex
CREATE INDEX "StoreListingSubmission_storeListingId_idx" ON "StoreListingSubmission"("storeListingId");
-- CreateIndex
CREATE INDEX "StoreListingSubmission_Status_idx" ON "StoreListingSubmission"("Status");
-- AddForeignKey
ALTER TABLE "Profile" ADD CONSTRAINT "Profile_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListing" ADD CONSTRAINT "StoreListing_agentId_agentVersion_fkey" FOREIGN KEY ("agentId", "agentVersion") REFERENCES "AgentGraph"("id", "version") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListing" ADD CONSTRAINT "StoreListing_owningUserId_fkey" FOREIGN KEY ("owningUserId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListingVersion" ADD CONSTRAINT "StoreListingVersion_agentId_agentVersion_fkey" FOREIGN KEY ("agentId", "agentVersion") REFERENCES "AgentGraph"("id", "version") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListingVersion" ADD CONSTRAINT "StoreListingVersion_storeListingId_fkey" FOREIGN KEY ("storeListingId") REFERENCES "StoreListing"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListingReview" ADD CONSTRAINT "StoreListingReview_storeListingVersionId_fkey" FOREIGN KEY ("storeListingVersionId") REFERENCES "StoreListingVersion"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListingReview" ADD CONSTRAINT "StoreListingReview_reviewByUserId_fkey" FOREIGN KEY ("reviewByUserId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListingSubmission" ADD CONSTRAINT "StoreListingSubmission_storeListingId_fkey" FOREIGN KEY ("storeListingId") REFERENCES "StoreListing"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListingSubmission" ADD CONSTRAINT "StoreListingSubmission_storeListingVersionId_fkey" FOREIGN KEY ("storeListingVersionId") REFERENCES "StoreListingVersion"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "StoreListingSubmission" ADD CONSTRAINT "StoreListingSubmission_reviewerId_fkey" FOREIGN KEY ("reviewerId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

View File

@@ -0,0 +1,57 @@
BEGIN;
CREATE VIEW "StoreAgent" AS
WITH ReviewStats AS (
SELECT
sl."id" AS "storeListingId",
COUNT(sr.id) AS review_count,
AVG(CAST(sr.score AS DECIMAL)) AS avg_rating
FROM "StoreListing" sl
JOIN "StoreListingVersion" slv ON slv."storeListingId" = sl."id"
JOIN "StoreListingReview" sr ON sr."storeListingVersionId" = slv.id
WHERE sl."isDeleted" = FALSE
GROUP BY sl."id"
),
AgentRuns AS (
SELECT "agentGraphId", COUNT(*) AS run_count
FROM "AgentGraphExecution"
GROUP BY "agentGraphId"
)
SELECT
sl."updatedAt" AS "lastUpdated",
slv.version AS "version",
sl."agentId" AS "agentId",
a.name AS "agentName",
a."version" AS "agentVersion",
p.username AS "creatorName",
p."avatarUrl" AS "avatarSrc",
slv."isFeatured",
slv.slug,
slv.name,
slv.description,
slv."videoUrl" AS "videoUrl",
COALESCE(slv."imageUrls", ARRAY[]::TEXT[]) AS "imageUrls",
slv.categories,
COALESCE(ar.run_count, 0) AS runs,
CAST(COALESCE(rs.avg_rating, 0.0) AS DOUBLE PRECISION) AS rating
FROM "StoreListing" sl
JOIN "AgentGraph" a ON sl."agentId" = a.id AND sl."agentVersion" = a."version"
LEFT JOIN "Profile" p ON sl."owningUserId" = p."userId"
LEFT JOIN LATERAL (
SELECT slv.*
FROM "StoreListingVersion" slv
WHERE slv."storeListingId" = sl.id
ORDER BY slv."updatedAt" DESC
LIMIT 1
) slv ON TRUE
LEFT JOIN ReviewStats rs ON sl.id = rs."storeListingId"
LEFT JOIN AgentRuns ar ON a.id = ar."agentGraphId"
WHERE sl."isDeleted" = FALSE
AND sl."isApproved" = TRUE;
COMMIT;

View File

@@ -0,0 +1,360 @@
import argparse
import re
from collections import defaultdict
def indent_block(block: str, indent_level: int = 4) -> str:
"""
Indent each line of a block by a specified number of spaces.
"""
indent = " " * indent_level
indented_lines = [
indent + line if line.strip() else line for line in block.splitlines()
]
return "\n".join(indented_lines)
def wrap_table_with_indexes_and_constraints(
table_block: str, indexes: list, constraints: list
) -> str:
"""
Wrap a table definition with its related indexes and constraints in a single DO block.
"""
# Add closing bracket if missing
if not table_block.strip().endswith(");"):
table_block = table_block.rstrip() + "\n);"
if_not_exists_statements = [table_block] + indexes + constraints
indented_statements = "\n\n".join(
[indent_block(stmt.strip(), 8) for stmt in if_not_exists_statements]
)
table_name_match = re.search(r'CREATE TABLE "([^"]+)"', table_block)
table_name = table_name_match.group(1) if table_name_match else None
wrapped_block = f"""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_tables WHERE tablename = '{table_name}' AND schemaname = CURRENT_SCHEMA()) THEN
{indented_statements}
END IF;
END $$;"""
return wrapped_block.strip()
def wrap_standalone_block(block: str, block_type: str, condition: str) -> str:
"""
Wrap standalone blocks like indexes or constraints in their own IF NOT EXISTS block.
"""
wrapped_block = f"""
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM {block_type} WHERE {condition} AND schemaname = CURRENT_SCHEMA()) THEN
{indent_block(block.strip(), 8)}
END IF;
END $$;"""
return wrapped_block.strip()
def topological_sort(nodes, edges):
"""
Perform a deterministic topological sort on the given nodes and edges.
nodes: a set of node names.
edges: a dict mapping from node to a set of nodes it depends on.
Returns a list of nodes in topological order.
Raises an exception if there is a cycle.
"""
in_degree = defaultdict(int)
graph = defaultdict(set)
for node in nodes:
in_degree[node] = 0
for node in edges:
for dep in edges[node]:
graph[dep].add(node)
in_degree[node] += 1
# Use a list to store nodes with zero in-degree, and sort it to ensure determinism
zero_in_degree_nodes = sorted([node for node in nodes if in_degree[node] == 0])
sorted_list = []
while zero_in_degree_nodes:
node = zero_in_degree_nodes.pop(0)
sorted_list.append(node)
for m in sorted(graph[node]):
in_degree[m] -= 1
if in_degree[m] == 0:
# Insert node and keep the list sorted for determinism
zero_in_degree_nodes.append(m)
zero_in_degree_nodes.sort()
if len(sorted_list) != len(nodes):
raise Exception("Cycle detected in dependency graph")
return sorted_list
def detect_cycles_and_remove_edges(nodes, edges, edge_to_constraint):
"""
Detect cycles in the dependency graph and remove edges to break cycles.
Returns a list of foreign key constraints that need to be deferred.
"""
edges_copy = {node: set(deps) for node, deps in edges.items()} # Copy edges
deferred_constraints = []
removed_edges = set()
while True:
try:
sorted_nodes = topological_sort(nodes, edges_copy)
break # If topological sort succeeds, exit the loop
except Exception:
# If a cycle is detected, find cycles and remove one edge from each
cycles = find_cycles(edges_copy)
if not cycles:
raise Exception("Cycle detected but no cycles found in graph.")
for cycle in cycles:
if len(cycle) >= 2:
# Remove the edge from the last node to the first node in the cycle
u = cycle[-1]
v = cycle[0]
edge = (u, v)
if edge in edge_to_constraint:
deferred_constraints.append(edge_to_constraint[edge])
if v in edges_copy[u]:
edges_copy[u].remove(v)
removed_edges.add(edge)
else:
# Cycle of length 1 (self-loop), remove it
node = cycle[0]
edges_copy[node].remove(node)
edge = (node, node)
if edge in edge_to_constraint:
deferred_constraints.append(edge_to_constraint[edge])
removed_edges.add(edge)
return sorted_nodes, deferred_constraints
def find_cycles(edges):
"""
Find cycles in the graph using Tarjan's algorithm.
Returns a list of cycles, where each cycle is a list of nodes.
"""
index = 0
index_stack = []
lowlinks = {}
index_dict = {}
on_stack = set()
cycles = []
def strongconnect(node):
nonlocal index
index_dict[node] = index
lowlinks[node] = index
index += 1
index_stack.append(node)
on_stack.add(node)
for neighbor in edges.get(node, []):
if neighbor not in index_dict:
strongconnect(neighbor)
lowlinks[node] = min(lowlinks[node], lowlinks[neighbor])
elif neighbor in on_stack:
lowlinks[node] = min(lowlinks[node], index_dict[neighbor])
# If node is a root node, pop the stack and generate a SCC (Strongly Connected Component)
if lowlinks[node] == index_dict[node]:
scc = []
while True:
w = index_stack.pop()
on_stack.remove(w)
scc.append(w)
if w == node:
break
if len(scc) > 1 or (len(scc) == 1 and node in edges.get(node, [])):
# It's a cycle
cycles.append(scc)
for node in edges:
if node not in index_dict:
strongconnect(node)
return cycles
def process_sql_file(input_file: str, output_file: str):
"""
Process an SQL file to:
- Leave ENUMs at the top.
- For each table, find the indexes and constraints related to that table and add them below the table definition.
- Add IF NOT EXISTS checks to relevant SQL statements, with proper indentation.
- Wrap all indexes and constraints related to a table in a single IF NOT EXISTS block.
- Ensure that tables are created before they are referenced in foreign key constraints.
- Extract cyclic dependency foreign keys and add them at the end of the file.
"""
with open(input_file, "r") as infile:
sql_content = infile.read()
# Split the SQL file by semicolons into blocks
blocks = sql_content.split(";")
# Separate blocks into enums, tables, indexes, and constraints
enums = []
tables = {}
standalone_indexes = []
standalone_constraints = []
table_related_indexes = {}
table_related_constraints = {}
table_dependencies = defaultdict(set)
edge_to_constraint = {}
# Classify each block
for block in blocks:
block = block.strip()
if not block:
continue
block += ";"
if "CREATE TYPE" in block:
enum_name_match = re.search(r'CREATE TYPE "([^"]+)"', block)
enum_name = enum_name_match.group(1) if enum_name_match else None
if not enum_name:
continue
# enums.append(
# wrap_standalone_block(block, "pg_type", f"typname = '{enum_name}'")
# )
enums.append(block)
elif "CREATE TABLE" in block:
table_name_match = re.search(r'CREATE TABLE "([^"]+)"', block)
table_name = table_name_match.group(1) if table_name_match else None
if not table_name:
continue
tables[table_name] = block
elif "CREATE INDEX" in block or "CREATE UNIQUE INDEX" in block:
table_match = re.search(r'ON "([^"]+)"', block)
if table_match:
table_name = table_match.group(1)
if table_name not in table_related_indexes:
table_related_indexes[table_name] = []
table_related_indexes[table_name].append(block)
else:
index_name_match = re.search(r'CREATE (UNIQUE )?INDEX "([^"]+)"', block)
index_name = index_name_match.group(2) if index_name_match else None
if not index_name:
continue
# standalone_indexes.append(
# wrap_standalone_block(
# block, "pg_indexes", f"indexname = '{index_name}'"
# )
# )
standalone_indexes.append(block)
elif "ADD CONSTRAINT" in block and "FOREIGN KEY" in block:
table_match = re.search(r'ALTER TABLE "([^"]+)"', block)
if table_match:
source_table = table_match.group(1)
if source_table not in table_related_constraints:
table_related_constraints[source_table] = []
table_related_constraints[source_table].append(block)
# Extract the referenced table
ref_table_match = re.search(r'REFERENCES "([^"]+)"', block)
if ref_table_match:
referenced_table = ref_table_match.group(1)
# Build dependency from source_table to referenced_table if the referenced table is in 'tables'
if referenced_table in tables:
table_dependencies[source_table].add(referenced_table)
edge_to_constraint[(source_table, referenced_table)] = block
else:
constraint_name_match = re.search(r'ADD CONSTRAINT "([^"]+)"', block)
constraint_name = (
constraint_name_match.group(1) if constraint_name_match else None
)
if not constraint_name:
continue
# standalone_constraints.append(
# wrap_standalone_block(
# block, "pg_constraint", f"conname = '{constraint_name}'"
# )
# )
standalone_constraints.append(block)
else:
print(f"Unhandled block: {block}")
all_table_names = set(tables.keys())
# Detect cycles and remove edges causing cycles
try:
sorted_tables, deferred_constraints = detect_cycles_and_remove_edges(
all_table_names, table_dependencies, edge_to_constraint
)
except Exception as e:
print(f"Error: {str(e)}")
return
# Update table_related_constraints by removing deferred constraints
for constraint in deferred_constraints:
table_match = re.search(r'ALTER TABLE "([^"]+)"', constraint)
if table_match:
source_table = table_match.group(1)
if source_table in table_related_constraints:
if constraint in table_related_constraints[source_table]:
table_related_constraints[source_table].remove(constraint)
final_sql = "BEGIN;\n\n"
# Add all enums to the top
if enums:
final_sql += "\n\n".join(enums) + "\n\n"
# Add each table with its related indexes and constraints
for table_name in sorted_tables:
table_block = tables[table_name]
final_sql += "-" * 100 + "\n"
final_sql += f"-- Table: {table_name}\n"
final_sql += "-" * 100 + "\n\n"
related_indexes = table_related_indexes.get(table_name, [])
related_constraints = table_related_constraints.get(table_name, [])
# final_sql += (
# wrap_table_with_indexes_and_constraints(
# table_block, related_indexes, related_constraints
# )
# + "\n\n"
# )
final_sql += table_block + "\n\n"
# Add standalone indexes and constraints that were not tied to a table
if standalone_indexes:
final_sql += "\n\n".join(standalone_indexes) + "\n\n"
if standalone_constraints:
final_sql += "\n\n".join(standalone_constraints) + "\n\n"
# Add deferred foreign key constraints at the end
if deferred_constraints:
final_sql += "-" * 100 + "\n\n"
final_sql += "-- Deferred Foreign Key Constraints (Cyclic Dependencies)\n"
final_sql += "-" * 100 + "\n\n"
for constraint in deferred_constraints:
constraint_name_match = re.search(r'ADD CONSTRAINT "([^"]+)"', constraint)
constraint_name = (
constraint_name_match.group(1) if constraint_name_match else None
)
if constraint_name:
# wrapped_constraint = wrap_standalone_block(
# constraint, "pg_constraint", f"conname = '{constraint_name}'"
# )
wrapped_constraint = constraint
else:
wrapped_constraint = constraint
final_sql += wrapped_constraint + "\n\n"
final_sql = final_sql.strip() + "\n\n"
final_sql += "COMMIT;"
with open(output_file, "w") as outfile:
outfile.write(final_sql)
print(f"Processed SQL written to {output_file}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Process SQL migration files.")
parser.add_argument("input_file", help="The input SQL migration file name.")
parser.add_argument("output_file", help="The desired output file name.")
args = parser.parse_args()
process_sql_file(args.input_file, args.output_file)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,165 @@
networks:
app-network:
name: app-network
shared-network:
name: shared-network
volumes:
db-config:
x-agpt-services:
&agpt-services
networks:
- app-network
- shared-network
x-supabase-services:
&supabase-services
networks:
- app-network
- shared-network
services:
# AGPT services
migrate:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: migrate
redis:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: redis
rest_server:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: rest_server
executor:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: executor
websocket_server:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: websocket_server
market:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: market
market-migrations:
<<: *agpt-services
extends:
file: ./docker-compose.platform.yml
service: market-migrations
# frontend:
# <<: *agpt-services
# extends:
# file: ./docker-compose.platform.yml
# service: frontend
# Supabase services
studio:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: studio
kong:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: kong
auth:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: auth
environment:
GOTRUE_MAILER_AUTOCONFIRM: true
rest:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: rest
realtime:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: realtime
storage:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: storage
imgproxy:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: imgproxy
meta:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: meta
functions:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: functions
analytics:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: analytics
db:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: db
vector:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: vector
deps:
<<: *supabase-services
profiles:
- local
image: busybox
command: /bin/true
depends_on:
- studio
- kong
- auth
- rest
- realtime
- storage
- imgproxy
- meta
- functions
- analytics
- db
- vector
- redis

View File

@@ -51,6 +51,7 @@ services:
file: ./docker-compose.platform.yml
service: websocket_server
# Agent Store v2 will be integrated with the rest service
market:
<<: *agpt-services
extends:
@@ -70,12 +71,14 @@ services:
# service: frontend
# Supabase services
studio:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: studio
# Uncomment theis if you want to use the ui
# studio:
# <<: *supabase-services
# extends:
# file: ./supabase/docker/docker-compose.yml
# service: studio
# Required for auth api routing
kong:
<<: *supabase-services
extends:
@@ -90,42 +93,43 @@ services:
environment:
GOTRUE_MAILER_AUTOCONFIRM: true
rest:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: rest
# rest:
# <<: *supabase-services
# extends:
# file: ./supabase/docker/docker-compose.yml
# service: rest
realtime:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: realtime
# realtime:
# <<: *supabase-services
# extends:
# file: ./supabase/docker/docker-compose.yml
# service: realtime
storage:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: storage
# storage:
# <<: *supabase-services
# extends:
# file: ./supabase/docker/docker-compose.yml
# service: storage
imgproxy:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: imgproxy
# imgproxy:
# <<: *supabase-services
# extends:
# file: ./supabase/docker/docker-compose.yml
# service: imgproxy
meta:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: meta
# meta:
# <<: *supabase-services
# extends:
# file: ./supabase/docker/docker-compose.yml
# service: meta
functions:
<<: *supabase-services
extends:
file: ./supabase/docker/docker-compose.yml
service: functions
# functions:
# <<: *supabase-services
# extends:
# file: ./supabase/docker/docker-compose.yml
# service: functions
# Required for Auth
analytics:
<<: *supabase-services
extends:
@@ -137,6 +141,8 @@ services:
extends:
file: ./supabase/docker/docker-compose.yml
service: db
# Required for db
vector:
<<: *supabase-services
extends:

View File

@@ -9,6 +9,9 @@ const config: StorybookConfig = {
"@storybook/addon-essentials",
"@storybook/addon-interactions",
],
features: {
experimentalRSC: true,
},
framework: {
name: "@storybook/nextjs",
options: {},

View File

@@ -1,8 +1,16 @@
import type { Preview } from "@storybook/react";
import { initialize, mswLoader } from 'msw-storybook-addon';
import "../src/app/globals.css";
// Initialize MSW
initialize();
const preview: Preview = {
parameters: {
nextjs: {
appDirectory: true,
},
controls: {
matchers: {
color: /(background|color)$/i,
@@ -10,6 +18,7 @@ const preview: Preview = {
},
},
},
loaders: [mswLoader],
};
export default preview;

View File

@@ -56,7 +56,8 @@
"dotenv": "^16.4.5",
"embla-carousel-react": "^8.3.0",
"framer-motion": "^11.11.9",
"lucide-react": "^0.454.0",
"geist": "^1.3.1",
"lucide-react": "^0.407.0",
"moment": "^2.30.1",
"negotiator": "^1.0.0",
"next": "^14.2.13",
@@ -97,8 +98,10 @@
"chromatic": "^11.12.5",
"concurrently": "^9.0.1",
"eslint": "^8",
"eslint-config-next": "15.0.2",
"eslint-plugin-storybook": "^0.11.0",
"eslint-config-next": "14.2.4",
"eslint-plugin-storybook": "^0.9.0",
"msw": "^2.5.2",
"msw-storybook-addon": "^2.0.3",
"postcss": "^8",
"prettier": "^3.3.3",
"prettier-plugin-tailwindcss": "^0.6.8",
@@ -106,5 +109,10 @@
"tailwindcss": "^3.4.14",
"typescript": "^5"
},
"packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e"
"packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e",
"msw": {
"workerDirectory": [
"public"
]
}
}

View File

@@ -0,0 +1,293 @@
/* eslint-disable */
/* tslint:disable */
/**
* Mock Service Worker.
* @see https://github.com/mswjs/msw
* - Please do NOT modify this file.
* - Please do NOT serve this file on production.
*/
const PACKAGE_VERSION = '2.5.2'
const INTEGRITY_CHECKSUM = '07a8241b182f8a246a7cd39894799a9e'
const IS_MOCKED_RESPONSE = Symbol('isMockedResponse')
const activeClientIds = new Set()
self.addEventListener('install', function () {
self.skipWaiting()
})
self.addEventListener('activate', function (event) {
event.waitUntil(self.clients.claim())
})
self.addEventListener('message', async function (event) {
const clientId = event.source.id
if (!clientId || !self.clients) {
return
}
const client = await self.clients.get(clientId)
if (!client) {
return
}
const allClients = await self.clients.matchAll({
type: 'window',
})
switch (event.data) {
case 'KEEPALIVE_REQUEST': {
sendToClient(client, {
type: 'KEEPALIVE_RESPONSE',
})
break
}
case 'INTEGRITY_CHECK_REQUEST': {
sendToClient(client, {
type: 'INTEGRITY_CHECK_RESPONSE',
payload: {
packageVersion: PACKAGE_VERSION,
checksum: INTEGRITY_CHECKSUM,
},
})
break
}
case 'MOCK_ACTIVATE': {
activeClientIds.add(clientId)
sendToClient(client, {
type: 'MOCKING_ENABLED',
payload: {
client: {
id: client.id,
frameType: client.frameType,
},
},
})
break
}
case 'MOCK_DEACTIVATE': {
activeClientIds.delete(clientId)
break
}
case 'CLIENT_CLOSED': {
activeClientIds.delete(clientId)
const remainingClients = allClients.filter((client) => {
return client.id !== clientId
})
// Unregister itself when there are no more clients
if (remainingClients.length === 0) {
self.registration.unregister()
}
break
}
}
})
self.addEventListener('fetch', function (event) {
const { request } = event
// Bypass navigation requests.
if (request.mode === 'navigate') {
return
}
// Opening the DevTools triggers the "only-if-cached" request
// that cannot be handled by the worker. Bypass such requests.
if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') {
return
}
// Bypass all requests when there are no active clients.
// Prevents the self-unregistered worked from handling requests
// after it's been deleted (still remains active until the next reload).
if (activeClientIds.size === 0) {
return
}
// Generate unique request ID.
const requestId = crypto.randomUUID()
event.respondWith(handleRequest(event, requestId))
})
async function handleRequest(event, requestId) {
const client = await resolveMainClient(event)
const response = await getResponse(event, client, requestId)
// Send back the response clone for the "response:*" life-cycle events.
// Ensure MSW is active and ready to handle the message, otherwise
// this message will pend indefinitely.
if (client && activeClientIds.has(client.id)) {
;(async function () {
const responseClone = response.clone()
sendToClient(
client,
{
type: 'RESPONSE',
payload: {
requestId,
isMockedResponse: IS_MOCKED_RESPONSE in response,
type: responseClone.type,
status: responseClone.status,
statusText: responseClone.statusText,
body: responseClone.body,
headers: Object.fromEntries(responseClone.headers.entries()),
},
},
[responseClone.body],
)
})()
}
return response
}
// Resolve the main client for the given event.
// Client that issues a request doesn't necessarily equal the client
// that registered the worker. It's with the latter the worker should
// communicate with during the response resolving phase.
async function resolveMainClient(event) {
const client = await self.clients.get(event.clientId)
if (activeClientIds.has(event.clientId)) {
return client
}
if (client?.frameType === 'top-level') {
return client
}
const allClients = await self.clients.matchAll({
type: 'window',
})
return allClients
.filter((client) => {
// Get only those clients that are currently visible.
return client.visibilityState === 'visible'
})
.find((client) => {
// Find the client ID that's recorded in the
// set of clients that have registered the worker.
return activeClientIds.has(client.id)
})
}
async function getResponse(event, client, requestId) {
const { request } = event
// Clone the request because it might've been already used
// (i.e. its body has been read and sent to the client).
const requestClone = request.clone()
function passthrough() {
const headers = Object.fromEntries(requestClone.headers.entries())
// Remove internal MSW request header so the passthrough request
// complies with any potential CORS preflight checks on the server.
// Some servers forbid unknown request headers.
delete headers['x-msw-intention']
return fetch(requestClone, { headers })
}
// Bypass mocking when the client is not active.
if (!client) {
return passthrough()
}
// Bypass initial page load requests (i.e. static assets).
// The absence of the immediate/parent client in the map of the active clients
// means that MSW hasn't dispatched the "MOCK_ACTIVATE" event yet
// and is not ready to handle requests.
if (!activeClientIds.has(client.id)) {
return passthrough()
}
// Notify the client that a request has been intercepted.
const requestBuffer = await request.arrayBuffer()
const clientMessage = await sendToClient(
client,
{
type: 'REQUEST',
payload: {
id: requestId,
url: request.url,
mode: request.mode,
method: request.method,
headers: Object.fromEntries(request.headers.entries()),
cache: request.cache,
credentials: request.credentials,
destination: request.destination,
integrity: request.integrity,
redirect: request.redirect,
referrer: request.referrer,
referrerPolicy: request.referrerPolicy,
body: requestBuffer,
keepalive: request.keepalive,
},
},
[requestBuffer],
)
switch (clientMessage.type) {
case 'MOCK_RESPONSE': {
return respondWithMock(clientMessage.data)
}
case 'PASSTHROUGH': {
return passthrough()
}
}
return passthrough()
}
function sendToClient(client, message, transferrables = []) {
return new Promise((resolve, reject) => {
const channel = new MessageChannel()
channel.port1.onmessage = (event) => {
if (event.data && event.data.error) {
return reject(event.data.error)
}
resolve(event.data)
}
client.postMessage(
message,
[channel.port2].concat(transferrables.filter(Boolean)),
)
})
}
async function respondWithMock(response) {
// Setting response status code to 0 is a no-op.
// However, when responding with a "Response.error()", the produced Response
// instance will have status code set to 0. Since it's not possible to create
// a Response instance with status code 0, handle that use-case separately.
if (response.status === 0) {
return Response.error()
}
const mockedResponse = new Response(response.body, response)
Reflect.defineProperty(mockedResponse, IS_MOCKED_RESPONSE, {
value: true,
enumerable: true,
})
return mockedResponse
}

View File

@@ -10,6 +10,10 @@ import TallyPopupSimple from "@/components/TallyPopup";
import { GoogleAnalytics } from "@next/third-parties/google";
import { Toaster } from "@/components/ui/toaster";
// Import Fonts
import { GeistSans } from 'geist/font/sans';
import { GeistMono } from 'geist/font/mono';
const inter = Inter({ subsets: ["latin"] });
export const metadata: Metadata = {

View File

@@ -10,16 +10,31 @@ const meta = {
center: true,
padding: 0,
},
nextjs: {
appDirectory: true,
navigation: {
pathname: '/search',
query: {
searchTerm: ''
}
}
}
},
tags: ["autodocs"],
argTypes: {
onSearch: { action: "searched" },
placeholder: { control: "text" },
backgroundColor: { control: "text" },
iconColor: { control: "text" },
textColor: { control: "text" },
placeholderColor: { control: "text" },
},
decorators: [
(Story) => (
<div className="w-full max-w-screen-lg mx-auto p-4">
<Story />
</div>
),
],
} satisfies Meta<typeof SearchBar>;
export default meta;
@@ -27,17 +42,15 @@ type Story = StoryObj<typeof meta>;
export const Default: Story = {
args: {
onSearch: (query: string) => console.log(`Searched: ${query}`),
placeholder: 'Search for tasks like "optimise SEO"',
},
};
export const CustomStyles: Story = {
args: {
onSearch: (query: string) => console.log(`Searched: ${query}`),
placeholder: "Enter your search query",
backgroundColor: "bg-blue-100",
iconColor: "text-blue-500",
iconColor: "text-blue-500",
textColor: "text-blue-700",
placeholderColor: "text-blue-400",
},
@@ -45,7 +58,6 @@ export const CustomStyles: Story = {
export const WithInteraction: Story = {
args: {
onSearch: (query: string) => console.log(`Searched: ${query}`),
placeholder: "Type and press Enter",
},
play: async ({ canvasElement }) => {
@@ -61,7 +73,6 @@ export const WithInteraction: Story = {
export const EmptySubmit: Story = {
args: {
onSearch: (query: string) => console.log(`Searched: ${query}`),
placeholder: "Empty submit test",
},
play: async ({ canvasElement }) => {

View File

@@ -1,11 +1,11 @@
"use client";
import * as React from "react";
import { useRouter } from "next/navigation";
import { MagnifyingGlassIcon } from "@radix-ui/react-icons";
interface SearchBarProps {
onSearch: (query: string) => void;
placeholder?: string;
backgroundColor?: string;
iconColor?: string;
@@ -15,22 +15,25 @@ interface SearchBarProps {
/** SearchBar component for user input and search functionality. */
export const SearchBar: React.FC<SearchBarProps> = ({
onSearch,
placeholder = 'Search for tasks like "optimise SEO"',
backgroundColor = "bg-neutral-100",
iconColor = "text-[#646464]",
textColor = "text-[#707070]",
placeholderColor = "text-[#707070]",
}) => {
const [searchQuery, setSearchQuery] = React.useState("");
const router = useRouter();
const handleInputChange = (event: React.ChangeEvent<HTMLInputElement>) => {
setSearchQuery(event.target.value);
};
const [searchQuery, setSearchQuery] = React.useState("");
const handleSubmit = (event: React.FormEvent<HTMLFormElement>) => {
event.preventDefault();
onSearch(searchQuery);
console.log(searchQuery);
if (searchQuery.trim()) {
// Encode the search term and navigate to the desired path
const encodedTerm = encodeURIComponent(searchQuery);
router.push(`/search?searchTerm=${encodedTerm}`);
}
};
return (
@@ -40,15 +43,15 @@ export const SearchBar: React.FC<SearchBarProps> = ({
data-testid="store-search-bar"
>
<div
className={`h-12 px-4 py-2 md:h-[4.5rem] md:px-6 md:py-[0.625rem] ${backgroundColor} flex items-center gap-2 rounded-full md:gap-5`}
className={`h-12 px-4 py-2 md:h-[4.5rem] md:px-6 md:py-1 ${backgroundColor} flex items-center gap-2 rounded-full md:gap-5`}
>
<MagnifyingGlassIcon className={`h-5 w-5 md:h-7 md:w-7 ${iconColor}`} />
<input
type="text"
value={searchQuery}
onChange={handleInputChange}
onChange={(e) => setSearchQuery(e.target.value)}
placeholder={placeholder}
className={`flex-grow border-none bg-transparent ${textColor} font-neue text-lg font-normal leading-[2.25rem] tracking-tight md:text-xl placeholder:${placeholderColor} focus:outline-none`}
className={`flex-grow border-none bg-transparent ${textColor} font-['Geist'] text-lg font-normal leading-[2.25rem] tracking-tight md:text-xl placeholder:${placeholderColor} focus:outline-none`}
data-testid="store-search-input"
/>
</div>

View File

@@ -1,16 +1,19 @@
"use client"
import * as React from "react";
import { SearchBar } from "@/components/agptui/SearchBar";
import { FilterChips } from "@/components/agptui/FilterChips";
import { useRouter } from "next/navigation";
interface HeroSectionProps {
onSearch: (query: string) => void;
onFilterChange: (selectedFilters: string[]) => void;
}
export const HeroSection: React.FC = () => {
const router = useRouter();
function onFilterChange(selectedFilters: string[]) {
const encodedTerm = encodeURIComponent(selectedFilters.join(", "));
router.push(`/search?searchTerm=${encodedTerm}`);
}
export const HeroSection: React.FC<HeroSectionProps> = ({
onSearch,
onFilterChange,
}) => {
return (
<div className="mb-2 mt-8 flex flex-col items-center justify-center px-4 sm:mb-4 sm:mt-12 sm:px-6 md:mb-6 md:mt-16 lg:my-24 lg:px-8 xl:my-16">
<div className="w-full max-w-3xl lg:max-w-4xl xl:max-w-5xl">
@@ -33,7 +36,7 @@ export const HeroSection: React.FC<HeroSectionProps> = ({
Bringing you AI agents designed by thinkers from around the world
</div>
<div className="mb-4 sm:mb-5 md:mb-6">
<SearchBar onSearch={onSearch} />
<SearchBar />
</div>
<div>
<div className="flex justify-center">
@@ -46,6 +49,7 @@ export const HeroSection: React.FC<HeroSectionProps> = ({
"Lorem ipsum",
]}
onFilterChange={onFilterChange}
multiSelect={false}
/>
</div>
</div>

View File

@@ -58,10 +58,6 @@ export const Page: React.FC<PageProps> = ({
featuredCreators,
menuItemGroups,
}) => {
const handleSearch = (query: string) => {
console.log("Search query:", query);
// Implement search functionality
};
const handleFilterChange = (selectedFilters: string[]) => {
console.log("Selected filters:", selectedFilters);
@@ -95,7 +91,6 @@ export const Page: React.FC<PageProps> = ({
/>
<main className="px-4">
<HeroSection
onSearch={handleSearch}
onFilterChange={handleFilterChange}
/>
<FeaturedSection

File diff suppressed because it is too large Load Diff