mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-12 00:28:31 -05:00
Compare commits
54 Commits
updated-do
...
go
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e0dad5a96 | ||
|
|
f598ba27b6 | ||
|
|
286202cc66 | ||
|
|
55eb917162 | ||
|
|
c2843eecfa | ||
|
|
3971fbd800 | ||
|
|
e04838feb5 | ||
|
|
3ef1f6e380 | ||
|
|
705c63b801 | ||
|
|
da007e3a22 | ||
|
|
9e18c26e65 | ||
|
|
30e3d65711 | ||
|
|
b9c26b1a6b | ||
|
|
220a127e51 | ||
|
|
004e49edb1 | ||
|
|
bf21bb1fa5 | ||
|
|
530ddf2c34 | ||
|
|
33ee2f2ee5 | ||
|
|
ae1d410b65 | ||
|
|
196a5d6b59 | ||
|
|
82c1249d33 | ||
|
|
9389a30298 | ||
|
|
9b58faeeb6 | ||
|
|
9cb55c5ac0 | ||
|
|
eb1df12ce8 | ||
|
|
12f40596b3 | ||
|
|
3af26a9379 | ||
|
|
4041ed3e33 | ||
|
|
ee78653425 | ||
|
|
76c5a27044 | ||
|
|
c6aba70dd4 | ||
|
|
470e7036b9 | ||
|
|
f6608754aa | ||
|
|
bd70ab00e0 | ||
|
|
2c940b381a | ||
|
|
03b30ebf5b | ||
|
|
0d8c2a820e | ||
|
|
5ce562e11f | ||
|
|
fcf2247c20 | ||
|
|
7326ee1221 | ||
|
|
70c7a3b1f3 | ||
|
|
6bdab5b777 | ||
|
|
5f5e31ac19 | ||
|
|
9e71b658d6 | ||
|
|
b7b23d68b4 | ||
|
|
630f401cee | ||
|
|
94fbcfb501 | ||
|
|
8102f78030 | ||
|
|
b1eb259bb3 | ||
|
|
c738eb3bc6 | ||
|
|
60fca5c5f0 | ||
|
|
bba9836735 | ||
|
|
ad76bd1300 | ||
|
|
fa16c207e0 |
30
.github/PULL_REQUEST_TEMPLATE.md
vendored
30
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -6,18 +6,26 @@
|
||||
|
||||
<!-- Concisely describe all of the changes made in this pull request: -->
|
||||
|
||||
|
||||
### Testing 🔍
|
||||
> [!NOTE]
|
||||
Only for the new autogpt platform, currently in rnd/
|
||||
### PR Quality Scorecard ✨
|
||||
|
||||
<!--
|
||||
Please make sure your changes have been tested and are in good working condition.
|
||||
Here is a list of our critical paths, if you need some inspiration on what and how to test:
|
||||
Check out our contribution guide:
|
||||
https://github.com/Significant-Gravitas/AutoGPT/wiki/Contributing
|
||||
|
||||
1. Avoid duplicate work, issues, PRs etc.
|
||||
2. Also consider contributing something other than code; see the [contribution guide]
|
||||
for options.
|
||||
3. Clearly explain your changes.
|
||||
4. Avoid making unnecessary changes, especially if they're purely based on personal
|
||||
preferences. Doing so is the maintainers' job. ;-)
|
||||
-->
|
||||
|
||||
- Create from scratch and execute an agent with at least 3 blocks
|
||||
- Import an agent from file upload, and confirm it executes correctly
|
||||
- Upload agent to marketplace
|
||||
- Import an agent from marketplace and confirm it executes correctly
|
||||
- Edit an agent from monitor, and confirm it executes correctly
|
||||
- [x] Have you used the PR description template?   `+2 pts`
|
||||
- [ ] Is your pull request atomic, focusing on a single change?   `+5 pts`
|
||||
- [ ] Have you linked the GitHub issue(s) that this PR addresses?   `+5 pts`
|
||||
- [ ] Have you documented your changes clearly and comprehensively?   `+5 pts`
|
||||
- [ ] Have you changed or added a feature?   `-4 pts`
|
||||
- [ ] Have you added/updated corresponding documentation?   `+4 pts`
|
||||
- [ ] Have you added/updated corresponding integration tests?   `+5 pts`
|
||||
- [ ] Have you changed the behavior of AutoGPT?   `-5 pts`
|
||||
- [ ] Have you also run `agbenchmark` to verify that these changes do not regress performance?   `+10 pts`
|
||||
|
||||
41
.github/workflows/autogpt-server-docker.yml
vendored
41
.github/workflows/autogpt-server-docker.yml
vendored
@@ -1,41 +0,0 @@
|
||||
name: AutoGPT Server Docker Build & Push
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ updated-docker-ci ]
|
||||
paths:
|
||||
- '**'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
working-directory: AutoGPT
|
||||
|
||||
env:
|
||||
PROJECT_ID: agpt-dev
|
||||
IMAGE_NAME: agpt-server-dev
|
||||
REGION: us-central1
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Set up Cloud SDK
|
||||
uses: google-github-actions/setup-gcloud@v0.2.1
|
||||
with:
|
||||
project_id: ${{ env.PROJECT_ID }}
|
||||
service_account_key: ${{ secrets.GCP_SA_KEY }}
|
||||
export_default_credentials: true
|
||||
|
||||
- name: Configure Docker
|
||||
run: gcloud auth configure-docker ${{ env.REGION }}-docker.pkg.dev
|
||||
|
||||
- name: Build Docker image
|
||||
run: docker build -t ${{ env.REGION }}-docker.pkg.dev/${{ env.PROJECT_ID }}/${{ env.IMAGE_NAME }}:${{ github.sha }} -f rnd/autogpt_server/Dockerfile .
|
||||
|
||||
- name: Push Docker image
|
||||
run: docker push ${{ env.REGION }}-docker.pkg.dev/${{ env.PROJECT_ID }}/${{ env.IMAGE_NAME }}:${{ github.sha }}
|
||||
3
.gitmodules
vendored
3
.gitmodules
vendored
@@ -1,6 +1,3 @@
|
||||
[submodule "forge/tests/vcr_cassettes"]
|
||||
path = forge/tests/vcr_cassettes
|
||||
url = https://github.com/Significant-Gravitas/Auto-GPT-test-cassettes
|
||||
[submodule "rnd/supabase"]
|
||||
path = rnd/supabase
|
||||
url = https://github.com/supabase/supabase.git
|
||||
|
||||
@@ -185,7 +185,7 @@ If you don't know which to choose, you can safely go with OpenAI*.
|
||||
1. Get your Groq API key from [Settings > API keys][groq/api-keys]
|
||||
2. Open `.env`
|
||||
3. Find the line that says `GROQ_API_KEY=`
|
||||
4. Insert your Groq API Key directly after = without quotes or spaces:
|
||||
4. Insert your Anthropic API Key directly after = without quotes or spaces:
|
||||
```ini
|
||||
GROQ_API_KEY=gsk_xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
||||
```
|
||||
|
||||
@@ -102,14 +102,21 @@ poetry run prisma generate
|
||||
|
||||
Without running this command, the necessary Python modules (prisma.models) won't be available, leading to a `ModuleNotFoundError`.
|
||||
|
||||
### Running the server
|
||||
### Running the server without Docker
|
||||
|
||||
To run the server, you can run the following commands in the same terminal you ran the `poetry install` command:
|
||||
|
||||
```bash
|
||||
cp supabase/docker/.env.example .env
|
||||
docker compose -f docker-compose.combined.yml build
|
||||
docker compose -f docker-compose.combined.yml up -d
|
||||
poetry run app
|
||||
```
|
||||
|
||||
### Running the server within Docker
|
||||
|
||||
To run the server, you can run the following commands in the same terminal you ran the `poetry install` command:
|
||||
|
||||
```bash
|
||||
docker compose build
|
||||
docker compose up
|
||||
```
|
||||
|
||||
In the other terminal from autogpt_builder, you can run the following command to start the frontend:
|
||||
|
||||
@@ -14,40 +14,21 @@ Welcome to the AutoGPT Platform - a powerful system for creating and running AI
|
||||
To run the AutoGPT Platform, follow these steps:
|
||||
|
||||
1. Clone this repository to your local machine.
|
||||
2. Navigate to rnd/supabase
|
||||
2. Navigate to the project directory.
|
||||
3. Run the following command:
|
||||
```
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
4. Navigate back to rnd (cd ..)
|
||||
5. Run the following command:
|
||||
```
|
||||
cp supabase/docker/.env.example .env
|
||||
```
|
||||
6. Run the following command:
|
||||
|
||||
```
|
||||
docker compose -f docker-compose.combined.yml up -d
|
||||
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This command will start all the necessary backend services defined in the `docker-compose.combined.yml` file in detached mode.
|
||||
7. Navigate to rnd/autogpt_builder.
|
||||
8. Run the following command:
|
||||
```
|
||||
cp .env.example .env.local
|
||||
```
|
||||
9. Run the following command:
|
||||
```
|
||||
yarn dev
|
||||
```
|
||||
This command will start all the necessary services defined in the `docker-compose.yml` file in detached mode.
|
||||
|
||||
### Docker Compose Commands
|
||||
|
||||
Here are some useful Docker Compose commands for managing your AutoGPT Platform:
|
||||
|
||||
- `docker compose -f docker-compose.combined.yml up -d`: Start the services in detached mode.
|
||||
- `docker compose -f docker-compose.combined.yml stop`: Stop the running services without removing them.
|
||||
- `docker compose up -d`: Start the services in detached mode.
|
||||
- `docker compose stop`: Stop the running services without removing them.
|
||||
- `docker compose rm`: Remove stopped service containers.
|
||||
- `docker compose build`: Build or rebuild services.
|
||||
- `docker compose down`: Stop and remove containers, networks, and volumes.
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
NEXT_PUBLIC_AUTH_CALLBACK_URL=http://localhost:8006/auth/callback
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8000/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8005/api/v1/market
|
||||
|
||||
## Supabase credentials
|
||||
|
||||
NEXT_PUBLIC_SUPABASE_URL=http://localhost:8000
|
||||
NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
## YOU ONLY NEED THEM IF YOU WANT TO USE SUPABASE USER AUTHENTICATION
|
||||
## If you're using self-hosted version then you most likely don't need to set this
|
||||
# NEXT_PUBLIC_SUPABASE_URL=your-project-url
|
||||
# NEXT_PUBLIC_SUPABASE_ANON_KEY=your-anon-key
|
||||
|
||||
## OAuth Callback URL
|
||||
## This should be {domain}/auth/callback
|
||||
|
||||
3
rnd/autogpt_builder/.gitignore
vendored
3
rnd/autogpt_builder/.gitignore
vendored
@@ -34,6 +34,3 @@ yarn-error.log*
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
|
||||
# Sentry Config File
|
||||
.env.sentry-build-plugin
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { withSentryConfig } from "@sentry/nextjs";
|
||||
import dotenv from "dotenv";
|
||||
|
||||
// Load environment variables
|
||||
@@ -29,56 +28,4 @@ const nextConfig = {
|
||||
},
|
||||
};
|
||||
|
||||
export default withSentryConfig(nextConfig, {
|
||||
// For all available options, see:
|
||||
// https://github.com/getsentry/sentry-webpack-plugin#options
|
||||
|
||||
org: "significant-gravitas",
|
||||
project: "builder",
|
||||
|
||||
// Only print logs for uploading source maps in CI
|
||||
silent: !process.env.CI,
|
||||
|
||||
// For all available options, see:
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/manual-setup/
|
||||
|
||||
// Upload a larger set of source maps for prettier stack traces (increases build time)
|
||||
widenClientFileUpload: true,
|
||||
|
||||
// Automatically annotate React components to show their full name in breadcrumbs and session replay
|
||||
reactComponentAnnotation: {
|
||||
enabled: true,
|
||||
},
|
||||
|
||||
// Route browser requests to Sentry through a Next.js rewrite to circumvent ad-blockers.
|
||||
// This can increase your server load as well as your hosting bill.
|
||||
// Note: Check that the configured route will not match with your Next.js middleware, otherwise reporting of client-
|
||||
// side errors will fail.
|
||||
tunnelRoute: "/monitoring",
|
||||
|
||||
// Hides source maps from generated client bundles
|
||||
hideSourceMaps: true,
|
||||
|
||||
// Automatically tree-shake Sentry logger statements to reduce bundle size
|
||||
disableLogger: true,
|
||||
|
||||
// Enables automatic instrumentation of Vercel Cron Monitors. (Does not yet work with App Router route handlers.)
|
||||
// See the following for more information:
|
||||
// https://docs.sentry.io/product/crons/
|
||||
// https://vercel.com/docs/cron-jobs
|
||||
automaticVercelMonitors: true,
|
||||
|
||||
async headers() {
|
||||
return [
|
||||
{
|
||||
source: "/:path*",
|
||||
headers: [
|
||||
{
|
||||
key: "Document-Policy",
|
||||
value: "js-profiling",
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
},
|
||||
});
|
||||
export default nextConfig;
|
||||
|
||||
@@ -27,7 +27,6 @@
|
||||
"@radix-ui/react-switch": "^1.1.0",
|
||||
"@radix-ui/react-toast": "^1.2.1",
|
||||
"@radix-ui/react-tooltip": "^1.1.2",
|
||||
"@sentry/nextjs": "^8",
|
||||
"@supabase/ssr": "^0.4.0",
|
||||
"@supabase/supabase-js": "^2.45.0",
|
||||
"@tanstack/react-table": "^8.20.5",
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
// This file configures the initialization of Sentry on the client.
|
||||
// The config you add here will be used whenever a users loads a page in their browser.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
Sentry.init({
|
||||
dsn: "https://fe4e4aa4a283391808a5da396da20159@o4505260022104064.ingest.us.sentry.io/4507946746380288",
|
||||
|
||||
// Add optional integrations for additional features
|
||||
integrations: [
|
||||
Sentry.replayIntegration(),
|
||||
Sentry.httpClientIntegration(),
|
||||
Sentry.replayCanvasIntegration(),
|
||||
Sentry.reportingObserverIntegration(),
|
||||
Sentry.browserProfilingIntegration(),
|
||||
// Sentry.feedbackIntegration({
|
||||
// // Additional SDK configuration goes in here, for example:
|
||||
// colorScheme: "system",
|
||||
// }),
|
||||
],
|
||||
|
||||
// Define how likely traces are sampled. Adjust this value in production, or use tracesSampler for greater control.
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Set `tracePropagationTargets` to control for which URLs trace propagation should be enabled
|
||||
tracePropagationTargets: [
|
||||
"localhost",
|
||||
/^https:\/\/dev\-builder\.agpt\.co\/api/,
|
||||
],
|
||||
|
||||
beforeSend(event, hint) {
|
||||
// Check if it is an exception, and if so, show the report dialog
|
||||
if (event.exception && event.event_id) {
|
||||
Sentry.showReportDialog({ eventId: event.event_id });
|
||||
}
|
||||
return event;
|
||||
},
|
||||
|
||||
// Define how likely Replay events are sampled.
|
||||
// This sets the sample rate to be 10%. You may want this to be 100% while
|
||||
// in development and sample at a lower rate in production
|
||||
replaysSessionSampleRate: 0.1,
|
||||
|
||||
// Define how likely Replay events are sampled when an error occurs.
|
||||
replaysOnErrorSampleRate: 1.0,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
|
||||
// Set profilesSampleRate to 1.0 to profile every transaction.
|
||||
// Since profilesSampleRate is relative to tracesSampleRate,
|
||||
// the final profiling rate can be computed as tracesSampleRate * profilesSampleRate
|
||||
// For example, a tracesSampleRate of 0.5 and profilesSampleRate of 0.5 would
|
||||
// result in 25% of transactions being profiled (0.5*0.5=0.25)
|
||||
profilesSampleRate: 1.0,
|
||||
});
|
||||
@@ -1,16 +0,0 @@
|
||||
// This file configures the initialization of Sentry for edge features (middleware, edge routes, and so on).
|
||||
// The config you add here will be used whenever one of the edge features is loaded.
|
||||
// Note that this config is unrelated to the Vercel Edge Runtime and is also required when running locally.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
Sentry.init({
|
||||
dsn: "https://fe4e4aa4a283391808a5da396da20159@o4505260022104064.ingest.us.sentry.io/4507946746380288",
|
||||
|
||||
// Define how likely traces are sampled. Adjust this value in production, or use tracesSampler for greater control.
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
});
|
||||
@@ -1,23 +0,0 @@
|
||||
// This file configures the initialization of Sentry on the server.
|
||||
// The config you add here will be used whenever the server handles a request.
|
||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
// import { NodeProfilingIntegration } from "@sentry/profiling-node";
|
||||
|
||||
Sentry.init({
|
||||
dsn: "https://fe4e4aa4a283391808a5da396da20159@o4505260022104064.ingest.us.sentry.io/4507946746380288",
|
||||
|
||||
// Define how likely traces are sampled. Adjust this value in production, or use tracesSampler for greater control.
|
||||
tracesSampleRate: 1,
|
||||
|
||||
// Setting this option to true will print useful information to the console while you're setting up Sentry.
|
||||
debug: false,
|
||||
|
||||
// Integrations
|
||||
integrations: [
|
||||
Sentry.anrIntegration(),
|
||||
// NodeProfilingIntegration,
|
||||
// Sentry.fsIntegration(),
|
||||
],
|
||||
});
|
||||
@@ -1,27 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import NextError from "next/error";
|
||||
import { useEffect } from "react";
|
||||
|
||||
export default function GlobalError({
|
||||
error,
|
||||
}: {
|
||||
error: Error & { digest?: string };
|
||||
}) {
|
||||
useEffect(() => {
|
||||
Sentry.captureException(error);
|
||||
}, [error]);
|
||||
|
||||
return (
|
||||
<html>
|
||||
<body>
|
||||
{/* `NextError` is the default Next.js error page component. Its type
|
||||
definition requires a `statusCode` prop. However, since the App Router
|
||||
does not expose status codes for errors, we simply pass 0 to render a
|
||||
generic error message. */}
|
||||
<NextError statusCode={0} />
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import { revalidatePath } from "next/cache";
|
||||
import { redirect } from "next/navigation";
|
||||
import { createServerClient } from "@/lib/supabase/server";
|
||||
import { z } from "zod";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
const loginFormSchema = z.object({
|
||||
email: z.string().email().min(2).max(64),
|
||||
@@ -11,54 +10,45 @@ const loginFormSchema = z.object({
|
||||
});
|
||||
|
||||
export async function login(values: z.infer<typeof loginFormSchema>) {
|
||||
return await Sentry.withServerActionInstrumentation("login", {}, async () => {
|
||||
const supabase = createServerClient();
|
||||
const supabase = createServerClient();
|
||||
|
||||
if (!supabase) {
|
||||
redirect("/error");
|
||||
}
|
||||
if (!supabase) {
|
||||
redirect("/error");
|
||||
}
|
||||
|
||||
// We are sure that the values are of the correct type because zod validates the form
|
||||
const { data, error } = await supabase.auth.signInWithPassword(values);
|
||||
// We are sure that the values are of the correct type because zod validates the form
|
||||
const { data, error } = await supabase.auth.signInWithPassword(values);
|
||||
|
||||
if (error) {
|
||||
return error.message;
|
||||
}
|
||||
if (error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (data.session) {
|
||||
await supabase.auth.setSession(data.session);
|
||||
}
|
||||
if (data.session) {
|
||||
await supabase.auth.setSession(data.session);
|
||||
}
|
||||
|
||||
revalidatePath("/", "layout");
|
||||
redirect("/profile");
|
||||
});
|
||||
revalidatePath("/", "layout");
|
||||
redirect("/profile");
|
||||
}
|
||||
|
||||
export async function signup(values: z.infer<typeof loginFormSchema>) {
|
||||
"use server";
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"signup",
|
||||
{},
|
||||
async () => {
|
||||
const supabase = createServerClient();
|
||||
const supabase = createServerClient();
|
||||
|
||||
if (!supabase) {
|
||||
redirect("/error");
|
||||
}
|
||||
if (!supabase) {
|
||||
redirect("/error");
|
||||
}
|
||||
|
||||
// We are sure that the values are of the correct type because zod validates the form
|
||||
const { data, error } = await supabase.auth.signUp(values);
|
||||
// We are sure that the values are of the correct type because zod validates the form
|
||||
const { data, error } = await supabase.auth.signUp(values);
|
||||
|
||||
if (error) {
|
||||
return error.message;
|
||||
}
|
||||
if (error) {
|
||||
return error.message;
|
||||
}
|
||||
|
||||
if (data.session) {
|
||||
await supabase.auth.setSession(data.session);
|
||||
}
|
||||
if (data.session) {
|
||||
await supabase.auth.setSession(data.session);
|
||||
}
|
||||
|
||||
revalidatePath("/", "layout");
|
||||
redirect("/profile");
|
||||
},
|
||||
);
|
||||
revalidatePath("/", "layout");
|
||||
redirect("/profile");
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ import AgentDetailContent from "@/components/marketplace/AgentDetailContent";
|
||||
async function getAgentDetails(id: string): Promise<AgentDetailResponse> {
|
||||
const apiUrl =
|
||||
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
|
||||
"http://localhost:8015/api/v1/market";
|
||||
"http://localhost:8001/api/v1/market";
|
||||
const api = new MarketplaceAPI(apiUrl);
|
||||
try {
|
||||
console.log(`Fetching agent details for id: ${id}`);
|
||||
|
||||
@@ -185,7 +185,7 @@ const Pagination: React.FC<{
|
||||
const Marketplace: React.FC = () => {
|
||||
const apiUrl =
|
||||
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
|
||||
"http://localhost:8015/api/v1/market";
|
||||
"http://localhost:8001/api/v1/market";
|
||||
const api = useMemo(() => new MarketplaceAPI(apiUrl), [apiUrl]);
|
||||
|
||||
const [searchValue, setSearchValue] = useState("");
|
||||
|
||||
@@ -1,32 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useEffect } from "react";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { IconRefresh } from "@/components/ui/icons";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
|
||||
export default function CreditButton() {
|
||||
const [credit, setCredit] = useState<number | null>(null);
|
||||
const api = new AutoGPTServerAPI();
|
||||
|
||||
const fetchCredit = async () => {
|
||||
const response = await api.getUserCredit();
|
||||
setCredit(response.credits);
|
||||
};
|
||||
useEffect(() => {
|
||||
fetchCredit();
|
||||
}, [api]);
|
||||
|
||||
return (
|
||||
credit !== null && (
|
||||
<Button
|
||||
onClick={fetchCredit}
|
||||
variant="outline"
|
||||
className="flex items-center space-x-2 text-muted-foreground"
|
||||
>
|
||||
<span>Credits: {credit}</span>
|
||||
<IconRefresh />
|
||||
</Button>
|
||||
)
|
||||
);
|
||||
}
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
Category,
|
||||
NodeExecutionResult,
|
||||
BlockUIType,
|
||||
BlockCost,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import { beautifyString, cn, setNestedProperty } from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
@@ -46,7 +45,6 @@ export type ConnectionData = Array<{
|
||||
|
||||
export type CustomNodeData = {
|
||||
blockType: string;
|
||||
blockCosts: BlockCost[];
|
||||
title: string;
|
||||
description: string;
|
||||
categories: Category[];
|
||||
@@ -523,18 +521,6 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
|
||||
);
|
||||
});
|
||||
|
||||
const inputValues = data.hardcodedValues;
|
||||
const blockCost =
|
||||
data.blockCosts &&
|
||||
data.blockCosts.find((cost) =>
|
||||
Object.entries(cost.cost_filter).every(
|
||||
// Undefined, null, or empty values are considered equal
|
||||
([key, value]) =>
|
||||
value === inputValues[key] || (!value && !inputValues[key]),
|
||||
),
|
||||
);
|
||||
console.debug(`Block cost ${inputValues}|${data.blockCosts}=${blockCost}`);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`${data.uiType === BlockUIType.NOTE ? "w-[300px]" : "w-[500px]"} ${blockClasses} ${errorClass} ${statusClass} ${data.uiType === BlockUIType.NOTE ? "bg-yellow-100" : "bg-white"}`}
|
||||
@@ -576,11 +562,6 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{blockCost && (
|
||||
<div className="p-3 text-right font-semibold">
|
||||
Cost: {blockCost.cost_amount} / {blockCost.cost_type}
|
||||
</div>
|
||||
)}
|
||||
{data.uiType !== BlockUIType.NOTE ? (
|
||||
<div className="flex items-start justify-between p-3">
|
||||
<div>
|
||||
|
||||
@@ -27,7 +27,7 @@ import "@xyflow/react/dist/style.css";
|
||||
import { CustomNode } from "./CustomNode";
|
||||
import "./flow.css";
|
||||
import { Link } from "@/lib/autogpt-server-api";
|
||||
import { getTypeColor, filterBlocksByType } from "@/lib/utils";
|
||||
import { getTypeColor } from "@/lib/utils";
|
||||
import { history } from "./history";
|
||||
import { CustomEdge } from "./CustomEdge";
|
||||
import ConnectionLine from "./ConnectionLine";
|
||||
@@ -36,19 +36,14 @@ import { SaveControl } from "@/components/edit/control/SaveControl";
|
||||
import { BlocksControl } from "@/components/edit/control/BlocksControl";
|
||||
import {
|
||||
IconPlay,
|
||||
IconUndo2,
|
||||
IconRedo2,
|
||||
IconSquare,
|
||||
IconOutput,
|
||||
IconUndo2,
|
||||
} from "@/components/ui/icons";
|
||||
import { startTutorial } from "./tutorial";
|
||||
import useAgentGraph from "@/hooks/useAgentGraph";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { useRouter, usePathname, useSearchParams } from "next/navigation";
|
||||
import { LogOut } from "lucide-react";
|
||||
import RunnerUIWrapper, {
|
||||
RunnerUIWrapperRef,
|
||||
} from "@/components/RunnerUIWrapper";
|
||||
|
||||
// This is for the history, this is the minimum distance a block must move before it is logged
|
||||
// It helps to prevent spamming the history with small movements especially when pressing on a input in a block
|
||||
@@ -106,8 +101,6 @@ const FlowEditor: React.FC<{
|
||||
// State to control if blocks menu should be pinned open
|
||||
const [pinBlocksPopover, setPinBlocksPopover] = useState(false);
|
||||
|
||||
const runnerUIRef = useRef<RunnerUIWrapperRef>(null);
|
||||
|
||||
useEffect(() => {
|
||||
const params = new URLSearchParams(window.location.search);
|
||||
|
||||
@@ -414,7 +407,6 @@ const FlowEditor: React.FC<{
|
||||
position: viewportCenter, // Set the position to the calculated viewport center
|
||||
data: {
|
||||
blockType: nodeType,
|
||||
blockCosts: nodeSchema.costs,
|
||||
title: `${nodeType} ${nodeId}`,
|
||||
description: nodeSchema.description,
|
||||
categories: nodeSchema.categories,
|
||||
@@ -558,21 +550,9 @@ const FlowEditor: React.FC<{
|
||||
onClick: handleRedo,
|
||||
},
|
||||
{
|
||||
label: !savedAgent
|
||||
? "Please save the agent to run"
|
||||
: !isRunning
|
||||
? "Run"
|
||||
: "Stop",
|
||||
label: !isRunning ? "Run" : "Stop",
|
||||
icon: !isRunning ? <IconPlay /> : <IconSquare />,
|
||||
onClick: !isRunning
|
||||
? () => runnerUIRef.current?.runOrOpenInput()
|
||||
: requestStopRun,
|
||||
disabled: !savedAgent,
|
||||
},
|
||||
{
|
||||
label: "Runner Output",
|
||||
icon: <LogOut size={18} strokeWidth={1.8} />,
|
||||
onClick: () => runnerUIRef.current?.openRunnerOutput(),
|
||||
onClick: !isRunning ? requestSaveAndRun : requestStopRun,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -608,21 +588,12 @@ const FlowEditor: React.FC<{
|
||||
<SaveControl
|
||||
agentMeta={savedAgent}
|
||||
onSave={(isTemplate) => requestSave(isTemplate ?? false)}
|
||||
agentDescription={agentDescription}
|
||||
onDescriptionChange={setAgentDescription}
|
||||
agentName={agentName}
|
||||
onNameChange={setAgentName}
|
||||
/>
|
||||
</ControlPanel>
|
||||
</ReactFlow>
|
||||
</div>
|
||||
<RunnerUIWrapper
|
||||
ref={runnerUIRef}
|
||||
nodes={nodes}
|
||||
setNodes={setNodes}
|
||||
isRunning={isRunning}
|
||||
requestSaveAndRun={requestSaveAndRun}
|
||||
/>
|
||||
</FlowContext.Provider>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -9,12 +9,9 @@ import {
|
||||
IconCircleUser,
|
||||
IconMenu,
|
||||
IconPackage2,
|
||||
IconRefresh,
|
||||
IconSquareActivity,
|
||||
IconWorkFlow,
|
||||
} from "@/components/ui/icons";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
import CreditButton from "@/components/CreditButton";
|
||||
|
||||
export async function NavBar() {
|
||||
const isAvailable = Boolean(
|
||||
@@ -99,8 +96,6 @@ export async function NavBar() {
|
||||
</a>
|
||||
</div>
|
||||
<div className="flex flex-1 items-center justify-end gap-4">
|
||||
{isAvailable && user && <CreditButton />}
|
||||
|
||||
{isAvailable && !user && (
|
||||
<Link
|
||||
href="/login"
|
||||
|
||||
@@ -1,141 +0,0 @@
|
||||
import React, {
|
||||
useState,
|
||||
useCallback,
|
||||
forwardRef,
|
||||
useImperativeHandle,
|
||||
} from "react";
|
||||
import RunnerInputUI from "./runner-ui/RunnerInputUI";
|
||||
import RunnerOutputUI from "./runner-ui/RunnerOutputUI";
|
||||
import { Node } from "@xyflow/react";
|
||||
import { filterBlocksByType } from "@/lib/utils";
|
||||
import { BlockIORootSchema } from "@/lib/autogpt-server-api/types";
|
||||
|
||||
interface RunnerUIWrapperProps {
|
||||
nodes: Node[];
|
||||
setNodes: React.Dispatch<React.SetStateAction<Node[]>>;
|
||||
isRunning: boolean;
|
||||
requestSaveAndRun: () => void;
|
||||
}
|
||||
|
||||
export interface RunnerUIWrapperRef {
|
||||
openRunnerInput: () => void;
|
||||
openRunnerOutput: () => void;
|
||||
runOrOpenInput: () => void;
|
||||
}
|
||||
|
||||
const RunnerUIWrapper = forwardRef<RunnerUIWrapperRef, RunnerUIWrapperProps>(
|
||||
({ nodes, setNodes, isRunning, requestSaveAndRun }, ref) => {
|
||||
const [isRunnerInputOpen, setIsRunnerInputOpen] = useState(false);
|
||||
const [isRunnerOutputOpen, setIsRunnerOutputOpen] = useState(false);
|
||||
|
||||
const getBlockInputsAndOutputs = useCallback(() => {
|
||||
const inputBlocks = filterBlocksByType(
|
||||
nodes,
|
||||
(node) => node.data.block_id === "c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
);
|
||||
|
||||
const outputBlocks = filterBlocksByType(
|
||||
nodes,
|
||||
(node) => node.data.block_id === "363ae599-353e-4804-937e-b2ee3cef3da4",
|
||||
);
|
||||
|
||||
const inputs = inputBlocks.map((node) => ({
|
||||
id: node.id,
|
||||
type: "input" as const,
|
||||
inputSchema: node.data.inputSchema as BlockIORootSchema,
|
||||
hardcodedValues: {
|
||||
name: (node.data.hardcodedValues as any).name || "",
|
||||
description: (node.data.hardcodedValues as any).description || "",
|
||||
value: (node.data.hardcodedValues as any).value,
|
||||
placeholder_values:
|
||||
(node.data.hardcodedValues as any).placeholder_values || [],
|
||||
limit_to_placeholder_values:
|
||||
(node.data.hardcodedValues as any).limit_to_placeholder_values ||
|
||||
false,
|
||||
},
|
||||
}));
|
||||
|
||||
const outputs = outputBlocks.map((node) => ({
|
||||
id: node.id,
|
||||
type: "output" as const,
|
||||
outputSchema: node.data.outputSchema as BlockIORootSchema,
|
||||
hardcodedValues: {
|
||||
name: (node.data.hardcodedValues as any).name || "Output",
|
||||
description:
|
||||
(node.data.hardcodedValues as any).description ||
|
||||
"Output from the agent",
|
||||
value: (node.data.hardcodedValues as any).value,
|
||||
},
|
||||
result: (node.data.executionResults as any)?.at(-1)?.data?.output,
|
||||
}));
|
||||
|
||||
return { inputs, outputs };
|
||||
}, [nodes]);
|
||||
|
||||
const handleInputChange = useCallback(
|
||||
(nodeId: string, field: string, value: string) => {
|
||||
setNodes((nds) =>
|
||||
nds.map((node) => {
|
||||
if (node.id === nodeId) {
|
||||
return {
|
||||
...node,
|
||||
data: {
|
||||
...node.data,
|
||||
hardcodedValues: {
|
||||
...(node.data.hardcodedValues as any),
|
||||
[field]: value,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
return node;
|
||||
}),
|
||||
);
|
||||
},
|
||||
[setNodes],
|
||||
);
|
||||
|
||||
const openRunnerInput = () => setIsRunnerInputOpen(true);
|
||||
const openRunnerOutput = () => setIsRunnerOutputOpen(true);
|
||||
|
||||
const runOrOpenInput = () => {
|
||||
const { inputs } = getBlockInputsAndOutputs();
|
||||
if (inputs.length > 0) {
|
||||
openRunnerInput();
|
||||
} else {
|
||||
requestSaveAndRun();
|
||||
}
|
||||
};
|
||||
|
||||
useImperativeHandle(ref, () => ({
|
||||
openRunnerInput,
|
||||
openRunnerOutput,
|
||||
runOrOpenInput,
|
||||
}));
|
||||
|
||||
return (
|
||||
<>
|
||||
<RunnerInputUI
|
||||
isOpen={isRunnerInputOpen}
|
||||
onClose={() => setIsRunnerInputOpen(false)}
|
||||
blockInputs={getBlockInputsAndOutputs().inputs}
|
||||
onInputChange={handleInputChange}
|
||||
onRun={() => {
|
||||
setIsRunnerInputOpen(false);
|
||||
requestSaveAndRun();
|
||||
}}
|
||||
isRunning={isRunning}
|
||||
/>
|
||||
<RunnerOutputUI
|
||||
isOpen={isRunnerOutputOpen}
|
||||
onClose={() => setIsRunnerOutputOpen(false)}
|
||||
blockOutputs={getBlockInputsAndOutputs().outputs}
|
||||
/>
|
||||
</>
|
||||
);
|
||||
},
|
||||
);
|
||||
|
||||
RunnerUIWrapper.displayName = "RunnerUIWrapper";
|
||||
|
||||
export default RunnerUIWrapper;
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
import FeaturedAgentsTable from "./FeaturedAgentsTable";
|
||||
import { AdminAddFeaturedAgentDialog } from "./AdminAddFeaturedAgentDialog";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
export default async function AdminFeaturedAgentsControl({
|
||||
className,
|
||||
@@ -56,15 +55,9 @@ export default async function AdminFeaturedAgentsControl({
|
||||
component: <Button>Remove</Button>,
|
||||
action: async (rows) => {
|
||||
"use server";
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"removeFeaturedAgent",
|
||||
{},
|
||||
async () => {
|
||||
const all = rows.map((row) => removeFeaturedAgent(row.id));
|
||||
await Promise.all(all);
|
||||
revalidatePath("/marketplace");
|
||||
},
|
||||
);
|
||||
const all = rows.map((row) => removeFeaturedAgent(row.id));
|
||||
await Promise.all(all);
|
||||
revalidatePath("/marketplace");
|
||||
},
|
||||
},
|
||||
]}
|
||||
|
||||
@@ -1,23 +1,17 @@
|
||||
"use server";
|
||||
import AutoGPTServerAPI from "@/lib/autogpt-server-api";
|
||||
import MarketplaceAPI from "@/lib/marketplace-api";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
export async function approveAgent(
|
||||
agentId: string,
|
||||
version: number,
|
||||
comment: string,
|
||||
) {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"approveAgent",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
await api.approveAgentSubmission(agentId, version, comment);
|
||||
console.debug(`Approving agent ${agentId}`);
|
||||
revalidatePath("/marketplace");
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
await api.approveAgentSubmission(agentId, version, comment);
|
||||
console.debug(`Approving agent ${agentId}`);
|
||||
revalidatePath("/marketplace");
|
||||
}
|
||||
|
||||
export async function rejectAgent(
|
||||
@@ -25,117 +19,67 @@ export async function rejectAgent(
|
||||
version: number,
|
||||
comment: string,
|
||||
) {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"rejectAgent",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
await api.rejectAgentSubmission(agentId, version, comment);
|
||||
console.debug(`Rejecting agent ${agentId}`);
|
||||
revalidatePath("/marketplace");
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
await api.rejectAgentSubmission(agentId, version, comment);
|
||||
console.debug(`Rejecting agent ${agentId}`);
|
||||
revalidatePath("/marketplace");
|
||||
}
|
||||
|
||||
export async function getReviewableAgents() {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"getReviewableAgents",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
return api.getAgentSubmissions();
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
return api.getAgentSubmissions();
|
||||
}
|
||||
|
||||
export async function getFeaturedAgents(
|
||||
page: number = 1,
|
||||
pageSize: number = 10,
|
||||
) {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"getFeaturedAgents",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
const featured = await api.getFeaturedAgents(page, pageSize);
|
||||
console.debug(`Getting featured agents ${featured.agents.length}`);
|
||||
return featured;
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
const featured = await api.getFeaturedAgents(page, pageSize);
|
||||
console.debug(`Getting featured agents ${featured.agents.length}`);
|
||||
return featured;
|
||||
}
|
||||
|
||||
export async function getFeaturedAgent(agentId: string) {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"getFeaturedAgent",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
const featured = await api.getFeaturedAgent(agentId);
|
||||
console.debug(`Getting featured agent ${featured.agentId}`);
|
||||
return featured;
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
const featured = await api.getFeaturedAgent(agentId);
|
||||
console.debug(`Getting featured agent ${featured.agentId}`);
|
||||
return featured;
|
||||
}
|
||||
|
||||
export async function addFeaturedAgent(
|
||||
agentId: string,
|
||||
categories: string[] = ["featured"],
|
||||
) {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"addFeaturedAgent",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
await api.addFeaturedAgent(agentId, categories);
|
||||
console.debug(`Adding featured agent ${agentId}`);
|
||||
revalidatePath("/marketplace");
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
await api.addFeaturedAgent(agentId, categories);
|
||||
console.debug(`Adding featured agent ${agentId}`);
|
||||
revalidatePath("/marketplace");
|
||||
}
|
||||
|
||||
export async function removeFeaturedAgent(
|
||||
agentId: string,
|
||||
categories: string[] = ["featured"],
|
||||
) {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"removeFeaturedAgent",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
await api.removeFeaturedAgent(agentId, categories);
|
||||
console.debug(`Removing featured agent ${agentId}`);
|
||||
revalidatePath("/marketplace");
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
await api.removeFeaturedAgent(agentId, categories);
|
||||
console.debug(`Removing featured agent ${agentId}`);
|
||||
revalidatePath("/marketplace");
|
||||
}
|
||||
|
||||
export async function getCategories() {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"getCategories",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
const categories = await api.getCategories();
|
||||
console.debug(
|
||||
`Getting categories ${categories.unique_categories.length}`,
|
||||
);
|
||||
return categories;
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
const categories = await api.getCategories();
|
||||
console.debug(`Getting categories ${categories.unique_categories.length}`);
|
||||
return categories;
|
||||
}
|
||||
|
||||
export async function getNotFeaturedAgents(
|
||||
page: number = 1,
|
||||
pageSize: number = 100,
|
||||
) {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"getNotFeaturedAgents",
|
||||
{},
|
||||
async () => {
|
||||
const api = new MarketplaceAPI();
|
||||
const agents = await api.getNotFeaturedAgents(page, pageSize);
|
||||
console.debug(`Getting not featured agents ${agents.agents.length}`);
|
||||
return agents;
|
||||
},
|
||||
);
|
||||
const api = new MarketplaceAPI();
|
||||
const agents = await api.getNotFeaturedAgents(page, pageSize);
|
||||
console.debug(`Getting not featured agents ${agents.agents.length}`);
|
||||
return agents;
|
||||
}
|
||||
|
||||
@@ -19,7 +19,6 @@ import React from "react";
|
||||
export type Control = {
|
||||
icon: React.ReactNode;
|
||||
label: string;
|
||||
disabled?: boolean;
|
||||
onClick: () => void;
|
||||
};
|
||||
|
||||
@@ -51,18 +50,15 @@ export const ControlPanel = ({
|
||||
{controls.map((control, index) => (
|
||||
<Tooltip key={index} delayDuration={500}>
|
||||
<TooltipTrigger asChild>
|
||||
<div>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={() => control.onClick()}
|
||||
data-id={`control-button-${index}`}
|
||||
disabled={control.disabled || false}
|
||||
>
|
||||
{control.icon}
|
||||
<span className="sr-only">{control.label}</span>
|
||||
</Button>
|
||||
</div>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="icon"
|
||||
onClick={() => control.onClick()}
|
||||
data-id={`control-button-${index}`}
|
||||
>
|
||||
{control.icon}
|
||||
<span className="sr-only">{control.label}</span>
|
||||
</Button>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent side="right">{control.label}</TooltipContent>
|
||||
</Tooltip>
|
||||
|
||||
@@ -18,8 +18,6 @@ import {
|
||||
|
||||
interface SaveControlProps {
|
||||
agentMeta: GraphMeta | null;
|
||||
agentName: string;
|
||||
agentDescription: string;
|
||||
onSave: (isTemplate: boolean | undefined) => void;
|
||||
onNameChange: (name: string) => void;
|
||||
onDescriptionChange: (description: string) => void;
|
||||
@@ -37,9 +35,7 @@ interface SaveControlProps {
|
||||
export const SaveControl = ({
|
||||
agentMeta,
|
||||
onSave,
|
||||
agentName,
|
||||
onNameChange,
|
||||
agentDescription,
|
||||
onDescriptionChange,
|
||||
}: SaveControlProps) => {
|
||||
/**
|
||||
@@ -79,7 +75,7 @@ export const SaveControl = ({
|
||||
id="name"
|
||||
placeholder="Enter your agent name"
|
||||
className="col-span-3"
|
||||
value={agentName}
|
||||
defaultValue={agentMeta?.name || ""}
|
||||
onChange={(e) => onNameChange(e.target.value)}
|
||||
/>
|
||||
<Label htmlFor="description">Description</Label>
|
||||
@@ -87,21 +83,9 @@ export const SaveControl = ({
|
||||
id="description"
|
||||
placeholder="Your agent description"
|
||||
className="col-span-3"
|
||||
value={agentDescription}
|
||||
defaultValue={agentMeta?.description || ""}
|
||||
onChange={(e) => onDescriptionChange(e.target.value)}
|
||||
/>
|
||||
{agentMeta?.version && (
|
||||
<>
|
||||
<Label htmlFor="version">Version</Label>
|
||||
<Input
|
||||
id="version"
|
||||
placeholder="Version"
|
||||
className="col-span-3"
|
||||
value={agentMeta?.version || "-"}
|
||||
disabled
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</CardContent>
|
||||
<CardFooter className="flex flex-col items-stretch gap-2">
|
||||
|
||||
@@ -81,7 +81,7 @@ function convertGraphToReactFlow(graph: any): { nodes: Node[]; edges: Edge[] } {
|
||||
async function installGraph(id: string): Promise<void> {
|
||||
const apiUrl =
|
||||
process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
|
||||
"http://localhost:8015/api/v1/market";
|
||||
"http://localhost:8001/api/v1/market";
|
||||
const api = new MarketplaceAPI(apiUrl);
|
||||
|
||||
const serverAPIUrl = process.env.AGPT_SERVER_API_URL;
|
||||
|
||||
@@ -1,16 +1,9 @@
|
||||
"use server";
|
||||
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import MarketplaceAPI, { AnalyticsEvent } from "@/lib/marketplace-api";
|
||||
|
||||
export async function makeAnalyticsEvent(event: AnalyticsEvent) {
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"makeAnalyticsEvent",
|
||||
{},
|
||||
async () => {
|
||||
const apiUrl = process.env.AGPT_SERVER_API_URL;
|
||||
const api = new MarketplaceAPI();
|
||||
await api.makeAnalyticsEvent(event);
|
||||
},
|
||||
);
|
||||
const apiUrl = process.env.AGPT_SERVER_API_URL;
|
||||
const api = new MarketplaceAPI();
|
||||
await api.makeAnalyticsEvent(event);
|
||||
}
|
||||
|
||||
@@ -380,7 +380,7 @@ const NodeKeyValueInput: FC<{
|
||||
<Input
|
||||
type="text"
|
||||
placeholder="Value"
|
||||
defaultValue={value ?? ""}
|
||||
value={value ?? ""}
|
||||
onBlur={(e) =>
|
||||
updateKeyValuePairs(
|
||||
keyValuePairs.toSpliced(index, 1, {
|
||||
@@ -563,7 +563,7 @@ const NodeStringInput: FC<{
|
||||
<Input
|
||||
type="text"
|
||||
id={selfKey}
|
||||
defaultValue={schema.secret && value ? "********" : value}
|
||||
value={schema.secret && value ? "********" : value}
|
||||
readOnly={schema.secret}
|
||||
placeholder={
|
||||
schema?.placeholder || `Enter ${beautifyString(displayName)}`
|
||||
@@ -658,7 +658,7 @@ const NodeNumberInput: FC<{
|
||||
<Input
|
||||
type="number"
|
||||
id={selfKey}
|
||||
defaultValue={value}
|
||||
value={value}
|
||||
onBlur={(e) => handleInputChange(selfKey, parseFloat(e.target.value))}
|
||||
placeholder={
|
||||
schema.placeholder || `Enter ${beautifyString(displayName)}`
|
||||
|
||||
@@ -1,61 +0,0 @@
|
||||
import React from "react";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
|
||||
interface InputBlockProps {
|
||||
id: string;
|
||||
name: string;
|
||||
description?: string;
|
||||
value: string;
|
||||
placeholder_values?: any[];
|
||||
onInputChange: (id: string, field: string, value: string) => void;
|
||||
}
|
||||
|
||||
export function InputBlock({
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
value,
|
||||
placeholder_values,
|
||||
onInputChange,
|
||||
}: InputBlockProps) {
|
||||
return (
|
||||
<div className="space-y-1">
|
||||
<h3 className="text-base font-semibold">{name || "Unnamed Input"}</h3>
|
||||
{description && <p className="text-sm text-gray-600">{description}</p>}
|
||||
<div>
|
||||
{placeholder_values && placeholder_values.length > 1 ? (
|
||||
<Select
|
||||
onValueChange={(value) => onInputChange(id, "value", value)}
|
||||
value={value}
|
||||
>
|
||||
<SelectTrigger className="w-full">
|
||||
<SelectValue placeholder="Select a value" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{placeholder_values.map((placeholder, index) => (
|
||||
<SelectItem key={index} value={placeholder.toString()}>
|
||||
{placeholder.toString()}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
) : (
|
||||
<Input
|
||||
id={`${id}-Value`}
|
||||
value={value}
|
||||
onChange={(e) => onInputChange(id, "value", e.target.value)}
|
||||
placeholder={placeholder_values?.[0]?.toString() || "Enter value"}
|
||||
className="w-full"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
import React from "react";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import { InputBlock } from "./RunnerInputBlock";
|
||||
import { BlockInput } from "./RunnerInputUI";
|
||||
|
||||
interface InputListProps {
|
||||
blockInputs: BlockInput[];
|
||||
onInputChange: (nodeId: string, field: string, value: string) => void;
|
||||
}
|
||||
|
||||
export function InputList({ blockInputs, onInputChange }: InputListProps) {
|
||||
return (
|
||||
<ScrollArea className="h-[20vh] overflow-auto pr-4 sm:h-[30vh] md:h-[40vh] lg:h-[50vh]">
|
||||
<div className="space-y-4">
|
||||
{blockInputs && blockInputs.length > 0 ? (
|
||||
blockInputs.map((block) => (
|
||||
<InputBlock
|
||||
key={block.id}
|
||||
id={block.id}
|
||||
name={block.hardcodedValues.name}
|
||||
description={block.hardcodedValues.description}
|
||||
value={block.hardcodedValues.value?.toString() || ""}
|
||||
placeholder_values={block.hardcodedValues.placeholder_values}
|
||||
onInputChange={onInputChange}
|
||||
/>
|
||||
))
|
||||
) : (
|
||||
<p>No input blocks available.</p>
|
||||
)}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
);
|
||||
}
|
||||
@@ -1,74 +0,0 @@
|
||||
import React from "react";
|
||||
import {
|
||||
Dialog,
|
||||
DialogContent,
|
||||
DialogHeader,
|
||||
DialogTitle,
|
||||
DialogDescription,
|
||||
DialogFooter,
|
||||
} from "@/components/ui/dialog";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { BlockIORootSchema } from "@/lib/autogpt-server-api/types";
|
||||
import { InputList } from "./RunnerInputList";
|
||||
|
||||
export interface BlockInput {
|
||||
id: string;
|
||||
inputSchema: BlockIORootSchema;
|
||||
hardcodedValues: {
|
||||
name: string;
|
||||
description: string;
|
||||
value: any;
|
||||
placeholder_values?: any[];
|
||||
limit_to_placeholder_values?: boolean;
|
||||
};
|
||||
}
|
||||
|
||||
interface RunSettingsUiProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
blockInputs: BlockInput[];
|
||||
onInputChange: (nodeId: string, field: string, value: string) => void;
|
||||
onRun: () => void;
|
||||
isRunning: boolean;
|
||||
}
|
||||
|
||||
export function RunnerInputUI({
|
||||
isOpen,
|
||||
onClose,
|
||||
blockInputs,
|
||||
onInputChange,
|
||||
onRun,
|
||||
isRunning,
|
||||
}: RunSettingsUiProps) {
|
||||
const handleRun = () => {
|
||||
onRun();
|
||||
onClose();
|
||||
};
|
||||
|
||||
return (
|
||||
<Dialog open={isOpen} onOpenChange={onClose}>
|
||||
<DialogContent className="flex max-h-[80vh] flex-col overflow-hidden sm:max-w-[400px] md:max-w-[500px] lg:max-w-[600px]">
|
||||
<DialogHeader className="px-4 py-4">
|
||||
<DialogTitle className="text-2xl">Run Settings</DialogTitle>
|
||||
<DialogDescription className="mt-2 text-sm">
|
||||
Configure settings for running your agent.
|
||||
</DialogDescription>
|
||||
</DialogHeader>
|
||||
<div className="flex-grow overflow-y-auto px-4 py-4">
|
||||
<InputList blockInputs={blockInputs} onInputChange={onInputChange} />
|
||||
</div>
|
||||
<DialogFooter className="px-6 py-4">
|
||||
<Button
|
||||
onClick={handleRun}
|
||||
className="px-8 py-2 text-lg"
|
||||
disabled={isRunning}
|
||||
>
|
||||
{isRunning ? "Running..." : "Run"}
|
||||
</Button>
|
||||
</DialogFooter>
|
||||
</DialogContent>
|
||||
</Dialog>
|
||||
);
|
||||
}
|
||||
|
||||
export default RunnerInputUI;
|
||||
@@ -1,94 +0,0 @@
|
||||
import React from "react";
|
||||
import {
|
||||
Sheet,
|
||||
SheetContent,
|
||||
SheetHeader,
|
||||
SheetTitle,
|
||||
SheetDescription,
|
||||
} from "@/components/ui/sheet";
|
||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||
import { BlockIORootSchema } from "@/lib/autogpt-server-api/types";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Textarea } from "@/components/ui/textarea";
|
||||
|
||||
interface BlockOutput {
|
||||
id: string;
|
||||
outputSchema: BlockIORootSchema;
|
||||
hardcodedValues: {
|
||||
name: string;
|
||||
description: string;
|
||||
};
|
||||
result?: any;
|
||||
}
|
||||
|
||||
interface OutputModalProps {
|
||||
isOpen: boolean;
|
||||
onClose: () => void;
|
||||
blockOutputs: BlockOutput[];
|
||||
}
|
||||
|
||||
const formatOutput = (output: any): string => {
|
||||
if (typeof output === "object") {
|
||||
try {
|
||||
return JSON.stringify(output, null, 2);
|
||||
} catch (error) {
|
||||
return `Error formatting output: ${(error as Error).message}`;
|
||||
}
|
||||
}
|
||||
return String(output);
|
||||
};
|
||||
|
||||
export function RunnerOutputUI({
|
||||
isOpen,
|
||||
onClose,
|
||||
blockOutputs,
|
||||
}: OutputModalProps) {
|
||||
return (
|
||||
<Sheet open={isOpen} onOpenChange={onClose}>
|
||||
<SheetContent
|
||||
side="right"
|
||||
className="flex h-full w-full flex-col overflow-hidden sm:max-w-[500px]"
|
||||
>
|
||||
<SheetHeader className="px-2 py-2">
|
||||
<SheetTitle className="text-xl">Run Outputs</SheetTitle>
|
||||
<SheetDescription className="mt-1 text-sm">
|
||||
View the outputs from your agent run.
|
||||
</SheetDescription>
|
||||
</SheetHeader>
|
||||
<div className="flex-grow overflow-y-auto px-2 py-2">
|
||||
<ScrollArea className="h-full overflow-auto pr-4">
|
||||
<div className="space-y-4">
|
||||
{blockOutputs && blockOutputs.length > 0 ? (
|
||||
blockOutputs.map((block) => (
|
||||
<div key={block.id} className="space-y-1">
|
||||
<Label className="text-base font-semibold">
|
||||
{block.hardcodedValues.name || "Unnamed Output"}
|
||||
</Label>
|
||||
|
||||
{block.hardcodedValues.description && (
|
||||
<Label className="block text-sm text-gray-600">
|
||||
{block.hardcodedValues.description}
|
||||
</Label>
|
||||
)}
|
||||
|
||||
<div className="rounded-md bg-gray-100 p-2">
|
||||
<Textarea
|
||||
readOnly
|
||||
value={formatOutput(block.result ?? "No output yet")}
|
||||
className="resize-none whitespace-pre-wrap break-words border-none bg-transparent text-sm"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
))
|
||||
) : (
|
||||
<p>No output blocks available.</p>
|
||||
)}
|
||||
</div>
|
||||
</ScrollArea>
|
||||
</div>
|
||||
</SheetContent>
|
||||
</Sheet>
|
||||
);
|
||||
}
|
||||
|
||||
export default RunnerOutputUI;
|
||||
@@ -1,4 +1,3 @@
|
||||
import { sendGAEvent } from "@next/third-parties/google";
|
||||
import Shepherd from "shepherd.js";
|
||||
import "shepherd.js/dist/css/shepherd.css";
|
||||
|
||||
@@ -494,15 +493,6 @@ export const startTutorial = (
|
||||
localStorage.setItem("shepherd-tour", "completed"); // Optionally mark the tutorial as completed
|
||||
});
|
||||
|
||||
for (const step of tour.steps) {
|
||||
step.on("show", () => {
|
||||
"use client";
|
||||
console.debug("sendTutorialStep");
|
||||
|
||||
sendGAEvent("event", "tutorial_step_shown", { value: step.id });
|
||||
});
|
||||
}
|
||||
|
||||
tour.on("cancel", () => {
|
||||
setPinBlocksPopover(false);
|
||||
localStorage.setItem("shepherd-tour", "canceled"); // Optionally mark the tutorial as canceled
|
||||
|
||||
@@ -264,43 +264,6 @@ export const IconCircleUser = createIcon((props) => (
|
||||
</svg>
|
||||
));
|
||||
|
||||
/**
|
||||
* Refresh icon component.
|
||||
*
|
||||
* @component IconRefresh
|
||||
* @param {IconProps} props - The props object containing additional attributes and event handlers for the icon.
|
||||
* @returns {JSX.Element} - The refresh icon.
|
||||
*
|
||||
* @example
|
||||
* // Default usage this is the standard usage
|
||||
* <IconRefresh />
|
||||
*
|
||||
* @example
|
||||
* // With custom color and size these should be used sparingly and only when necessary
|
||||
* <IconRefresh className="text-primary" size="lg" />
|
||||
*
|
||||
* @example
|
||||
* // With custom size and onClick handler
|
||||
* <IconRefresh size="sm" onClick={handleOnClick} />
|
||||
*/
|
||||
export const IconRefresh = createIcon((props) => (
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 24 24"
|
||||
fill="none"
|
||||
stroke="currentColor"
|
||||
strokeWidth="2"
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
{...props}
|
||||
>
|
||||
<polyline points="23 4 23 10 17 10" />
|
||||
<polyline points="1 20 1 14 7 14" />
|
||||
<path d="M3.51 9a9 9 0 0 1 14.136 -5.36L23 10" />
|
||||
<path d="M20.49 15a9 9 0 0 1 -14.136 5.36L1 14" />
|
||||
</svg>
|
||||
));
|
||||
|
||||
/**
|
||||
* Menu icon component.
|
||||
*
|
||||
|
||||
@@ -6,7 +6,20 @@ export interface InputProps
|
||||
extends React.InputHTMLAttributes<HTMLInputElement> {}
|
||||
|
||||
const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
||||
({ className, type, ...props }, ref) => {
|
||||
({ className, type, value, ...props }, ref) => {
|
||||
// This ref allows the `Input` component to be both controlled and uncontrolled.
|
||||
// The HTMLvalue will only be updated if the value prop changes, but the user can still type in the input.
|
||||
ref = ref || React.createRef<HTMLInputElement>();
|
||||
React.useEffect(() => {
|
||||
if (
|
||||
ref &&
|
||||
ref.current &&
|
||||
ref.current.value !== value &&
|
||||
type !== "file"
|
||||
) {
|
||||
ref.current.value = value;
|
||||
}
|
||||
}, [value, type, ref]);
|
||||
return (
|
||||
<input
|
||||
type={type}
|
||||
@@ -16,6 +29,7 @@ const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
||||
className,
|
||||
)}
|
||||
ref={ref}
|
||||
defaultValue={type !== "file" ? value : undefined}
|
||||
{...props}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -16,7 +16,6 @@ import {
|
||||
import { Connection, MarkerType } from "@xyflow/react";
|
||||
import Ajv from "ajv";
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { useRouter, useSearchParams, usePathname } from "next/navigation";
|
||||
|
||||
const ajv = new Ajv({ strict: false, allErrors: true });
|
||||
|
||||
@@ -25,11 +24,6 @@ export default function useAgentGraph(
|
||||
template?: boolean,
|
||||
passDataToBeads?: boolean,
|
||||
) {
|
||||
const [router, searchParams, pathname] = [
|
||||
useRouter(),
|
||||
useSearchParams(),
|
||||
usePathname(),
|
||||
];
|
||||
const [savedAgent, setSavedAgent] = useState<Graph | null>(null);
|
||||
const [agentDescription, setAgentDescription] = useState<string>("");
|
||||
const [agentName, setAgentName] = useState<string>("");
|
||||
@@ -139,13 +133,12 @@ export default function useAgentGraph(
|
||||
id: node.id,
|
||||
type: "custom",
|
||||
position: {
|
||||
x: node?.metadata?.position?.x || 0,
|
||||
y: node?.metadata?.position?.y || 0,
|
||||
x: node.metadata.position.x,
|
||||
y: node.metadata.position.y,
|
||||
},
|
||||
data: {
|
||||
block_id: block.id,
|
||||
blockType: block.name,
|
||||
blockCosts: block.costs,
|
||||
categories: block.categories,
|
||||
description: block.description,
|
||||
title: `${block.name} ${node.id}`,
|
||||
@@ -314,7 +307,7 @@ export default function useAgentGraph(
|
||||
|
||||
(template ? api.getTemplate(flowID) : api.getGraph(flowID)).then(
|
||||
(graph) => {
|
||||
console.debug("Loading graph");
|
||||
console.log("Loading graph");
|
||||
loadGraph(graph);
|
||||
},
|
||||
);
|
||||
@@ -645,59 +638,31 @@ export default function useAgentGraph(
|
||||
links: links,
|
||||
};
|
||||
|
||||
// To avoid saving the same graph, we compare the payload with the saved agent.
|
||||
// Differences in IDs are ignored.
|
||||
const comparedPayload = {
|
||||
...(({ id, ...rest }) => rest)(payload),
|
||||
nodes: payload.nodes.map(
|
||||
({ id, data, input_nodes, output_nodes, ...rest }) => rest,
|
||||
),
|
||||
links: payload.links.map(({ source_id, sink_id, ...rest }) => rest),
|
||||
};
|
||||
const comparedSavedAgent = {
|
||||
name: savedAgent?.name,
|
||||
description: savedAgent?.description,
|
||||
nodes: savedAgent?.nodes.map((v) => ({
|
||||
block_id: v.block_id,
|
||||
input_default: v.input_default,
|
||||
metadata: v.metadata,
|
||||
})),
|
||||
links: savedAgent?.links.map((v) => ({
|
||||
sink_name: v.sink_name,
|
||||
source_name: v.source_name,
|
||||
})),
|
||||
};
|
||||
|
||||
let newSavedAgent = null;
|
||||
if (savedAgent && deepEquals(comparedPayload, comparedSavedAgent)) {
|
||||
console.warn("No need to save: Graph is the same as version on server");
|
||||
newSavedAgent = savedAgent;
|
||||
if (savedAgent && deepEquals(payload, savedAgent)) {
|
||||
console.debug(
|
||||
"No need to save: Graph is the same as version on server",
|
||||
);
|
||||
// Trigger state change
|
||||
setSavedAgent(savedAgent);
|
||||
return;
|
||||
} else {
|
||||
console.debug(
|
||||
"Saving new Graph version; old vs new:",
|
||||
comparedPayload,
|
||||
savedAgent,
|
||||
payload,
|
||||
);
|
||||
setNodesSyncedWithSavedAgent(false);
|
||||
|
||||
newSavedAgent = savedAgent
|
||||
? await (savedAgent.is_template
|
||||
? api.updateTemplate(savedAgent.id, payload)
|
||||
: api.updateGraph(savedAgent.id, payload))
|
||||
: await (asTemplate
|
||||
? api.createTemplate(payload)
|
||||
: api.createGraph(payload));
|
||||
|
||||
console.debug("Response from the API:", newSavedAgent);
|
||||
}
|
||||
|
||||
// Route the URL to the new flow ID if it's a new agent.
|
||||
if (!savedAgent) {
|
||||
const path = new URLSearchParams(searchParams);
|
||||
path.set("flowID", newSavedAgent.id);
|
||||
router.push(`${pathname}?${path.toString()}`);
|
||||
return;
|
||||
}
|
||||
setNodesSyncedWithSavedAgent(false);
|
||||
|
||||
const newSavedAgent = savedAgent
|
||||
? await (savedAgent.is_template
|
||||
? api.updateTemplate(savedAgent.id, payload)
|
||||
: api.updateGraph(savedAgent.id, payload))
|
||||
: await (asTemplate
|
||||
? api.createTemplate(payload)
|
||||
: api.createGraph(payload));
|
||||
console.debug("Response from the API:", newSavedAgent);
|
||||
|
||||
// Update the node IDs on the frontend
|
||||
setSavedAgent(newSavedAgent);
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
export async function register() {
|
||||
if (process.env.NEXT_RUNTIME === "nodejs") {
|
||||
await import("../sentry.server.config");
|
||||
}
|
||||
|
||||
if (process.env.NEXT_RUNTIME === "edge") {
|
||||
await import("../sentry.edge.config");
|
||||
}
|
||||
}
|
||||
|
||||
export const onRequestError = Sentry.captureRequestError;
|
||||
@@ -1,321 +0,0 @@
|
||||
import { SupabaseClient } from "@supabase/supabase-js";
|
||||
import {
|
||||
Block,
|
||||
Graph,
|
||||
GraphCreatable,
|
||||
GraphUpdateable,
|
||||
GraphMeta,
|
||||
GraphExecuteResponse,
|
||||
NodeExecutionResult,
|
||||
User,
|
||||
AnalyticsMetrics,
|
||||
AnalyticsDetails,
|
||||
} from "./types";
|
||||
|
||||
export default class BaseAutoGPTServerAPI {
|
||||
private baseUrl: string;
|
||||
private wsUrl: string;
|
||||
private webSocket: WebSocket | null = null;
|
||||
private wsConnecting: Promise<void> | null = null;
|
||||
private wsMessageHandlers: Record<string, Set<(data: any) => void>> = {};
|
||||
private supabaseClient: SupabaseClient | null = null;
|
||||
|
||||
constructor(
|
||||
baseUrl: string = process.env.NEXT_PUBLIC_AGPT_SERVER_URL ||
|
||||
"http://localhost:8006/api",
|
||||
wsUrl: string = process.env.NEXT_PUBLIC_AGPT_WS_SERVER_URL ||
|
||||
"ws://localhost:8001/ws",
|
||||
supabaseClient: SupabaseClient | null = null,
|
||||
) {
|
||||
this.baseUrl = baseUrl;
|
||||
this.wsUrl = wsUrl;
|
||||
this.supabaseClient = supabaseClient;
|
||||
}
|
||||
|
||||
async createUser(): Promise<User> {
|
||||
return this._request("POST", "/auth/user", {});
|
||||
}
|
||||
|
||||
async getUserCredit(): Promise<{ credits: number }> {
|
||||
return this._get(`/credits`);
|
||||
}
|
||||
|
||||
async getBlocks(): Promise<Block[]> {
|
||||
return await this._get("/blocks");
|
||||
}
|
||||
|
||||
async listGraphs(): Promise<GraphMeta[]> {
|
||||
return this._get("/graphs");
|
||||
}
|
||||
|
||||
async listTemplates(): Promise<GraphMeta[]> {
|
||||
return this._get("/templates");
|
||||
}
|
||||
|
||||
async getGraph(id: string, version?: number): Promise<Graph> {
|
||||
const query = version !== undefined ? `?version=${version}` : "";
|
||||
return this._get(`/graphs/${id}` + query);
|
||||
}
|
||||
|
||||
async getTemplate(id: string, version?: number): Promise<Graph> {
|
||||
const query = version !== undefined ? `?version=${version}` : "";
|
||||
return this._get(`/templates/${id}` + query);
|
||||
}
|
||||
|
||||
async getGraphAllVersions(id: string): Promise<Graph[]> {
|
||||
return this._get(`/graphs/${id}/versions`);
|
||||
}
|
||||
|
||||
async getTemplateAllVersions(id: string): Promise<Graph[]> {
|
||||
return this._get(`/templates/${id}/versions`);
|
||||
}
|
||||
|
||||
async createGraph(graphCreateBody: GraphCreatable): Promise<Graph>;
|
||||
async createGraph(
|
||||
fromTemplateID: string,
|
||||
templateVersion: number,
|
||||
): Promise<Graph>;
|
||||
async createGraph(
|
||||
graphOrTemplateID: GraphCreatable | string,
|
||||
templateVersion?: number,
|
||||
): Promise<Graph> {
|
||||
let requestBody: GraphCreateRequestBody;
|
||||
|
||||
if (typeof graphOrTemplateID == "string") {
|
||||
if (templateVersion == undefined) {
|
||||
throw new Error("templateVersion not specified");
|
||||
}
|
||||
requestBody = {
|
||||
template_id: graphOrTemplateID,
|
||||
template_version: templateVersion,
|
||||
};
|
||||
} else {
|
||||
requestBody = { graph: graphOrTemplateID };
|
||||
}
|
||||
|
||||
return this._request("POST", "/graphs", requestBody);
|
||||
}
|
||||
|
||||
async createTemplate(templateCreateBody: GraphCreatable): Promise<Graph> {
|
||||
const requestBody: GraphCreateRequestBody = { graph: templateCreateBody };
|
||||
return this._request("POST", "/templates", requestBody);
|
||||
}
|
||||
|
||||
async updateGraph(id: string, graph: GraphUpdateable): Promise<Graph> {
|
||||
return await this._request("PUT", `/graphs/${id}`, graph);
|
||||
}
|
||||
|
||||
async updateTemplate(id: string, template: GraphUpdateable): Promise<Graph> {
|
||||
return await this._request("PUT", `/templates/${id}`, template);
|
||||
}
|
||||
|
||||
async setGraphActiveVersion(id: string, version: number): Promise<Graph> {
|
||||
return this._request("PUT", `/graphs/${id}/versions/active`, {
|
||||
active_graph_version: version,
|
||||
});
|
||||
}
|
||||
|
||||
async executeGraph(
|
||||
id: string,
|
||||
inputData: { [key: string]: any } = {},
|
||||
): Promise<GraphExecuteResponse> {
|
||||
return this._request("POST", `/graphs/${id}/execute`, inputData);
|
||||
}
|
||||
|
||||
async listGraphRunIDs(
|
||||
graphID: string,
|
||||
graphVersion?: number,
|
||||
): Promise<string[]> {
|
||||
const query =
|
||||
graphVersion !== undefined ? `?graph_version=${graphVersion}` : "";
|
||||
return this._get(`/graphs/${graphID}/executions` + query);
|
||||
}
|
||||
|
||||
async getGraphExecutionInfo(
|
||||
graphID: string,
|
||||
runID: string,
|
||||
): Promise<NodeExecutionResult[]> {
|
||||
return (await this._get(`/graphs/${graphID}/executions/${runID}`)).map(
|
||||
parseNodeExecutionResultTimestamps,
|
||||
);
|
||||
}
|
||||
|
||||
async stopGraphExecution(
|
||||
graphID: string,
|
||||
runID: string,
|
||||
): Promise<NodeExecutionResult[]> {
|
||||
return (
|
||||
await this._request("POST", `/graphs/${graphID}/executions/${runID}/stop`)
|
||||
).map(parseNodeExecutionResultTimestamps);
|
||||
}
|
||||
|
||||
async logMetric(metric: AnalyticsMetrics) {
|
||||
return this._request("POST", "/analytics/log_raw_metric", metric);
|
||||
}
|
||||
|
||||
async logAnalytic(analytic: AnalyticsDetails) {
|
||||
return this._request("POST", "/analytics/log_raw_analytics", analytic);
|
||||
}
|
||||
|
||||
private async _get(path: string) {
|
||||
return this._request("GET", path);
|
||||
}
|
||||
|
||||
private async _request(
|
||||
method: "GET" | "POST" | "PUT" | "PATCH",
|
||||
path: string,
|
||||
payload?: { [key: string]: any },
|
||||
) {
|
||||
if (method != "GET") {
|
||||
console.debug(`${method} ${path} payload:`, payload);
|
||||
}
|
||||
|
||||
const token =
|
||||
(await this.supabaseClient?.auth.getSession())?.data.session
|
||||
?.access_token || "";
|
||||
|
||||
const response = await fetch(this.baseUrl + path, {
|
||||
method,
|
||||
headers:
|
||||
method != "GET"
|
||||
? {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: token ? `Bearer ${token}` : "",
|
||||
}
|
||||
: {
|
||||
Authorization: token ? `Bearer ${token}` : "",
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
const response_data = await response.json();
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn(
|
||||
`${method} ${path} returned non-OK response:`,
|
||||
response_data.detail,
|
||||
response,
|
||||
);
|
||||
throw new Error(`HTTP error ${response.status}! ${response_data.detail}`);
|
||||
}
|
||||
return response_data;
|
||||
}
|
||||
|
||||
async connectWebSocket(): Promise<void> {
|
||||
this.wsConnecting ??= new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const token =
|
||||
(await this.supabaseClient?.auth.getSession())?.data.session
|
||||
?.access_token || "";
|
||||
|
||||
const wsUrlWithToken = `${this.wsUrl}?token=${token}`;
|
||||
this.webSocket = new WebSocket(wsUrlWithToken);
|
||||
|
||||
this.webSocket.onopen = () => {
|
||||
console.debug("WebSocket connection established");
|
||||
resolve();
|
||||
};
|
||||
|
||||
this.webSocket.onclose = (event) => {
|
||||
console.debug("WebSocket connection closed", event);
|
||||
this.webSocket = null;
|
||||
};
|
||||
|
||||
this.webSocket.onerror = (error) => {
|
||||
console.error("WebSocket error:", error);
|
||||
reject(error);
|
||||
};
|
||||
|
||||
this.webSocket.onmessage = (event) => {
|
||||
const message: WebsocketMessage = JSON.parse(event.data);
|
||||
if (message.method == "execution_event") {
|
||||
message.data = parseNodeExecutionResultTimestamps(message.data);
|
||||
}
|
||||
this.wsMessageHandlers[message.method]?.forEach((handler) =>
|
||||
handler(message.data),
|
||||
);
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error connecting to WebSocket:", error);
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
return this.wsConnecting;
|
||||
}
|
||||
|
||||
disconnectWebSocket() {
|
||||
if (this.webSocket && this.webSocket.readyState === WebSocket.OPEN) {
|
||||
this.webSocket.close();
|
||||
}
|
||||
}
|
||||
|
||||
sendWebSocketMessage<M extends keyof WebsocketMessageTypeMap>(
|
||||
method: M,
|
||||
data: WebsocketMessageTypeMap[M],
|
||||
callCount = 0,
|
||||
) {
|
||||
if (this.webSocket && this.webSocket.readyState === WebSocket.OPEN) {
|
||||
this.webSocket.send(JSON.stringify({ method, data }));
|
||||
} else {
|
||||
this.connectWebSocket().then(() => {
|
||||
callCount == 0
|
||||
? this.sendWebSocketMessage(method, data, callCount + 1)
|
||||
: setTimeout(
|
||||
() => {
|
||||
this.sendWebSocketMessage(method, data, callCount + 1);
|
||||
},
|
||||
2 ** (callCount - 1) * 1000,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
onWebSocketMessage<M extends keyof WebsocketMessageTypeMap>(
|
||||
method: M,
|
||||
handler: (data: WebsocketMessageTypeMap[M]) => void,
|
||||
): () => void {
|
||||
this.wsMessageHandlers[method] ??= new Set();
|
||||
this.wsMessageHandlers[method].add(handler);
|
||||
|
||||
// Return detacher
|
||||
return () => this.wsMessageHandlers[method].delete(handler);
|
||||
}
|
||||
|
||||
subscribeToExecution(graphId: string) {
|
||||
this.sendWebSocketMessage("subscribe", { graph_id: graphId });
|
||||
}
|
||||
}
|
||||
|
||||
/* *** UTILITY TYPES *** */
|
||||
|
||||
type GraphCreateRequestBody =
|
||||
| {
|
||||
template_id: string;
|
||||
template_version: number;
|
||||
}
|
||||
| {
|
||||
graph: GraphCreatable;
|
||||
};
|
||||
|
||||
type WebsocketMessageTypeMap = {
|
||||
subscribe: { graph_id: string };
|
||||
execution_event: NodeExecutionResult;
|
||||
};
|
||||
|
||||
type WebsocketMessage = {
|
||||
[M in keyof WebsocketMessageTypeMap]: {
|
||||
method: M;
|
||||
data: WebsocketMessageTypeMap[M];
|
||||
};
|
||||
}[keyof WebsocketMessageTypeMap];
|
||||
|
||||
/* *** HELPER FUNCTIONS *** */
|
||||
|
||||
function parseNodeExecutionResultTimestamps(result: any): NodeExecutionResult {
|
||||
return {
|
||||
...result,
|
||||
add_time: new Date(result.add_time),
|
||||
queue_time: result.queue_time ? new Date(result.queue_time) : undefined,
|
||||
start_time: result.start_time ? new Date(result.start_time) : undefined,
|
||||
end_time: result.end_time ? new Date(result.end_time) : undefined,
|
||||
};
|
||||
}
|
||||
@@ -1,14 +1,305 @@
|
||||
import { createClient } from "../supabase/client";
|
||||
import BaseAutoGPTServerAPI from "./baseClient";
|
||||
import {
|
||||
Block,
|
||||
Graph,
|
||||
GraphCreatable,
|
||||
GraphUpdateable,
|
||||
GraphMeta,
|
||||
GraphExecuteResponse,
|
||||
NodeExecutionResult,
|
||||
User,
|
||||
} from "./types";
|
||||
|
||||
export default class AutoGPTServerAPI {
|
||||
private baseUrl: string;
|
||||
private wsUrl: string;
|
||||
private webSocket: WebSocket | null = null;
|
||||
private wsConnecting: Promise<void> | null = null;
|
||||
private wsMessageHandlers: Record<string, Set<(data: any) => void>> = {};
|
||||
private supabaseClient = createClient();
|
||||
|
||||
export default class AutoGPTServerAPI extends BaseAutoGPTServerAPI {
|
||||
constructor(
|
||||
baseUrl: string = process.env.NEXT_PUBLIC_AGPT_SERVER_URL ||
|
||||
"http://localhost:8006/api",
|
||||
"http://localhost:8000/api",
|
||||
wsUrl: string = process.env.NEXT_PUBLIC_AGPT_WS_SERVER_URL ||
|
||||
"ws://localhost:8001/ws",
|
||||
) {
|
||||
const supabaseClient = createClient();
|
||||
super(baseUrl, wsUrl, supabaseClient);
|
||||
this.baseUrl = baseUrl;
|
||||
this.wsUrl = wsUrl;
|
||||
}
|
||||
|
||||
async createUser(): Promise<User> {
|
||||
return this._request("POST", "/auth/user", {});
|
||||
}
|
||||
|
||||
async getBlocks(): Promise<Block[]> {
|
||||
return await this._get("/blocks");
|
||||
}
|
||||
|
||||
async listGraphs(): Promise<GraphMeta[]> {
|
||||
return this._get("/graphs");
|
||||
}
|
||||
|
||||
async listTemplates(): Promise<GraphMeta[]> {
|
||||
return this._get("/templates");
|
||||
}
|
||||
|
||||
async getGraph(id: string, version?: number): Promise<Graph> {
|
||||
const query = version !== undefined ? `?version=${version}` : "";
|
||||
return this._get(`/graphs/${id}` + query);
|
||||
}
|
||||
|
||||
async getTemplate(id: string, version?: number): Promise<Graph> {
|
||||
const query = version !== undefined ? `?version=${version}` : "";
|
||||
return this._get(`/templates/${id}` + query);
|
||||
}
|
||||
|
||||
async getGraphAllVersions(id: string): Promise<Graph[]> {
|
||||
return this._get(`/graphs/${id}/versions`);
|
||||
}
|
||||
|
||||
async getTemplateAllVersions(id: string): Promise<Graph[]> {
|
||||
return this._get(`/templates/${id}/versions`);
|
||||
}
|
||||
|
||||
async createGraph(graphCreateBody: GraphCreatable): Promise<Graph>;
|
||||
async createGraph(
|
||||
fromTemplateID: string,
|
||||
templateVersion: number,
|
||||
): Promise<Graph>;
|
||||
async createGraph(
|
||||
graphOrTemplateID: GraphCreatable | string,
|
||||
templateVersion?: number,
|
||||
): Promise<Graph> {
|
||||
let requestBody: GraphCreateRequestBody;
|
||||
|
||||
if (typeof graphOrTemplateID == "string") {
|
||||
if (templateVersion == undefined) {
|
||||
throw new Error("templateVersion not specified");
|
||||
}
|
||||
requestBody = {
|
||||
template_id: graphOrTemplateID,
|
||||
template_version: templateVersion,
|
||||
};
|
||||
} else {
|
||||
requestBody = { graph: graphOrTemplateID };
|
||||
}
|
||||
|
||||
return this._request("POST", "/graphs", requestBody);
|
||||
}
|
||||
|
||||
async createTemplate(templateCreateBody: GraphCreatable): Promise<Graph> {
|
||||
const requestBody: GraphCreateRequestBody = { graph: templateCreateBody };
|
||||
return this._request("POST", "/templates", requestBody);
|
||||
}
|
||||
|
||||
async updateGraph(id: string, graph: GraphUpdateable): Promise<Graph> {
|
||||
return await this._request("PUT", `/graphs/${id}`, graph);
|
||||
}
|
||||
|
||||
async updateTemplate(id: string, template: GraphUpdateable): Promise<Graph> {
|
||||
return await this._request("PUT", `/templates/${id}`, template);
|
||||
}
|
||||
|
||||
async setGraphActiveVersion(id: string, version: number): Promise<Graph> {
|
||||
return this._request("PUT", `/graphs/${id}/versions/active`, {
|
||||
active_graph_version: version,
|
||||
});
|
||||
}
|
||||
|
||||
async executeGraph(
|
||||
id: string,
|
||||
inputData: { [key: string]: any } = {},
|
||||
): Promise<GraphExecuteResponse> {
|
||||
return this._request("POST", `/graphs/${id}/execute`, inputData);
|
||||
}
|
||||
|
||||
async listGraphRunIDs(
|
||||
graphID: string,
|
||||
graphVersion?: number,
|
||||
): Promise<string[]> {
|
||||
const query =
|
||||
graphVersion !== undefined ? `?graph_version=${graphVersion}` : "";
|
||||
return this._get(`/graphs/${graphID}/executions` + query);
|
||||
}
|
||||
|
||||
async getGraphExecutionInfo(
|
||||
graphID: string,
|
||||
runID: string,
|
||||
): Promise<NodeExecutionResult[]> {
|
||||
return (await this._get(`/graphs/${graphID}/executions/${runID}`)).map(
|
||||
parseNodeExecutionResultTimestamps,
|
||||
);
|
||||
}
|
||||
|
||||
async stopGraphExecution(
|
||||
graphID: string,
|
||||
runID: string,
|
||||
): Promise<NodeExecutionResult[]> {
|
||||
return (
|
||||
await this._request("POST", `/graphs/${graphID}/executions/${runID}/stop`)
|
||||
).map(parseNodeExecutionResultTimestamps);
|
||||
}
|
||||
|
||||
private async _get(path: string) {
|
||||
return this._request("GET", path);
|
||||
}
|
||||
|
||||
private async _request(
|
||||
method: "GET" | "POST" | "PUT" | "PATCH",
|
||||
path: string,
|
||||
payload?: { [key: string]: any },
|
||||
) {
|
||||
if (method != "GET") {
|
||||
console.debug(`${method} ${path} payload:`, payload);
|
||||
}
|
||||
|
||||
const token =
|
||||
(await this.supabaseClient?.auth.getSession())?.data.session
|
||||
?.access_token || "";
|
||||
|
||||
const response = await fetch(this.baseUrl + path, {
|
||||
method,
|
||||
headers:
|
||||
method != "GET"
|
||||
? {
|
||||
"Content-Type": "application/json",
|
||||
Authorization: token ? `Bearer ${token}` : "",
|
||||
}
|
||||
: {
|
||||
Authorization: token ? `Bearer ${token}` : "",
|
||||
},
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
const response_data = await response.json();
|
||||
|
||||
if (!response.ok) {
|
||||
console.warn(
|
||||
`${method} ${path} returned non-OK response:`,
|
||||
response_data.detail,
|
||||
response,
|
||||
);
|
||||
throw new Error(`HTTP error ${response.status}! ${response_data.detail}`);
|
||||
}
|
||||
return response_data;
|
||||
}
|
||||
|
||||
async connectWebSocket(): Promise<void> {
|
||||
this.wsConnecting ??= new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
const token =
|
||||
(await this.supabaseClient?.auth.getSession())?.data.session
|
||||
?.access_token || "";
|
||||
|
||||
const wsUrlWithToken = `${this.wsUrl}?token=${token}`;
|
||||
this.webSocket = new WebSocket(wsUrlWithToken);
|
||||
|
||||
this.webSocket.onopen = () => {
|
||||
console.debug("WebSocket connection established");
|
||||
resolve();
|
||||
};
|
||||
|
||||
this.webSocket.onclose = (event) => {
|
||||
console.debug("WebSocket connection closed", event);
|
||||
this.webSocket = null;
|
||||
};
|
||||
|
||||
this.webSocket.onerror = (error) => {
|
||||
console.error("WebSocket error:", error);
|
||||
reject(error);
|
||||
};
|
||||
|
||||
this.webSocket.onmessage = (event) => {
|
||||
const message: WebsocketMessage = JSON.parse(event.data);
|
||||
if (message.method == "execution_event") {
|
||||
message.data = parseNodeExecutionResultTimestamps(message.data);
|
||||
}
|
||||
this.wsMessageHandlers[message.method]?.forEach((handler) =>
|
||||
handler(message.data),
|
||||
);
|
||||
};
|
||||
} catch (error) {
|
||||
console.error("Error connecting to WebSocket:", error);
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
return this.wsConnecting;
|
||||
}
|
||||
|
||||
disconnectWebSocket() {
|
||||
if (this.webSocket && this.webSocket.readyState === WebSocket.OPEN) {
|
||||
this.webSocket.close();
|
||||
}
|
||||
}
|
||||
|
||||
sendWebSocketMessage<M extends keyof WebsocketMessageTypeMap>(
|
||||
method: M,
|
||||
data: WebsocketMessageTypeMap[M],
|
||||
callCount = 0,
|
||||
) {
|
||||
if (this.webSocket && this.webSocket.readyState === WebSocket.OPEN) {
|
||||
this.webSocket.send(JSON.stringify({ method, data }));
|
||||
} else {
|
||||
this.connectWebSocket().then(() => {
|
||||
callCount == 0
|
||||
? this.sendWebSocketMessage(method, data, callCount + 1)
|
||||
: setTimeout(
|
||||
() => {
|
||||
this.sendWebSocketMessage(method, data, callCount + 1);
|
||||
},
|
||||
2 ** (callCount - 1) * 1000,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
onWebSocketMessage<M extends keyof WebsocketMessageTypeMap>(
|
||||
method: M,
|
||||
handler: (data: WebsocketMessageTypeMap[M]) => void,
|
||||
): () => void {
|
||||
this.wsMessageHandlers[method] ??= new Set();
|
||||
this.wsMessageHandlers[method].add(handler);
|
||||
|
||||
// Return detacher
|
||||
return () => this.wsMessageHandlers[method].delete(handler);
|
||||
}
|
||||
|
||||
subscribeToExecution(graphId: string) {
|
||||
this.sendWebSocketMessage("subscribe", { graph_id: graphId });
|
||||
}
|
||||
}
|
||||
|
||||
/* *** UTILITY TYPES *** */
|
||||
|
||||
type GraphCreateRequestBody =
|
||||
| {
|
||||
template_id: string;
|
||||
template_version: number;
|
||||
}
|
||||
| {
|
||||
graph: GraphCreatable;
|
||||
};
|
||||
|
||||
type WebsocketMessageTypeMap = {
|
||||
subscribe: { graph_id: string };
|
||||
execution_event: NodeExecutionResult;
|
||||
};
|
||||
|
||||
type WebsocketMessage = {
|
||||
[M in keyof WebsocketMessageTypeMap]: {
|
||||
method: M;
|
||||
data: WebsocketMessageTypeMap[M];
|
||||
};
|
||||
}[keyof WebsocketMessageTypeMap];
|
||||
|
||||
/* *** HELPER FUNCTIONS *** */
|
||||
|
||||
function parseNodeExecutionResultTimestamps(result: any): NodeExecutionResult {
|
||||
return {
|
||||
...result,
|
||||
add_time: new Date(result.add_time),
|
||||
queue_time: result.queue_time ? new Date(result.queue_time) : undefined,
|
||||
start_time: result.start_time ? new Date(result.start_time) : undefined,
|
||||
end_time: result.end_time ? new Date(result.end_time) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
import { createServerClient } from "../supabase/server";
|
||||
import BaseAutoGPTServerAPI from "./baseClient";
|
||||
|
||||
export default class AutoGPTServerAPIServerSide extends BaseAutoGPTServerAPI {
|
||||
constructor(
|
||||
baseUrl: string = process.env.NEXT_PUBLIC_AGPT_SERVER_URL ||
|
||||
"http://localhost:8006/api",
|
||||
wsUrl: string = process.env.NEXT_PUBLIC_AGPT_WS_SERVER_URL ||
|
||||
"ws://localhost:8001/ws",
|
||||
) {
|
||||
const supabaseClient = createServerClient();
|
||||
super(baseUrl, wsUrl, supabaseClient);
|
||||
}
|
||||
}
|
||||
@@ -5,18 +5,6 @@ export type Category = {
|
||||
description: string;
|
||||
};
|
||||
|
||||
export enum BlockCostType {
|
||||
RUN = "run",
|
||||
BYTE = "byte",
|
||||
SECOND = "second",
|
||||
}
|
||||
|
||||
export type BlockCost = {
|
||||
cost_amount: number;
|
||||
cost_type: BlockCostType;
|
||||
cost_filter: { [key: string]: any };
|
||||
};
|
||||
|
||||
export type Block = {
|
||||
id: string;
|
||||
name: string;
|
||||
@@ -26,7 +14,6 @@ export type Block = {
|
||||
outputSchema: BlockIORootSchema;
|
||||
staticOutput: boolean;
|
||||
uiType: BlockUIType;
|
||||
costs: BlockCost[];
|
||||
};
|
||||
|
||||
export type BlockIORootSchema = {
|
||||
@@ -203,15 +190,3 @@ export enum BlockUIType {
|
||||
OUTPUT = "Output",
|
||||
NOTE = "Note",
|
||||
}
|
||||
|
||||
export type AnalyticsMetrics = {
|
||||
metric_name: string;
|
||||
metric_value: number;
|
||||
data_string: string;
|
||||
};
|
||||
|
||||
export type AnalyticsDetails = {
|
||||
type: string;
|
||||
data: { [key: string]: any };
|
||||
index: string;
|
||||
};
|
||||
|
||||
@@ -17,7 +17,7 @@ export default class MarketplaceAPI {
|
||||
|
||||
constructor(
|
||||
baseUrl: string = process.env.NEXT_PUBLIC_AGPT_MARKETPLACE_URL ||
|
||||
"http://localhost:8015/api/v1/market",
|
||||
"http://localhost:8001/api/v1/market",
|
||||
) {
|
||||
this.baseUrl = baseUrl;
|
||||
}
|
||||
@@ -46,7 +46,7 @@ export default class MarketplaceAPI {
|
||||
pageSize: number = 10,
|
||||
): Promise<AgentListResponse> {
|
||||
return this._get(
|
||||
`/top-downloads/agents?page=${page}&page_size=${pageSize}`,
|
||||
`agents/top-downloads?page=${page}&page_size=${pageSize}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -54,7 +54,7 @@ export default class MarketplaceAPI {
|
||||
page: number = 1,
|
||||
pageSize: number = 10,
|
||||
): Promise<AgentListResponse> {
|
||||
return this._get(`/featured/agents?page=${page}&page_size=${pageSize}`);
|
||||
return this._get(`/agents/featured?page=${page}&page_size=${pageSize}`);
|
||||
}
|
||||
|
||||
async searchAgents(
|
||||
|
||||
@@ -7,7 +7,6 @@ export function createClient() {
|
||||
process.env.NEXT_PUBLIC_SUPABASE_ANON_KEY!,
|
||||
);
|
||||
} catch (error) {
|
||||
console.error("error creating client", error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,16 +24,15 @@ export function deepEquals(x: any, y: any): boolean {
|
||||
const ok = Object.keys,
|
||||
tx = typeof x,
|
||||
ty = typeof y;
|
||||
|
||||
const res =
|
||||
return (
|
||||
x &&
|
||||
y &&
|
||||
tx === ty &&
|
||||
(tx === "object"
|
||||
? ok(x).length === ok(y).length &&
|
||||
ok(x).every((key) => deepEquals(x[key], y[key]))
|
||||
: x === y);
|
||||
return res;
|
||||
: x === y)
|
||||
);
|
||||
}
|
||||
|
||||
/** Get tailwind text color class from type name */
|
||||
@@ -185,7 +184,7 @@ export const categoryColorMap: Record<string, string> = {
|
||||
SEARCH: "bg-blue-300/[.7]",
|
||||
BASIC: "bg-purple-300/[.7]",
|
||||
INPUT: "bg-cyan-300/[.7]",
|
||||
OUTPUT: "bg-red-300/[.7]",
|
||||
OUTPUT: "bg-brown-300/[.7]",
|
||||
LOGIC: "bg-teal-300/[.7]",
|
||||
};
|
||||
|
||||
@@ -195,10 +194,3 @@ export function getPrimaryCategoryColor(categories: Category[]): string {
|
||||
}
|
||||
return categoryColorMap[categories[0].category] || "bg-gray-300/[.7]";
|
||||
}
|
||||
|
||||
export function filterBlocksByType<T>(
|
||||
blocks: T[],
|
||||
predicate: (block: T) => boolean,
|
||||
): T[] {
|
||||
return blocks.filter(predicate);
|
||||
}
|
||||
|
||||
@@ -1,23 +1,16 @@
|
||||
import { redirect } from "next/navigation";
|
||||
import getServerUser from "@/hooks/getServerUser";
|
||||
import React from "react";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
|
||||
export async function withRoleAccess(allowedRoles: string[]) {
|
||||
"use server";
|
||||
return await Sentry.withServerActionInstrumentation(
|
||||
"withRoleAccess",
|
||||
{},
|
||||
async () => {
|
||||
return async function <T extends React.ComponentType<any>>(Component: T) {
|
||||
const { user, role, error } = await getServerUser();
|
||||
return async function <T extends React.ComponentType<any>>(Component: T) {
|
||||
const { user, role, error } = await getServerUser();
|
||||
|
||||
if (error || !user || !role || !allowedRoles.includes(role)) {
|
||||
redirect("/unauthorized");
|
||||
}
|
||||
if (error || !user || !role || !allowedRoles.includes(role)) {
|
||||
redirect("/unauthorized");
|
||||
}
|
||||
|
||||
return Component;
|
||||
};
|
||||
},
|
||||
);
|
||||
return Component;
|
||||
};
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -7,7 +7,7 @@ from pydantic import BaseModel, Field, SecretStr, field_serializer
|
||||
class _BaseCredentials(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid4()))
|
||||
provider: str
|
||||
title: Optional[str]
|
||||
title: str
|
||||
|
||||
@field_serializer("*")
|
||||
def dump_secret_strings(value: Any, _info):
|
||||
@@ -18,8 +18,6 @@ class _BaseCredentials(BaseModel):
|
||||
|
||||
class OAuth2Credentials(_BaseCredentials):
|
||||
type: Literal["oauth2"] = "oauth2"
|
||||
username: Optional[str]
|
||||
"""Username of the third-party service user that these credentials belong to"""
|
||||
access_token: SecretStr
|
||||
access_token_expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the access token expires (if at all)"""
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
DB_USER=agpt_user
|
||||
DB_PASS=pass123
|
||||
DB_NAME=agpt_local
|
||||
DB_PORT=5433
|
||||
DB_PORT=5432
|
||||
DATABASE_URL="postgresql://${DB_USER}:${DB_PASS}@localhost:${DB_PORT}/${DB_NAME}"
|
||||
PRISMA_SCHEMA="postgres/schema.prisma"
|
||||
|
||||
@@ -9,13 +9,10 @@ REDIS_HOST=localhost
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=password
|
||||
|
||||
ENABLE_AUTH=false
|
||||
ENABLE_CREDIT=false
|
||||
AUTH_ENABLED=false
|
||||
APP_ENV="local"
|
||||
PYRO_HOST=localhost
|
||||
SENTRY_DSN=
|
||||
# This is needed when ENABLE_AUTH is true
|
||||
SUPABASE_JWT_SECRET=
|
||||
|
||||
## ===== OPTIONAL API KEYS ===== ##
|
||||
|
||||
|
||||
3
rnd/autogpt_server/.vscode/settings.json
vendored
3
rnd/autogpt_server/.vscode/settings.json
vendored
@@ -1,6 +1,3 @@
|
||||
{
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"python.testing.pytestArgs": ["test"],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.pytestEnabled": true
|
||||
}
|
||||
|
||||
@@ -17,10 +17,6 @@ ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
|
||||
# Upgrade pip and setuptools to fix security vulnerabilities
|
||||
RUN pip3 install --upgrade pip setuptools
|
||||
|
||||
RUN pip3 install poetry
|
||||
|
||||
# Copy and install dependencies
|
||||
@@ -45,10 +41,6 @@ ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
|
||||
|
||||
# Upgrade pip and setuptools to fix security vulnerabilities
|
||||
RUN pip3 install --upgrade pip setuptools
|
||||
|
||||
# Copy only necessary files from builder
|
||||
COPY --from=builder /app /app
|
||||
COPY --from=builder /usr/local/lib/python3.11 /usr/local/lib/python3.11
|
||||
|
||||
@@ -14,8 +14,7 @@ class ReadCsvBlock(Block):
|
||||
skip_columns: list[str] = []
|
||||
|
||||
class Output(BlockSchema):
|
||||
row: dict[str, str]
|
||||
all_data: list[dict[str, str]]
|
||||
data: dict[str, str]
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -28,15 +27,8 @@ class ReadCsvBlock(Block):
|
||||
"contents": "a, b, c\n1,2,3\n4,5,6",
|
||||
},
|
||||
test_output=[
|
||||
("row", {"a": "1", "b": "2", "c": "3"}),
|
||||
("row", {"a": "4", "b": "5", "c": "6"}),
|
||||
(
|
||||
"all_data",
|
||||
[
|
||||
{"a": "1", "b": "2", "c": "3"},
|
||||
{"a": "4", "b": "5", "c": "6"},
|
||||
],
|
||||
),
|
||||
("data", {"a": "1", "b": "2", "c": "3"}),
|
||||
("data", {"a": "4", "b": "5", "c": "6"}),
|
||||
],
|
||||
)
|
||||
|
||||
@@ -61,7 +53,8 @@ class ReadCsvBlock(Block):
|
||||
for _ in range(input_data.skip_rows):
|
||||
next(reader)
|
||||
|
||||
def process_row(row):
|
||||
# join the data with the header
|
||||
for row in reader:
|
||||
data = {}
|
||||
for i, value in enumerate(row):
|
||||
if i not in input_data.skip_columns:
|
||||
@@ -69,12 +62,4 @@ class ReadCsvBlock(Block):
|
||||
data[header[i]] = value.strip() if input_data.strip else value
|
||||
else:
|
||||
data[str(i)] = value.strip() if input_data.strip else value
|
||||
return data
|
||||
|
||||
all_data = []
|
||||
for row in reader:
|
||||
processed_row = process_row(row)
|
||||
all_data.append(processed_row)
|
||||
yield "row", processed_row
|
||||
|
||||
yield "all_data", all_data
|
||||
yield "data", data
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import logging
|
||||
from enum import Enum
|
||||
from json import JSONDecodeError
|
||||
from typing import Any, List, NamedTuple
|
||||
from typing import List, NamedTuple
|
||||
|
||||
import anthropic
|
||||
import ollama
|
||||
@@ -25,7 +24,6 @@ LlmApiKeys = {
|
||||
class ModelMetadata(NamedTuple):
|
||||
provider: str
|
||||
context_window: int
|
||||
cost_factor: int
|
||||
|
||||
|
||||
class LlmModel(str, Enum):
|
||||
@@ -57,29 +55,26 @@ class LlmModel(str, Enum):
|
||||
|
||||
|
||||
MODEL_METADATA = {
|
||||
LlmModel.GPT4O_MINI: ModelMetadata("openai", 128000, cost_factor=10),
|
||||
LlmModel.GPT4O: ModelMetadata("openai", 128000, cost_factor=12),
|
||||
LlmModel.GPT4_TURBO: ModelMetadata("openai", 128000, cost_factor=11),
|
||||
LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385, cost_factor=8),
|
||||
LlmModel.CLAUDE_3_5_SONNET: ModelMetadata("anthropic", 200000, cost_factor=14),
|
||||
LlmModel.CLAUDE_3_HAIKU: ModelMetadata("anthropic", 200000, cost_factor=13),
|
||||
LlmModel.LLAMA3_8B: ModelMetadata("groq", 8192, cost_factor=6),
|
||||
LlmModel.LLAMA3_70B: ModelMetadata("groq", 8192, cost_factor=9),
|
||||
LlmModel.MIXTRAL_8X7B: ModelMetadata("groq", 32768, cost_factor=7),
|
||||
LlmModel.GEMMA_7B: ModelMetadata("groq", 8192, cost_factor=6),
|
||||
LlmModel.GEMMA2_9B: ModelMetadata("groq", 8192, cost_factor=7),
|
||||
LlmModel.LLAMA3_1_405B: ModelMetadata("groq", 8192, cost_factor=10),
|
||||
# Limited to 16k during preview
|
||||
LlmModel.LLAMA3_1_70B: ModelMetadata("groq", 131072, cost_factor=15),
|
||||
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 131072, cost_factor=13),
|
||||
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192, cost_factor=7),
|
||||
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192, cost_factor=11),
|
||||
LlmModel.GPT4O_MINI: ModelMetadata("openai", 128000),
|
||||
LlmModel.GPT4O: ModelMetadata("openai", 128000),
|
||||
LlmModel.GPT4_TURBO: ModelMetadata("openai", 128000),
|
||||
LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385),
|
||||
LlmModel.CLAUDE_3_5_SONNET: ModelMetadata("anthropic", 200000),
|
||||
LlmModel.CLAUDE_3_HAIKU: ModelMetadata("anthropic", 200000),
|
||||
LlmModel.LLAMA3_8B: ModelMetadata("groq", 8192),
|
||||
LlmModel.LLAMA3_70B: ModelMetadata("groq", 8192),
|
||||
LlmModel.MIXTRAL_8X7B: ModelMetadata("groq", 32768),
|
||||
LlmModel.GEMMA_7B: ModelMetadata("groq", 8192),
|
||||
LlmModel.GEMMA2_9B: ModelMetadata("groq", 8192),
|
||||
LlmModel.LLAMA3_1_405B: ModelMetadata(
|
||||
"groq", 8192
|
||||
), # Limited to 16k during preview
|
||||
LlmModel.LLAMA3_1_70B: ModelMetadata("groq", 131072),
|
||||
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 131072),
|
||||
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192),
|
||||
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192),
|
||||
}
|
||||
|
||||
for model in LlmModel:
|
||||
if model not in MODEL_METADATA:
|
||||
raise ValueError(f"Missing MODEL_METADATA metadata for model: {model}")
|
||||
|
||||
|
||||
class AIStructuredResponseGeneratorBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
@@ -94,7 +89,7 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
response: dict[str, Any]
|
||||
response: dict[str, str]
|
||||
error: str
|
||||
|
||||
def __init__(self):
|
||||
@@ -140,33 +135,16 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
)
|
||||
return response.choices[0].message.content or ""
|
||||
elif provider == "anthropic":
|
||||
system_messages = [p["content"] for p in prompt if p["role"] == "system"]
|
||||
sysprompt = " ".join(system_messages)
|
||||
|
||||
messages = []
|
||||
last_role = None
|
||||
for p in prompt:
|
||||
if p["role"] in ["user", "assistant"]:
|
||||
if p["role"] != last_role:
|
||||
messages.append({"role": p["role"], "content": p["content"]})
|
||||
last_role = p["role"]
|
||||
else:
|
||||
# If the role is the same as the last one, combine the content
|
||||
messages[-1]["content"] += "\n" + p["content"]
|
||||
|
||||
sysprompt = "".join([p["content"] for p in prompt if p["role"] == "system"])
|
||||
usrprompt = [p for p in prompt if p["role"] == "user"]
|
||||
client = anthropic.Anthropic(api_key=api_key)
|
||||
try:
|
||||
response = client.messages.create(
|
||||
model=model.value,
|
||||
max_tokens=4096,
|
||||
system=sysprompt,
|
||||
messages=messages,
|
||||
)
|
||||
return response.content[0].text if response.content else ""
|
||||
except anthropic.APIError as e:
|
||||
error_message = f"Anthropic API error: {str(e)}"
|
||||
logger.error(error_message)
|
||||
raise ValueError(error_message)
|
||||
response = client.messages.create(
|
||||
model=model.value,
|
||||
max_tokens=4096,
|
||||
system=sysprompt,
|
||||
messages=usrprompt, # type: ignore
|
||||
)
|
||||
return response.content[0].text if response.content else ""
|
||||
elif provider == "groq":
|
||||
client = Groq(api_key=api_key)
|
||||
response_format = {"type": "json_object"} if json_format else None
|
||||
@@ -217,16 +195,14 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
|
||||
prompt.append({"role": "user", "content": input_data.prompt})
|
||||
|
||||
def parse_response(resp: str) -> tuple[dict[str, Any], str | None]:
|
||||
def parse_response(resp: str) -> tuple[dict[str, str], str | None]:
|
||||
try:
|
||||
parsed = json.loads(resp)
|
||||
if not isinstance(parsed, dict):
|
||||
return {}, f"Expected a dictionary, but got {type(parsed)}"
|
||||
miss_keys = set(input_data.expected_format.keys()) - set(parsed.keys())
|
||||
if miss_keys:
|
||||
return parsed, f"Missing keys: {miss_keys}"
|
||||
return parsed, None
|
||||
except JSONDecodeError as e:
|
||||
except Exception as e:
|
||||
return {}, f"JSON decode error: {e}"
|
||||
|
||||
logger.info(f"LLM request: {prompt}")
|
||||
@@ -250,16 +226,7 @@ class AIStructuredResponseGeneratorBlock(Block):
|
||||
if input_data.expected_format:
|
||||
parsed_dict, parsed_error = parse_response(response_text)
|
||||
if not parsed_error:
|
||||
yield "response", {
|
||||
k: (
|
||||
json.loads(v)
|
||||
if isinstance(v, str)
|
||||
and v.startswith("[")
|
||||
and v.endswith("]")
|
||||
else (", ".join(v) if isinstance(v, list) else v)
|
||||
)
|
||||
for k, v in parsed_dict.items()
|
||||
}
|
||||
yield "response", {k: str(v) for k, v in parsed_dict.items()}
|
||||
return
|
||||
else:
|
||||
yield "response", {"response": response_text}
|
||||
@@ -334,7 +301,7 @@ class AITextGeneratorBlock(Block):
|
||||
yield "error", str(e)
|
||||
|
||||
|
||||
class AITextSummarizerBlock(Block):
|
||||
class TextSummarizerBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str
|
||||
model: LlmModel = LlmModel.GPT4_TURBO
|
||||
@@ -352,8 +319,8 @@ class AITextSummarizerBlock(Block):
|
||||
id="c3d4e5f6-7g8h-9i0j-1k2l-m3n4o5p6q7r8",
|
||||
description="Utilize a Large Language Model (LLM) to summarize a long text.",
|
||||
categories={BlockCategory.AI, BlockCategory.TEXT},
|
||||
input_schema=AITextSummarizerBlock.Input,
|
||||
output_schema=AITextSummarizerBlock.Output,
|
||||
input_schema=TextSummarizerBlock.Input,
|
||||
output_schema=TextSummarizerBlock.Output,
|
||||
test_input={"text": "Lorem ipsum..." * 100},
|
||||
test_output=("summary", "Final summary of a long text"),
|
||||
test_mock={
|
||||
@@ -445,7 +412,7 @@ class AITextSummarizerBlock(Block):
|
||||
else:
|
||||
# If combined summaries are still too long, recursively summarize
|
||||
return self._run(
|
||||
AITextSummarizerBlock.Input(
|
||||
TextSummarizerBlock.Input(
|
||||
text=combined_text,
|
||||
api_key=input_data.api_key,
|
||||
model=input_data.model,
|
||||
|
||||
@@ -1,264 +0,0 @@
|
||||
import random
|
||||
from collections import defaultdict
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from autogpt_server.data.model import SchemaField
|
||||
|
||||
|
||||
class SamplingMethod(str, Enum):
|
||||
RANDOM = "random"
|
||||
SYSTEMATIC = "systematic"
|
||||
TOP = "top"
|
||||
BOTTOM = "bottom"
|
||||
STRATIFIED = "stratified"
|
||||
WEIGHTED = "weighted"
|
||||
RESERVOIR = "reservoir"
|
||||
CLUSTER = "cluster"
|
||||
|
||||
|
||||
class DataSamplingBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
data: Union[Dict[str, Any], List[Union[dict, List[Any]]]] = SchemaField(
|
||||
description="The dataset to sample from. Can be a single dictionary, a list of dictionaries, or a list of lists.",
|
||||
placeholder="{'id': 1, 'value': 'a'} or [{'id': 1, 'value': 'a'}, {'id': 2, 'value': 'b'}, ...]",
|
||||
)
|
||||
sample_size: int = SchemaField(
|
||||
description="The number of samples to take from the dataset.",
|
||||
placeholder="10",
|
||||
default=10,
|
||||
)
|
||||
sampling_method: SamplingMethod = SchemaField(
|
||||
description="The method to use for sampling.",
|
||||
default=SamplingMethod.RANDOM,
|
||||
)
|
||||
accumulate: bool = SchemaField(
|
||||
description="Whether to accumulate data before sampling.",
|
||||
default=False,
|
||||
)
|
||||
random_seed: Optional[int] = SchemaField(
|
||||
description="Seed for random number generator (optional).",
|
||||
default=None,
|
||||
)
|
||||
stratify_key: Optional[str] = SchemaField(
|
||||
description="Key to use for stratified sampling (required for stratified sampling).",
|
||||
default=None,
|
||||
)
|
||||
weight_key: Optional[str] = SchemaField(
|
||||
description="Key to use for weighted sampling (required for weighted sampling).",
|
||||
default=None,
|
||||
)
|
||||
cluster_key: Optional[str] = SchemaField(
|
||||
description="Key to use for cluster sampling (required for cluster sampling).",
|
||||
default=None,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
sampled_data: List[Union[dict, List[Any]]] = SchemaField(
|
||||
description="The sampled subset of the input data."
|
||||
)
|
||||
sample_indices: List[int] = SchemaField(
|
||||
description="The indices of the sampled data in the original dataset."
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="4a448883-71fa-49cf-91cf-70d793bd7d87",
|
||||
description="This block samples data from a given dataset using various sampling methods.",
|
||||
categories={BlockCategory.LOGIC},
|
||||
input_schema=DataSamplingBlock.Input,
|
||||
output_schema=DataSamplingBlock.Output,
|
||||
test_input={
|
||||
"data": [
|
||||
{"id": i, "value": chr(97 + i), "group": i % 3} for i in range(10)
|
||||
],
|
||||
"sample_size": 3,
|
||||
"sampling_method": SamplingMethod.STRATIFIED,
|
||||
"accumulate": False,
|
||||
"random_seed": 42,
|
||||
"stratify_key": "group",
|
||||
},
|
||||
test_output=[
|
||||
(
|
||||
"sampled_data",
|
||||
[
|
||||
{"id": 0, "value": "a", "group": 0},
|
||||
{"id": 1, "value": "b", "group": 1},
|
||||
{"id": 8, "value": "i", "group": 2},
|
||||
],
|
||||
),
|
||||
("sample_indices", [0, 1, 8]),
|
||||
],
|
||||
)
|
||||
self.accumulated_data = []
|
||||
|
||||
def run(self, input_data: Input) -> BlockOutput:
|
||||
if input_data.accumulate:
|
||||
if isinstance(input_data.data, dict):
|
||||
self.accumulated_data.append(input_data.data)
|
||||
elif isinstance(input_data.data, list):
|
||||
self.accumulated_data.extend(input_data.data)
|
||||
else:
|
||||
raise ValueError(f"Unsupported data type: {type(input_data.data)}")
|
||||
|
||||
# If we don't have enough data yet, return without sampling
|
||||
if len(self.accumulated_data) < input_data.sample_size:
|
||||
return
|
||||
|
||||
data_to_sample = self.accumulated_data
|
||||
else:
|
||||
# If not accumulating, use the input data directly
|
||||
data_to_sample = (
|
||||
input_data.data
|
||||
if isinstance(input_data.data, list)
|
||||
else [input_data.data]
|
||||
)
|
||||
|
||||
if input_data.random_seed is not None:
|
||||
random.seed(input_data.random_seed)
|
||||
|
||||
data_size = len(data_to_sample)
|
||||
|
||||
if input_data.sample_size > data_size:
|
||||
raise ValueError(
|
||||
f"Sample size ({input_data.sample_size}) cannot be larger than the dataset size ({data_size})."
|
||||
)
|
||||
|
||||
indices = []
|
||||
|
||||
if input_data.sampling_method == SamplingMethod.RANDOM:
|
||||
indices = random.sample(range(data_size), input_data.sample_size)
|
||||
elif input_data.sampling_method == SamplingMethod.SYSTEMATIC:
|
||||
step = data_size // input_data.sample_size
|
||||
start = random.randint(0, step - 1)
|
||||
indices = list(range(start, data_size, step))[: input_data.sample_size]
|
||||
elif input_data.sampling_method == SamplingMethod.TOP:
|
||||
indices = list(range(input_data.sample_size))
|
||||
elif input_data.sampling_method == SamplingMethod.BOTTOM:
|
||||
indices = list(range(data_size - input_data.sample_size, data_size))
|
||||
elif input_data.sampling_method == SamplingMethod.STRATIFIED:
|
||||
if not input_data.stratify_key:
|
||||
raise ValueError(
|
||||
"Stratify key must be provided for stratified sampling."
|
||||
)
|
||||
strata = defaultdict(list)
|
||||
for i, item in enumerate(data_to_sample):
|
||||
if isinstance(item, dict):
|
||||
strata_value = item.get(input_data.stratify_key)
|
||||
elif hasattr(item, input_data.stratify_key):
|
||||
strata_value = getattr(item, input_data.stratify_key)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Stratify key '{input_data.stratify_key}' not found in item {item}"
|
||||
)
|
||||
|
||||
if strata_value is None:
|
||||
raise ValueError(
|
||||
f"Stratify value for key '{input_data.stratify_key}' is None"
|
||||
)
|
||||
|
||||
strata[str(strata_value)].append(i)
|
||||
|
||||
# Calculate the number of samples to take from each stratum
|
||||
stratum_sizes = {
|
||||
k: max(1, int(len(v) / data_size * input_data.sample_size))
|
||||
for k, v in strata.items()
|
||||
}
|
||||
|
||||
# Adjust sizes to ensure we get exactly sample_size samples
|
||||
while sum(stratum_sizes.values()) != input_data.sample_size:
|
||||
if sum(stratum_sizes.values()) < input_data.sample_size:
|
||||
stratum_sizes[
|
||||
max(stratum_sizes, key=lambda k: stratum_sizes[k])
|
||||
] += 1
|
||||
else:
|
||||
stratum_sizes[
|
||||
max(stratum_sizes, key=lambda k: stratum_sizes[k])
|
||||
] -= 1
|
||||
|
||||
for stratum, size in stratum_sizes.items():
|
||||
indices.extend(random.sample(strata[stratum], size))
|
||||
elif input_data.sampling_method == SamplingMethod.WEIGHTED:
|
||||
if not input_data.weight_key:
|
||||
raise ValueError("Weight key must be provided for weighted sampling.")
|
||||
weights = []
|
||||
for item in data_to_sample:
|
||||
if isinstance(item, dict):
|
||||
weight = item.get(input_data.weight_key)
|
||||
elif hasattr(item, input_data.weight_key):
|
||||
weight = getattr(item, input_data.weight_key)
|
||||
else:
|
||||
raise ValueError(
|
||||
f"Weight key '{input_data.weight_key}' not found in item {item}"
|
||||
)
|
||||
|
||||
if weight is None:
|
||||
raise ValueError(
|
||||
f"Weight value for key '{input_data.weight_key}' is None"
|
||||
)
|
||||
try:
|
||||
weights.append(float(weight))
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
f"Weight value '{weight}' cannot be converted to a number"
|
||||
)
|
||||
|
||||
if not weights:
|
||||
raise ValueError(
|
||||
f"No valid weights found using key '{input_data.weight_key}'"
|
||||
)
|
||||
|
||||
indices = random.choices(
|
||||
range(data_size), weights=weights, k=input_data.sample_size
|
||||
)
|
||||
elif input_data.sampling_method == SamplingMethod.RESERVOIR:
|
||||
indices = list(range(input_data.sample_size))
|
||||
for i in range(input_data.sample_size, data_size):
|
||||
j = random.randint(0, i)
|
||||
if j < input_data.sample_size:
|
||||
indices[j] = i
|
||||
elif input_data.sampling_method == SamplingMethod.CLUSTER:
|
||||
if not input_data.cluster_key:
|
||||
raise ValueError("Cluster key must be provided for cluster sampling.")
|
||||
clusters = defaultdict(list)
|
||||
for i, item in enumerate(data_to_sample):
|
||||
if isinstance(item, dict):
|
||||
cluster_value = item.get(input_data.cluster_key)
|
||||
elif hasattr(item, input_data.cluster_key):
|
||||
cluster_value = getattr(item, input_data.cluster_key)
|
||||
else:
|
||||
raise TypeError(
|
||||
f"Item {item} does not have the cluster key '{input_data.cluster_key}'"
|
||||
)
|
||||
|
||||
clusters[str(cluster_value)].append(i)
|
||||
|
||||
# Randomly select clusters until we have enough samples
|
||||
selected_clusters = []
|
||||
while (
|
||||
sum(len(clusters[c]) for c in selected_clusters)
|
||||
< input_data.sample_size
|
||||
):
|
||||
available_clusters = [c for c in clusters if c not in selected_clusters]
|
||||
if not available_clusters:
|
||||
break
|
||||
selected_clusters.append(random.choice(available_clusters))
|
||||
|
||||
for cluster in selected_clusters:
|
||||
indices.extend(clusters[cluster])
|
||||
|
||||
# If we have more samples than needed, randomly remove some
|
||||
if len(indices) > input_data.sample_size:
|
||||
indices = random.sample(indices, input_data.sample_size)
|
||||
else:
|
||||
raise ValueError(f"Unknown sampling method: {input_data.sampling_method}")
|
||||
|
||||
sampled_data = [data_to_sample[i] for i in indices]
|
||||
|
||||
# Clear accumulated data after sampling if accumulation is enabled
|
||||
if input_data.accumulate:
|
||||
self.accumulated_data = []
|
||||
|
||||
yield "sampled_data", sampled_data
|
||||
yield "sample_indices", indices
|
||||
@@ -1,43 +0,0 @@
|
||||
import logging
|
||||
|
||||
import prisma.types
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def log_raw_analytics(
|
||||
user_id: str,
|
||||
type: str,
|
||||
data: dict,
|
||||
data_index: str,
|
||||
):
|
||||
details = await prisma.models.AnalyticsDetails.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"type": type,
|
||||
"data": prisma.Json(data),
|
||||
"dataIndex": data_index,
|
||||
}
|
||||
)
|
||||
return details
|
||||
|
||||
|
||||
async def log_raw_metric(
|
||||
user_id: str,
|
||||
metric_name: str,
|
||||
metric_value: float,
|
||||
data_string: str,
|
||||
):
|
||||
if metric_value < 0:
|
||||
raise ValueError("metric_value must be non-negative")
|
||||
|
||||
result = await prisma.models.AnalyticsMetrics.prisma().create(
|
||||
data={
|
||||
"value": metric_value,
|
||||
"analyticMetric": metric_name,
|
||||
"userId": user_id,
|
||||
"dataString": data_string,
|
||||
},
|
||||
)
|
||||
|
||||
return result
|
||||
@@ -1,274 +0,0 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import Any, Optional, Type
|
||||
|
||||
import prisma.errors
|
||||
from prisma import Json
|
||||
from prisma.enums import UserBlockCreditType
|
||||
from prisma.models import UserBlockCredit
|
||||
from pydantic import BaseModel
|
||||
|
||||
from autogpt_server.blocks.llm import (
|
||||
MODEL_METADATA,
|
||||
AIConversationBlock,
|
||||
AIStructuredResponseGeneratorBlock,
|
||||
AITextGeneratorBlock,
|
||||
AITextSummarizerBlock,
|
||||
LlmModel,
|
||||
)
|
||||
from autogpt_server.blocks.talking_head import CreateTalkingAvatarVideoBlock
|
||||
from autogpt_server.data.block import Block, BlockInput
|
||||
from autogpt_server.util.settings import Config
|
||||
|
||||
|
||||
class BlockCostType(str, Enum):
|
||||
RUN = "run" # cost X credits per run
|
||||
BYTE = "byte" # cost X credits per byte
|
||||
SECOND = "second" # cost X credits per second
|
||||
|
||||
|
||||
class BlockCost(BaseModel):
|
||||
cost_amount: int
|
||||
cost_filter: BlockInput
|
||||
cost_type: BlockCostType
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
cost_amount: int,
|
||||
cost_type: BlockCostType = BlockCostType.RUN,
|
||||
cost_filter: Optional[BlockInput] = None,
|
||||
**data: Any,
|
||||
) -> None:
|
||||
super().__init__(
|
||||
cost_amount=cost_amount,
|
||||
cost_filter=cost_filter or {},
|
||||
cost_type=cost_type,
|
||||
**data,
|
||||
)
|
||||
|
||||
|
||||
llm_cost = [
|
||||
BlockCost(
|
||||
cost_type=BlockCostType.RUN,
|
||||
cost_filter={
|
||||
"model": model,
|
||||
"api_key": None, # Running LLM with user own API key is free.
|
||||
},
|
||||
cost_amount=metadata.cost_factor,
|
||||
)
|
||||
for model, metadata in MODEL_METADATA.items()
|
||||
] + [
|
||||
BlockCost(
|
||||
# Default cost is running LlmModel.GPT4O.
|
||||
cost_amount=MODEL_METADATA[LlmModel.GPT4O].cost_factor,
|
||||
cost_filter={"api_key": None},
|
||||
),
|
||||
]
|
||||
|
||||
BLOCK_COSTS: dict[Type[Block], list[BlockCost]] = {
|
||||
AIConversationBlock: llm_cost,
|
||||
AITextGeneratorBlock: llm_cost,
|
||||
AIStructuredResponseGeneratorBlock: llm_cost,
|
||||
AITextSummarizerBlock: llm_cost,
|
||||
CreateTalkingAvatarVideoBlock: [
|
||||
BlockCost(cost_amount=15, cost_filter={"api_key": None})
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class UserCreditBase(ABC):
|
||||
def __init__(self, num_user_credits_refill: int):
|
||||
self.num_user_credits_refill = num_user_credits_refill
|
||||
|
||||
@abstractmethod
|
||||
async def get_or_refill_credit(self, user_id: str) -> int:
|
||||
"""
|
||||
Get the current credit for the user and refill if no transaction has been made in the current cycle.
|
||||
|
||||
Returns:
|
||||
int: The current credit for the user.
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def spend_credits(
|
||||
self,
|
||||
user_id: str,
|
||||
user_credit: int,
|
||||
block: Block,
|
||||
input_data: BlockInput,
|
||||
data_size: float,
|
||||
run_time: float,
|
||||
) -> int:
|
||||
"""
|
||||
Spend the credits for the user based on the block usage.
|
||||
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
user_credit (int): The current credit for the user.
|
||||
block (Block): The block that is being used.
|
||||
input_data (BlockInput): The input data for the block.
|
||||
data_size (float): The size of the data being processed.
|
||||
run_time (float): The time taken to run the block.
|
||||
|
||||
Returns:
|
||||
int: amount of credit spent
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def top_up_credits(self, user_id: str, amount: int):
|
||||
"""
|
||||
Top up the credits for the user.
|
||||
|
||||
Args:
|
||||
user_id (str): The user ID.
|
||||
amount (int): The amount to top up.
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
class UserCredit(UserCreditBase):
|
||||
async def get_or_refill_credit(self, user_id: str) -> int:
|
||||
cur_time = self.time_now()
|
||||
cur_month = cur_time.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
nxt_month = cur_month.replace(month=cur_month.month + 1)
|
||||
|
||||
user_credit = await UserBlockCredit.prisma().group_by(
|
||||
by=["userId"],
|
||||
sum={"amount": True},
|
||||
where={
|
||||
"userId": user_id,
|
||||
"createdAt": {"gte": cur_month, "lt": nxt_month},
|
||||
"isActive": True,
|
||||
},
|
||||
)
|
||||
|
||||
if user_credit:
|
||||
credit_sum = user_credit[0].get("_sum") or {}
|
||||
return credit_sum.get("amount", 0)
|
||||
|
||||
key = f"MONTHLY-CREDIT-TOP-UP-{cur_month}"
|
||||
|
||||
try:
|
||||
await UserBlockCredit.prisma().create(
|
||||
data={
|
||||
"amount": self.num_user_credits_refill,
|
||||
"type": UserBlockCreditType.TOP_UP,
|
||||
"userId": user_id,
|
||||
"transactionKey": key,
|
||||
"createdAt": self.time_now(),
|
||||
}
|
||||
)
|
||||
except prisma.errors.UniqueViolationError:
|
||||
pass # Already refilled this month
|
||||
|
||||
return self.num_user_credits_refill
|
||||
|
||||
@staticmethod
|
||||
def time_now():
|
||||
return datetime.now(timezone.utc)
|
||||
|
||||
@staticmethod
|
||||
def _block_usage_cost(
|
||||
block: Block,
|
||||
input_data: BlockInput,
|
||||
data_size: float,
|
||||
run_time: float,
|
||||
) -> tuple[int, BlockInput]:
|
||||
block_costs = BLOCK_COSTS.get(type(block))
|
||||
if not block_costs:
|
||||
return 0, {}
|
||||
|
||||
for block_cost in block_costs:
|
||||
if all(
|
||||
# None, [], {}, "", are considered the same value.
|
||||
input_data.get(k) == b or (not input_data.get(k) and not b)
|
||||
for k, b in block_cost.cost_filter.items()
|
||||
):
|
||||
if block_cost.cost_type == BlockCostType.RUN:
|
||||
return block_cost.cost_amount, block_cost.cost_filter
|
||||
|
||||
if block_cost.cost_type == BlockCostType.SECOND:
|
||||
return (
|
||||
int(run_time * block_cost.cost_amount),
|
||||
block_cost.cost_filter,
|
||||
)
|
||||
|
||||
if block_cost.cost_type == BlockCostType.BYTE:
|
||||
return (
|
||||
int(data_size * block_cost.cost_amount),
|
||||
block_cost.cost_filter,
|
||||
)
|
||||
|
||||
return 0, {}
|
||||
|
||||
async def spend_credits(
|
||||
self,
|
||||
user_id: str,
|
||||
user_credit: int,
|
||||
block: Block,
|
||||
input_data: BlockInput,
|
||||
data_size: float,
|
||||
run_time: float,
|
||||
validate_balance: bool = True,
|
||||
) -> int:
|
||||
cost, matching_filter = self._block_usage_cost(
|
||||
block=block, input_data=input_data, data_size=data_size, run_time=run_time
|
||||
)
|
||||
if cost <= 0:
|
||||
return 0
|
||||
|
||||
if validate_balance and user_credit < cost:
|
||||
raise ValueError(f"Insufficient credit: {user_credit} < {cost}")
|
||||
|
||||
await UserBlockCredit.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"amount": -cost,
|
||||
"type": UserBlockCreditType.USAGE,
|
||||
"blockId": block.id,
|
||||
"metadata": Json(
|
||||
{
|
||||
"block": block.name,
|
||||
"input": matching_filter,
|
||||
}
|
||||
),
|
||||
"createdAt": self.time_now(),
|
||||
}
|
||||
)
|
||||
return cost
|
||||
|
||||
async def top_up_credits(self, user_id: str, amount: int):
|
||||
await UserBlockCredit.prisma().create(
|
||||
data={
|
||||
"userId": user_id,
|
||||
"amount": amount,
|
||||
"type": UserBlockCreditType.TOP_UP,
|
||||
"createdAt": self.time_now(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class DisabledUserCredit(UserCreditBase):
|
||||
async def get_or_refill_credit(self, *args, **kwargs) -> int:
|
||||
return 0
|
||||
|
||||
async def spend_credits(self, *args, **kwargs) -> int:
|
||||
return 0
|
||||
|
||||
async def top_up_credits(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
def get_user_credit_model() -> UserCreditBase:
|
||||
config = Config()
|
||||
if config.enable_credit.lower() == "true":
|
||||
return UserCredit(config.num_user_credits_refill)
|
||||
else:
|
||||
return DisabledUserCredit(0)
|
||||
|
||||
|
||||
def get_block_costs() -> dict[str, list[BlockCost]]:
|
||||
return {block().id: costs for block, costs in BLOCK_COSTS.items()}
|
||||
@@ -1,9 +1,9 @@
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from multiprocessing import Manager
|
||||
from typing import Any, Generic, TypeVar
|
||||
|
||||
from prisma.enums import AgentExecutionStatus
|
||||
from prisma.models import (
|
||||
AgentGraphExecution,
|
||||
AgentNodeExecution,
|
||||
@@ -21,14 +21,12 @@ from autogpt_server.util import json, mock
|
||||
|
||||
|
||||
class GraphExecution(BaseModel):
|
||||
user_id: str
|
||||
graph_exec_id: str
|
||||
graph_id: str
|
||||
start_node_execs: list["NodeExecution"]
|
||||
graph_id: str
|
||||
|
||||
|
||||
class NodeExecution(BaseModel):
|
||||
user_id: str
|
||||
graph_exec_id: str
|
||||
graph_id: str
|
||||
node_exec_id: str
|
||||
@@ -36,7 +34,13 @@ class NodeExecution(BaseModel):
|
||||
data: BlockInput
|
||||
|
||||
|
||||
ExecutionStatus = AgentExecutionStatus
|
||||
class ExecutionStatus(str, Enum):
|
||||
INCOMPLETE = "INCOMPLETE"
|
||||
QUEUED = "QUEUED"
|
||||
RUNNING = "RUNNING"
|
||||
COMPLETED = "COMPLETED"
|
||||
FAILED = "FAILED"
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
@@ -144,7 +148,6 @@ async def create_graph_execution(
|
||||
data={
|
||||
"agentGraphId": graph_id,
|
||||
"agentGraphVersion": graph_version,
|
||||
"executionStatus": ExecutionStatus.QUEUED,
|
||||
"AgentNodeExecutions": {
|
||||
"create": [ # type: ignore
|
||||
{
|
||||
@@ -256,20 +259,10 @@ async def upsert_execution_output(
|
||||
)
|
||||
|
||||
|
||||
async def update_graph_execution_start_time(graph_exec_id: str):
|
||||
await AgentGraphExecution.prisma().update(
|
||||
where={"id": graph_exec_id},
|
||||
data={
|
||||
"executionStatus": ExecutionStatus.RUNNING,
|
||||
"startedAt": datetime.now(tz=timezone.utc),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def update_graph_execution_stats(graph_exec_id: str, stats: dict[str, Any]):
|
||||
await AgentGraphExecution.prisma().update(
|
||||
where={"id": graph_exec_id},
|
||||
data={"executionStatus": ExecutionStatus.COMPLETED, "stats": json.dumps(stats)},
|
||||
data={"stats": json.dumps(stats)},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -17,10 +17,8 @@ if TYPE_CHECKING:
|
||||
from autogpt_server.blocks.basic import AgentInputBlock
|
||||
from autogpt_server.data import db
|
||||
from autogpt_server.data.block import Block, BlockData, BlockInput, get_block
|
||||
from autogpt_server.data.credit import get_user_credit_model
|
||||
from autogpt_server.data.execution import (
|
||||
ExecutionQueue,
|
||||
ExecutionResult,
|
||||
ExecutionStatus,
|
||||
GraphExecution,
|
||||
NodeExecution,
|
||||
@@ -47,41 +45,25 @@ from autogpt_server.util.type import convert
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class LogMetadata:
|
||||
def __init__(
|
||||
self,
|
||||
user_id: str,
|
||||
graph_eid: str,
|
||||
graph_id: str,
|
||||
node_eid: str,
|
||||
node_id: str,
|
||||
block_name: str,
|
||||
):
|
||||
self.metadata = {
|
||||
"component": "ExecutionManager",
|
||||
"user_id": user_id,
|
||||
"graph_eid": graph_eid,
|
||||
"graph_id": graph_id,
|
||||
"node_eid": node_eid,
|
||||
"node_id": node_id,
|
||||
"block_name": block_name,
|
||||
}
|
||||
self.prefix = f"[ExecutionManager|uid:{user_id}|gid:{graph_id}|nid:{node_id}]|geid:{graph_eid}|nid:{node_eid}|{block_name}]"
|
||||
def get_log_metadata(
|
||||
graph_eid: str,
|
||||
graph_id: str,
|
||||
node_eid: str,
|
||||
node_id: str,
|
||||
block_name: str,
|
||||
) -> dict:
|
||||
return {
|
||||
"component": "ExecutionManager",
|
||||
"graph_eid": graph_eid,
|
||||
"graph_id": graph_id,
|
||||
"node_eid": node_eid,
|
||||
"node_id": node_id,
|
||||
"block_name": block_name,
|
||||
}
|
||||
|
||||
def info(self, msg: str, **extra):
|
||||
logger.info(msg, extra={"json_fields": {**self.metadata, **extra}})
|
||||
|
||||
def warning(self, msg: str, **extra):
|
||||
logger.warning(msg, extra={"json_fields": {**self.metadata, **extra}})
|
||||
|
||||
def error(self, msg: str, **extra):
|
||||
logger.error(msg, extra={"json_fields": {**self.metadata, **extra}})
|
||||
|
||||
def debug(self, msg: str, **extra):
|
||||
logger.debug(msg, extra={"json_fields": {**self.metadata, **extra}})
|
||||
|
||||
def exception(self, msg: str, **extra):
|
||||
logger.exception(msg, extra={"json_fields": {**self.metadata, **extra}})
|
||||
def get_log_prefix(graph_eid: str, node_eid: str, block_name: str = "-"):
|
||||
return f"[ExecutionManager][graph-eid-{graph_eid}|node-eid-{node_eid}|{block_name}]"
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
@@ -107,7 +89,6 @@ def execute_node(
|
||||
Returns:
|
||||
The subsequent node to be enqueued, or None if there is no subsequent node.
|
||||
"""
|
||||
user_id = data.user_id
|
||||
graph_exec_id = data.graph_exec_id
|
||||
graph_id = data.graph_id
|
||||
node_exec_id = data.node_exec_id
|
||||
@@ -118,10 +99,9 @@ def execute_node(
|
||||
def wait(f: Coroutine[Any, Any, T]) -> T:
|
||||
return loop.run_until_complete(f)
|
||||
|
||||
def update_execution(status: ExecutionStatus) -> ExecutionResult:
|
||||
def update_execution(status: ExecutionStatus):
|
||||
exec_update = wait(update_execution_status(node_exec_id, status))
|
||||
api_client.send_execution_update(exec_update.model_dump())
|
||||
return exec_update
|
||||
|
||||
node = wait(get_node(node_id))
|
||||
|
||||
@@ -131,35 +111,43 @@ def execute_node(
|
||||
return
|
||||
|
||||
# Sanity check: validate the execution input.
|
||||
log_metadata = LogMetadata(
|
||||
user_id=user_id,
|
||||
log_metadata = get_log_metadata(
|
||||
graph_eid=graph_exec_id,
|
||||
graph_id=graph_id,
|
||||
node_eid=node_exec_id,
|
||||
node_id=node_id,
|
||||
block_name=node_block.name,
|
||||
)
|
||||
prefix = get_log_prefix(
|
||||
graph_eid=graph_exec_id,
|
||||
node_eid=node_exec_id,
|
||||
block_name=node_block.name,
|
||||
)
|
||||
input_data, error = validate_exec(node, data.data, resolve_input=False)
|
||||
if input_data is None:
|
||||
log_metadata.error(f"Skip execution, input validation error: {error}")
|
||||
logger.error(
|
||||
"{prefix} Skip execution, input validation error",
|
||||
extra={"json_fields": {**log_metadata, "error": error}},
|
||||
)
|
||||
return
|
||||
|
||||
# Execute the node
|
||||
input_data_str = json.dumps(input_data)
|
||||
input_size = len(input_data_str)
|
||||
log_metadata.info("Executed node with input", input=input_data_str)
|
||||
logger.info(
|
||||
f"{prefix} Executed node with input",
|
||||
extra={"json_fields": {**log_metadata, "input": input_data_str}},
|
||||
)
|
||||
update_execution(ExecutionStatus.RUNNING)
|
||||
user_credit = get_user_credit_model()
|
||||
|
||||
output_size = 0
|
||||
try:
|
||||
credit = wait(user_credit.get_or_refill_credit(user_id))
|
||||
if credit < 0:
|
||||
raise ValueError(f"Insufficient credit: {credit}")
|
||||
|
||||
for output_name, output_data in node_block.execute(input_data):
|
||||
output_size += len(json.dumps(output_data))
|
||||
log_metadata.info("Node produced output", output_name=output_data)
|
||||
logger.info(
|
||||
f"{prefix} Node produced output",
|
||||
extra={"json_fields": {**log_metadata, output_name: output_data}},
|
||||
)
|
||||
wait(upsert_execution_output(node_exec_id, output_name, output_data))
|
||||
|
||||
for execution in _enqueue_next_nodes(
|
||||
@@ -167,25 +155,20 @@ def execute_node(
|
||||
loop=loop,
|
||||
node=node,
|
||||
output=(output_name, output_data),
|
||||
user_id=user_id,
|
||||
graph_exec_id=graph_exec_id,
|
||||
graph_id=graph_id,
|
||||
log_metadata=log_metadata,
|
||||
):
|
||||
yield execution
|
||||
|
||||
r = update_execution(ExecutionStatus.COMPLETED)
|
||||
s = input_size + output_size
|
||||
t = (
|
||||
(r.end_time - r.start_time).total_seconds()
|
||||
if r.end_time and r.start_time
|
||||
else 0
|
||||
)
|
||||
wait(user_credit.spend_credits(user_id, credit, node_block, input_data, s, t))
|
||||
update_execution(ExecutionStatus.COMPLETED)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
log_metadata.exception(f"Node execution failed with error {error_msg}")
|
||||
error_msg = f"{e.__class__.__name__}: {e}"
|
||||
logger.exception(
|
||||
f"{prefix} Node execution failed with error",
|
||||
extra={"json_fields": {**log_metadata, error: error_msg}},
|
||||
)
|
||||
wait(upsert_execution_output(node_exec_id, "error", error_msg))
|
||||
update_execution(ExecutionStatus.FAILED)
|
||||
|
||||
@@ -211,10 +194,9 @@ def _enqueue_next_nodes(
|
||||
loop: asyncio.AbstractEventLoop,
|
||||
node: Node,
|
||||
output: BlockData,
|
||||
user_id: str,
|
||||
graph_exec_id: str,
|
||||
graph_id: str,
|
||||
log_metadata: LogMetadata,
|
||||
log_metadata: dict,
|
||||
) -> list[NodeExecution]:
|
||||
def wait(f: Coroutine[Any, Any, T]) -> T:
|
||||
return loop.run_until_complete(f)
|
||||
@@ -227,7 +209,6 @@ def _enqueue_next_nodes(
|
||||
)
|
||||
api_client.send_execution_update(exec_update.model_dump())
|
||||
return NodeExecution(
|
||||
user_id=user_id,
|
||||
graph_exec_id=graph_exec_id,
|
||||
graph_id=graph_id,
|
||||
node_exec_id=node_exec_id,
|
||||
@@ -281,11 +262,17 @@ def _enqueue_next_nodes(
|
||||
|
||||
# Incomplete input data, skip queueing the execution.
|
||||
if not next_node_input:
|
||||
log_metadata.warning(f"Skipped queueing {suffix}")
|
||||
logger.warning(
|
||||
f"Skipped queueing {suffix}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
return enqueued_executions
|
||||
|
||||
# Input is complete, enqueue the execution.
|
||||
log_metadata.info(f"Enqueued {suffix}")
|
||||
logger.info(
|
||||
f"Enqueued {suffix}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
enqueued_executions.append(
|
||||
add_enqueued_execution(next_node_exec_id, next_node_id, next_node_input)
|
||||
)
|
||||
@@ -311,9 +298,11 @@ def _enqueue_next_nodes(
|
||||
idata, msg = validate_exec(next_node, idata)
|
||||
suffix = f"{next_output_name}>{next_input_name}~{ineid}:{msg}"
|
||||
if not idata:
|
||||
log_metadata.info(f"Enqueueing static-link skipped: {suffix}")
|
||||
logger.info(
|
||||
f"{log_metadata} Enqueueing static-link skipped: {suffix}"
|
||||
)
|
||||
continue
|
||||
log_metadata.info(f"Enqueueing static-link execution {suffix}")
|
||||
logger.info(f"{log_metadata} Enqueueing static-link execution {suffix}")
|
||||
enqueued_executions.append(
|
||||
add_enqueued_execution(iexec.node_exec_id, next_node_id, idata)
|
||||
)
|
||||
@@ -454,18 +443,22 @@ class Executor:
|
||||
def on_node_execution(
|
||||
cls, q: ExecutionQueue[NodeExecution], node_exec: NodeExecution
|
||||
):
|
||||
log_metadata = LogMetadata(
|
||||
user_id=node_exec.user_id,
|
||||
log_metadata = get_log_metadata(
|
||||
graph_eid=node_exec.graph_exec_id,
|
||||
graph_id=node_exec.graph_id,
|
||||
node_eid=node_exec.node_exec_id,
|
||||
node_id=node_exec.node_id,
|
||||
block_name="-",
|
||||
)
|
||||
prefix = get_log_prefix(
|
||||
graph_eid=node_exec.graph_exec_id,
|
||||
node_eid=node_exec.node_exec_id,
|
||||
block_name="-",
|
||||
)
|
||||
|
||||
execution_stats = {}
|
||||
timing_info, _ = cls._on_node_execution(
|
||||
q, node_exec, log_metadata, execution_stats
|
||||
q, node_exec, log_metadata, prefix, execution_stats
|
||||
)
|
||||
execution_stats["walltime"] = timing_info.wall_time
|
||||
execution_stats["cputime"] = timing_info.cpu_time
|
||||
@@ -480,19 +473,29 @@ class Executor:
|
||||
cls,
|
||||
q: ExecutionQueue[NodeExecution],
|
||||
node_exec: NodeExecution,
|
||||
log_metadata: LogMetadata,
|
||||
log_metadata: dict,
|
||||
prefix: str,
|
||||
stats: dict[str, Any] | None = None,
|
||||
):
|
||||
try:
|
||||
log_metadata.info(f"Start node execution {node_exec.node_exec_id}")
|
||||
logger.info(
|
||||
f"{prefix} Start node execution {node_exec.node_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
for execution in execute_node(
|
||||
cls.loop, cls.agent_server_client, node_exec, stats
|
||||
):
|
||||
q.add(execution)
|
||||
log_metadata.info(f"Finished node execution {node_exec.node_exec_id}")
|
||||
logger.info(
|
||||
f"{prefix} Finished node execution {node_exec.node_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
except Exception as e:
|
||||
log_metadata.exception(
|
||||
f"Failed node execution {node_exec.node_exec_id}: {e}"
|
||||
logger.exception(
|
||||
f"Failed node execution {node_exec.node_exec_id}: {e}",
|
||||
extra={
|
||||
**log_metadata,
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@@ -514,12 +517,10 @@ class Executor:
|
||||
|
||||
@classmethod
|
||||
def on_graph_executor_stop(cls):
|
||||
prefix = f"[on_graph_executor_stop {cls.pid}]"
|
||||
logger.info(f"{prefix} ⏳ Disconnecting DB...")
|
||||
cls.loop.run_until_complete(db.disconnect())
|
||||
logger.info(f"{prefix} ⏳ Terminating node executor pool...")
|
||||
logger.info(
|
||||
f"[on_graph_executor_stop {cls.pid}] ⏳ Terminating node executor pool..."
|
||||
)
|
||||
cls.executor.terminate()
|
||||
logger.info(f"{prefix} ✅ Finished cleanup")
|
||||
|
||||
@classmethod
|
||||
def _init_node_executor_pool(cls):
|
||||
@@ -531,16 +532,20 @@ class Executor:
|
||||
@classmethod
|
||||
@error_logged
|
||||
def on_graph_execution(cls, graph_exec: GraphExecution, cancel: threading.Event):
|
||||
log_metadata = LogMetadata(
|
||||
user_id=graph_exec.user_id,
|
||||
log_metadata = get_log_metadata(
|
||||
graph_eid=graph_exec.graph_exec_id,
|
||||
graph_id=graph_exec.graph_id,
|
||||
node_id="*",
|
||||
node_eid="*",
|
||||
block_name="-",
|
||||
)
|
||||
prefix = get_log_prefix(
|
||||
graph_eid=graph_exec.graph_exec_id,
|
||||
node_eid="*",
|
||||
block_name="-",
|
||||
)
|
||||
timing_info, node_count = cls._on_graph_execution(
|
||||
graph_exec, cancel, log_metadata
|
||||
graph_exec, cancel, log_metadata, prefix
|
||||
)
|
||||
|
||||
cls.loop.run_until_complete(
|
||||
@@ -560,9 +565,13 @@ class Executor:
|
||||
cls,
|
||||
graph_exec: GraphExecution,
|
||||
cancel: threading.Event,
|
||||
log_metadata: LogMetadata,
|
||||
log_metadata: dict,
|
||||
prefix: str,
|
||||
) -> int:
|
||||
log_metadata.info(f"Start graph execution {graph_exec.graph_exec_id}")
|
||||
logger.info(
|
||||
f"{prefix} Start graph execution {graph_exec.graph_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
n_node_executions = 0
|
||||
finished = False
|
||||
|
||||
@@ -572,7 +581,10 @@ class Executor:
|
||||
if finished:
|
||||
return
|
||||
cls.executor.terminate()
|
||||
log_metadata.info(f"Terminated graph execution {graph_exec.graph_exec_id}")
|
||||
logger.info(
|
||||
f"{prefix} Terminated graph execution {graph_exec.graph_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
cls._init_node_executor_pool()
|
||||
|
||||
cancel_thread = threading.Thread(target=cancel_handler)
|
||||
@@ -610,9 +622,10 @@ class Executor:
|
||||
# Re-enqueueing the data back to the queue will disrupt the order.
|
||||
execution.wait()
|
||||
|
||||
log_metadata.debug(
|
||||
f"Dispatching node execution {exec_data.node_exec_id} "
|
||||
logger.debug(
|
||||
f"{prefix} Dispatching node execution {exec_data.node_exec_id} "
|
||||
f"for node {exec_data.node_id}",
|
||||
extra={**log_metadata},
|
||||
)
|
||||
running_executions[exec_data.node_id] = cls.executor.apply_async(
|
||||
cls.on_node_execution,
|
||||
@@ -622,8 +635,10 @@ class Executor:
|
||||
|
||||
# Avoid terminating graph execution when some nodes are still running.
|
||||
while queue.empty() and running_executions:
|
||||
log_metadata.debug(
|
||||
f"Queue empty; running nodes: {list(running_executions.keys())}"
|
||||
logger.debug(
|
||||
"Queue empty; running nodes: "
|
||||
f"{list(running_executions.keys())}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
for node_id, execution in list(running_executions.items()):
|
||||
if cancel.is_set():
|
||||
@@ -632,13 +647,20 @@ class Executor:
|
||||
if not queue.empty():
|
||||
break # yield to parent loop to execute new queue items
|
||||
|
||||
log_metadata.debug(f"Waiting on execution of node {node_id}")
|
||||
logger.debug(
|
||||
f"Waiting on execution of node {node_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
execution.wait(3)
|
||||
|
||||
log_metadata.info(f"Finished graph execution {graph_exec.graph_exec_id}")
|
||||
logger.info(
|
||||
f"{prefix} Finished graph execution {graph_exec.graph_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
except Exception as e:
|
||||
log_metadata.exception(
|
||||
f"Failed graph execution {graph_exec.graph_exec_id}: {e}"
|
||||
logger.exception(
|
||||
f"{prefix} Failed graph execution {graph_exec.graph_exec_id}: {e}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
finally:
|
||||
if not cancel.is_set():
|
||||
@@ -725,7 +747,6 @@ class ExecutionManager(AppService):
|
||||
for node_exec in node_execs:
|
||||
starting_node_execs.append(
|
||||
NodeExecution(
|
||||
user_id=user_id,
|
||||
graph_exec_id=node_exec.graph_exec_id,
|
||||
graph_id=node_exec.graph_id,
|
||||
node_exec_id=node_exec.node_exec_id,
|
||||
@@ -741,7 +762,6 @@ class ExecutionManager(AppService):
|
||||
self.agent_server_client.send_execution_update(exec_update.model_dump())
|
||||
|
||||
graph_exec = GraphExecution(
|
||||
user_id=user_id,
|
||||
graph_id=graph_id,
|
||||
graph_exec_id=graph_exec_id,
|
||||
start_node_execs=starting_node_execs,
|
||||
|
||||
99
rnd/autogpt_server/autogpt_server/integrations/github.py
Normal file
99
rnd/autogpt_server/autogpt_server/integrations/github.py
Normal file
@@ -0,0 +1,99 @@
|
||||
import time
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
from autogpt_server.integrations.oauth import BaseOAuthHandler
|
||||
|
||||
|
||||
class GitHubOAuthHandler(BaseOAuthHandler):
|
||||
"""
|
||||
Based on the documentation at:
|
||||
- [Authorizing OAuth apps - GitHub Docs](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps)
|
||||
- [Refreshing user access tokens - GitHub Docs](https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/refreshing-user-access-tokens)
|
||||
|
||||
Notes:
|
||||
- By default, token expiration is disabled on GitHub Apps. This means the access
|
||||
token doesn't expire and no refresh token is returned by the authorization flow.
|
||||
- When token expiration gets enabled, any existing tokens will remain non-expiring.
|
||||
- When token expiration gets disabled, token refreshes will return a non-expiring
|
||||
access token *with no refresh token*.
|
||||
""" # noqa
|
||||
|
||||
PROVIDER_NAME = "github"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.redirect_uri = redirect_uri
|
||||
self.auth_base_url = "https://github.com/login/oauth/authorize"
|
||||
self.token_url = "https://github.com/login/oauth/access_token"
|
||||
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
params = {
|
||||
"client_id": self.client_id,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
"scope": " ".join(scopes),
|
||||
"state": state,
|
||||
}
|
||||
return f"{self.auth_base_url}?{urlencode(params)}"
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
return self._request_tokens({"code": code, "redirect_uri": self.redirect_uri})
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
if not credentials.refresh_token:
|
||||
return credentials
|
||||
|
||||
return self._request_tokens(
|
||||
{
|
||||
"refresh_token": credentials.refresh_token.get_secret_value(),
|
||||
"grant_type": "refresh_token",
|
||||
}
|
||||
)
|
||||
|
||||
def _request_tokens(
|
||||
self,
|
||||
params: dict[str, str],
|
||||
current_credentials: Optional[OAuth2Credentials] = None,
|
||||
) -> OAuth2Credentials:
|
||||
request_body = {
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
**params,
|
||||
}
|
||||
headers = {"Accept": "application/json"}
|
||||
response = requests.post(self.token_url, data=request_body, headers=headers)
|
||||
response.raise_for_status()
|
||||
token_data: dict = response.json()
|
||||
|
||||
now = int(time.time())
|
||||
new_credentials = OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=current_credentials.title if current_credentials else "GitHub",
|
||||
access_token=token_data["access_token"],
|
||||
# Token refresh responses have an empty `scope` property (see docs),
|
||||
# so we have to get the scope from the existing credentials object.
|
||||
scopes=(
|
||||
token_data.get("scope", "").split(",")
|
||||
or (current_credentials.scopes if current_credentials else [])
|
||||
),
|
||||
# Refresh token and expiration intervals are only given if token expiration
|
||||
# is enabled in the GitHub App's settings.
|
||||
refresh_token=token_data.get("refresh_token"),
|
||||
access_token_expires_at=(
|
||||
now + expires_in
|
||||
if (expires_in := token_data.get("expires_in", None))
|
||||
else None
|
||||
),
|
||||
refresh_token_expires_at=(
|
||||
now + expires_in
|
||||
if (expires_in := token_data.get("refresh_token_expires_in", None))
|
||||
else None
|
||||
),
|
||||
)
|
||||
if current_credentials:
|
||||
new_credentials.id = current_credentials.id
|
||||
return new_credentials
|
||||
96
rnd/autogpt_server/autogpt_server/integrations/google.py
Normal file
96
rnd/autogpt_server/autogpt_server/integrations/google.py
Normal file
@@ -0,0 +1,96 @@
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
from google.auth.transport.requests import Request
|
||||
from google.oauth2.credentials import Credentials
|
||||
from google_auth_oauthlib.flow import Flow
|
||||
from pydantic import SecretStr
|
||||
|
||||
from .oauth import BaseOAuthHandler
|
||||
|
||||
|
||||
class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
"""
|
||||
Based on the documentation at https://developers.google.com/identity/protocols/oauth2/web-server
|
||||
""" # noqa
|
||||
|
||||
PROVIDER_NAME = "google"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.redirect_uri = redirect_uri
|
||||
self.token_uri = "https://oauth2.googleapis.com/token"
|
||||
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
flow = self._setup_oauth_flow(scopes)
|
||||
flow.redirect_uri = self.redirect_uri
|
||||
authorization_url, _ = flow.authorization_url(
|
||||
access_type="offline",
|
||||
include_granted_scopes="true",
|
||||
state=state,
|
||||
prompt="consent",
|
||||
)
|
||||
return authorization_url
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
flow = self._setup_oauth_flow(None)
|
||||
flow.redirect_uri = self.redirect_uri
|
||||
flow.fetch_token(code=code)
|
||||
|
||||
google_creds = flow.credentials
|
||||
# Google's OAuth library is poorly typed so we need some of these:
|
||||
assert google_creds.token
|
||||
assert google_creds.refresh_token
|
||||
assert google_creds.expiry
|
||||
assert google_creds.scopes
|
||||
return OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title="Google",
|
||||
access_token=SecretStr(google_creds.token),
|
||||
refresh_token=SecretStr(google_creds.refresh_token),
|
||||
access_token_expires_at=int(google_creds.expiry.timestamp()),
|
||||
refresh_token_expires_at=None,
|
||||
scopes=google_creds.scopes,
|
||||
)
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
# Google credentials should ALWAYS have a refresh token
|
||||
assert credentials.refresh_token
|
||||
|
||||
google_creds = Credentials(
|
||||
token=credentials.access_token.get_secret_value(),
|
||||
refresh_token=credentials.refresh_token.get_secret_value(),
|
||||
token_uri=self.token_uri,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
# Google's OAuth library is poorly typed so we need some of these:
|
||||
assert google_creds.refresh_token
|
||||
assert google_creds.scopes
|
||||
|
||||
google_creds.refresh(Request())
|
||||
assert google_creds.expiry
|
||||
|
||||
return OAuth2Credentials(
|
||||
id=credentials.id,
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=credentials.title,
|
||||
access_token=SecretStr(google_creds.token),
|
||||
refresh_token=SecretStr(google_creds.refresh_token),
|
||||
access_token_expires_at=int(google_creds.expiry.timestamp()),
|
||||
refresh_token_expires_at=None,
|
||||
scopes=google_creds.scopes,
|
||||
)
|
||||
|
||||
def _setup_oauth_flow(self, scopes: list[str] | None) -> Flow:
|
||||
return Flow.from_client_config(
|
||||
{
|
||||
"web": {
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": self.token_uri,
|
||||
}
|
||||
},
|
||||
scopes=scopes,
|
||||
)
|
||||
76
rnd/autogpt_server/autogpt_server/integrations/notion.py
Normal file
76
rnd/autogpt_server/autogpt_server/integrations/notion.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from base64 import b64encode
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
from autogpt_server.integrations.oauth import BaseOAuthHandler
|
||||
|
||||
|
||||
class NotionOAuthHandler(BaseOAuthHandler):
|
||||
"""
|
||||
Based on the documentation at https://developers.notion.com/docs/authorization
|
||||
|
||||
Notes:
|
||||
- Notion uses non-expiring access tokens and therefore doesn't have a refresh flow
|
||||
- Notion doesn't use scopes
|
||||
"""
|
||||
|
||||
PROVIDER_NAME = "notion"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.redirect_uri = redirect_uri
|
||||
self.auth_base_url = "https://api.notion.com/v1/oauth/authorize"
|
||||
self.token_url = "https://api.notion.com/v1/oauth/token"
|
||||
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
params = {
|
||||
"client_id": self.client_id,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
"response_type": "code",
|
||||
"owner": "user",
|
||||
"state": state,
|
||||
}
|
||||
return f"{self.auth_base_url}?{urlencode(params)}"
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
request_body = {
|
||||
"grant_type": "authorization_code",
|
||||
"code": code,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
}
|
||||
auth_str = b64encode(f"{self.client_id}:{self.client_secret}".encode()).decode()
|
||||
headers = {
|
||||
"Authorization": f"Basic {auth_str}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
response = requests.post(self.token_url, json=request_body, headers=headers)
|
||||
response.raise_for_status()
|
||||
token_data = response.json()
|
||||
|
||||
return OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=token_data.get("workspace_name", "Notion"),
|
||||
access_token=token_data["access_token"],
|
||||
refresh_token=None,
|
||||
access_token_expires_at=None, # Notion tokens don't expire
|
||||
refresh_token_expires_at=None,
|
||||
scopes=[],
|
||||
metadata={
|
||||
"owner": token_data["owner"],
|
||||
"bot_id": token_data["bot_id"],
|
||||
"workspace_id": token_data["workspace_id"],
|
||||
"workspace_name": token_data.get("workspace_name"),
|
||||
"workspace_icon": token_data.get("workspace_icon"),
|
||||
},
|
||||
)
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
# Notion doesn't support token refresh
|
||||
return credentials
|
||||
|
||||
def needs_refresh(self, credentials: OAuth2Credentials) -> bool:
|
||||
# Notion access tokens don't expire
|
||||
return False
|
||||
48
rnd/autogpt_server/autogpt_server/integrations/oauth.py
Normal file
48
rnd/autogpt_server/autogpt_server/integrations/oauth.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import ClassVar
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
|
||||
class BaseOAuthHandler(ABC):
|
||||
PROVIDER_NAME: ClassVar[str]
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str): ...
|
||||
|
||||
@abstractmethod
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
"""Constructs a login URL that the user can be redirected to"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
"""Exchanges the acquired authorization code from login for a set of tokens"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
"""Implements the token refresh mechanism"""
|
||||
...
|
||||
|
||||
def refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
if credentials.provider != self.PROVIDER_NAME:
|
||||
raise ValueError(
|
||||
f"{self.__class__.__name__} can not refresh tokens "
|
||||
f"for other provider '{credentials.provider}'"
|
||||
)
|
||||
return self._refresh_tokens(credentials)
|
||||
|
||||
def get_access_token(self, credentials: OAuth2Credentials) -> str:
|
||||
"""Returns a valid access token, refreshing it first if needed"""
|
||||
if self.needs_refresh(credentials):
|
||||
credentials = self.refresh_tokens(credentials)
|
||||
return credentials.access_token.get_secret_value()
|
||||
|
||||
def needs_refresh(self, credentials: OAuth2Credentials) -> bool:
|
||||
"""Indicates whether the given tokens need to be refreshed"""
|
||||
return (
|
||||
credentials.access_token_expires_at is not None
|
||||
and credentials.access_token_expires_at < int(time.time()) + 300
|
||||
)
|
||||
@@ -23,7 +23,6 @@ class GitHubOAuthHandler(BaseOAuthHandler):
|
||||
""" # noqa
|
||||
|
||||
PROVIDER_NAME = "github"
|
||||
EMAIL_ENDPOINT = "https://api.github.com/user/emails"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
@@ -70,13 +69,10 @@ class GitHubOAuthHandler(BaseOAuthHandler):
|
||||
response.raise_for_status()
|
||||
token_data: dict = response.json()
|
||||
|
||||
username = self._request_username(token_data["access_token"])
|
||||
|
||||
now = int(time.time())
|
||||
new_credentials = OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=current_credentials.title if current_credentials else None,
|
||||
username=username,
|
||||
title=current_credentials.title if current_credentials else "GitHub",
|
||||
access_token=token_data["access_token"],
|
||||
# Token refresh responses have an empty `scope` property (see docs),
|
||||
# so we have to get the scope from the existing credentials object.
|
||||
@@ -101,19 +97,3 @@ class GitHubOAuthHandler(BaseOAuthHandler):
|
||||
if current_credentials:
|
||||
new_credentials.id = current_credentials.id
|
||||
return new_credentials
|
||||
|
||||
def _request_username(self, access_token: str) -> str | None:
|
||||
url = "https://api.github.com/user"
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
"Authorization": f"Bearer {access_token}",
|
||||
"X-GitHub-Api-Version": "2022-11-28",
|
||||
}
|
||||
|
||||
response = requests.get(url, headers=headers)
|
||||
|
||||
if not response.ok:
|
||||
return None
|
||||
|
||||
# Get the login (username)
|
||||
return response.json().get("login")
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
from google.auth.external_account_authorized_user import (
|
||||
Credentials as ExternalAccountCredentials,
|
||||
)
|
||||
from google.auth.transport.requests import AuthorizedSession, Request
|
||||
from google.auth.transport.requests import Request
|
||||
from google.oauth2.credentials import Credentials
|
||||
from google_auth_oauthlib.flow import Flow
|
||||
from pydantic import SecretStr
|
||||
@@ -16,7 +13,6 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
""" # noqa
|
||||
|
||||
PROVIDER_NAME = "google"
|
||||
EMAIL_ENDPOINT = "https://www.googleapis.com/oauth2/v2/userinfo"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
@@ -41,8 +37,6 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
flow.fetch_token(code=code)
|
||||
|
||||
google_creds = flow.credentials
|
||||
username = self._request_email(google_creds)
|
||||
|
||||
# Google's OAuth library is poorly typed so we need some of these:
|
||||
assert google_creds.token
|
||||
assert google_creds.refresh_token
|
||||
@@ -50,8 +44,7 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
assert google_creds.scopes
|
||||
return OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=None,
|
||||
username=username,
|
||||
title="Google",
|
||||
access_token=SecretStr(google_creds.token),
|
||||
refresh_token=SecretStr(google_creds.refresh_token),
|
||||
access_token_expires_at=int(google_creds.expiry.timestamp()),
|
||||
@@ -59,15 +52,6 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
scopes=google_creds.scopes,
|
||||
)
|
||||
|
||||
def _request_email(
|
||||
self, creds: Credentials | ExternalAccountCredentials
|
||||
) -> str | None:
|
||||
session = AuthorizedSession(creds)
|
||||
response = session.get(self.EMAIL_ENDPOINT)
|
||||
if not response.ok:
|
||||
return None
|
||||
return response.json()["email"]
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
# Google credentials should ALWAYS have a refresh token
|
||||
assert credentials.refresh_token
|
||||
@@ -88,10 +72,9 @@ class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
assert google_creds.expiry
|
||||
|
||||
return OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
id=credentials.id,
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=credentials.title,
|
||||
username=credentials.username,
|
||||
access_token=SecretStr(google_creds.token),
|
||||
refresh_token=SecretStr(google_creds.refresh_token),
|
||||
access_token_expires_at=int(google_creds.expiry.timestamp()),
|
||||
|
||||
@@ -49,18 +49,10 @@ class NotionOAuthHandler(BaseOAuthHandler):
|
||||
response = requests.post(self.token_url, json=request_body, headers=headers)
|
||||
response.raise_for_status()
|
||||
token_data = response.json()
|
||||
# Email is only available for non-bot users
|
||||
email = (
|
||||
token_data["owner"]["person"]["email"]
|
||||
if "person" in token_data["owner"]
|
||||
and "email" in token_data["owner"]["person"]
|
||||
else None
|
||||
)
|
||||
|
||||
return OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=token_data.get("workspace_name"),
|
||||
username=email,
|
||||
title=token_data.get("workspace_name", "Notion"),
|
||||
access_token=token_data["access_token"],
|
||||
refresh_token=None,
|
||||
access_token_expires_at=None, # Notion tokens don't expire
|
||||
|
||||
@@ -4,10 +4,6 @@ from typing import Annotated, Literal
|
||||
from autogpt_libs.supabase_integration_credentials_store import (
|
||||
SupabaseIntegrationCredentialsStore,
|
||||
)
|
||||
from autogpt_libs.supabase_integration_credentials_store.types import (
|
||||
Credentials,
|
||||
OAuth2Credentials,
|
||||
)
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request
|
||||
from pydantic import BaseModel
|
||||
from supabase import Client
|
||||
@@ -15,11 +11,11 @@ from supabase import Client
|
||||
from autogpt_server.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
|
||||
from autogpt_server.util.settings import Settings
|
||||
|
||||
from ..utils import get_supabase, get_user_id
|
||||
from .utils import get_supabase, get_user_id
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
router = APIRouter()
|
||||
integrations_api_router = APIRouter()
|
||||
|
||||
|
||||
def get_store(supabase: Client = Depends(get_supabase)):
|
||||
@@ -30,7 +26,7 @@ class LoginResponse(BaseModel):
|
||||
login_url: str
|
||||
|
||||
|
||||
@router.get("/{provider}/login")
|
||||
@integrations_api_router.get("/{provider}/login")
|
||||
async def login(
|
||||
provider: Annotated[str, Path(title="The provider to initiate an OAuth flow for")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
@@ -52,14 +48,11 @@ async def login(
|
||||
|
||||
|
||||
class CredentialsMetaResponse(BaseModel):
|
||||
id: str
|
||||
type: Literal["oauth2", "api_key"]
|
||||
title: str | None
|
||||
scopes: list[str] | None
|
||||
username: str | None
|
||||
credentials_id: str
|
||||
credentials_type: Literal["oauth2", "api_key"]
|
||||
|
||||
|
||||
@router.post("/{provider}/callback")
|
||||
@integrations_api_router.post("/{provider}/callback")
|
||||
async def callback(
|
||||
provider: Annotated[str, Path(title="The target provider for this OAuth exchange")],
|
||||
code: Annotated[str, Body(title="Authorization code acquired by user login")],
|
||||
@@ -80,53 +73,13 @@ async def callback(
|
||||
logger.warning(f"Code->Token exchange failed for provider {provider}: {e}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
# TODO: Allow specifying `title` to set on `credentials`
|
||||
store.add_creds(user_id, credentials)
|
||||
return CredentialsMetaResponse(
|
||||
id=credentials.id,
|
||||
type=credentials.type,
|
||||
title=credentials.title,
|
||||
scopes=credentials.scopes,
|
||||
username=credentials.username,
|
||||
credentials_id=credentials.id,
|
||||
credentials_type=credentials.type,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{provider}/credentials")
|
||||
async def list_credentials(
|
||||
provider: Annotated[str, Path(title="The provider to list credentials for")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
store: Annotated[SupabaseIntegrationCredentialsStore, Depends(get_store)],
|
||||
) -> list[CredentialsMetaResponse]:
|
||||
credentials = store.get_creds_by_provider(user_id, provider)
|
||||
return [
|
||||
CredentialsMetaResponse(
|
||||
id=cred.id,
|
||||
type=cred.type,
|
||||
title=cred.title,
|
||||
scopes=cred.scopes if isinstance(cred, OAuth2Credentials) else None,
|
||||
username=cred.username if isinstance(cred, OAuth2Credentials) else None,
|
||||
)
|
||||
for cred in credentials
|
||||
]
|
||||
|
||||
|
||||
@router.get("/{provider}/credentials/{cred_id}")
|
||||
async def get_credential(
|
||||
provider: Annotated[str, Path(title="The provider to retrieve credentials for")],
|
||||
cred_id: Annotated[str, Path(title="The ID of the credentials to retrieve")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
store: Annotated[SupabaseIntegrationCredentialsStore, Depends(get_store)],
|
||||
) -> Credentials:
|
||||
credential = store.get_creds_by_id(user_id, cred_id)
|
||||
if not credential:
|
||||
raise HTTPException(status_code=404, detail="Credentials not found")
|
||||
if credential.provider != provider:
|
||||
raise HTTPException(
|
||||
status_code=404, detail="Credentials do not match the specified provider"
|
||||
)
|
||||
return credential
|
||||
|
||||
|
||||
# -------- UTILITIES --------- #
|
||||
|
||||
|
||||
@@ -15,7 +15,6 @@ from autogpt_server.data import execution as execution_db
|
||||
from autogpt_server.data import graph as graph_db
|
||||
from autogpt_server.data import user as user_db
|
||||
from autogpt_server.data.block import BlockInput, CompletedBlockOutput
|
||||
from autogpt_server.data.credit import get_block_costs, get_user_credit_model
|
||||
from autogpt_server.data.queue import AsyncEventQueue, AsyncRedisEventQueue
|
||||
from autogpt_server.data.user import get_or_create_user
|
||||
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
|
||||
@@ -33,7 +32,6 @@ class AgentServer(AppService):
|
||||
mutex = KeyedMutex()
|
||||
use_redis = True
|
||||
_test_dependency_overrides = {}
|
||||
_user_credit_model = get_user_credit_model()
|
||||
|
||||
def __init__(self, event_queue: AsyncEventQueue | None = None):
|
||||
super().__init__(port=Config().agent_server_port)
|
||||
@@ -78,180 +76,138 @@ class AgentServer(AppService):
|
||||
api_router.dependencies.append(Depends(auth_middleware))
|
||||
|
||||
# Import & Attach sub-routers
|
||||
import autogpt_server.server.routers.analytics
|
||||
import autogpt_server.server.routers.integrations
|
||||
from .integrations import integrations_api_router
|
||||
|
||||
api_router.include_router(
|
||||
autogpt_server.server.routers.integrations.router,
|
||||
prefix="/integrations",
|
||||
tags=["integrations"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
|
||||
api_router.include_router(
|
||||
autogpt_server.server.routers.analytics.router,
|
||||
prefix="/analytics",
|
||||
tags=["analytics"],
|
||||
dependencies=[Depends(auth_middleware)],
|
||||
)
|
||||
api_router.include_router(integrations_api_router, prefix="/integrations")
|
||||
|
||||
api_router.add_api_route(
|
||||
path="/auth/user",
|
||||
endpoint=self.get_or_create_user_route,
|
||||
methods=["POST"],
|
||||
tags=["auth"],
|
||||
)
|
||||
|
||||
api_router.add_api_route(
|
||||
path="/blocks",
|
||||
endpoint=self.get_graph_blocks,
|
||||
methods=["GET"],
|
||||
tags=["blocks"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/blocks/{block_id}/execute",
|
||||
endpoint=self.execute_graph_block,
|
||||
methods=["POST"],
|
||||
tags=["blocks"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs",
|
||||
endpoint=self.get_graphs,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates",
|
||||
endpoint=self.get_templates,
|
||||
methods=["GET"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs",
|
||||
endpoint=self.create_new_graph,
|
||||
methods=["POST"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates",
|
||||
endpoint=self.create_new_template,
|
||||
methods=["POST"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}",
|
||||
endpoint=self.get_graph,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}",
|
||||
endpoint=self.get_template,
|
||||
methods=["GET"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}",
|
||||
endpoint=self.update_graph,
|
||||
methods=["PUT"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}",
|
||||
endpoint=self.update_graph,
|
||||
methods=["PUT"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions",
|
||||
endpoint=self.get_graph_all_versions,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}/versions",
|
||||
endpoint=self.get_graph_all_versions,
|
||||
methods=["GET"],
|
||||
tags=["templates", "graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions/{version}",
|
||||
endpoint=self.get_graph,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions/active",
|
||||
endpoint=self.set_graph_active_version,
|
||||
methods=["PUT"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/input_schema",
|
||||
endpoint=self.get_graph_input_schema,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/execute",
|
||||
endpoint=self.execute_graph,
|
||||
methods=["POST"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions",
|
||||
endpoint=self.list_graph_runs,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}",
|
||||
endpoint=self.get_graph_run_node_execution_results,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}/stop",
|
||||
endpoint=self.stop_graph_run,
|
||||
methods=["POST"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/schedules",
|
||||
endpoint=self.create_schedule,
|
||||
methods=["POST"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/schedules",
|
||||
endpoint=self.get_execution_schedules,
|
||||
methods=["GET"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/graphs/schedules/{schedule_id}",
|
||||
endpoint=self.update_schedule,
|
||||
methods=["PUT"],
|
||||
tags=["graphs"],
|
||||
)
|
||||
api_router.add_api_route(
|
||||
path="/credits",
|
||||
endpoint=self.get_user_credits,
|
||||
methods=["GET"],
|
||||
)
|
||||
|
||||
api_router.add_api_route(
|
||||
path="/settings",
|
||||
endpoint=self.update_configuration,
|
||||
methods=["POST"],
|
||||
tags=["settings"],
|
||||
)
|
||||
|
||||
app.add_exception_handler(500, self.handle_internal_http_error)
|
||||
|
||||
app.include_router(api_router)
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=Config().agent_api_port, log_config=None)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000, log_config=None)
|
||||
|
||||
def set_test_dependency_overrides(self, overrides: dict):
|
||||
self._test_dependency_overrides = overrides
|
||||
@@ -307,9 +263,7 @@ class AgentServer(AppService):
|
||||
|
||||
@classmethod
|
||||
def get_graph_blocks(cls) -> list[dict[Any, Any]]:
|
||||
blocks = block.get_blocks()
|
||||
costs = get_block_costs()
|
||||
return [{**b.to_dict(), "costs": costs.get(b.id, [])} for b in blocks.values()]
|
||||
return [v.to_dict() for v in block.get_blocks().values()]
|
||||
|
||||
@classmethod
|
||||
def execute_graph_block(
|
||||
@@ -527,25 +481,6 @@ class AgentServer(AppService):
|
||||
|
||||
return await execution_db.list_executions(graph_id, graph_version)
|
||||
|
||||
@classmethod
|
||||
async def get_graph_run_status(
|
||||
cls,
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
) -> execution_db.ExecutionStatus:
|
||||
graph = await graph_db.get_graph(graph_id, user_id=user_id)
|
||||
if not graph:
|
||||
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
|
||||
|
||||
execution = await execution_db.get_graph_execution(graph_exec_id, user_id)
|
||||
if not execution:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Execution #{graph_exec_id} not found."
|
||||
)
|
||||
|
||||
return execution.executionStatus
|
||||
|
||||
@classmethod
|
||||
async def get_graph_run_node_execution_results(
|
||||
cls,
|
||||
@@ -587,11 +522,6 @@ class AgentServer(AppService):
|
||||
execution_scheduler.update_schedule(schedule_id, is_enabled, user_id=user_id)
|
||||
return {"id": schedule_id}
|
||||
|
||||
async def get_user_credits(
|
||||
self, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> dict[str, int]:
|
||||
return {"credits": await self._user_credit_model.get_or_refill_credit(user_id)}
|
||||
|
||||
def get_execution_schedules(
|
||||
self, graph_id: str, user_id: Annotated[str, Depends(get_user_id)]
|
||||
) -> dict[str, str]:
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
"""Analytics API"""
|
||||
|
||||
from typing import Annotated
|
||||
|
||||
import fastapi
|
||||
|
||||
import autogpt_server.data.analytics
|
||||
from autogpt_server.server.utils import get_user_id
|
||||
|
||||
router = fastapi.APIRouter()
|
||||
|
||||
|
||||
@router.post(path="/log_raw_metric")
|
||||
async def log_raw_metric(
|
||||
user_id: Annotated[str, fastapi.Depends(get_user_id)],
|
||||
metric_name: Annotated[str, fastapi.Body(..., embed=True)],
|
||||
metric_value: Annotated[float, fastapi.Body(..., embed=True)],
|
||||
data_string: Annotated[str, fastapi.Body(..., embed=True)],
|
||||
):
|
||||
result = await autogpt_server.data.analytics.log_raw_metric(
|
||||
user_id=user_id,
|
||||
metric_name=metric_name,
|
||||
metric_value=metric_value,
|
||||
data_string=data_string,
|
||||
)
|
||||
return result.id
|
||||
|
||||
|
||||
@router.post("/log_raw_analytics")
|
||||
async def log_raw_analytics(
|
||||
user_id: Annotated[str, fastapi.Depends(get_user_id)],
|
||||
type: Annotated[str, fastapi.Body(..., embed=True)],
|
||||
data: Annotated[
|
||||
dict,
|
||||
fastapi.Body(..., embed=True, description="The data to log"),
|
||||
],
|
||||
data_index: Annotated[
|
||||
str,
|
||||
fastapi.Body(
|
||||
...,
|
||||
embed=True,
|
||||
description="Indexable field for any count based analytical measures like page order clicking, tutorial step completion, etc.",
|
||||
),
|
||||
],
|
||||
):
|
||||
result = await autogpt_server.data.analytics.log_raw_analytics(
|
||||
user_id, type, data, data_index
|
||||
)
|
||||
return result.id
|
||||
@@ -11,7 +11,7 @@ from autogpt_server.data.user import DEFAULT_USER_ID
|
||||
from autogpt_server.server.conn_manager import ConnectionManager
|
||||
from autogpt_server.server.model import ExecutionSubscription, Methods, WsMessage
|
||||
from autogpt_server.util.service import AppProcess
|
||||
from autogpt_server.util.settings import Config, Settings
|
||||
from autogpt_server.util.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
@@ -174,4 +174,4 @@ async def websocket_router(
|
||||
|
||||
class WebsocketServer(AppProcess):
|
||||
def run(self):
|
||||
uvicorn.run(app, host="0.0.0.0", port=Config().websocket_server_port)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8001)
|
||||
|
||||
@@ -252,6 +252,7 @@ Here are a couple of sample of the Block class implementation:
|
||||
|
||||
async def block_autogen_agent():
|
||||
async with SpinTestServer() as server:
|
||||
test_manager = server.exec_manager
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(create_test_graph(), user_id=test_user.id)
|
||||
input_data = {"input": "Write me a block that writes a string into a file."}
|
||||
@@ -260,8 +261,10 @@ async def block_autogen_agent():
|
||||
)
|
||||
print(response)
|
||||
result = await wait_execution(
|
||||
exec_manager=test_manager,
|
||||
graph_id=test_graph.id,
|
||||
graph_exec_id=response["id"],
|
||||
num_execs=10,
|
||||
timeout=1200,
|
||||
user_id=test_user.id,
|
||||
)
|
||||
|
||||
@@ -153,6 +153,7 @@ async def create_test_user() -> User:
|
||||
|
||||
async def reddit_marketing_agent():
|
||||
async with SpinTestServer() as server:
|
||||
exec_man = server.exec_manager
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(create_test_graph(), user_id=test_user.id)
|
||||
input_data = {"subreddit": "AutoGPT"}
|
||||
@@ -160,7 +161,9 @@ async def reddit_marketing_agent():
|
||||
test_graph.id, input_data, test_user.id
|
||||
)
|
||||
print(response)
|
||||
result = await wait_execution(test_user.id, test_graph.id, response["id"], 120)
|
||||
result = await wait_execution(
|
||||
exec_man, test_user.id, test_graph.id, response["id"], 13, 120
|
||||
)
|
||||
print(result)
|
||||
|
||||
|
||||
|
||||
@@ -75,6 +75,7 @@ def create_test_graph() -> graph.Graph:
|
||||
|
||||
async def sample_agent():
|
||||
async with SpinTestServer() as server:
|
||||
exec_man = server.exec_manager
|
||||
test_user = await create_test_user()
|
||||
test_graph = await create_graph(create_test_graph(), test_user.id)
|
||||
input_data = {"input_1": "Hello", "input_2": "World"}
|
||||
@@ -82,7 +83,9 @@ async def sample_agent():
|
||||
test_graph.id, input_data, test_user.id
|
||||
)
|
||||
print(response)
|
||||
result = await wait_execution(test_user.id, test_graph.id, response["id"], 10)
|
||||
result = await wait_execution(
|
||||
exec_man, test_user.id, test_graph.id, response["id"], 4, 10
|
||||
)
|
||||
print(result)
|
||||
|
||||
|
||||
|
||||
@@ -42,15 +42,15 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
"""Config for the server."""
|
||||
|
||||
num_graph_workers: int = Field(
|
||||
default=10,
|
||||
default=1,
|
||||
ge=1,
|
||||
le=1000,
|
||||
le=100,
|
||||
description="Maximum number of workers to use for graph execution.",
|
||||
)
|
||||
num_node_workers: int = Field(
|
||||
default=5,
|
||||
default=1,
|
||||
ge=1,
|
||||
le=1000,
|
||||
le=100,
|
||||
description="Maximum number of workers to use for node execution within a single graph.",
|
||||
)
|
||||
pyro_host: str = Field(
|
||||
@@ -61,14 +61,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
default="false",
|
||||
description="If authentication is enabled or not",
|
||||
)
|
||||
enable_credit: str = Field(
|
||||
default="false",
|
||||
description="If user credit system is enabled or not",
|
||||
)
|
||||
num_user_credits_refill: int = Field(
|
||||
default=1500,
|
||||
description="Number of credits to refill for each user",
|
||||
)
|
||||
# Add more configuration fields as needed
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
@@ -80,11 +72,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
extra="allow",
|
||||
)
|
||||
|
||||
websocket_server_port: int = Field(
|
||||
default=8001,
|
||||
description="The port for the websocket server to run on",
|
||||
)
|
||||
|
||||
execution_manager_port: int = Field(
|
||||
default=8002,
|
||||
description="The port for execution manager daemon to run on",
|
||||
@@ -100,11 +87,6 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
description="The port for agent server daemon to run on",
|
||||
)
|
||||
|
||||
agent_api_port: int = Field(
|
||||
default=8006,
|
||||
description="The port for agent server API to run on",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def settings_customise_sources(
|
||||
cls,
|
||||
|
||||
@@ -5,7 +5,6 @@ from autogpt_server.data import db
|
||||
from autogpt_server.data.block import Block, initialize_blocks
|
||||
from autogpt_server.data.execution import ExecutionResult, ExecutionStatus
|
||||
from autogpt_server.data.queue import AsyncEventQueue
|
||||
from autogpt_server.data.user import create_default_user
|
||||
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
|
||||
from autogpt_server.server import AgentServer
|
||||
from autogpt_server.server.rest_api import get_user_id
|
||||
@@ -65,7 +64,6 @@ class SpinTestServer:
|
||||
|
||||
await db.connect()
|
||||
await initialize_blocks()
|
||||
await create_default_user("false")
|
||||
|
||||
return self
|
||||
|
||||
@@ -84,18 +82,25 @@ class SpinTestServer:
|
||||
|
||||
|
||||
async def wait_execution(
|
||||
exec_manager: ExecutionManager,
|
||||
user_id: str,
|
||||
graph_id: str,
|
||||
graph_exec_id: str,
|
||||
num_execs: int,
|
||||
timeout: int = 20,
|
||||
) -> list:
|
||||
async def is_execution_completed():
|
||||
status = await AgentServer().get_graph_run_status(
|
||||
execs = await AgentServer().get_graph_run_node_execution_results(
|
||||
graph_id, graph_exec_id, user_id
|
||||
)
|
||||
if status == ExecutionStatus.FAILED:
|
||||
raise Exception("Execution failed")
|
||||
return status == ExecutionStatus.COMPLETED
|
||||
return (
|
||||
exec_manager.queue.empty()
|
||||
and len(execs) == num_execs
|
||||
and all(
|
||||
v.status in [ExecutionStatus.COMPLETED, ExecutionStatus.FAILED]
|
||||
for v in execs
|
||||
)
|
||||
)
|
||||
|
||||
# Wait for the executions to complete
|
||||
for i in range(timeout):
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"num_graph_workers": 10,
|
||||
"num_node_workers": 5,
|
||||
"num_user_credits_refill": 1500
|
||||
"num_node_workers": 5
|
||||
}
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
/*
|
||||
Warnings:
|
||||
|
||||
- The `executionStatus` column on the `AgentNodeExecution` table would be dropped and recreated. This will lead to data loss if there is data in the column.
|
||||
|
||||
*/
|
||||
-- CreateEnum
|
||||
CREATE TYPE "AgentExecutionStatus" AS ENUM ('INCOMPLETE', 'QUEUED', 'RUNNING', 'COMPLETED', 'FAILED');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "UserBlockCreditType" AS ENUM ('TOP_UP', 'USAGE');
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "AgentGraphExecution" ADD COLUMN "executionStatus" "AgentExecutionStatus" NOT NULL DEFAULT 'COMPLETED',
|
||||
ADD COLUMN "startedAt" TIMESTAMP(3);
|
||||
|
||||
-- AlterTable
|
||||
ALTER TABLE "AgentNodeExecution" DROP COLUMN "executionStatus",
|
||||
ADD COLUMN "executionStatus" "AgentExecutionStatus" NOT NULL DEFAULT 'COMPLETED';
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "UserBlockCredit" (
|
||||
"transactionKey" TEXT NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"userId" TEXT NOT NULL,
|
||||
"blockId" TEXT,
|
||||
"amount" INTEGER NOT NULL,
|
||||
"type" "UserBlockCreditType" NOT NULL,
|
||||
"isActive" BOOLEAN NOT NULL DEFAULT true,
|
||||
"metadata" JSONB,
|
||||
|
||||
CONSTRAINT "UserBlockCredit_pkey" PRIMARY KEY ("transactionKey","userId")
|
||||
);
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "UserBlockCredit" ADD CONSTRAINT "UserBlockCredit_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "UserBlockCredit" ADD CONSTRAINT "UserBlockCredit_blockId_fkey" FOREIGN KEY ("blockId") REFERENCES "AgentBlock"("id") ON DELETE SET NULL ON UPDATE CASCADE;
|
||||
@@ -1,37 +0,0 @@
|
||||
-- CreateTable
|
||||
CREATE TABLE "AnalyticsDetails" (
|
||||
"id" TEXT NOT NULL DEFAULT gen_random_uuid(),
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"userId" TEXT NOT NULL,
|
||||
"type" TEXT NOT NULL,
|
||||
"data" JSONB,
|
||||
"dataIndex" TEXT,
|
||||
|
||||
CONSTRAINT "AnalyticsDetails_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "AnalyticsMetrics" (
|
||||
"id" TEXT NOT NULL DEFAULT gen_random_uuid(),
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL,
|
||||
"analyticMetric" TEXT NOT NULL,
|
||||
"value" DOUBLE PRECISION NOT NULL,
|
||||
"dataString" TEXT,
|
||||
"userId" TEXT NOT NULL,
|
||||
|
||||
CONSTRAINT "AnalyticsMetrics_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "analyticsDetails" ON "AnalyticsDetails"("userId", "type");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE INDEX "AnalyticsDetails_type_idx" ON "AnalyticsDetails"("type");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AnalyticsDetails" ADD CONSTRAINT "AnalyticsDetails_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AnalyticsMetrics" ADD CONSTRAINT "AnalyticsMetrics_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
6
rnd/autogpt_server/poetry.lock
generated
6
rnd/autogpt_server/poetry.lock
generated
@@ -289,7 +289,7 @@ description = "Shared libraries across NextGen AutoGPT"
|
||||
optional = false
|
||||
python-versions = ">=3.10,<4.0"
|
||||
files = []
|
||||
develop = true
|
||||
develop = false
|
||||
|
||||
[package.dependencies]
|
||||
colorama = "^0.4.6"
|
||||
@@ -2022,7 +2022,6 @@ description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"},
|
||||
{file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"},
|
||||
]
|
||||
|
||||
@@ -2033,7 +2032,6 @@ description = "A collection of ASN.1-based protocols modules"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"},
|
||||
{file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"},
|
||||
]
|
||||
|
||||
@@ -3623,4 +3621,4 @@ type = ["pytest-mypy"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "311c527a1d1947af049dac27c7a2b2f49d7fa4cdede52ef436422a528b0ad866"
|
||||
content-hash = "fbc928c40dc95041f7750ab34677fa3eebacd06a84944de900dedd639f847a9c"
|
||||
|
||||
@@ -13,7 +13,7 @@ python = "^3.10"
|
||||
aio-pika = "^9.4.3"
|
||||
anthropic = "^0.25.1"
|
||||
apscheduler = "^3.10.4"
|
||||
autogpt-libs = { path = "../autogpt_libs", develop = true }
|
||||
autogpt-libs = { path = "../autogpt_libs" }
|
||||
click = "^8.1.7"
|
||||
croniter = "^2.0.5"
|
||||
discord-py = "^2.4.0"
|
||||
|
||||
@@ -22,9 +22,6 @@ model User {
|
||||
AgentGraphs AgentGraph[]
|
||||
AgentGraphExecutions AgentGraphExecution[]
|
||||
AgentGraphExecutionSchedules AgentGraphExecutionSchedule[]
|
||||
AnalyticsDetails AnalyticsDetails[]
|
||||
AnalyticsMetrics AnalyticsMetrics[]
|
||||
UserBlockCredit UserBlockCredit[]
|
||||
|
||||
@@index([id])
|
||||
@@index([email])
|
||||
@@ -32,9 +29,9 @@ model User {
|
||||
|
||||
// This model describes the Agent Graph/Flow (Multi Agent System).
|
||||
model AgentGraph {
|
||||
id String @default(uuid())
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
id String @default(uuid())
|
||||
version Int @default(1)
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime? @updatedAt
|
||||
|
||||
name String?
|
||||
@@ -114,26 +111,13 @@ model AgentBlock {
|
||||
|
||||
// Prisma requires explicit back-references.
|
||||
ReferencedByAgentNode AgentNode[]
|
||||
UserBlockCredit UserBlockCredit[]
|
||||
}
|
||||
|
||||
// This model describes the status of an AgentGraphExecution or AgentNodeExecution.
|
||||
enum AgentExecutionStatus {
|
||||
INCOMPLETE
|
||||
QUEUED
|
||||
RUNNING
|
||||
COMPLETED
|
||||
FAILED
|
||||
}
|
||||
|
||||
// This model describes the execution of an AgentGraph.
|
||||
model AgentGraphExecution {
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
id String @id @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime? @updatedAt
|
||||
startedAt DateTime?
|
||||
|
||||
executionStatus AgentExecutionStatus @default(COMPLETED)
|
||||
|
||||
agentGraphId String
|
||||
agentGraphVersion Int @default(1)
|
||||
@@ -161,10 +145,12 @@ model AgentNodeExecution {
|
||||
Input AgentNodeExecutionInputOutput[] @relation("AgentNodeExecutionInput")
|
||||
Output AgentNodeExecutionInputOutput[] @relation("AgentNodeExecutionOutput")
|
||||
|
||||
executionStatus AgentExecutionStatus @default(COMPLETED)
|
||||
// sqlite does not support enum
|
||||
// enum Status { INCOMPLETE, QUEUED, RUNNING, SUCCESS, FAILED }
|
||||
executionStatus String
|
||||
// Final JSON serialized input data for the node execution.
|
||||
executionData String?
|
||||
addedTime DateTime @default(now())
|
||||
addedTime DateTime @default(now())
|
||||
queuedTime DateTime?
|
||||
startedTime DateTime?
|
||||
endedTime DateTime?
|
||||
@@ -192,8 +178,8 @@ model AgentNodeExecutionInputOutput {
|
||||
|
||||
// This model describes the recurring execution schedule of an Agent.
|
||||
model AgentGraphExecutionSchedule {
|
||||
id String @id
|
||||
createdAt DateTime @default(now())
|
||||
id String @id
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime? @updatedAt
|
||||
|
||||
agentGraphId String
|
||||
@@ -213,70 +199,3 @@ model AgentGraphExecutionSchedule {
|
||||
|
||||
@@index([isEnabled])
|
||||
}
|
||||
|
||||
model AnalyticsDetails {
|
||||
// PK uses gen_random_uuid() to allow the db inserts to happen outside of prisma
|
||||
// typical uuid() inserts are handled by prisma
|
||||
id String @id @default(dbgenerated("gen_random_uuid()"))
|
||||
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @default(now()) @updatedAt
|
||||
|
||||
// Link to User model
|
||||
userId String
|
||||
user User @relation(fields: [userId], references: [id])
|
||||
|
||||
// Analytics Categorical data used for filtering (indexable w and w/o userId)
|
||||
type String
|
||||
|
||||
// Analytic Specific Data. We should use a union type here, but prisma doesn't support it.
|
||||
data Json?
|
||||
|
||||
// Indexable field for any count based analytical measures like page order clicking, tutorial step completion, etc.
|
||||
dataIndex String?
|
||||
|
||||
@@index([userId, type], name: "analyticsDetails")
|
||||
@@index([type])
|
||||
}
|
||||
|
||||
model AnalyticsMetrics {
|
||||
id String @id @default(dbgenerated("gen_random_uuid()"))
|
||||
createdAt DateTime @default(now())
|
||||
updatedAt DateTime @updatedAt
|
||||
|
||||
// Analytics Categorical data used for filtering (indexable w and w/o userId)
|
||||
analyticMetric String
|
||||
// Any numeric data that should be counted upon, summed, or otherwise aggregated.
|
||||
value Float
|
||||
// Any string data that should be used to identify the metric as distinct.
|
||||
// ex: '/build' vs '/market'
|
||||
dataString String?
|
||||
|
||||
// Link to User model
|
||||
userId String
|
||||
user User @relation(fields: [userId], references: [id])
|
||||
}
|
||||
|
||||
enum UserBlockCreditType {
|
||||
TOP_UP
|
||||
USAGE
|
||||
}
|
||||
|
||||
model UserBlockCredit {
|
||||
transactionKey String @default(uuid())
|
||||
createdAt DateTime @default(now())
|
||||
|
||||
userId String
|
||||
user User @relation(fields: [userId], references: [id])
|
||||
|
||||
blockId String?
|
||||
block AgentBlock? @relation(fields: [blockId], references: [id])
|
||||
|
||||
amount Int
|
||||
type UserBlockCreditType
|
||||
|
||||
isActive Boolean @default(true)
|
||||
metadata Json?
|
||||
|
||||
@@id(name: "creditTransactionIdentifier", [transactionKey, userId])
|
||||
}
|
||||
|
||||
@@ -1,90 +0,0 @@
|
||||
from datetime import datetime
|
||||
|
||||
import pytest
|
||||
from prisma.models import UserBlockCredit
|
||||
|
||||
from autogpt_server.blocks.llm import AITextGeneratorBlock
|
||||
from autogpt_server.data.credit import UserCredit
|
||||
from autogpt_server.data.user import DEFAULT_USER_ID
|
||||
from autogpt_server.util.test import SpinTestServer
|
||||
|
||||
REFILL_VALUE = 1000
|
||||
user_credit = UserCredit(REFILL_VALUE)
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_block_credit_usage(server: SpinTestServer):
|
||||
current_credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
|
||||
spending_amount_1 = await user_credit.spend_credits(
|
||||
DEFAULT_USER_ID,
|
||||
current_credit,
|
||||
AITextGeneratorBlock(),
|
||||
{"model": "gpt-4-turbo"},
|
||||
0.0,
|
||||
0.0,
|
||||
validate_balance=False,
|
||||
)
|
||||
assert spending_amount_1 > 0
|
||||
|
||||
spending_amount_2 = await user_credit.spend_credits(
|
||||
DEFAULT_USER_ID,
|
||||
current_credit,
|
||||
AITextGeneratorBlock(),
|
||||
{"model": "gpt-4-turbo", "api_key": "owned_api_key"},
|
||||
0.0,
|
||||
0.0,
|
||||
validate_balance=False,
|
||||
)
|
||||
assert spending_amount_2 == 0
|
||||
|
||||
new_credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
assert new_credit == current_credit - spending_amount_1 - spending_amount_2
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_block_credit_top_up(server: SpinTestServer):
|
||||
current_credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
|
||||
await user_credit.top_up_credits(DEFAULT_USER_ID, 100)
|
||||
|
||||
new_credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
assert new_credit == current_credit + 100
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_block_credit_reset(server: SpinTestServer):
|
||||
month1 = datetime(2022, 1, 15)
|
||||
month2 = datetime(2022, 2, 15)
|
||||
|
||||
user_credit.time_now = lambda: month2
|
||||
month2credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
|
||||
# Month 1 result should only affect month 1
|
||||
user_credit.time_now = lambda: month1
|
||||
month1credit = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
await user_credit.top_up_credits(DEFAULT_USER_ID, 100)
|
||||
assert await user_credit.get_or_refill_credit(DEFAULT_USER_ID) == month1credit + 100
|
||||
|
||||
# Month 2 balance is unaffected
|
||||
user_credit.time_now = lambda: month2
|
||||
assert await user_credit.get_or_refill_credit(DEFAULT_USER_ID) == month2credit
|
||||
|
||||
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_credit_refill(server: SpinTestServer):
|
||||
# Clear all transactions within the month
|
||||
await UserBlockCredit.prisma().update_many(
|
||||
where={
|
||||
"userId": DEFAULT_USER_ID,
|
||||
"createdAt": {
|
||||
"gte": datetime(2022, 2, 1),
|
||||
"lt": datetime(2022, 3, 1),
|
||||
},
|
||||
},
|
||||
data={"isActive": False},
|
||||
)
|
||||
user_credit.time_now = lambda: datetime(2022, 2, 15)
|
||||
|
||||
balance = await user_credit.get_or_refill_credit(DEFAULT_USER_ID)
|
||||
assert balance == REFILL_VALUE
|
||||
@@ -4,7 +4,7 @@ import pytest
|
||||
|
||||
from autogpt_server.blocks.basic import AgentInputBlock, StoreValueBlock
|
||||
from autogpt_server.data.graph import Graph, Link, Node
|
||||
from autogpt_server.data.user import DEFAULT_USER_ID
|
||||
from autogpt_server.data.user import DEFAULT_USER_ID, create_default_user
|
||||
from autogpt_server.server.model import CreateGraph
|
||||
from autogpt_server.util.test import SpinTestServer
|
||||
|
||||
@@ -22,6 +22,8 @@ async def test_graph_creation(server: SpinTestServer):
|
||||
Args:
|
||||
server (SpinTestServer): The test server instance.
|
||||
"""
|
||||
await create_default_user("false")
|
||||
|
||||
value_block = StoreValueBlock().id
|
||||
input_block = AgentInputBlock().id
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ from prisma.models import User
|
||||
from autogpt_server.blocks.basic import FindInDictionaryBlock, StoreValueBlock
|
||||
from autogpt_server.blocks.maths import CalculatorBlock, Operation
|
||||
from autogpt_server.data import execution, graph
|
||||
from autogpt_server.executor import ExecutionManager
|
||||
from autogpt_server.server import AgentServer
|
||||
from autogpt_server.usecases.sample import create_test_graph, create_test_user
|
||||
from autogpt_server.util.test import SpinTestServer, wait_execution
|
||||
@@ -11,6 +12,7 @@ from autogpt_server.util.test import SpinTestServer, wait_execution
|
||||
|
||||
async def execute_graph(
|
||||
agent_server: AgentServer,
|
||||
test_manager: ExecutionManager,
|
||||
test_graph: graph.Graph,
|
||||
test_user: User,
|
||||
input_data: dict,
|
||||
@@ -21,8 +23,9 @@ async def execute_graph(
|
||||
graph_exec_id = response["id"]
|
||||
|
||||
# Execution queue should be empty
|
||||
result = await wait_execution(test_user.id, test_graph.id, graph_exec_id)
|
||||
assert result and len(result) == num_execs
|
||||
assert await wait_execution(
|
||||
test_manager, test_user.id, test_graph.id, graph_exec_id, num_execs
|
||||
)
|
||||
return graph_exec_id
|
||||
|
||||
|
||||
@@ -105,6 +108,7 @@ async def test_agent_execution(server: SpinTestServer):
|
||||
data = {"input_1": "Hello", "input_2": "World"}
|
||||
graph_exec_id = await execute_graph(
|
||||
server.agent_server,
|
||||
server.exec_manager,
|
||||
test_graph,
|
||||
test_user,
|
||||
data,
|
||||
@@ -165,7 +169,7 @@ async def test_input_pin_always_waited(server: SpinTestServer):
|
||||
test_user = await create_test_user()
|
||||
test_graph = await graph.create_graph(test_graph, user_id=test_user.id)
|
||||
graph_exec_id = await execute_graph(
|
||||
server.agent_server, test_graph, test_user, {}, 3
|
||||
server.agent_server, server.exec_manager, test_graph, test_user, {}, 3
|
||||
)
|
||||
|
||||
executions = await server.agent_server.get_graph_run_node_execution_results(
|
||||
@@ -246,7 +250,7 @@ async def test_static_input_link_on_graph(server: SpinTestServer):
|
||||
test_user = await create_test_user()
|
||||
test_graph = await graph.create_graph(test_graph, user_id=test_user.id)
|
||||
graph_exec_id = await execute_graph(
|
||||
server.agent_server, test_graph, test_user, {}, 8
|
||||
server.agent_server, server.exec_manager, test_graph, test_user, {}, 8
|
||||
)
|
||||
executions = await server.agent_server.get_graph_run_node_execution_results(
|
||||
test_graph.id, graph_exec_id, test_user.id
|
||||
|
||||
@@ -1,147 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
networks:
|
||||
app-network:
|
||||
name: app-network
|
||||
shared-network:
|
||||
name: shared-network
|
||||
|
||||
volumes:
|
||||
db-config:
|
||||
|
||||
x-agpt-services:
|
||||
&agpt-services
|
||||
networks:
|
||||
- app-network
|
||||
- shared-network
|
||||
|
||||
x-supabase-services:
|
||||
&supabase-services
|
||||
networks:
|
||||
- app-network
|
||||
- shared-network
|
||||
|
||||
services:
|
||||
# AGPT services
|
||||
postgres:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.yml
|
||||
service: postgres
|
||||
|
||||
migrate:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.yml
|
||||
service: migrate
|
||||
|
||||
redis:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.yml
|
||||
service: redis
|
||||
|
||||
rest_server:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.yml
|
||||
service: rest_server
|
||||
|
||||
executor:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.yml
|
||||
service: executor
|
||||
|
||||
websocket_server:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.yml
|
||||
service: websocket_server
|
||||
|
||||
market:
|
||||
<<: *agpt-services
|
||||
extends:
|
||||
file: ./docker-compose.yml
|
||||
service: market
|
||||
|
||||
# frontend:
|
||||
# <<: *agpt-services
|
||||
# extends:
|
||||
# file: ./docker-compose.yml
|
||||
# service: frontend
|
||||
|
||||
# Supabase services
|
||||
studio:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: studio
|
||||
|
||||
kong:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: kong
|
||||
|
||||
auth:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: auth
|
||||
environment:
|
||||
GOTRUE_MAILER_AUTOCONFIRM: true
|
||||
|
||||
rest:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: rest
|
||||
|
||||
realtime:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: realtime
|
||||
|
||||
storage:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: storage
|
||||
|
||||
imgproxy:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: imgproxy
|
||||
|
||||
meta:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: meta
|
||||
|
||||
functions:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: functions
|
||||
|
||||
analytics:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: analytics
|
||||
|
||||
db:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: db
|
||||
|
||||
vector:
|
||||
<<: *supabase-services
|
||||
extends:
|
||||
file: ./supabase/docker/docker-compose.yml
|
||||
service: vector
|
||||
@@ -11,21 +11,21 @@ services:
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
ports:
|
||||
- "5433:5432"
|
||||
- "5432:5432"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
|
||||
migrate:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
command: ["sh", "-c", "poetry run prisma migrate deploy"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_server/migrate
|
||||
action: rebuild
|
||||
command: ["poetry", "run", "prisma", "migrate", "deploy"]
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
@@ -72,18 +72,15 @@ services:
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- SUPABASE_URL=http://kong:8000
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- ENABLE_AUTH=true
|
||||
- AUTH_ENABLED=false
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- EXECUTIONMANAGER_HOST=executor
|
||||
ports:
|
||||
- "8006:8006"
|
||||
- "8000:8000"
|
||||
- "8003:8003" # execution scheduler
|
||||
networks:
|
||||
- app-network
|
||||
@@ -107,14 +104,11 @@ services:
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- NEXT_PUBLIC_SUPABASE_URL=http://kong:8000
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- ENABLE_AUTH=true
|
||||
- AUTH_ENABLED=false
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- AGENTSERVER_HOST=rest_server
|
||||
ports:
|
||||
@@ -141,14 +135,11 @@ services:
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- SUPABASE_URL=http://kong:8000
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- ENABLE_AUTH=true
|
||||
- AUTH_ENABLED=false
|
||||
- PYRO_HOST=0.0.0.0
|
||||
ports:
|
||||
- "8001:8001"
|
||||
@@ -170,40 +161,40 @@ services:
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- SUPABASE_URL=http://kong:8000
|
||||
- SUPABASE_JWT_SECRET=your-super-secret-jwt-token-with-at-least-32-characters-long
|
||||
- SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60&schema=market
|
||||
ports:
|
||||
- "8015:8015"
|
||||
- "8015:8000"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_builder/Dockerfile
|
||||
target: dev
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_builder/
|
||||
action: rebuild
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rest_server:
|
||||
condition: service_started
|
||||
websocket_server:
|
||||
condition: service_started
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8000/api
|
||||
- NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
- NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
||||
ports:
|
||||
- "3000:3000"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
# frontend:
|
||||
# build:
|
||||
# context: ../
|
||||
# dockerfile: rnd/autogpt_builder/Dockerfile
|
||||
# target: dev
|
||||
# depends_on:
|
||||
# postgres:
|
||||
# condition: service_healthy
|
||||
# rest_server:
|
||||
# condition: service_started
|
||||
# websocket_server:
|
||||
# condition: service_started
|
||||
# migrate:
|
||||
# condition: service_completed_successfully
|
||||
# environment:
|
||||
# - NEXT_PUBLIC_SUPABASE_URL=http://kong:8000
|
||||
# - NEXT_PUBLIC_SUPABASE_ANON_KEY=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyAgCiAgICAicm9sZSI6ICJhbm9uIiwKICAgICJpc3MiOiAic3VwYWJhc2UtZGVtbyIsCiAgICAiaWF0IjogMTY0MTc2OTIwMCwKICAgICJleHAiOiAxNzk5NTM1NjAwCn0.dc_X5iR_VP_qT0zsiyj_I_OZ2T9FtRU2BBNWN8Bu4GE
|
||||
# - DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
# - NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8006/api
|
||||
# - NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
# - NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
||||
# ports:
|
||||
# - "3000:3000"
|
||||
# networks:
|
||||
# - app-network
|
||||
|
||||
networks:
|
||||
app-network:
|
||||
|
||||
@@ -17,10 +17,6 @@ ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
|
||||
# Upgrade pip and setuptools to fix security vulnerabilities
|
||||
RUN pip3 install --upgrade pip setuptools
|
||||
|
||||
RUN pip3 install poetry
|
||||
|
||||
# Copy and install dependencies
|
||||
@@ -39,9 +35,6 @@ FROM python:3.11-slim-buster AS server_dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Upgrade pip and setuptools to fix security vulnerabilities
|
||||
RUN pip3 install --upgrade pip setuptools
|
||||
|
||||
# Copy only necessary files from builder
|
||||
COPY --from=builder /app /app
|
||||
COPY --from=builder /usr/local/lib/python3.11 /usr/local/lib/python3.11
|
||||
@@ -65,4 +58,4 @@ FROM server_dependencies AS server
|
||||
ENV DATABASE_URL=""
|
||||
ENV PORT=8015
|
||||
|
||||
CMD ["poetry", "run", "app"]
|
||||
CMD ["uvicorn", "market.app:app", "--reload"]
|
||||
|
||||
@@ -353,9 +353,7 @@ async def search_db(
|
||||
|
||||
|
||||
async def get_top_agents_by_downloads(
|
||||
page: int = 1,
|
||||
page_size: int = 10,
|
||||
submission_status: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.APPROVED,
|
||||
page: int = 1, page_size: int = 10
|
||||
) -> TopAgentsDBResponse:
|
||||
"""Retrieve the top agents by download count.
|
||||
|
||||
@@ -376,7 +374,6 @@ async def get_top_agents_by_downloads(
|
||||
analytics = await prisma.models.AnalyticsTracker.prisma().find_many(
|
||||
include={"agent": True},
|
||||
order={"downloads": "desc"},
|
||||
where={"agent": {"is": {"submissionStatus": submission_status}}},
|
||||
skip=skip,
|
||||
take=page_size,
|
||||
)
|
||||
@@ -444,10 +441,7 @@ async def set_agent_featured(
|
||||
|
||||
|
||||
async def get_featured_agents(
|
||||
category: str = "featured",
|
||||
page: int = 1,
|
||||
page_size: int = 10,
|
||||
submission_status: prisma.enums.SubmissionStatus = prisma.enums.SubmissionStatus.APPROVED,
|
||||
category: str = "featured", page: int = 1, page_size: int = 10
|
||||
) -> FeaturedAgentResponse:
|
||||
"""Retrieve a list of featured agents from the database based on the provided category.
|
||||
|
||||
@@ -469,7 +463,6 @@ async def get_featured_agents(
|
||||
where={
|
||||
"featuredCategories": {"has": category},
|
||||
"isActive": True,
|
||||
"agent": {"is": {"submissionStatus": submission_status}},
|
||||
},
|
||||
include={"agent": {"include": {"AnalyticsTracker": True}}},
|
||||
skip=skip,
|
||||
|
||||
@@ -5,7 +5,6 @@ import typing
|
||||
import fastapi
|
||||
import fastapi.responses
|
||||
import prisma
|
||||
import prisma.enums
|
||||
|
||||
import market.db
|
||||
import market.model
|
||||
@@ -39,10 +38,6 @@ async def list_agents(
|
||||
sort_order: typing.Literal["asc", "desc"] = fastapi.Query(
|
||||
"desc", description="Sort order (asc or desc)"
|
||||
),
|
||||
submission_status: prisma.enums.SubmissionStatus = fastapi.Query(
|
||||
default=prisma.enums.SubmissionStatus.APPROVED,
|
||||
description="Filter by submission status",
|
||||
),
|
||||
):
|
||||
"""
|
||||
Retrieve a list of agents based on the provided filters.
|
||||
@@ -57,7 +52,6 @@ async def list_agents(
|
||||
description_threshold (int): Fuzzy search threshold (default: 60, min: 0, max: 100).
|
||||
sort_by (str): Field to sort by (default: "createdAt").
|
||||
sort_order (str): Sort order (asc or desc) (default: "desc").
|
||||
submission_status (str): Filter by submission status (default: "APPROVED").
|
||||
|
||||
Returns:
|
||||
market.model.AgentListResponse: A response containing the list of agents and pagination information.
|
||||
@@ -76,7 +70,6 @@ async def list_agents(
|
||||
description_threshold=description_threshold,
|
||||
sort_by=sort_by,
|
||||
sort_order=sort_order,
|
||||
submission_status=submission_status,
|
||||
)
|
||||
|
||||
agents = [
|
||||
@@ -217,10 +210,6 @@ async def top_agents_by_downloads(
|
||||
page_size: int = fastapi.Query(
|
||||
10, ge=1, le=100, description="Number of items per page"
|
||||
),
|
||||
submission_status: prisma.enums.SubmissionStatus = fastapi.Query(
|
||||
default=prisma.enums.SubmissionStatus.APPROVED,
|
||||
description="Filter by submission status",
|
||||
),
|
||||
):
|
||||
"""
|
||||
Retrieve a list of top agents based on the number of downloads.
|
||||
@@ -228,7 +217,6 @@ async def top_agents_by_downloads(
|
||||
Args:
|
||||
page (int): Page number (default: 1).
|
||||
page_size (int): Number of items per page (default: 10, min: 1, max: 100).
|
||||
submission_status (str): Filter by submission status (default: "APPROVED").
|
||||
|
||||
Returns:
|
||||
market.model.AgentListResponse: A response containing the list of top agents and pagination information.
|
||||
@@ -240,7 +228,6 @@ async def top_agents_by_downloads(
|
||||
result = await market.db.get_top_agents_by_downloads(
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
submission_status=submission_status,
|
||||
)
|
||||
|
||||
ret = market.model.AgentListResponse(
|
||||
@@ -287,10 +274,6 @@ async def get_featured_agents(
|
||||
page_size: int = fastapi.Query(
|
||||
10, ge=1, le=100, description="Number of items per page"
|
||||
),
|
||||
submission_status: prisma.enums.SubmissionStatus = fastapi.Query(
|
||||
default=prisma.enums.SubmissionStatus.APPROVED,
|
||||
description="Filter by submission status",
|
||||
),
|
||||
):
|
||||
"""
|
||||
Retrieve a list of featured agents based on the provided category.
|
||||
@@ -299,7 +282,6 @@ async def get_featured_agents(
|
||||
category (str): Category of featured agents (default: "featured").
|
||||
page (int): Page number (default: 1).
|
||||
page_size (int): Number of items per page (default: 10, min: 1, max: 100).
|
||||
submission_status (str): Filter by submission status (default: "APPROVED").
|
||||
|
||||
Returns:
|
||||
market.model.AgentListResponse: A response containing the list of featured agents and pagination information.
|
||||
@@ -312,7 +294,6 @@ async def get_featured_agents(
|
||||
category=category,
|
||||
page=page,
|
||||
page_size=page_size,
|
||||
submission_status=submission_status,
|
||||
)
|
||||
|
||||
ret = market.model.AgentListResponse(
|
||||
|
||||
@@ -45,7 +45,7 @@ def populate_database():
|
||||
keywords=["test"],
|
||||
)
|
||||
response = requests.post(
|
||||
"http://localhost:8015/api/v1/market/admin/agent", json=req.model_dump()
|
||||
"http://localhost:8001/api/v1/market/admin/agent", json=req.model_dump()
|
||||
)
|
||||
print(response.text)
|
||||
|
||||
|
||||
48
rnd/rest-api-go/.gitignore
vendored
Normal file
48
rnd/rest-api-go/.gitignore
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
|
||||
# Go workspace file
|
||||
go.work
|
||||
|
||||
# IDE-specific files
|
||||
.idea/
|
||||
.vscode/
|
||||
|
||||
# OS-specific files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Binary output directory
|
||||
/bin/
|
||||
|
||||
# Log files
|
||||
*.log
|
||||
|
||||
# Environment variables file
|
||||
.env
|
||||
|
||||
# Air temporary files (if using Air for live reloading)
|
||||
tmp/
|
||||
|
||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
|
||||
# Debug files
|
||||
debug
|
||||
|
||||
# Project-specific build outputs
|
||||
/gosrv
|
||||
34
rnd/rest-api-go/Dockerfile
Normal file
34
rnd/rest-api-go/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
||||
# Build stage
|
||||
FROM golang:1.23.1-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy go mod and sum files
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
# Download all dependencies
|
||||
RUN go mod download
|
||||
|
||||
# Copy the source code
|
||||
COPY . .
|
||||
|
||||
# Build the application
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags="-w -s" -o main .
|
||||
|
||||
# Run stage
|
||||
FROM alpine:latest
|
||||
|
||||
RUN apk --no-cache add ca-certificates
|
||||
ENV GIN_MODE=release
|
||||
|
||||
WORKDIR /root/
|
||||
|
||||
# Copy the pre-built binary file from the previous stage
|
||||
COPY --from=builder /app/main .
|
||||
COPY --from=builder /app/config.yaml .
|
||||
|
||||
# Expose port 8080 to the outside world
|
||||
EXPOSE 8080
|
||||
|
||||
# Command to run the executable
|
||||
CMD ["./main"]
|
||||
122
rnd/rest-api-go/README.md
Normal file
122
rnd/rest-api-go/README.md
Normal file
@@ -0,0 +1,122 @@
|
||||
# Market API
|
||||
|
||||
This project is a Go-based API for a marketplace application. It provides endpoints for managing agents, handling user authentication, and performing administrative tasks.
|
||||
|
||||
## Project Structure
|
||||
|
||||
The project is organized into several packages:
|
||||
|
||||
- `config`: Handles configuration loading and management
|
||||
- `docs`: Contains the Swagger documentation
|
||||
- `database`: Contains database migrations and interaction logic
|
||||
- `handlers`: Implements HTTP request handlers
|
||||
- `middleware`: Contains middleware functions for the API
|
||||
- `models`: Defines data structures used throughout the application
|
||||
- `utils`: Provides utility functions
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Go 1.16 or later
|
||||
- PostgreSQL
|
||||
- [golang-migrate](https://github.com/golang-migrate/migrate)
|
||||
|
||||
## Setup
|
||||
|
||||
1. Clone the repository
|
||||
2. Install dependencies:
|
||||
```
|
||||
go mod tidy
|
||||
```
|
||||
3. Set up the database:
|
||||
- Create a PostgreSQL database
|
||||
- Update the `DatabaseURL` in your configuration file
|
||||
|
||||
4. Run database migrations:
|
||||
```
|
||||
migrate -source file://database/migrations -database "postgresql://agpt_user:pass123@localhost:5432/apgt_marketplace?sslmode=disable" up
|
||||
```
|
||||
|
||||
## Running the Application
|
||||
|
||||
To run the application in development mode with hot reloading:
|
||||
|
||||
```
|
||||
air
|
||||
```
|
||||
|
||||
For production, build and run the binary:
|
||||
|
||||
```
|
||||
go build -o market-api
|
||||
./market-api
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests with coverage:
|
||||
```
|
||||
go test -cover ./...
|
||||
```
|
||||
|
||||
## Code Formatting
|
||||
|
||||
Format the code using:
|
||||
|
||||
```
|
||||
gofmt -w .
|
||||
```
|
||||
|
||||
## Database Migrations
|
||||
|
||||
Create a new migration:
|
||||
|
||||
```
|
||||
migrate create -ext sql -dir database/migrations -seq <migration_name>
|
||||
```
|
||||
|
||||
Apply migrations:
|
||||
|
||||
```
|
||||
migrate -source file://database/migrations -database "postgresql://user:password@localhost:5432/dbname?sslmode=disable" up
|
||||
```
|
||||
|
||||
Revert the last migration:
|
||||
|
||||
```
|
||||
migrate -source file://database/migrations -database "postgresql://user:password@localhost:5432/dbname?sslmode=disable" down 1
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
The API provides various endpoints for agent management, user authentication, and administrative tasks. Some key endpoints include:
|
||||
|
||||
- `/api/agents`: Get list of agents
|
||||
- `/api/agents/:agent_id`: Get agent details
|
||||
- `/api/agents/submit`: Submit a new agent
|
||||
- `/api/admin/*`: Various administrative endpoints (requires admin authentication)
|
||||
|
||||
Refer to the `main.go` file for a complete list of endpoints and their corresponding handlers.
|
||||
|
||||
|
||||
# Swagger Documentation
|
||||
|
||||
This project uses `gin-swagger` and `Swaggo` tools for automatic generation of API documentation in OpenAPI (Swagger) format. The documentation is based on comments added to the code using Swagger annotations.
|
||||
|
||||
To view and interact with the generated Swagger documentation, follow these steps:
|
||||
|
||||
1. Run your Gin server.
|
||||
2. Access the Swagger UI by navigating to `http://localhost:8015/docs/index.html` in your web browser.
|
||||
|
||||
Alternatively, you can view the raw OpenAPI specification at `http://localhost:8015/docs/doc.json`.
|
||||
|
||||
## Regenerating Swagger Documentation
|
||||
|
||||
If you make changes to your codebase and want to regenerate the Swagger documentation, follow these steps:
|
||||
|
||||
1. Run the `swag init` command in your project directory to create a new `docs.go` file (or update an existing one) with Swagger documentation comments based on your code:
|
||||
```bash
|
||||
swag init -g main.go
|
||||
```
|
||||
Replace `main.go` with the name of your main Go source file.
|
||||
|
||||
3. Run your Gin server, and access the updated Swagger UI at `http://localhost:8015/docs/index.html`. You should see your documentation reflecting the latest changes in your codebase.
|
||||
7
rnd/rest-api-go/config.yaml
Normal file
7
rnd/rest-api-go/config.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
ServerAddress: ":8015"
|
||||
DatabaseURL: "postgresql://agpt_user:pass123@localhost:5433/agpt_marketplace?connect_timeout=60"
|
||||
JWTSecret: "Z86RsQ+nhSk+A8ODJX1kQA11JCk9nlw8n+MRdSgmR+P1sMPTTDG1rjBTwj7Ucjb3TRHSVxkCNPgXISmzU/vMkA=="
|
||||
JWTAlgorithm: "HS256"
|
||||
CORSAllowOrigins:
|
||||
- "http://localhost:3000"
|
||||
- "http://127.0.0.1:3000"
|
||||
63
rnd/rest-api-go/config/config.go
Normal file
63
rnd/rest-api-go/config/config.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
ServerAddress string `mapstructure:"serveraddress"`
|
||||
DatabaseURL string `mapstructure:"databaseurl"`
|
||||
AuthEnabled bool `mapstructure:"authenabled"`
|
||||
JWTSecret string `mapstructure:"jwtsecret"`
|
||||
JWTAlgorithm string `mapstructure:"jwtalgorithm"`
|
||||
CORSAllowOrigins []string `mapstructure:"corsalloworigins"`
|
||||
}
|
||||
|
||||
func Load(configFile ...string) (*Config, error) {
|
||||
logger := zap.L().With(zap.String("function", "Load"))
|
||||
|
||||
if len(configFile) > 0 {
|
||||
viper.SetConfigFile(configFile[0])
|
||||
} else {
|
||||
viper.SetConfigName("config")
|
||||
viper.SetConfigType("yaml")
|
||||
viper.AddConfigPath(".")
|
||||
}
|
||||
|
||||
viper.SetEnvPrefix("AGPT")
|
||||
viper.AutomaticEnv()
|
||||
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
logger.Error("Failed to read config file", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var config Config
|
||||
if err := viper.Unmarshal(&config); err != nil {
|
||||
logger.Error("Failed to unmarshal config", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if config.ServerAddress == "" {
|
||||
logger.Error("ServerAddress is required")
|
||||
return nil, fmt.Errorf("ServerAddress is required")
|
||||
}
|
||||
if config.DatabaseURL == "" {
|
||||
logger.Error("DatabaseURL is required")
|
||||
return nil, fmt.Errorf("DatabaseURL is required")
|
||||
}
|
||||
if config.JWTSecret == "" {
|
||||
logger.Error("JWTSecret is required")
|
||||
return nil, fmt.Errorf("JWTSecret is required")
|
||||
}
|
||||
if config.JWTAlgorithm == "" {
|
||||
logger.Error("JWTAlgorithm is required")
|
||||
return nil, fmt.Errorf("JWTAlgorithm is required")
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
78
rnd/rest-api-go/config/config_test.go
Normal file
78
rnd/rest-api-go/config/config_test.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestLoadValidConfig(t *testing.T) {
|
||||
// Create a temporary config file for testing
|
||||
tempFile, err := os.CreateTemp("", "test-config*.yaml")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile.Name())
|
||||
|
||||
// Write test configuration to the temp file
|
||||
testConfig := []byte(`
|
||||
serveraddress: ":8080"
|
||||
databaseurl: "postgres://user:pass@localhost:5432/testdb"
|
||||
authenabled: true
|
||||
jwtsecret: "test-secret"
|
||||
jwtalgorithm: "HS256"
|
||||
`)
|
||||
if _, err := tempFile.Write(testConfig); err != nil {
|
||||
t.Fatalf("Failed to write to temp file: %v", err)
|
||||
}
|
||||
tempFile.Close()
|
||||
|
||||
// Test the Load function with a specific config file
|
||||
config, err := Load(tempFile.Name())
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, config)
|
||||
|
||||
// Verify the loaded configuration
|
||||
assert.Equal(t, ":8080", config.ServerAddress)
|
||||
assert.Equal(t, "postgres://user:pass@localhost:5432/testdb", config.DatabaseURL)
|
||||
assert.True(t, config.AuthEnabled)
|
||||
assert.Equal(t, "test-secret", config.JWTSecret)
|
||||
assert.Equal(t, "HS256", config.JWTAlgorithm)
|
||||
}
|
||||
|
||||
func TestLoadDefaultConfigFile(t *testing.T) {
|
||||
// Test with default config file (should fail in test environment)
|
||||
config, err := Load()
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, config)
|
||||
}
|
||||
|
||||
func TestLoadMissingConfigFile(t *testing.T) {
|
||||
// Test with missing config file
|
||||
config, err := Load("non_existent_config.yaml")
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, config)
|
||||
}
|
||||
|
||||
func TestLoadInvalidConfigFormat(t *testing.T) {
|
||||
// Create a temporary config file for testing
|
||||
tempFile, err := os.CreateTemp("", "test-config*.yaml")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile.Name())
|
||||
|
||||
// Test with invalid config format
|
||||
invalidConfig := []byte(`
|
||||
serveraddress: ":8080"
|
||||
databaseurl: 123 # Invalid type, should be string
|
||||
`)
|
||||
if err := os.WriteFile(tempFile.Name(), invalidConfig, 0644); err != nil {
|
||||
t.Fatalf("Failed to write invalid config: %v", err)
|
||||
}
|
||||
|
||||
config, err := Load(tempFile.Name())
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, config)
|
||||
}
|
||||
805
rnd/rest-api-go/database/db.go
Normal file
805
rnd/rest-api-go/database/db.go
Normal file
@@ -0,0 +1,805 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/swiftyos/market/config"
|
||||
"github.com/swiftyos/market/models"
|
||||
"github.com/swiftyos/market/utils"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func NewDB(cfg *config.Config) (*pgxpool.Pool, error) {
|
||||
return pgxpool.New(context.Background(), cfg.DatabaseURL)
|
||||
}
|
||||
|
||||
func GetAgents(ctx context.Context, db *pgxpool.Pool, logger *zap.Logger, page int, pageSize int, name *string, keywords *string, categories *string) ([]models.Agent, error) {
|
||||
logger = logger.With(zap.String("function", "GetAgents")).With(zap.String("file", "db.go"))
|
||||
|
||||
logger.Debug("Query parameters",
|
||||
zap.Int("page", page),
|
||||
zap.Int("pageSize", pageSize),
|
||||
zap.String("name", utils.StringOrNil(name)),
|
||||
zap.String("keywords", utils.StringOrNil(keywords)),
|
||||
zap.String("categories", utils.StringOrNil(categories)))
|
||||
|
||||
query := `
|
||||
SELECT "id", "name", "description", "author", "keywords", "categories", "graph" FROM "Agents"
|
||||
WHERE "submissionStatus" = 'APPROVED'
|
||||
AND ($3::text IS NULL OR name ILIKE '%' || $3 || '%')
|
||||
AND ($4::text IS NULL OR $4 = ANY(keywords))
|
||||
AND ($5::text IS NULL OR $5 = ANY(categories))
|
||||
ORDER BY "createdAt" DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query, pageSize, (page-1)*pageSize, name, keywords, categories)
|
||||
if err != nil {
|
||||
logger.Error("Error querying agents", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.Agent
|
||||
for rows.Next() {
|
||||
var agent models.Agent
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Error scanning agent", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
logger.Error("Error iterating over agents", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
logger.Info("Found agents", zap.Int("count", len(agents)))
|
||||
|
||||
if agents == nil {
|
||||
agents = []models.Agent{}
|
||||
}
|
||||
return agents, err
|
||||
}
|
||||
|
||||
func SubmitAgent(ctx context.Context, db *pgxpool.Pool, request models.AddAgentRequest, user interface{}) (*models.AgentWithMetadata, error) {
|
||||
logger := zap.L().With(zap.String("function", "SubmitAgent"))
|
||||
logger.Info("Submitting new agent")
|
||||
|
||||
// Generate a new UUID for the agent
|
||||
agentID := uuid.New().String()
|
||||
|
||||
// Create the Agent struct
|
||||
agent := models.Agent{
|
||||
ID: agentID,
|
||||
Name: request.Graph.Name,
|
||||
Description: request.Graph.Description,
|
||||
Author: request.Author,
|
||||
Keywords: request.Keywords,
|
||||
Categories: request.Categories,
|
||||
Graph: request.Graph,
|
||||
}
|
||||
|
||||
// Create the AgentWithMetadata struct
|
||||
agentWithMetadata := models.AgentWithMetadata{
|
||||
Agent: agent,
|
||||
Version: 1,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
SubmissionDate: time.Now(),
|
||||
SubmissionStatus: models.SubmissionStatusPending,
|
||||
}
|
||||
|
||||
// Start a transaction
|
||||
tx, err := db.Begin(ctx)
|
||||
if err != nil {
|
||||
logger.Error("Failed to begin transaction", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
|
||||
// Insert the agent into the database
|
||||
_, err = tx.Exec(ctx, `
|
||||
INSERT INTO "Agents" (id, name, description, author, keywords, categories, graph, version, created_at, updated_at, submission_date, submission_status)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||
`, agentWithMetadata.ID, agentWithMetadata.Name, agentWithMetadata.Description, agentWithMetadata.Author,
|
||||
agentWithMetadata.Keywords, agentWithMetadata.Categories, agentWithMetadata.Graph,
|
||||
agentWithMetadata.Version, agentWithMetadata.CreatedAt, agentWithMetadata.UpdatedAt,
|
||||
agentWithMetadata.SubmissionDate, agentWithMetadata.SubmissionStatus)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to insert agent", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit the transaction
|
||||
err = tx.Commit(ctx)
|
||||
if err != nil {
|
||||
logger.Error("Failed to commit transaction", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Successfully submitted new agent", zap.String("agentID", agentID))
|
||||
return &agentWithMetadata, nil
|
||||
}
|
||||
|
||||
func GetAgentDetails(ctx context.Context, db *pgxpool.Pool, agentID string) (*models.AgentWithMetadata, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAgentDetails"))
|
||||
|
||||
query := `
|
||||
SELECT id, name, description, author, keywords, categories, graph, version, created_at, updated_at, submission_date, submission_status
|
||||
FROM "Agents"
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
var agent models.AgentWithMetadata
|
||||
err := db.QueryRow(ctx, query, agentID).Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.Version,
|
||||
&agent.CreatedAt,
|
||||
&agent.UpdatedAt,
|
||||
&agent.SubmissionDate,
|
||||
&agent.SubmissionStatus,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if err == pgx.ErrNoRows {
|
||||
logger.Error("Agent not found", zap.String("agentID", agentID))
|
||||
return nil, fmt.Errorf("agent not found")
|
||||
}
|
||||
logger.Error("Error querying agent details", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Agent details retrieved", zap.String("agentID", agentID))
|
||||
return &agent, nil
|
||||
}
|
||||
|
||||
func IncrementDownloadCount(ctx context.Context, db *pgxpool.Pool, agentID string) error {
|
||||
logger := zap.L().With(zap.String("function", "IncrementDownloadCount"))
|
||||
|
||||
query := `
|
||||
UPDATE "Agents"
|
||||
SET download_count = download_count + 1
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
_, err := db.Exec(ctx, query, agentID)
|
||||
if err != nil {
|
||||
logger.Error("Failed to increment download count", zap.Error(err), zap.String("agentID", agentID))
|
||||
return err
|
||||
}
|
||||
|
||||
logger.Info("Download count incremented", zap.String("agentID", agentID))
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetAgentFile(ctx context.Context, db *pgxpool.Pool, agentID string) (*models.AgentFile, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAgentFile"))
|
||||
|
||||
query := `
|
||||
SELECT id, name, graph
|
||||
FROM "Agents"
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
var agentFile models.AgentFile
|
||||
err := db.QueryRow(ctx, query, agentID).Scan(
|
||||
&agentFile.ID,
|
||||
&agentFile.Name,
|
||||
&agentFile.Graph,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if err == pgx.ErrNoRows {
|
||||
logger.Error("Agent not found", zap.String("agentID", agentID))
|
||||
return nil, fmt.Errorf("agent not found")
|
||||
}
|
||||
logger.Error("Error querying agent file", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Agent file retrieved", zap.String("agentID", agentID))
|
||||
return &agentFile, nil
|
||||
}
|
||||
|
||||
func GetTopAgentsByDownloads(ctx context.Context, db *pgxpool.Pool, page, pageSize int) ([]models.AgentWithDownloads, int, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetTopAgentsByDownloads"))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
query := `
|
||||
SELECT a.id, a.name, a.description, a.author, a.keywords, a.categories, a.graph, at.downloads
|
||||
FROM "Agents" a
|
||||
JOIN "AnalyticsTracker" at ON a.id = at.agent_id
|
||||
WHERE a.submission_status = 'APPROVED'
|
||||
ORDER BY at.downloads DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query, pageSize, offset)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query top agents", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.AgentWithDownloads
|
||||
for rows.Next() {
|
||||
var agent models.AgentWithDownloads
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.Downloads,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan agent row", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
var totalCount int
|
||||
err = db.QueryRow(ctx, `SELECT COUNT(*) FROM "Agents" WHERE submission_status = 'APPROVED'`).Scan(&totalCount)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get total count", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
logger.Info("Top agents retrieved", zap.Int("count", len(agents)))
|
||||
return agents, totalCount, nil
|
||||
}
|
||||
|
||||
func GetFeaturedAgents(ctx context.Context, db *pgxpool.Pool, category string, page, pageSize int) ([]models.Agent, int, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetFeaturedAgents"))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
query := `
|
||||
SELECT a.id, a.name, a.description, a.author, a.keywords, a.categories, a.graph
|
||||
FROM "Agents" a
|
||||
JOIN "FeaturedAgent" fa ON a.id = fa.agent_id
|
||||
WHERE $1 = ANY(fa.featured_categories) AND fa.is_active = true AND a.submission_status = 'APPROVED'
|
||||
ORDER BY a.created_at DESC
|
||||
LIMIT $2 OFFSET $3
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query, category, pageSize, offset)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query featured agents", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.Agent
|
||||
for rows.Next() {
|
||||
var agent models.Agent
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan featured agent row", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
var totalCount int
|
||||
err = db.QueryRow(ctx, `SELECT COUNT(*) FROM "FeaturedAgent" fa JOIN "Agents" a ON fa.agent_id = a.id WHERE $1 = ANY(fa.featured_categories) AND fa.is_active = true AND a.submission_status = 'APPROVED'`, category).Scan(&totalCount)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get total count of featured agents", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
logger.Info("Featured agents retrieved", zap.Int("count", len(agents)))
|
||||
return agents, totalCount, nil
|
||||
}
|
||||
|
||||
func Search(ctx context.Context, db *pgxpool.Pool, query string, categories []string, page, pageSize int, sortBy, sortOrder string) ([]models.AgentWithRank, error) {
|
||||
logger := zap.L().With(zap.String("function", "Search"))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
categoryFilter := ""
|
||||
if len(categories) > 0 {
|
||||
categoryConditions := make([]string, len(categories))
|
||||
for i, cat := range categories {
|
||||
categoryConditions[i] = fmt.Sprintf("'%s' = ANY(a.categories)", cat)
|
||||
}
|
||||
categoryFilter = "AND (" + strings.Join(categoryConditions, " OR ") + ")"
|
||||
}
|
||||
|
||||
orderByClause := ""
|
||||
switch sortBy {
|
||||
case "createdAt", "updatedAt":
|
||||
orderByClause = fmt.Sprintf(`a."%s" %s, rank DESC`, sortBy, sortOrder)
|
||||
case "name":
|
||||
orderByClause = fmt.Sprintf(`a.name %s, rank DESC`, sortOrder)
|
||||
default:
|
||||
orderByClause = `rank DESC, a."createdAt" DESC`
|
||||
}
|
||||
|
||||
sqlQuery := fmt.Sprintf(`
|
||||
WITH query AS (
|
||||
SELECT to_tsquery(string_agg(lexeme || ':*', ' & ' ORDER BY positions)) AS q
|
||||
FROM unnest(to_tsvector($1))
|
||||
)
|
||||
SELECT
|
||||
a.id,
|
||||
a.created_at,
|
||||
a.updated_at,
|
||||
a.version,
|
||||
a.name,
|
||||
LEFT(a.description, 500) AS description,
|
||||
a.author,
|
||||
a.keywords,
|
||||
a.categories,
|
||||
a.graph,
|
||||
a.submission_status,
|
||||
a.submission_date,
|
||||
ts_rank(CAST(a.search AS tsvector), query.q) AS rank
|
||||
FROM "Agents" a, query
|
||||
WHERE a.submission_status = 'APPROVED' %s
|
||||
ORDER BY %s
|
||||
LIMIT $2
|
||||
OFFSET $3
|
||||
`, categoryFilter, orderByClause)
|
||||
|
||||
rows, err := db.Query(ctx, sqlQuery, query, pageSize, offset)
|
||||
if err != nil {
|
||||
logger.Error("Failed to execute search query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.AgentWithRank
|
||||
for rows.Next() {
|
||||
var agent models.AgentWithRank
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.CreatedAt,
|
||||
&agent.UpdatedAt,
|
||||
&agent.Version,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.SubmissionStatus,
|
||||
&agent.SubmissionDate,
|
||||
&agent.Rank,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan search result row", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
logger.Info("Search completed", zap.Int("results", len(agents)))
|
||||
return agents, nil
|
||||
}
|
||||
|
||||
func CreateAgentInstalledEvent(ctx context.Context, db *pgxpool.Pool, eventData models.InstallTracker) error {
|
||||
logger := zap.L().With(zap.String("function", "CreateAgentInstalledEvent"))
|
||||
logger.Info("Creating agent installed event")
|
||||
|
||||
query := `
|
||||
INSERT INTO install_tracker (marketplace_agent_id, installed_agent_id, installation_location)
|
||||
VALUES ($1, $2, $3)
|
||||
`
|
||||
|
||||
_, err := db.Exec(ctx, query,
|
||||
eventData.MarketplaceAgentID,
|
||||
eventData.InstalledAgentID,
|
||||
eventData.InstallationLocation,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to create agent installed event", zap.Error(err))
|
||||
return fmt.Errorf("failed to create agent installed event: %w", err)
|
||||
}
|
||||
|
||||
logger.Info("Agent installed event created successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Admin Queries
|
||||
|
||||
func CreateAgentEntry(ctx context.Context, db *pgxpool.Pool, agent models.Agent) (models.Agent, error) {
|
||||
logger := zap.L().With(zap.String("function", "CreateAgentEntry"))
|
||||
logger.Info("Creating agent entry")
|
||||
|
||||
query := `
|
||||
INSERT INTO agents (id, name, description, author, keywords, categories, graph)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING id, name, description, author, keywords, categories, graph
|
||||
`
|
||||
var createdAgent models.Agent
|
||||
err := db.QueryRow(ctx, query,
|
||||
agent.ID,
|
||||
agent.Name,
|
||||
agent.Description,
|
||||
agent.Author,
|
||||
agent.Keywords,
|
||||
agent.Categories,
|
||||
agent.Graph,
|
||||
).Scan(
|
||||
&createdAgent.ID,
|
||||
&createdAgent.Name,
|
||||
&createdAgent.Description,
|
||||
&createdAgent.Author,
|
||||
&createdAgent.Keywords,
|
||||
&createdAgent.Categories,
|
||||
&createdAgent.Graph,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to create agent entry", zap.Error(err))
|
||||
return models.Agent{}, err
|
||||
}
|
||||
|
||||
logger.Info("Agent entry created successfully", zap.String("agentID", agent.ID))
|
||||
return createdAgent, nil
|
||||
}
|
||||
|
||||
func SetAgentFeatured(ctx context.Context, db *pgxpool.Pool, agentID string, isActive bool, featuredCategories []string) (*models.FeaturedAgent, error) {
|
||||
logger := zap.L().With(zap.String("function", "SetAgentFeatured"))
|
||||
logger.Info("Setting agent featured status", zap.String("agentID", agentID), zap.Bool("isActive", isActive))
|
||||
|
||||
// Check if the agent exists
|
||||
var exists bool
|
||||
err := db.QueryRow(ctx, `SELECT EXISTS(SELECT 1 FROM "Agents" WHERE id = $1)`, agentID).Scan(&exists)
|
||||
if err != nil {
|
||||
logger.Error("Failed to check if agent exists", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to check if agent exists: %w", err)
|
||||
}
|
||||
if !exists {
|
||||
return nil, fmt.Errorf("agent with ID %s not found", agentID)
|
||||
}
|
||||
|
||||
var query string
|
||||
var args []interface{}
|
||||
|
||||
if isActive {
|
||||
// Set the agent as featured
|
||||
query = `
|
||||
INSERT INTO "FeaturedAgent" (agent_id, featured_categories, is_active)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (agent_id) DO UPDATE
|
||||
SET featured_categories = $2, is_active = $3
|
||||
RETURNING agent_id, featured_categories, is_active
|
||||
`
|
||||
args = []interface{}{agentID, featuredCategories, isActive}
|
||||
} else {
|
||||
// Unset the agent as featured
|
||||
query = `
|
||||
DELETE FROM "FeaturedAgent"
|
||||
WHERE agent_id = $1
|
||||
RETURNING agent_id, featured_categories, is_active
|
||||
`
|
||||
args = []interface{}{agentID}
|
||||
}
|
||||
|
||||
var featuredAgent models.FeaturedAgent
|
||||
err = db.QueryRow(ctx, query, args...).Scan(
|
||||
&featuredAgent.AgentID,
|
||||
&featuredAgent.FeaturedCategories,
|
||||
&featuredAgent.IsActive,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if err == pgx.ErrNoRows && !isActive {
|
||||
logger.Info("Agent was not featured, no action needed", zap.String("agentID", agentID))
|
||||
return nil, nil
|
||||
}
|
||||
logger.Error("Failed to set agent featured status", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to set agent featured status: %w", err)
|
||||
}
|
||||
|
||||
if isActive {
|
||||
logger.Info("Agent set as featured successfully", zap.String("agentID", agentID))
|
||||
} else {
|
||||
logger.Info("Agent unset as featured successfully", zap.String("agentID", agentID))
|
||||
}
|
||||
return &featuredAgent, nil
|
||||
}
|
||||
|
||||
func GetAgentFeatured(ctx context.Context, db *pgxpool.Pool, agentID string) (*models.FeaturedAgent, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAgentFeatured"))
|
||||
logger.Info("Getting featured agent", zap.String("agentID", agentID))
|
||||
|
||||
query := `
|
||||
SELECT agent_id, featured_categories, is_active
|
||||
FROM "FeaturedAgent"
|
||||
WHERE agent_id = $1
|
||||
`
|
||||
|
||||
var featuredAgent models.FeaturedAgent
|
||||
err := db.QueryRow(ctx, query, agentID).Scan(
|
||||
&featuredAgent.AgentID,
|
||||
&featuredAgent.FeaturedCategories,
|
||||
&featuredAgent.IsActive,
|
||||
)
|
||||
|
||||
if err == pgx.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to get featured agent", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get featured agent: %w", err)
|
||||
}
|
||||
|
||||
logger.Info("Featured agent retrieved successfully", zap.String("agentID", agentID))
|
||||
return &featuredAgent, nil
|
||||
}
|
||||
|
||||
func RemoveFeaturedCategory(ctx context.Context, db *pgxpool.Pool, agentID string, category string) (*models.FeaturedAgent, error) {
|
||||
logger := zap.L().With(zap.String("function", "RemoveFeaturedCategory"))
|
||||
logger.Info("Removing featured category", zap.String("agentID", agentID), zap.String("category", category))
|
||||
|
||||
query := `
|
||||
UPDATE "FeaturedAgent"
|
||||
SET featured_categories = array_remove(featured_categories, $1)
|
||||
WHERE agent_id = $2
|
||||
RETURNING agent_id, featured_categories, is_active
|
||||
`
|
||||
|
||||
var featuredAgent models.FeaturedAgent
|
||||
err := db.QueryRow(ctx, query, category, agentID).Scan(
|
||||
&featuredAgent.AgentID,
|
||||
&featuredAgent.FeaturedCategories,
|
||||
&featuredAgent.IsActive,
|
||||
)
|
||||
|
||||
if err == pgx.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to remove featured category", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to remove featured category: %w", err)
|
||||
}
|
||||
|
||||
logger.Info("Featured category removed successfully", zap.String("agentID", agentID), zap.String("category", category))
|
||||
return &featuredAgent, nil
|
||||
}
|
||||
|
||||
func GetNotFeaturedAgents(ctx context.Context, db *pgxpool.Pool, page, pageSize int) ([]models.Agent, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetNotFeaturedAgents"))
|
||||
logger.Info("Getting not featured agents", zap.Int("page", page), zap.Int("pageSize", pageSize))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
query := `
|
||||
SELECT a.id, a.name, a.description, a.author, a.keywords, a.categories, a.graph
|
||||
FROM "Agents" a
|
||||
LEFT JOIN "FeaturedAgent" fa ON a.id = fa.agent_id
|
||||
WHERE (fa.agent_id IS NULL OR fa.featured_categories = '{}')
|
||||
AND a.submission_status = 'APPROVED'
|
||||
ORDER BY a.created_at DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query, pageSize, offset)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query not featured agents", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.Agent
|
||||
for rows.Next() {
|
||||
var agent models.Agent
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan not featured agent row", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
logger.Info("Not featured agents retrieved", zap.Int("count", len(agents)))
|
||||
return agents, nil
|
||||
}
|
||||
|
||||
func GetAgentSubmissions(ctx context.Context, db *pgxpool.Pool, page, pageSize int, name, keyword, category *string, sortBy, sortOrder string) ([]models.AgentWithMetadata, int, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAgentSubmissions"))
|
||||
logger.Info("Getting agent submissions", zap.Int("page", page), zap.Int("pageSize", pageSize))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
query := `
|
||||
SELECT a.id, a.name, a.description, a.author, a.keywords, a.categories, a.graph, a.created_at, a.updated_at, a.version, a.submission_status, a.submission_review_date, a.submission_review_comments
|
||||
FROM "Agents" a
|
||||
WHERE a.submission_status = 'PENDING'
|
||||
`
|
||||
|
||||
args := []interface{}{}
|
||||
argCount := 1
|
||||
|
||||
if name != nil {
|
||||
query += fmt.Sprintf(" AND a.name ILIKE $%d", argCount)
|
||||
args = append(args, "%"+*name+"%")
|
||||
argCount++
|
||||
}
|
||||
|
||||
if keyword != nil {
|
||||
query += fmt.Sprintf(" AND $%d = ANY(a.keywords)", argCount)
|
||||
args = append(args, *keyword)
|
||||
argCount++
|
||||
}
|
||||
|
||||
if category != nil {
|
||||
query += fmt.Sprintf(" AND $%d = ANY(a.categories)", argCount)
|
||||
args = append(args, *category)
|
||||
argCount++
|
||||
}
|
||||
|
||||
// Add sorting
|
||||
query += fmt.Sprintf(" ORDER BY a.%s %s", sortBy, sortOrder)
|
||||
|
||||
// Add pagination
|
||||
query += fmt.Sprintf(" LIMIT $%d OFFSET $%d", argCount, argCount+1)
|
||||
args = append(args, pageSize, offset)
|
||||
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query agent submissions", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.AgentWithMetadata
|
||||
for rows.Next() {
|
||||
var agent models.AgentWithMetadata
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.CreatedAt,
|
||||
&agent.UpdatedAt,
|
||||
&agent.Version,
|
||||
&agent.SubmissionStatus,
|
||||
&agent.SubmissionReviewDate,
|
||||
&agent.SubmissionReviewComments,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan agent submission row", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
// Get total count
|
||||
countQuery := `SELECT COUNT(*) FROM "Agents" WHERE submission_status = 'PENDING'`
|
||||
var totalCount int
|
||||
err = db.QueryRow(ctx, countQuery).Scan(&totalCount)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get total count of agent submissions", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
logger.Info("Agent submissions retrieved", zap.Int("count", len(agents)))
|
||||
return agents, totalCount, nil
|
||||
}
|
||||
|
||||
func ReviewSubmission(ctx context.Context, db *pgxpool.Pool, agentID string, version int, status models.SubmissionStatus, comments *string) (*models.AgentWithMetadata, error) {
|
||||
logger := zap.L().With(zap.String("function", "ReviewSubmission"))
|
||||
logger.Info("Reviewing agent submission", zap.String("agentID", agentID), zap.Int("version", version))
|
||||
|
||||
query := `
|
||||
UPDATE "Agents"
|
||||
SET submission_status = $1,
|
||||
submission_review_date = NOW(),
|
||||
submission_review_comments = $2
|
||||
WHERE id = $3 AND version = $4
|
||||
RETURNING id, name, description, author, keywords, categories, graph, created_at, updated_at, version, submission_status, submission_review_date, submission_review_comments
|
||||
`
|
||||
|
||||
var agent models.AgentWithMetadata
|
||||
err := db.QueryRow(ctx, query, status, comments, agentID, version).Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.CreatedAt,
|
||||
&agent.UpdatedAt,
|
||||
&agent.Version,
|
||||
&agent.SubmissionStatus,
|
||||
&agent.SubmissionReviewDate,
|
||||
&agent.SubmissionReviewComments,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if err == pgx.ErrNoRows {
|
||||
logger.Error("Agent submission not found", zap.String("agentID", agentID), zap.Int("version", version))
|
||||
return nil, fmt.Errorf("agent submission not found")
|
||||
}
|
||||
logger.Error("Failed to review agent submission", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Agent submission reviewed successfully", zap.String("agentID", agentID), zap.Int("version", version))
|
||||
return &agent, nil
|
||||
}
|
||||
|
||||
func GetAllCategories(ctx context.Context, db *pgxpool.Pool) ([]string, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAllCategories"))
|
||||
logger.Info("Getting all categories")
|
||||
|
||||
query := `
|
||||
SELECT DISTINCT unnest(categories) AS category
|
||||
FROM "Agents"
|
||||
ORDER BY category
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query categories", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var categories []string
|
||||
for rows.Next() {
|
||||
var category string
|
||||
err := rows.Scan(&category)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan category row", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
categories = append(categories, category)
|
||||
}
|
||||
|
||||
logger.Info("Categories retrieved", zap.Int("count", len(categories)))
|
||||
return categories, nil
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
-- Drop foreign key constraints
|
||||
ALTER TABLE "AnalyticsTracker" DROP CONSTRAINT IF EXISTS "AnalyticsTracker_agentId_fkey";
|
||||
ALTER TABLE "InstallTracker" DROP CONSTRAINT IF EXISTS "InstallTracker_marketplaceAgentId_fkey";
|
||||
ALTER TABLE "FeaturedAgent" DROP CONSTRAINT IF EXISTS "FeaturedAgent_agentId_fkey";
|
||||
|
||||
-- Drop indexes
|
||||
DROP INDEX IF EXISTS "FeaturedAgent_agentId_key";
|
||||
DROP INDEX IF EXISTS "FeaturedAgent_id_key";
|
||||
DROP INDEX IF EXISTS "InstallTracker_marketplaceAgentId_installedAgentId_key";
|
||||
DROP INDEX IF EXISTS "AnalyticsTracker_agentId_key";
|
||||
DROP INDEX IF EXISTS "AnalyticsTracker_id_key";
|
||||
DROP INDEX IF EXISTS "Agents_id_key";
|
||||
|
||||
-- Drop tables
|
||||
DROP TABLE IF EXISTS "FeaturedAgent";
|
||||
DROP TABLE IF EXISTS "InstallTracker";
|
||||
DROP TABLE IF EXISTS "AnalyticsTracker";
|
||||
DROP TABLE IF EXISTS "Agents";
|
||||
|
||||
-- Drop enums
|
||||
DROP TYPE IF EXISTS "InstallationLocation";
|
||||
DROP TYPE IF EXISTS "SubmissionStatus";
|
||||
@@ -0,0 +1,86 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "SubmissionStatus" AS ENUM ('PENDING', 'APPROVED', 'REJECTED');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "InstallationLocation" AS ENUM ('LOCAL', 'CLOUD');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Agents" (
|
||||
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"submissionDate" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"submissionReviewDate" TIMESTAMP(3),
|
||||
"submissionStatus" "SubmissionStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"submissionReviewComments" TEXT,
|
||||
"name" TEXT,
|
||||
"description" TEXT,
|
||||
"author" TEXT,
|
||||
"keywords" TEXT[],
|
||||
"categories" TEXT[],
|
||||
"search" tsvector DEFAULT ''::tsvector,
|
||||
"graph" JSONB NOT NULL,
|
||||
|
||||
CONSTRAINT "Agents_pkey" PRIMARY KEY ("id","version")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "AnalyticsTracker" (
|
||||
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
|
||||
"agentId" UUID NOT NULL,
|
||||
"views" INTEGER NOT NULL,
|
||||
"downloads" INTEGER NOT NULL,
|
||||
|
||||
CONSTRAINT "AnalyticsTracker_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "InstallTracker" (
|
||||
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
|
||||
"marketplaceAgentId" UUID NOT NULL,
|
||||
"installedAgentId" UUID NOT NULL,
|
||||
"installationLocation" "InstallationLocation" NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "InstallTracker_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "FeaturedAgent" (
|
||||
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
|
||||
"agentId" UUID NOT NULL,
|
||||
"isActive" BOOLEAN NOT NULL DEFAULT false,
|
||||
"featuredCategories" TEXT[],
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "FeaturedAgent_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Agents_id_key" ON "Agents"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "AnalyticsTracker_id_key" ON "AnalyticsTracker"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "AnalyticsTracker_agentId_key" ON "AnalyticsTracker"("agentId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "InstallTracker_marketplaceAgentId_installedAgentId_key" ON "InstallTracker"("marketplaceAgentId", "installedAgentId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "FeaturedAgent_id_key" ON "FeaturedAgent"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "FeaturedAgent_agentId_key" ON "FeaturedAgent"("agentId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AnalyticsTracker" ADD CONSTRAINT "AnalyticsTracker_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "InstallTracker" ADD CONSTRAINT "InstallTracker_marketplaceAgentId_fkey" FOREIGN KEY ("marketplaceAgentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "FeaturedAgent" ADD CONSTRAINT "FeaturedAgent_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,11 @@
|
||||
-- Remove sample data from FeaturedAgent table
|
||||
DELETE FROM "FeaturedAgent" WHERE "agentId" IN ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'eaa773b1-5efa-485f-b2f0-2e05bae6d297');
|
||||
|
||||
-- Remove sample data from InstallTracker table
|
||||
DELETE FROM "InstallTracker" WHERE "marketplaceAgentId" IN ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'eaa773b1-5efa-485f-b2f0-2e05bae6d297', 'b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 'a4d3598f-6180-4e6d-96bf-6e15c3de05a9', '9f332ff3-4c74-4f5b-9838-65938a06711f');
|
||||
|
||||
-- Remove sample data from AnalyticsTracker table
|
||||
DELETE FROM "AnalyticsTracker" WHERE "agentId" IN ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'eaa773b1-5efa-485f-b2f0-2e05bae6d297', 'b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 'a4d3598f-6180-4e6d-96bf-6e15c3de05a9', '9f332ff3-4c74-4f5b-9838-65938a06711f');
|
||||
|
||||
-- Remove sample data from Agents table
|
||||
DELETE FROM "Agents" WHERE "id" IN ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'eaa773b1-5efa-485f-b2f0-2e05bae6d297', 'b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 'a4d3598f-6180-4e6d-96bf-6e15c3de05a9', '9f332ff3-4c74-4f5b-9838-65938a06711f');
|
||||
@@ -0,0 +1,86 @@
|
||||
-- Sample data for Agents table (10 agents)
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', 'AI Recruiter', 'An AI-powered tool that assists HR teams with talent acquisition, screening, and shortlisting.', 'Author1', ARRAY['recruitment', 'HR'], ARRAY['human resources', 'talent management'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'Customer Service Bot', 'A chatbot that provides 24/7 support and assistance to customers, handling common inquiries and issues.', 'Author2', ARRAY['customer service', 'chatbot'], ARRAY['customer experience', 'artificial intelligence'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('eaa773b1-5efa-485f-b2f0-2e05bae6d297', 'Financial Advisor', 'An AI-powered financial advisor that offers personalized investment recommendations and portfolio management.', 'Author3', ARRAY['finance', 'investment'], ARRAY['wealth management', 'artificial intelligence'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 'AI Content Writer', 'An AI-powered tool that generates high-quality content for websites, blogs, and marketing materials.', 'Author4', ARRAY['content writing', 'AI'], ARRAY['marketing', 'artificial intelligence'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('a4d3598f-6180-4e6d-96bf-6e15c3de05a9', 'AI Image Generator', 'An AI-powered tool that creates realistic images based on text prompts.', 'Author5', ARRAY['image generation', 'AI'], ARRAY['marketing', 'artificial intelligence'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph")
|
||||
VALUES ('9f332ff3-4c74-4f5b-9838-65938a06711f', 'AI Video Editor', 'An AI-powered tool that edits and enhances videos with advanced AI algorithms.', 'Author6', ARRAY['video editing', 'AI'], ARRAY['marketing', 'artificial intelligence'], '{"key": "value"}');
|
||||
|
||||
-- Sample data for AnalyticsTracker table (10 agents)
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', 200, 80);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 150, 60);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('eaa773b1-5efa-485f-b2f0-2e05bae6d297', 100, 40);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 120, 50);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('a4d3598f-6180-4e6d-96bf-6e15c3de05a9', 130, 55);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('9f332ff3-4c74-4f5b-9838-65938a06711f', 140, 60);
|
||||
|
||||
|
||||
-- Sample data for InstallTracker table (10 agents)
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac11', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac12', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac13', 'LOCAL');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac14', 'LOCAL');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac15', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac16', 'LOCAL');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac17', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', '244f809e-1eee-4a36-a49b-ac2db008ac18', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('eaa773b1-5efa-485f-b2f0-2e05bae6d297', '244f809e-1eee-4a36-a49b-ac2db008ac19', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b47e40a7-ad5f-4b29-9eac-abd5b728f19a', '244f809e-1eee-4a36-a49b-ac2db008ac20', 'LOCAL');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('a4d3598f-6180-4e6d-96bf-6e15c3de05a9', '244f809e-1eee-4a36-a49b-ac2db008ac22', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('9f332ff3-4c74-4f5b-9838-65938a06711f', '244f809e-1eee-4a36-a49b-ac2db008ac21', 'CLOUD');
|
||||
|
||||
-- Sample data for FeaturedAgent table (3 featured agents)
|
||||
INSERT INTO "FeaturedAgent" ("agentId", "isActive", "featuredCategories")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', true, ARRAY['human resources', 'talent management']);
|
||||
|
||||
INSERT INTO "FeaturedAgent" ("agentId", "isActive", "featuredCategories")
|
||||
VALUES ('3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', true, ARRAY['customer experience', 'artificial intelligence']);
|
||||
|
||||
INSERT INTO "FeaturedAgent" ("agentId", "isActive", "featuredCategories")
|
||||
VALUES ('eaa773b1-5efa-485f-b2f0-2e05bae6d297', true, ARRAY['wealth management', 'artificial intelligence']);
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user