mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-01-11 16:18:07 -05:00
Compare commits
77 Commits
docker-upd
...
go
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5e0dad5a96 | ||
|
|
f598ba27b6 | ||
|
|
286202cc66 | ||
|
|
55eb917162 | ||
|
|
c2843eecfa | ||
|
|
3971fbd800 | ||
|
|
e04838feb5 | ||
|
|
3ef1f6e380 | ||
|
|
705c63b801 | ||
|
|
9395706841 | ||
|
|
da007e3a22 | ||
|
|
9e18c26e65 | ||
|
|
30e3d65711 | ||
|
|
b9c26b1a6b | ||
|
|
220a127e51 | ||
|
|
004e49edb1 | ||
|
|
bf21bb1fa5 | ||
|
|
a98677b79d | ||
|
|
530ddf2c34 | ||
|
|
33ee2f2ee5 | ||
|
|
ae1d410b65 | ||
|
|
196a5d6b59 | ||
|
|
82c1249d33 | ||
|
|
056eb46c0f | ||
|
|
9389a30298 | ||
|
|
9b58faeeb6 | ||
|
|
9cb55c5ac0 | ||
|
|
eb1df12ce8 | ||
|
|
12f40596b3 | ||
|
|
3af26a9379 | ||
|
|
4041ed3e33 | ||
|
|
ee78653425 | ||
|
|
6fde030c37 | ||
|
|
76c5a27044 | ||
|
|
c6aba70dd4 | ||
|
|
bf1e01d423 | ||
|
|
470e7036b9 | ||
|
|
f6608754aa | ||
|
|
bd70ab00e0 | ||
|
|
2c940b381a | ||
|
|
03b30ebf5b | ||
|
|
0d8c2a820e | ||
|
|
5ce562e11f | ||
|
|
fcf2247c20 | ||
|
|
7326ee1221 | ||
|
|
70c7a3b1f3 | ||
|
|
6bdab5b777 | ||
|
|
5f5e31ac19 | ||
|
|
9e71b658d6 | ||
|
|
b7b23d68b4 | ||
|
|
630f401cee | ||
|
|
94fbcfb501 | ||
|
|
8102f78030 | ||
|
|
52c731abd6 | ||
|
|
c8fbce643e | ||
|
|
b1eb259bb3 | ||
|
|
c738eb3bc6 | ||
|
|
60fca5c5f0 | ||
|
|
bba9836735 | ||
|
|
ad76bd1300 | ||
|
|
6c001bd595 | ||
|
|
f5b89672f8 | ||
|
|
76480ffa03 | ||
|
|
ab60a57379 | ||
|
|
1d9b01fc77 | ||
|
|
e81d9f9f0b | ||
|
|
0d5d0270ea | ||
|
|
bd25f9223c | ||
|
|
07305b55ff | ||
|
|
cdfe3e5fbc | ||
|
|
e992cdf8c2 | ||
|
|
fa16c207e0 | ||
|
|
ebd2ecd84c | ||
|
|
0b919522ae | ||
|
|
ef691359b7 | ||
|
|
f8815c3053 | ||
|
|
a60ed21404 |
@@ -37,3 +37,4 @@ rnd/autogpt_builder/.env.local
|
||||
rnd/autogpt_server/.env
|
||||
rnd/autogpt_server/.venv/
|
||||
|
||||
rnd/market/.env
|
||||
|
||||
@@ -69,6 +69,8 @@ Lets the agent execute non-interactive Shell commands and Python code. Python ex
|
||||
| `shell_denylist` | List of prohibited shell commands | `List[str]` | `[]` |
|
||||
| `docker_container_name` | Name of the Docker container used for code execution | `str` | `"agent_sandbox"` |
|
||||
|
||||
All shell command configurations are expected to be for convience only. This component is not secure and should not be used in production environments. It is recommended to use more appropriate sandboxing.
|
||||
|
||||
### CommandProvider
|
||||
|
||||
- `execute_shell` execute shell command
|
||||
|
||||
@@ -73,6 +73,7 @@ Once you have installed Yarn and Poetry, you can run the following command to in
|
||||
|
||||
```bash
|
||||
cd rnd/autogpt_server
|
||||
cp .env.example .env
|
||||
poetry install
|
||||
```
|
||||
|
||||
@@ -90,7 +91,7 @@ Once you have installed the dependencies, you can proceed to the next step.
|
||||
In order to setup the database, you need to run the following commands, in the same terminal you ran the `poetry install` command:
|
||||
|
||||
```sh
|
||||
docker compose up postgres -d
|
||||
docker compose up postgres redis -d
|
||||
poetry run prisma migrate dev
|
||||
```
|
||||
After deploying the migration, to ensure that the database schema is correctly mapped to your codebase, allowing the application to interact with the database properly, you need to generate the Prisma database model:
|
||||
@@ -101,7 +102,15 @@ poetry run prisma generate
|
||||
|
||||
Without running this command, the necessary Python modules (prisma.models) won't be available, leading to a `ModuleNotFoundError`.
|
||||
|
||||
### Running the server
|
||||
### Running the server without Docker
|
||||
|
||||
To run the server, you can run the following commands in the same terminal you ran the `poetry install` command:
|
||||
|
||||
```bash
|
||||
poetry run app
|
||||
```
|
||||
|
||||
### Running the server within Docker
|
||||
|
||||
To run the server, you can run the following commands in the same terminal you ran the `poetry install` command:
|
||||
|
||||
@@ -110,7 +119,7 @@ docker compose build
|
||||
docker compose up
|
||||
```
|
||||
|
||||
In the other terminal, you can run the following command to start the frontend:
|
||||
In the other terminal from autogpt_builder, you can run the following command to start the frontend:
|
||||
|
||||
```bash
|
||||
yarn dev
|
||||
@@ -119,3 +128,10 @@ yarn dev
|
||||
### Checking if the server is running
|
||||
|
||||
You can check if the server is running by visiting [http://localhost:3000](http://localhost:3000) in your browser.
|
||||
|
||||
### Notes:
|
||||
By default the daemons for different services run on the following ports:
|
||||
|
||||
Execution Manager Daemon: 8002
|
||||
Execution Scheduler Daemon: 8003
|
||||
Rest Server Daemon: 8004
|
||||
|
||||
138
rnd/README.md
138
rnd/README.md
@@ -1,36 +1,114 @@
|
||||
This is a guide to setting up and running the AutoGPT Server and Builder. This tutorial will cover downloading the necessary files, setting up the server, and testing the system.
|
||||
# AutoGPT Platform
|
||||
|
||||
https://github.com/user-attachments/assets/fd0d0f35-3155-4263-b575-ba3efb126cb4
|
||||
Welcome to the AutoGPT Platform - a powerful system for creating and running AI agents to solve business problems. This platform enables you to harness the power of artificial intelligence to automate tasks, analyze data, and generate insights for your organization.
|
||||
|
||||
1. Navigate to the AutoGPT GitHub repository.
|
||||
2. Click the "Code" button, then select "Download ZIP".
|
||||
3. Once downloaded, extract the ZIP file to a folder of your choice.
|
||||
## Getting Started
|
||||
|
||||
4. Open the extracted folder and navigate to the "rnd" directory.
|
||||
5. Enter the "AutoGPT server" folder.
|
||||
6. Open a terminal window in this directory.
|
||||
7. Locate and open the README file in the AutoGPT server folder: [doc](./autogpt_server/README.md#setup).
|
||||
8. Copy and paste each command from the setup section in the README into your terminal.
|
||||
- Important: Wait for each command to finish before running the next one.
|
||||
9. If all commands run without errors, enter the final command: `poetry run app`
|
||||
10. You should now see the server running in your terminal.
|
||||
### Prerequisites
|
||||
|
||||
- Docker
|
||||
- Docker Compose V2 (comes with Docker Desktop, or can be installed separately)
|
||||
|
||||
### Running the System
|
||||
|
||||
To run the AutoGPT Platform, follow these steps:
|
||||
|
||||
1. Clone this repository to your local machine.
|
||||
2. Navigate to the project directory.
|
||||
3. Run the following command:
|
||||
|
||||
```
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
This command will start all the necessary services defined in the `docker-compose.yml` file in detached mode.
|
||||
|
||||
### Docker Compose Commands
|
||||
|
||||
Here are some useful Docker Compose commands for managing your AutoGPT Platform:
|
||||
|
||||
- `docker compose up -d`: Start the services in detached mode.
|
||||
- `docker compose stop`: Stop the running services without removing them.
|
||||
- `docker compose rm`: Remove stopped service containers.
|
||||
- `docker compose build`: Build or rebuild services.
|
||||
- `docker compose down`: Stop and remove containers, networks, and volumes.
|
||||
- `docker compose watch`: Watch for changes in your services and automatically update them.
|
||||
|
||||
|
||||
### Sample Scenarios
|
||||
|
||||
Here are some common scenarios where you might use multiple Docker Compose commands:
|
||||
|
||||
1. Updating and restarting a specific service:
|
||||
```
|
||||
docker compose build api_srv
|
||||
docker compose up -d --no-deps api_srv
|
||||
```
|
||||
This rebuilds the `api_srv` service and restarts it without affecting other services.
|
||||
|
||||
2. Viewing logs for troubleshooting:
|
||||
```
|
||||
docker compose logs -f api_srv ws_srv
|
||||
```
|
||||
This shows and follows the logs for both `api_srv` and `ws_srv` services.
|
||||
|
||||
3. Scaling a service for increased load:
|
||||
```
|
||||
docker compose up -d --scale executor=3
|
||||
```
|
||||
This scales the `executor` service to 3 instances to handle increased load.
|
||||
|
||||
4. Stopping the entire system for maintenance:
|
||||
```
|
||||
docker compose stop
|
||||
docker compose rm -f
|
||||
docker compose pull
|
||||
docker compose up -d
|
||||
```
|
||||
This stops all services, removes containers, pulls the latest images, and restarts the system.
|
||||
|
||||
5. Developing with live updates:
|
||||
```
|
||||
docker compose watch
|
||||
```
|
||||
This watches for changes in your code and automatically updates the relevant services.
|
||||
|
||||
6. Checking the status of services:
|
||||
```
|
||||
docker compose ps
|
||||
```
|
||||
This shows the current status of all services defined in your docker-compose.yml file.
|
||||
|
||||
These scenarios demonstrate how to use Docker Compose commands in combination to manage your AutoGPT Platform effectively.
|
||||
|
||||
|
||||
### Persisting Data
|
||||
|
||||
To persist data for PostgreSQL and Redis, you can modify the `docker-compose.yml` file to add volumes. Here's how:
|
||||
|
||||
1. Open the `docker-compose.yml` file in a text editor.
|
||||
2. Add volume configurations for PostgreSQL and Redis services:
|
||||
|
||||
```yaml
|
||||
services:
|
||||
postgres:
|
||||
# ... other configurations ...
|
||||
volumes:
|
||||
- postgres_data:/var/lib/postgresql/data
|
||||
|
||||
redis:
|
||||
# ... other configurations ...
|
||||
volumes:
|
||||
- redis_data:/data
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
redis_data:
|
||||
```
|
||||
|
||||
3. Save the file and run `docker compose up -d` to apply the changes.
|
||||
|
||||
This configuration will create named volumes for PostgreSQL and Redis, ensuring that your data persists across container restarts.
|
||||
|
||||
11. Navigate back to the "rnd" folder.
|
||||
12. Open the "AutoGPT builder" folder.
|
||||
13. Open the README file in this folder: [doc](./autogpt_builder/README.md#getting-started).
|
||||
14. In your terminal, run the following commands:
|
||||
```
|
||||
npm install
|
||||
```
|
||||
```
|
||||
npm run dev
|
||||
```
|
||||
|
||||
15. Once the front-end is running, click the link to navigate to `localhost:3000`.
|
||||
16. Click on the "Build" option.
|
||||
17. Add a few blocks to test the functionality.
|
||||
18. Connect the blocks together.
|
||||
19. Click "Run".
|
||||
20. Check your terminal window - you should see that the server has received the request, is processing it, and has executed it.
|
||||
|
||||
And there you have it! You've successfully set up and tested AutoGPT.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8000/api
|
||||
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8001/api/v1/market
|
||||
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8005/api/v1/market
|
||||
|
||||
## Supabase credentials
|
||||
## YOU ONLY NEED THEM IF YOU WANT TO USE SUPABASE USER AUTHENTICATION
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
# Base stage for both dev and prod
|
||||
FROM node:21-alpine AS base
|
||||
WORKDIR /app
|
||||
COPY autogpt_builder/package.json autogpt_builder/yarn.lock ./
|
||||
COPY rnd/autogpt_builder/package.json rnd/autogpt_builder/yarn.lock ./
|
||||
RUN yarn install --frozen-lockfile
|
||||
|
||||
# Dev stage
|
||||
FROM base AS dev
|
||||
ENV NODE_ENV=development
|
||||
COPY autogpt_builder/ .
|
||||
COPY rnd/autogpt_builder/ .
|
||||
EXPOSE 3000
|
||||
CMD ["npm", "run", "dev"]
|
||||
CMD ["yarn", "run", "dev"]
|
||||
|
||||
# Build stage for prod
|
||||
FROM base AS build
|
||||
COPY autogpt_builder/ .
|
||||
COPY rnd/autogpt_builder/ .
|
||||
RUN npm run build
|
||||
|
||||
# Prod stage
|
||||
|
||||
@@ -12,8 +12,10 @@ import InputModalComponent from "./InputModalComponent";
|
||||
import OutputModalComponent from "./OutputModalComponent";
|
||||
import {
|
||||
BlockIORootSchema,
|
||||
BlockIOStringSubSchema,
|
||||
Category,
|
||||
NodeExecutionResult,
|
||||
BlockUIType,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import { beautifyString, cn, setNestedProperty } from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
@@ -21,7 +23,10 @@ import { Switch } from "@/components/ui/switch";
|
||||
import { Copy, Trash2 } from "lucide-react";
|
||||
import { history } from "./history";
|
||||
import NodeHandle from "./NodeHandle";
|
||||
import { NodeGenericInputField } from "./node-input-components";
|
||||
import {
|
||||
NodeGenericInputField,
|
||||
NodeTextBoxInput,
|
||||
} from "./node-input-components";
|
||||
import SchemaTooltip from "./SchemaTooltip";
|
||||
import { getPrimaryCategoryColor } from "@/lib/utils";
|
||||
import { FlowContext } from "./Flow";
|
||||
@@ -59,6 +64,7 @@ export type CustomNodeData = {
|
||||
backend_id?: string;
|
||||
errors?: { [key: string]: string };
|
||||
isOutputStatic?: boolean;
|
||||
uiType: BlockUIType;
|
||||
};
|
||||
|
||||
export type CustomNode = Node<CustomNodeData, "custom">;
|
||||
@@ -118,8 +124,16 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
|
||||
setIsAdvancedOpen(checked);
|
||||
};
|
||||
|
||||
const generateOutputHandles = (schema: BlockIORootSchema) => {
|
||||
if (!schema?.properties) return null;
|
||||
const generateOutputHandles = (
|
||||
schema: BlockIORootSchema,
|
||||
nodeType: BlockUIType,
|
||||
) => {
|
||||
if (
|
||||
!schema?.properties ||
|
||||
nodeType === BlockUIType.OUTPUT ||
|
||||
nodeType === BlockUIType.NOTE
|
||||
)
|
||||
return null;
|
||||
const keys = Object.keys(schema.properties);
|
||||
return keys.map((key) => (
|
||||
<div key={key}>
|
||||
@@ -133,6 +147,137 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
|
||||
));
|
||||
};
|
||||
|
||||
const generateInputHandles = (
|
||||
schema: BlockIORootSchema,
|
||||
nodeType: BlockUIType,
|
||||
) => {
|
||||
if (!schema?.properties) return null;
|
||||
let keys = Object.entries(schema.properties);
|
||||
switch (nodeType) {
|
||||
case BlockUIType.INPUT:
|
||||
// For INPUT blocks, dont include connection handles
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
return (
|
||||
(isRequired || isAdvancedOpen || !isAdvanced) && (
|
||||
<div key={propKey}>
|
||||
<span className="text-m green -mb-1 text-gray-900">
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
<div key={propKey} onMouseOver={() => {}}>
|
||||
{!isConnected && (
|
||||
<NodeGenericInputField
|
||||
className="mb-2 mt-1"
|
||||
propKey={propKey}
|
||||
propSchema={propSchema}
|
||||
currentValue={getValue(propKey)}
|
||||
connections={data.connections}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
errors={data.errors ?? {}}
|
||||
displayName={propSchema.title || beautifyString(propKey)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
case BlockUIType.NOTE:
|
||||
// For NOTE blocks, don't render any input handles
|
||||
const [noteKey, noteSchema] = keys[0];
|
||||
return (
|
||||
<div key={noteKey}>
|
||||
<NodeTextBoxInput
|
||||
className=""
|
||||
selfKey={noteKey}
|
||||
schema={noteSchema as BlockIOStringSubSchema}
|
||||
value={getValue(noteKey)}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
error={data.errors?.[noteKey] ?? ""}
|
||||
displayName={noteSchema.title || beautifyString(noteKey)}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
case BlockUIType.OUTPUT:
|
||||
// For OUTPUT blocks, only show the 'value' property
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
return (
|
||||
(isRequired || isAdvancedOpen || !isAdvanced) && (
|
||||
<div key={propKey} onMouseOver={() => {}}>
|
||||
{propKey !== "value" ? (
|
||||
<span className="text-m green -mb-1 text-gray-900">
|
||||
{propSchema.title || beautifyString(propKey)}
|
||||
</span>
|
||||
) : (
|
||||
<NodeHandle
|
||||
keyName={propKey}
|
||||
isConnected={isConnected}
|
||||
isRequired={isRequired}
|
||||
schema={propSchema}
|
||||
side="left"
|
||||
/>
|
||||
)}
|
||||
{!isConnected && (
|
||||
<NodeGenericInputField
|
||||
className="mb-2 mt-1"
|
||||
propKey={propKey}
|
||||
propSchema={propSchema}
|
||||
currentValue={getValue(propKey)}
|
||||
connections={data.connections}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
errors={data.errors ?? {}}
|
||||
displayName={propSchema.title || beautifyString(propKey)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
default:
|
||||
return keys.map(([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
return (
|
||||
(isRequired || isAdvancedOpen || isConnected || !isAdvanced) && (
|
||||
<div key={propKey} onMouseOver={() => {}}>
|
||||
<NodeHandle
|
||||
keyName={propKey}
|
||||
isConnected={isConnected}
|
||||
isRequired={isRequired}
|
||||
schema={propSchema}
|
||||
side="left"
|
||||
/>
|
||||
{!isConnected && (
|
||||
<NodeGenericInputField
|
||||
className="mb-2 mt-1"
|
||||
propKey={propKey}
|
||||
propSchema={propSchema}
|
||||
currentValue={getValue(propKey)}
|
||||
connections={data.connections}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
errors={data.errors ?? {}}
|
||||
displayName={propSchema.title || beautifyString(propKey)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
);
|
||||
});
|
||||
}
|
||||
};
|
||||
const handleInputChange = (path: string, value: any) => {
|
||||
const keys = parseKeys(path);
|
||||
const newValues = JSON.parse(JSON.stringify(data.hardcodedValues));
|
||||
@@ -378,13 +523,13 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`${blockClasses} ${errorClass} ${statusClass}`}
|
||||
className={`${data.uiType === BlockUIType.NOTE ? "w-[300px]" : "w-[500px]"} ${blockClasses} ${errorClass} ${statusClass} ${data.uiType === BlockUIType.NOTE ? "bg-yellow-100" : "bg-white"}`}
|
||||
onMouseEnter={handleHovered}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
data-id={`custom-node-${id}`}
|
||||
>
|
||||
<div
|
||||
className={`mb-2 p-3 ${getPrimaryCategoryColor(data.categories)} rounded-t-xl`}
|
||||
className={`mb-2 p-3 ${data.uiType === BlockUIType.NOTE ? "bg-yellow-100" : getPrimaryCategoryColor(data.categories)} rounded-t-xl`}
|
||||
>
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="font-roboto p-3 text-lg font-semibold">
|
||||
@@ -417,53 +562,24 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex items-start justify-between gap-2 p-3">
|
||||
{data.uiType !== BlockUIType.NOTE ? (
|
||||
<div className="flex items-start justify-between p-3">
|
||||
<div>
|
||||
{data.inputSchema &&
|
||||
generateInputHandles(data.inputSchema, data.uiType)}
|
||||
</div>
|
||||
<div className="flex-none">
|
||||
{data.outputSchema &&
|
||||
generateOutputHandles(data.outputSchema, data.uiType)}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
<div>
|
||||
{data.inputSchema &&
|
||||
Object.entries(data.inputSchema.properties).map(
|
||||
([propKey, propSchema]) => {
|
||||
const isRequired = data.inputSchema.required?.includes(propKey);
|
||||
const isConnected = isHandleConnected(propKey);
|
||||
const isAdvanced = propSchema.advanced;
|
||||
return (
|
||||
(isRequired ||
|
||||
isAdvancedOpen ||
|
||||
isConnected ||
|
||||
!isAdvanced) && (
|
||||
<div key={propKey} onMouseOver={() => {}}>
|
||||
<NodeHandle
|
||||
keyName={propKey}
|
||||
isConnected={isConnected}
|
||||
isRequired={isRequired}
|
||||
schema={propSchema}
|
||||
side="left"
|
||||
/>
|
||||
{!isConnected && (
|
||||
<NodeGenericInputField
|
||||
className="mb-2 mt-1"
|
||||
propKey={propKey}
|
||||
propSchema={propSchema}
|
||||
currentValue={getValue(propKey)}
|
||||
connections={data.connections}
|
||||
handleInputChange={handleInputChange}
|
||||
handleInputClick={handleInputClick}
|
||||
errors={data.errors ?? {}}
|
||||
displayName={
|
||||
propSchema.title || beautifyString(propKey)
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
);
|
||||
},
|
||||
)}
|
||||
generateInputHandles(data.inputSchema, data.uiType)}
|
||||
</div>
|
||||
<div className="flex-none">
|
||||
{data.outputSchema && generateOutputHandles(data.outputSchema)}
|
||||
</div>
|
||||
</div>
|
||||
{isOutputOpen && (
|
||||
)}
|
||||
{isOutputOpen && data.uiType !== BlockUIType.NOTE && (
|
||||
<div
|
||||
data-id="latest-output"
|
||||
className="nodrag m-3 break-words rounded-md border-[1.5px] p-2"
|
||||
@@ -486,25 +602,27 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<div className="mt-2.5 flex items-center pb-4 pl-4">
|
||||
<Switch checked={isOutputOpen} onCheckedChange={toggleOutput} />
|
||||
<span className="m-1 mr-4">Output</span>
|
||||
{hasAdvancedFields && (
|
||||
<>
|
||||
<Switch onCheckedChange={toggleAdvancedSettings} />
|
||||
<span className="m-1">Advanced</span>
|
||||
</>
|
||||
)}
|
||||
{data.status && (
|
||||
<Badge
|
||||
variant="outline"
|
||||
data-id={`badge-${id}-${data.status}`}
|
||||
className={cn(data.status.toLowerCase(), "ml-auto mr-5")}
|
||||
>
|
||||
{data.status}
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
{data.uiType !== BlockUIType.NOTE && (
|
||||
<div className="mt-2.5 flex items-center pb-4 pl-4">
|
||||
<Switch checked={isOutputOpen} onCheckedChange={toggleOutput} />
|
||||
<span className="m-1 mr-4">Output</span>
|
||||
{hasAdvancedFields && (
|
||||
<>
|
||||
<Switch onCheckedChange={toggleAdvancedSettings} />
|
||||
<span className="m-1">Advanced</span>
|
||||
</>
|
||||
)}
|
||||
{data.status && (
|
||||
<Badge
|
||||
variant="outline"
|
||||
data-id={`badge-${id}-${data.status}`}
|
||||
className={cn(data.status.toLowerCase(), "ml-auto mr-5")}
|
||||
>
|
||||
{data.status}
|
||||
</Badge>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
<InputModalComponent
|
||||
title={activeKey ? `Enter ${beautifyString(activeKey)}` : undefined}
|
||||
isOpen={isModalOpen}
|
||||
|
||||
@@ -417,6 +417,7 @@ const FlowEditor: React.FC<{
|
||||
isOutputOpen: false,
|
||||
block_id: blockId,
|
||||
isOutputStatic: nodeSchema.staticOutput,
|
||||
uiType: nodeSchema.uiType,
|
||||
},
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
.custom-node {
|
||||
color: #000000;
|
||||
width: 500px;
|
||||
box-sizing: border-box;
|
||||
transition: border-color 0.3s ease-in-out;
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
BlockIONumberSubSchema,
|
||||
BlockIOBooleanSubSchema,
|
||||
} from "@/lib/autogpt-server-api/types";
|
||||
import { FC, useCallback, useEffect, useState } from "react";
|
||||
import React, { FC, useCallback, useEffect, useState } from "react";
|
||||
import { Button } from "./ui/button";
|
||||
import { Switch } from "./ui/switch";
|
||||
import {
|
||||
@@ -587,6 +587,52 @@ const NodeStringInput: FC<{
|
||||
);
|
||||
};
|
||||
|
||||
export const NodeTextBoxInput: FC<{
|
||||
selfKey: string;
|
||||
schema: BlockIOStringSubSchema;
|
||||
value?: string;
|
||||
error?: string;
|
||||
handleInputChange: NodeObjectInputTreeProps["handleInputChange"];
|
||||
handleInputClick: NodeObjectInputTreeProps["handleInputClick"];
|
||||
className?: string;
|
||||
displayName: string;
|
||||
}> = ({
|
||||
selfKey,
|
||||
schema,
|
||||
value = "",
|
||||
error,
|
||||
handleInputChange,
|
||||
handleInputClick,
|
||||
className,
|
||||
displayName,
|
||||
}) => {
|
||||
return (
|
||||
<div className={className}>
|
||||
<div
|
||||
className="nodrag relative m-0 h-[200px] w-full bg-yellow-100 p-4"
|
||||
onClick={schema.secret ? () => handleInputClick(selfKey) : undefined}
|
||||
>
|
||||
<textarea
|
||||
id={selfKey}
|
||||
value={schema.secret && value ? "********" : value}
|
||||
readOnly={schema.secret}
|
||||
placeholder={
|
||||
schema?.placeholder || `Enter ${beautifyString(displayName)}`
|
||||
}
|
||||
onChange={(e) => handleInputChange(selfKey, e.target.value)}
|
||||
onBlur={(e) => handleInputChange(selfKey, e.target.value)}
|
||||
className="h-full w-full resize-none overflow-hidden border-none bg-transparent text-lg text-black outline-none"
|
||||
style={{
|
||||
fontSize: "min(1em, 16px)",
|
||||
lineHeight: "1.2",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
{error && <span className="error-message">{error}</span>}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const NodeNumberInput: FC<{
|
||||
selfKey: string;
|
||||
schema: BlockIONumberSubSchema;
|
||||
|
||||
@@ -13,6 +13,7 @@ export type Block = {
|
||||
inputSchema: BlockIORootSchema;
|
||||
outputSchema: BlockIORootSchema;
|
||||
staticOutput: boolean;
|
||||
uiType: BlockUIType;
|
||||
};
|
||||
|
||||
export type BlockIORootSchema = {
|
||||
@@ -182,3 +183,10 @@ export type User = {
|
||||
id: string;
|
||||
email: string;
|
||||
};
|
||||
|
||||
export enum BlockUIType {
|
||||
STANDARD = "Standard",
|
||||
INPUT = "Input",
|
||||
OUTPUT = "Output",
|
||||
NOTE = "Note",
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ export default class MarketplaceAPI {
|
||||
pageSize: number = 10,
|
||||
): Promise<AgentListResponse> {
|
||||
return this._get(
|
||||
`/top-downloads/agents?page=${page}&page_size=${pageSize}`,
|
||||
`agents/top-downloads?page=${page}&page_size=${pageSize}`,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -54,7 +54,7 @@ export default class MarketplaceAPI {
|
||||
page: number = 1,
|
||||
pageSize: number = 10,
|
||||
): Promise<AgentListResponse> {
|
||||
return this._get(`/featured/agents?page=${page}&page_size=${pageSize}`);
|
||||
return this._get(`/agents/featured?page=${page}&page_size=${pageSize}`);
|
||||
}
|
||||
|
||||
async searchAgents(
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
import pytest
|
||||
|
||||
from .depends import verify_user, requires_admin_user, requires_user
|
||||
from .depends import requires_admin_user, requires_user, verify_user
|
||||
|
||||
|
||||
def test_verify_user_no_payload():
|
||||
|
||||
9
rnd/autogpt_libs/autogpt_libs/logging/__init__.py
Normal file
9
rnd/autogpt_libs/autogpt_libs/logging/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from .config import configure_logging
|
||||
from .filters import BelowLevelFilter
|
||||
from .formatters import FancyConsoleFormatter
|
||||
|
||||
__all__ = [
|
||||
"configure_logging",
|
||||
"BelowLevelFilter",
|
||||
"FancyConsoleFormatter",
|
||||
]
|
||||
166
rnd/autogpt_libs/autogpt_libs/logging/config.py
Normal file
166
rnd/autogpt_libs/autogpt_libs/logging/config.py
Normal file
@@ -0,0 +1,166 @@
|
||||
"""Logging module for Auto-GPT."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import Field, field_validator
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
from .filters import BelowLevelFilter
|
||||
from .formatters import AGPTFormatter, StructuredLoggingFormatter
|
||||
|
||||
LOG_DIR = Path(__file__).parent.parent.parent.parent / "logs"
|
||||
LOG_FILE = "activity.log"
|
||||
DEBUG_LOG_FILE = "debug.log"
|
||||
ERROR_LOG_FILE = "error.log"
|
||||
|
||||
SIMPLE_LOG_FORMAT = "%(asctime)s %(levelname)s %(title)s%(message)s"
|
||||
|
||||
DEBUG_LOG_FORMAT = (
|
||||
"%(asctime)s %(levelname)s %(filename)s:%(lineno)d" " %(title)s%(message)s"
|
||||
)
|
||||
|
||||
|
||||
class LoggingConfig(BaseSettings):
|
||||
|
||||
level: str = Field(
|
||||
default="INFO",
|
||||
description="Logging level",
|
||||
validation_alias="LOG_LEVEL",
|
||||
)
|
||||
|
||||
enable_cloud_logging: bool = Field(
|
||||
default=False,
|
||||
description="Enable logging to Google Cloud Logging",
|
||||
)
|
||||
|
||||
enable_file_logging: bool = Field(
|
||||
default=False,
|
||||
description="Enable logging to file",
|
||||
)
|
||||
# File output
|
||||
log_dir: Path = Field(
|
||||
default=LOG_DIR,
|
||||
description="Log directory",
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(
|
||||
env_prefix="",
|
||||
env_file=".env",
|
||||
env_file_encoding="utf-8",
|
||||
extra="ignore",
|
||||
)
|
||||
|
||||
@field_validator("level", mode="before")
|
||||
@classmethod
|
||||
def parse_log_level(cls, v):
|
||||
if isinstance(v, str):
|
||||
v = v.upper()
|
||||
if v not in ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]:
|
||||
raise ValueError(f"Invalid log level: {v}")
|
||||
return v
|
||||
return v
|
||||
|
||||
|
||||
def configure_logging(force_cloud_logging: bool = False) -> None:
|
||||
"""Configure the native logging module based on the LoggingConfig settings.
|
||||
|
||||
This function sets up logging handlers and formatters according to the
|
||||
configuration specified in the LoggingConfig object. It supports various
|
||||
logging outputs including console, file, cloud, and JSON logging.
|
||||
|
||||
The function uses the LoggingConfig object to determine which logging
|
||||
features to enable and how to configure them. This includes setting
|
||||
log levels, log formats, and output destinations.
|
||||
|
||||
No arguments are required as the function creates its own LoggingConfig
|
||||
instance internally.
|
||||
|
||||
Note: This function is typically called at the start of the application
|
||||
to set up the logging infrastructure.
|
||||
"""
|
||||
|
||||
config = LoggingConfig()
|
||||
|
||||
log_handlers: list[logging.Handler] = []
|
||||
|
||||
# Cloud logging setup
|
||||
if config.enable_cloud_logging or force_cloud_logging:
|
||||
import google.cloud.logging
|
||||
from google.cloud.logging.handlers import CloudLoggingHandler
|
||||
from google.cloud.logging_v2.handlers.transports.sync import SyncTransport
|
||||
|
||||
client = google.cloud.logging.Client()
|
||||
cloud_handler = CloudLoggingHandler(
|
||||
client,
|
||||
name="autogpt_logs",
|
||||
transport=SyncTransport,
|
||||
)
|
||||
cloud_handler.setLevel(config.level)
|
||||
cloud_handler.setFormatter(StructuredLoggingFormatter())
|
||||
log_handlers.append(cloud_handler)
|
||||
print("Cloud logging enabled")
|
||||
else:
|
||||
# Console output handlers
|
||||
stdout = logging.StreamHandler(stream=sys.stdout)
|
||||
stdout.setLevel(config.level)
|
||||
stdout.addFilter(BelowLevelFilter(logging.WARNING))
|
||||
if config.level == logging.DEBUG:
|
||||
stdout.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stdout.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
stderr = logging.StreamHandler()
|
||||
stderr.setLevel(logging.WARNING)
|
||||
if config.level == logging.DEBUG:
|
||||
stderr.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT))
|
||||
else:
|
||||
stderr.setFormatter(AGPTFormatter(SIMPLE_LOG_FORMAT))
|
||||
|
||||
log_handlers += [stdout, stderr]
|
||||
print("Console logging enabled")
|
||||
|
||||
# File logging setup
|
||||
if config.enable_file_logging:
|
||||
# create log directory if it doesn't exist
|
||||
if not config.log_dir.exists():
|
||||
config.log_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
print(f"Log directory: {config.log_dir}")
|
||||
|
||||
# Activity log handler (INFO and above)
|
||||
activity_log_handler = logging.FileHandler(
|
||||
config.log_dir / LOG_FILE, "a", "utf-8"
|
||||
)
|
||||
activity_log_handler.setLevel(config.level)
|
||||
activity_log_handler.setFormatter(
|
||||
AGPTFormatter(SIMPLE_LOG_FORMAT, no_color=True)
|
||||
)
|
||||
log_handlers.append(activity_log_handler)
|
||||
|
||||
if config.level == logging.DEBUG:
|
||||
# Debug log handler (all levels)
|
||||
debug_log_handler = logging.FileHandler(
|
||||
config.log_dir / DEBUG_LOG_FILE, "a", "utf-8"
|
||||
)
|
||||
debug_log_handler.setLevel(logging.DEBUG)
|
||||
debug_log_handler.setFormatter(
|
||||
AGPTFormatter(DEBUG_LOG_FORMAT, no_color=True)
|
||||
)
|
||||
log_handlers.append(debug_log_handler)
|
||||
|
||||
# Error log handler (ERROR and above)
|
||||
error_log_handler = logging.FileHandler(
|
||||
config.log_dir / ERROR_LOG_FILE, "a", "utf-8"
|
||||
)
|
||||
error_log_handler.setLevel(logging.ERROR)
|
||||
error_log_handler.setFormatter(AGPTFormatter(DEBUG_LOG_FORMAT, no_color=True))
|
||||
log_handlers.append(error_log_handler)
|
||||
print("File logging enabled")
|
||||
|
||||
# Configure the root logger
|
||||
logging.basicConfig(
|
||||
format=DEBUG_LOG_FORMAT if config.level == logging.DEBUG else SIMPLE_LOG_FORMAT,
|
||||
level=config.level,
|
||||
handlers=log_handlers,
|
||||
)
|
||||
12
rnd/autogpt_libs/autogpt_libs/logging/filters.py
Normal file
12
rnd/autogpt_libs/autogpt_libs/logging/filters.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import logging
|
||||
|
||||
|
||||
class BelowLevelFilter(logging.Filter):
|
||||
"""Filter for logging levels below a certain threshold."""
|
||||
|
||||
def __init__(self, below_level: int):
|
||||
super().__init__()
|
||||
self.below_level = below_level
|
||||
|
||||
def filter(self, record: logging.LogRecord):
|
||||
return record.levelno < self.below_level
|
||||
95
rnd/autogpt_libs/autogpt_libs/logging/formatters.py
Normal file
95
rnd/autogpt_libs/autogpt_libs/logging/formatters.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import logging
|
||||
|
||||
from colorama import Fore, Style
|
||||
from google.cloud.logging_v2.handlers import CloudLoggingFilter, StructuredLogHandler
|
||||
|
||||
from .utils import remove_color_codes
|
||||
|
||||
|
||||
class FancyConsoleFormatter(logging.Formatter):
|
||||
"""
|
||||
A custom logging formatter designed for console output.
|
||||
|
||||
This formatter enhances the standard logging output with color coding. The color
|
||||
coding is based on the level of the log message, making it easier to distinguish
|
||||
between different types of messages in the console output.
|
||||
|
||||
The color for each level is defined in the LEVEL_COLOR_MAP class attribute.
|
||||
"""
|
||||
|
||||
# level -> (level & text color, title color)
|
||||
LEVEL_COLOR_MAP = {
|
||||
logging.DEBUG: Fore.LIGHTBLACK_EX,
|
||||
logging.INFO: Fore.BLUE,
|
||||
logging.WARNING: Fore.YELLOW,
|
||||
logging.ERROR: Fore.RED,
|
||||
logging.CRITICAL: Fore.RED + Style.BRIGHT,
|
||||
}
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
# Make sure `msg` is a string
|
||||
if not hasattr(record, "msg"):
|
||||
record.msg = ""
|
||||
elif type(record.msg) is not str:
|
||||
record.msg = str(record.msg)
|
||||
|
||||
# Determine default color based on error level
|
||||
level_color = ""
|
||||
if record.levelno in self.LEVEL_COLOR_MAP:
|
||||
level_color = self.LEVEL_COLOR_MAP[record.levelno]
|
||||
record.levelname = f"{level_color}{record.levelname}{Style.RESET_ALL}"
|
||||
|
||||
# Determine color for message
|
||||
color = getattr(record, "color", level_color)
|
||||
color_is_specified = hasattr(record, "color")
|
||||
|
||||
# Don't color INFO messages unless the color is explicitly specified.
|
||||
if color and (record.levelno != logging.INFO or color_is_specified):
|
||||
record.msg = f"{color}{record.msg}{Style.RESET_ALL}"
|
||||
|
||||
return super().format(record)
|
||||
|
||||
|
||||
class AGPTFormatter(FancyConsoleFormatter):
|
||||
def __init__(self, *args, no_color: bool = False, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.no_color = no_color
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
# Make sure `msg` is a string
|
||||
if not hasattr(record, "msg"):
|
||||
record.msg = ""
|
||||
elif type(record.msg) is not str:
|
||||
record.msg = str(record.msg)
|
||||
|
||||
# Strip color from the message to prevent color spoofing
|
||||
if record.msg and not getattr(record, "preserve_color", False):
|
||||
record.msg = remove_color_codes(record.msg)
|
||||
|
||||
# Determine color for title
|
||||
title = getattr(record, "title", "")
|
||||
title_color = getattr(record, "title_color", "") or self.LEVEL_COLOR_MAP.get(
|
||||
record.levelno, ""
|
||||
)
|
||||
if title and title_color:
|
||||
title = f"{title_color + Style.BRIGHT}{title}{Style.RESET_ALL}"
|
||||
# Make sure record.title is set, and padded with a space if not empty
|
||||
record.title = f"{title} " if title else ""
|
||||
|
||||
if self.no_color:
|
||||
return remove_color_codes(super().format(record))
|
||||
else:
|
||||
return super().format(record)
|
||||
|
||||
|
||||
class StructuredLoggingFormatter(StructuredLogHandler, logging.Formatter):
|
||||
def __init__(self):
|
||||
# Set up CloudLoggingFilter to add diagnostic info to the log records
|
||||
self.cloud_logging_filter = CloudLoggingFilter()
|
||||
|
||||
# Init StructuredLogHandler
|
||||
super().__init__()
|
||||
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
self.cloud_logging_filter.filter(record)
|
||||
return super().format(record)
|
||||
14
rnd/autogpt_libs/autogpt_libs/logging/handlers.py
Normal file
14
rnd/autogpt_libs/autogpt_libs/logging/handlers.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
|
||||
|
||||
class JsonFileHandler(logging.FileHandler):
|
||||
def format(self, record: logging.LogRecord) -> str:
|
||||
record.json_data = json.loads(record.getMessage())
|
||||
return json.dumps(getattr(record, "json_data"), ensure_ascii=False, indent=4)
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
with open(self.baseFilename, "w", encoding="utf-8") as f:
|
||||
f.write(self.format(record))
|
||||
36
rnd/autogpt_libs/autogpt_libs/logging/test_utils.py
Normal file
36
rnd/autogpt_libs/autogpt_libs/logging/test_utils.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import pytest
|
||||
|
||||
from .utils import remove_color_codes
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"raw_text, clean_text",
|
||||
[
|
||||
(
|
||||
"COMMAND = \x1b[36mbrowse_website\x1b[0m "
|
||||
"ARGUMENTS = \x1b[36m{'url': 'https://www.google.com',"
|
||||
" 'question': 'What is the capital of France?'}\x1b[0m",
|
||||
"COMMAND = browse_website "
|
||||
"ARGUMENTS = {'url': 'https://www.google.com',"
|
||||
" 'question': 'What is the capital of France?'}",
|
||||
),
|
||||
(
|
||||
"{'Schaue dir meine Projekte auf github () an, als auch meine Webseiten': "
|
||||
"'https://github.com/Significant-Gravitas/AutoGPT,"
|
||||
" https://discord.gg/autogpt und https://twitter.com/Auto_GPT'}",
|
||||
"{'Schaue dir meine Projekte auf github () an, als auch meine Webseiten': "
|
||||
"'https://github.com/Significant-Gravitas/AutoGPT,"
|
||||
" https://discord.gg/autogpt und https://twitter.com/Auto_GPT'}",
|
||||
),
|
||||
("", ""),
|
||||
("hello", "hello"),
|
||||
("hello\x1B[31m world", "hello world"),
|
||||
("\x1B[36mHello,\x1B[32m World!", "Hello, World!"),
|
||||
(
|
||||
"\x1B[1m\x1B[31mError:\x1B[0m\x1B[31m file not found",
|
||||
"Error: file not found",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_remove_color_codes(raw_text, clean_text):
|
||||
assert remove_color_codes(raw_text) == clean_text
|
||||
27
rnd/autogpt_libs/autogpt_libs/logging/utils.py
Normal file
27
rnd/autogpt_libs/autogpt_libs/logging/utils.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from colorama import Fore
|
||||
|
||||
|
||||
def remove_color_codes(s: str) -> str:
|
||||
return re.sub(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])", "", s)
|
||||
|
||||
|
||||
def fmt_kwargs(kwargs: dict) -> str:
|
||||
return ", ".join(f"{n}={repr(v)}" for n, v in kwargs.items())
|
||||
|
||||
|
||||
def print_attribute(
|
||||
title: str, value: Any, title_color: str = Fore.GREEN, value_color: str = ""
|
||||
) -> None:
|
||||
logger = logging.getLogger()
|
||||
logger.info(
|
||||
str(value),
|
||||
extra={
|
||||
"title": f"{title.rstrip(':')}:",
|
||||
"title_color": title_color,
|
||||
"color": value_color,
|
||||
},
|
||||
)
|
||||
@@ -1,13 +1,21 @@
|
||||
import secrets
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import cast
|
||||
|
||||
from supabase import Client, create_client
|
||||
from supabase import Client
|
||||
|
||||
from .types import Credentials, OAuth2Credentials, UserMetadata, UserMetadataRaw
|
||||
from .types import (
|
||||
Credentials,
|
||||
OAuth2Credentials,
|
||||
OAuthState,
|
||||
UserMetadata,
|
||||
UserMetadataRaw,
|
||||
)
|
||||
|
||||
|
||||
class SupabaseIntegrationCredentialsStore:
|
||||
def __init__(self, url: str, key: str):
|
||||
self.supabase: Client = create_client(url, key)
|
||||
def __init__(self, supabase: Client):
|
||||
self.supabase = supabase
|
||||
|
||||
def add_creds(self, user_id: str, credentials: Credentials) -> None:
|
||||
if self.get_creds_by_id(user_id, credentials.id):
|
||||
@@ -73,6 +81,52 @@ class SupabaseIntegrationCredentialsStore:
|
||||
]
|
||||
self._set_user_integration_creds(user_id, filtered_credentials)
|
||||
|
||||
async def store_state_token(self, user_id: str, provider: str) -> str:
|
||||
token = secrets.token_urlsafe(32)
|
||||
expires_at = datetime.now(timezone.utc) + timedelta(minutes=10)
|
||||
|
||||
state = OAuthState(
|
||||
token=token, provider=provider, expires_at=int(expires_at.timestamp())
|
||||
)
|
||||
|
||||
user_metadata = self._get_user_metadata(user_id)
|
||||
oauth_states = user_metadata.get("integration_oauth_states", [])
|
||||
oauth_states.append(state.model_dump())
|
||||
user_metadata["integration_oauth_states"] = oauth_states
|
||||
|
||||
self.supabase.auth.admin.update_user_by_id(
|
||||
user_id, {"user_metadata": user_metadata}
|
||||
)
|
||||
|
||||
return token
|
||||
|
||||
async def verify_state_token(self, user_id: str, token: str, provider: str) -> bool:
|
||||
user_metadata = self._get_user_metadata(user_id)
|
||||
oauth_states = user_metadata.get("integration_oauth_states", [])
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
valid_state = next(
|
||||
(
|
||||
state
|
||||
for state in oauth_states
|
||||
if state["token"] == token
|
||||
and state["provider"] == provider
|
||||
and state["expires_at"] > now.timestamp()
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if valid_state:
|
||||
# Remove the used state
|
||||
oauth_states.remove(valid_state)
|
||||
user_metadata["integration_oauth_states"] = oauth_states
|
||||
self.supabase.auth.admin.update_user_by_id(
|
||||
user_id, {"user_metadata": user_metadata}
|
||||
)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _set_user_integration_creds(
|
||||
self, user_id: str, credentials: list[Credentials]
|
||||
) -> None:
|
||||
|
||||
@@ -19,9 +19,11 @@ class _BaseCredentials(BaseModel):
|
||||
class OAuth2Credentials(_BaseCredentials):
|
||||
type: Literal["oauth2"] = "oauth2"
|
||||
access_token: SecretStr
|
||||
access_token_expires_at: Optional[int] # seconds
|
||||
access_token_expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the access token expires (if at all)"""
|
||||
refresh_token: Optional[SecretStr]
|
||||
refresh_token_expires_at: Optional[int] # seconds
|
||||
refresh_token_expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the refresh token expires (if at all)"""
|
||||
scopes: list[str]
|
||||
metadata: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
@@ -29,7 +31,8 @@ class OAuth2Credentials(_BaseCredentials):
|
||||
class APIKeyCredentials(_BaseCredentials):
|
||||
type: Literal["api_key"] = "api_key"
|
||||
api_key: SecretStr
|
||||
expires_at: Optional[int] # seconds
|
||||
expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the API key expires (if at all)"""
|
||||
|
||||
|
||||
Credentials = Annotated[
|
||||
@@ -38,9 +41,18 @@ Credentials = Annotated[
|
||||
]
|
||||
|
||||
|
||||
class OAuthState(BaseModel):
|
||||
token: str
|
||||
provider: str
|
||||
expires_at: int
|
||||
"""Unix timestamp (seconds) indicating when this OAuth state expires"""
|
||||
|
||||
|
||||
class UserMetadata(BaseModel):
|
||||
integration_credentials: list[Credentials] = Field(default_factory=list)
|
||||
integration_oauth_states: list[OAuthState] = Field(default_factory=list)
|
||||
|
||||
|
||||
class UserMetadataRaw(TypedDict, total=False):
|
||||
integration_credentials: list[dict]
|
||||
integration_oauth_states: list[dict]
|
||||
|
||||
1439
rnd/autogpt_libs/poetry.lock
generated
1439
rnd/autogpt_libs/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,19 +1,21 @@
|
||||
[tool.poetry]
|
||||
name = "autogpt-libs"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
description = "Shared libraries across NextGen AutoGPT"
|
||||
authors = ["Aarushi <aarushik93@gmail.com>"]
|
||||
readme = "README.md"
|
||||
packages = [{ include = "autogpt_libs" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<4.0"
|
||||
colorama = "^0.4.6"
|
||||
google-cloud-logging = "^3.8.0"
|
||||
pydantic = "^2.8.2"
|
||||
pydantic-settings = "^2.5.2"
|
||||
pyjwt = "^2.8.0"
|
||||
python = ">=3.10,<4.0"
|
||||
python-dotenv = "^1.0.1"
|
||||
supabase = "^2.7.2"
|
||||
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
@@ -11,6 +11,7 @@ REDIS_PASSWORD=password
|
||||
|
||||
AUTH_ENABLED=false
|
||||
APP_ENV="local"
|
||||
PYRO_HOST=localhost
|
||||
SENTRY_DSN=
|
||||
|
||||
## ===== OPTIONAL API KEYS ===== ##
|
||||
@@ -50,3 +51,11 @@ SMTP_PASSWORD=
|
||||
# Medium
|
||||
MEDIUM_API_KEY=
|
||||
MEDIUM_AUTHOR_ID=
|
||||
|
||||
|
||||
# Logging Configuration
|
||||
LOG_LEVEL=INFO
|
||||
ENABLE_CLOUD_LOGGING=false
|
||||
ENABLE_FILE_LOGGING=false
|
||||
# Use to manually set the log directory
|
||||
# LOG_DIR=./logs
|
||||
|
||||
@@ -1,22 +1,16 @@
|
||||
FROM python:3.11-slim-buster as server_base
|
||||
FROM python:3.11-slim-buster AS builder
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client git \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& wget https://github.com/git/git/archive/v2.28.0.tar.gz -O git.tar.gz \
|
||||
&& tar -zxf git.tar.gz \
|
||||
&& cd git-* \
|
||||
&& make prefix=/usr all \
|
||||
&& make prefix=/usr install
|
||||
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
@@ -25,22 +19,53 @@ ENV POETRY_VERSION=1.8.3 \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
RUN pip3 install poetry
|
||||
|
||||
COPY autogpt /app/autogpt
|
||||
COPY forge /app/forge
|
||||
# Copy and install dependencies
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
COPY rnd/autogpt_server/poetry.lock rnd/autogpt_server/pyproject.toml /app/rnd/autogpt_server/
|
||||
WORKDIR /app/rnd/autogpt_server
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi
|
||||
|
||||
# Generate Prisma client
|
||||
COPY rnd/autogpt_server/schema.prisma ./
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry run prisma generate
|
||||
|
||||
FROM python:3.11-slim-buster AS server_dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
|
||||
# Copy only necessary files from builder
|
||||
COPY --from=builder /app /app
|
||||
COPY --from=builder /usr/local/lib/python3.11 /usr/local/lib/python3.11
|
||||
COPY --from=builder /usr/local/bin /usr/local/bin
|
||||
# Copy Prisma binaries
|
||||
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
||||
|
||||
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
RUN mkdir -p /app/rnd/autogpt_libs
|
||||
RUN mkdir -p /app/rnd/autogpt_server
|
||||
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
|
||||
COPY rnd/autogpt_server/poetry.lock rnd/autogpt_server/pyproject.toml /app/rnd/autogpt_server/
|
||||
|
||||
WORKDIR /app/rnd/autogpt_server
|
||||
|
||||
COPY rnd/autogpt_server/pyproject.toml rnd/autogpt_server/poetry.lock ./
|
||||
RUN poetry install --no-interaction --no-ansi
|
||||
|
||||
COPY rnd/autogpt_server/schema.prisma ./
|
||||
RUN poetry run prisma generate
|
||||
FROM server_dependencies AS server
|
||||
|
||||
COPY rnd/autogpt_server /app/rnd/autogpt_server
|
||||
FROM server_base as server
|
||||
|
||||
ENV PORT=8000
|
||||
ENV DATABASE_URL=""
|
||||
ENV PORT=8000
|
||||
|
||||
CMD ["poetry", "run", "rest"]
|
||||
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
FROM python:3.11-slim-buster as server_base
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& wget https://github.com/git/git/archive/v2.28.0.tar.gz -O git.tar.gz \
|
||||
&& tar -zxf git.tar.gz \
|
||||
&& cd git-* \
|
||||
&& make prefix=/usr all \
|
||||
&& make prefix=/usr install
|
||||
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
POETRY_NO_INTERACTION=1 \
|
||||
POETRY_VIRTUALENVS_CREATE=false \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
RUN pip3 install poetry
|
||||
|
||||
COPY autogpt /app/autogpt
|
||||
COPY forge /app/forge
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
|
||||
WORKDIR /app/rnd/autogpt_server
|
||||
|
||||
COPY rnd/autogpt_server/pyproject.toml rnd/autogpt_server/poetry.lock ./
|
||||
RUN poetry install --no-interaction --no-ansi
|
||||
|
||||
COPY rnd/autogpt_server/schema.prisma ./
|
||||
RUN poetry run prisma generate
|
||||
|
||||
COPY rnd/autogpt_server /app/rnd/autogpt_server
|
||||
FROM server_base as server
|
||||
|
||||
FROM server_base as server
|
||||
|
||||
ENV PORT=8001
|
||||
ENV DATABASE_URL=""
|
||||
|
||||
CMD ["poetry", "run", "ws"]
|
||||
@@ -101,7 +101,7 @@ docker compose down
|
||||
If you run into issues with dangling orphans, try:
|
||||
|
||||
```sh
|
||||
docker-compose down --volumes --remove-orphans && docker-compose up --force-recreate --renew-anon-volumes --remove-orphans
|
||||
docker compose down --volumes --remove-orphans && docker-compose up --force-recreate --renew-anon-volumes --remove-orphans
|
||||
```
|
||||
|
||||
## Testing
|
||||
@@ -183,6 +183,13 @@ A communication layer (`service.py`) is created to decouple the communication li
|
||||
|
||||
Currently, the IPC is done using Pyro5 and abstracted in a way that allows a function decorated with `@expose` to be called from a different process.
|
||||
|
||||
|
||||
By default the daemons run on the following ports:
|
||||
|
||||
Execution Manager Daemon: 8002
|
||||
Execution Scheduler Daemon: 8003
|
||||
Rest Server Daemon: 8004
|
||||
|
||||
## Adding a New Agent Block
|
||||
|
||||
To add a new agent block, you need to create a new class that inherits from `Block` and provides the following information:
|
||||
|
||||
@@ -26,10 +26,8 @@ def main(**kwargs):
|
||||
|
||||
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
|
||||
from autogpt_server.server import AgentServer, WebsocketServer
|
||||
from autogpt_server.util.service import PyroNameServer
|
||||
|
||||
run_processes(
|
||||
PyroNameServer(),
|
||||
ExecutionManager(),
|
||||
ExecutionScheduler(),
|
||||
WebsocketServer(),
|
||||
|
||||
@@ -55,15 +55,15 @@ for cls in all_subclasses(Block):
|
||||
raise ValueError(f"Block ID {block.name} error: {block.id} is already in use")
|
||||
|
||||
# Prevent duplicate field name in input_schema and output_schema
|
||||
duplicate_field_names = set(block.input_schema.__fields__.keys()) & set(
|
||||
block.output_schema.__fields__.keys()
|
||||
duplicate_field_names = set(block.input_schema.model_fields.keys()) & set(
|
||||
block.output_schema.model_fields.keys()
|
||||
)
|
||||
if duplicate_field_names:
|
||||
raise ValueError(
|
||||
f"{block.name} has duplicate field names in input_schema and output_schema: {duplicate_field_names}"
|
||||
)
|
||||
|
||||
for field in block.input_schema.__fields__.values():
|
||||
for field in block.input_schema.model_fields.values():
|
||||
if field.annotation is bool and field.default not in (True, False):
|
||||
raise ValueError(f"{block.name} has a boolean field with no default value")
|
||||
|
||||
|
||||
@@ -1,190 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Iterator
|
||||
|
||||
from autogpt.agents.agent import Agent, AgentSettings
|
||||
from autogpt.app.config import ConfigBuilder
|
||||
from forge.agent.components import AgentComponent
|
||||
from forge.agent.protocols import CommandProvider
|
||||
from forge.command import command
|
||||
from forge.command.command import Command
|
||||
from forge.file_storage import FileStorageBackendName, get_storage
|
||||
from forge.file_storage.base import FileStorage
|
||||
from forge.llm.providers import MultiProvider
|
||||
from forge.llm.providers.openai import OpenAICredentials, OpenAIProvider
|
||||
from forge.llm.providers.schema import ModelProviderName
|
||||
from forge.models.json_schema import JSONSchema
|
||||
from pydantic import Field, SecretStr
|
||||
|
||||
from autogpt_server.data.block import Block, BlockCategory, BlockOutput, BlockSchema
|
||||
from autogpt_server.data.model import BlockSecret, SchemaField, SecretField
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from autogpt.app.config import AppConfig
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BlockAgentSettings(AgentSettings):
|
||||
enabled_components: list[str] = Field(default_factory=list)
|
||||
|
||||
|
||||
class OutputComponent(CommandProvider):
|
||||
def get_commands(self) -> Iterator[Command]:
|
||||
yield self.output
|
||||
|
||||
@command(
|
||||
parameters={
|
||||
"output": JSONSchema(
|
||||
type=JSONSchema.Type.STRING,
|
||||
description="Output data to be returned.",
|
||||
required=True,
|
||||
),
|
||||
},
|
||||
)
|
||||
def output(self, output: str) -> str:
|
||||
"""Use this to output the result."""
|
||||
return output
|
||||
|
||||
|
||||
class BlockAgent(Agent):
|
||||
def __init__(
|
||||
self,
|
||||
settings: BlockAgentSettings,
|
||||
llm_provider: MultiProvider,
|
||||
file_storage: FileStorage,
|
||||
app_config: AppConfig,
|
||||
):
|
||||
super().__init__(settings, llm_provider, file_storage, app_config)
|
||||
|
||||
self.output = OutputComponent()
|
||||
|
||||
# Disable components
|
||||
for attr_name in list(self.__dict__.keys()):
|
||||
attr_value = getattr(self, attr_name)
|
||||
if not isinstance(attr_value, AgentComponent):
|
||||
continue
|
||||
component_name = type(attr_value).__name__
|
||||
if (
|
||||
component_name != "SystemComponent"
|
||||
and component_name not in settings.enabled_components
|
||||
):
|
||||
delattr(self, attr_name)
|
||||
|
||||
|
||||
class AutoGPTAgentBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
task: str = SchemaField(
|
||||
description="Task description for the agent.",
|
||||
placeholder="Calculate and use Output command",
|
||||
)
|
||||
input: str = SchemaField(
|
||||
description="Input data for the task",
|
||||
placeholder="8 + 5",
|
||||
)
|
||||
openai_api_key: BlockSecret = SecretField(
|
||||
key="openai_api_key", description="OpenAI API key"
|
||||
)
|
||||
enabled_components: list[str] = Field(
|
||||
default_factory=lambda: [OutputComponent.__name__],
|
||||
description="List of [AgentComponents](https://docs.agpt.co/forge/components/built-in-components/) enabled for the agent.",
|
||||
)
|
||||
disabled_commands: list[str] = Field(
|
||||
default_factory=list,
|
||||
description="List of commands from enabled components to disable.",
|
||||
)
|
||||
fast_mode: bool = Field(
|
||||
False,
|
||||
description="If true uses fast llm, otherwise uses smart and slow llm.",
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
result: str
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="d2e2ecd2-9ae6-422d-8dfe-ceca500ce6a6",
|
||||
description="AutoGPT agent, it utilizes a Large Language Model and enabled components/tools to perform a task.",
|
||||
categories={BlockCategory.AI},
|
||||
input_schema=AutoGPTAgentBlock.Input,
|
||||
output_schema=AutoGPTAgentBlock.Output,
|
||||
test_input={
|
||||
"task": "Make calculations and use output command to output the result",
|
||||
"input": "5 + 3",
|
||||
"openai_api_key": "openai_api_key",
|
||||
"enabled_components": [OutputComponent.__name__],
|
||||
"disabled_commands": ["finish"],
|
||||
"fast_mode": True,
|
||||
},
|
||||
test_output=[
|
||||
("result", "8"),
|
||||
],
|
||||
test_mock={
|
||||
"get_provider": lambda _: MultiProvider(),
|
||||
"get_result": lambda _: "8",
|
||||
},
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_provider(openai_api_key: str) -> MultiProvider:
|
||||
# LLM provider
|
||||
settings = OpenAIProvider.default_settings.model_copy()
|
||||
settings.credentials = OpenAICredentials(api_key=SecretStr(openai_api_key))
|
||||
openai_provider = OpenAIProvider(settings=settings)
|
||||
|
||||
multi_provider = MultiProvider()
|
||||
# HACK: Add OpenAI provider to the multi provider with api key
|
||||
multi_provider._provider_instances[ModelProviderName.OPENAI] = openai_provider
|
||||
|
||||
return multi_provider
|
||||
|
||||
@staticmethod
|
||||
def get_result(agent: BlockAgent) -> str:
|
||||
error: Exception | None = None
|
||||
|
||||
for tries in range(3):
|
||||
try:
|
||||
proposal = asyncio.run(agent.propose_action())
|
||||
result = asyncio.run(agent.execute(proposal))
|
||||
return str(result)
|
||||
except Exception as e:
|
||||
error = e
|
||||
|
||||
raise error or Exception("Failed to get result")
|
||||
|
||||
def run(self, input_data: Input) -> BlockOutput:
|
||||
# Set up configuration
|
||||
config = ConfigBuilder.build_config_from_env()
|
||||
# Disable commands
|
||||
config.disabled_commands.extend(input_data.disabled_commands)
|
||||
|
||||
# Storage
|
||||
local = config.file_storage_backend == FileStorageBackendName.LOCAL
|
||||
restrict_to_root = not local or config.restrict_to_workspace
|
||||
file_storage = get_storage(
|
||||
config.file_storage_backend,
|
||||
root_path=Path("data"),
|
||||
restrict_to_root=restrict_to_root,
|
||||
)
|
||||
file_storage.initialize()
|
||||
|
||||
# State
|
||||
state = BlockAgentSettings(
|
||||
agent_id="TemporaryAgentID",
|
||||
name="WrappedAgent",
|
||||
description="Wrapped agent for the Agent Server.",
|
||||
task=f"Your task: {input_data.task}\n" f"Input data: {input_data.input}",
|
||||
enabled_components=input_data.enabled_components,
|
||||
)
|
||||
# Switch big brain mode
|
||||
state.config.big_brain = not input_data.fast_mode
|
||||
provider = self.get_provider(input_data.openai_api_key.get_secret_value())
|
||||
|
||||
agent = BlockAgent(state, provider, file_storage, config)
|
||||
|
||||
result = self.get_result(agent)
|
||||
|
||||
yield "result", result
|
||||
@@ -1,5 +1,7 @@
|
||||
import re
|
||||
from typing import Any, List
|
||||
|
||||
from jinja2 import BaseLoader, Environment
|
||||
from pydantic import Field
|
||||
|
||||
from autogpt_server.data.block import (
|
||||
@@ -12,6 +14,8 @@ from autogpt_server.data.block import (
|
||||
from autogpt_server.data.model import SchemaField
|
||||
from autogpt_server.util.mock import MockObject
|
||||
|
||||
jinja = Environment(loader=BaseLoader())
|
||||
|
||||
|
||||
class StoreValueBlock(Block):
|
||||
"""
|
||||
@@ -136,7 +140,7 @@ class FindInDictionaryBlock(Block):
|
||||
yield "missing", input_data.input
|
||||
|
||||
|
||||
class InputBlock(Block):
|
||||
class AgentInputBlock(Block):
|
||||
"""
|
||||
This block is used to provide input to the graph.
|
||||
|
||||
@@ -148,13 +152,20 @@ class InputBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
value: Any = SchemaField(description="The value to be passed as input.")
|
||||
name: str = SchemaField(description="The name of the input.")
|
||||
description: str = SchemaField(description="The description of the input.")
|
||||
description: str = SchemaField(
|
||||
description="The description of the input.",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
placeholder_values: List[Any] = SchemaField(
|
||||
description="The placeholder values to be passed as input."
|
||||
description="The placeholder values to be passed as input.",
|
||||
default=[],
|
||||
advanced=True,
|
||||
)
|
||||
limit_to_placeholder_values: bool = SchemaField(
|
||||
description="Whether to limit the selection to placeholder values.",
|
||||
default=False,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -164,8 +175,8 @@ class InputBlock(Block):
|
||||
super().__init__(
|
||||
id="c0a8e994-ebf1-4a9c-a4d8-89d09c86741b",
|
||||
description="This block is used to provide input to the graph.",
|
||||
input_schema=InputBlock.Input,
|
||||
output_schema=InputBlock.Output,
|
||||
input_schema=AgentInputBlock.Input,
|
||||
output_schema=AgentInputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"value": "Hello, World!",
|
||||
@@ -194,7 +205,7 @@ class InputBlock(Block):
|
||||
yield "result", input_data.value
|
||||
|
||||
|
||||
class OutputBlock(Block):
|
||||
class AgentOutputBlock(Block):
|
||||
"""
|
||||
Records the output of the graph for users to see.
|
||||
|
||||
@@ -215,13 +226,17 @@ class OutputBlock(Block):
|
||||
"""
|
||||
|
||||
class Input(BlockSchema):
|
||||
recorded_value: Any = SchemaField(
|
||||
description="The value to be recorded as output."
|
||||
)
|
||||
value: Any = SchemaField(description="The value to be recorded as output.")
|
||||
name: str = SchemaField(description="The name of the output.")
|
||||
description: str = SchemaField(description="The description of the output.")
|
||||
fmt_string: str = SchemaField(
|
||||
description="The format string to be used to format the recorded_value."
|
||||
description: str = SchemaField(
|
||||
description="The description of the output.",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
format: str = SchemaField(
|
||||
description="The format string to be used to format the recorded_value.",
|
||||
default="",
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchema):
|
||||
@@ -238,31 +253,31 @@ class OutputBlock(Block):
|
||||
"This block is key for capturing and presenting final results or "
|
||||
"important intermediate outputs of the graph execution."
|
||||
),
|
||||
input_schema=OutputBlock.Input,
|
||||
output_schema=OutputBlock.Output,
|
||||
input_schema=AgentOutputBlock.Input,
|
||||
output_schema=AgentOutputBlock.Output,
|
||||
test_input=[
|
||||
{
|
||||
"recorded_value": "Hello, World!",
|
||||
"value": "Hello, World!",
|
||||
"name": "output_1",
|
||||
"description": "This is a test output.",
|
||||
"fmt_string": "{value}",
|
||||
"format": "{{ output_1 }}!!",
|
||||
},
|
||||
{
|
||||
"recorded_value": 42,
|
||||
"value": "42",
|
||||
"name": "output_2",
|
||||
"description": "This is another test output.",
|
||||
"fmt_string": "{value}",
|
||||
"format": "{{ output_2 }}",
|
||||
},
|
||||
{
|
||||
"recorded_value": MockObject(value="!!", key="key"),
|
||||
"value": MockObject(value="!!", key="key"),
|
||||
"name": "output_3",
|
||||
"description": "This is a test output with a mock object.",
|
||||
"fmt_string": "{value}",
|
||||
"format": "{{ output_3 }}",
|
||||
},
|
||||
],
|
||||
test_output=[
|
||||
("output", "Hello, World!"),
|
||||
("output", 42),
|
||||
("output", "Hello, World!!!"),
|
||||
("output", "42"),
|
||||
("output", MockObject(value="!!", key="key")),
|
||||
],
|
||||
categories={BlockCategory.OUTPUT, BlockCategory.BASIC},
|
||||
@@ -274,13 +289,15 @@ class OutputBlock(Block):
|
||||
Attempts to format the recorded_value using the fmt_string if provided.
|
||||
If formatting fails or no fmt_string is given, returns the original recorded_value.
|
||||
"""
|
||||
if input_data.fmt_string:
|
||||
if input_data.format:
|
||||
try:
|
||||
yield "output", input_data.fmt_string.format(input_data.recorded_value)
|
||||
except Exception:
|
||||
yield "output", input_data.recorded_value
|
||||
fmt = re.sub(r"(?<!{){[ a-zA-Z0-9_]+}", r"{\g<0>}", input_data.format)
|
||||
template = jinja.from_string(fmt)
|
||||
yield "output", template.render({input_data.name: input_data.value})
|
||||
except Exception as e:
|
||||
yield "output", f"Error: {e}, {input_data.value}"
|
||||
else:
|
||||
yield "output", input_data.recorded_value
|
||||
yield "output", input_data.value
|
||||
|
||||
|
||||
class AddToDictionaryBlock(Block):
|
||||
@@ -422,7 +439,8 @@ class NoteBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
text: str = SchemaField(description="The text to display in the sticky note.")
|
||||
|
||||
class Output(BlockSchema): ...
|
||||
class Output(BlockSchema):
|
||||
output: str = SchemaField(description="The text to display in the sticky note.")
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
@@ -432,8 +450,11 @@ class NoteBlock(Block):
|
||||
input_schema=NoteBlock.Input,
|
||||
output_schema=NoteBlock.Output,
|
||||
test_input={"text": "Hello, World!"},
|
||||
test_output=None,
|
||||
test_output=[
|
||||
("output", "Hello, World!"),
|
||||
],
|
||||
ui_type=BlockUIType.NOTE,
|
||||
)
|
||||
|
||||
def run(self, input_data: Input) -> BlockOutput: ...
|
||||
def run(self, input_data: Input) -> BlockOutput:
|
||||
yield "output", input_data.text
|
||||
|
||||
@@ -438,7 +438,7 @@ class Message(BlockSchema):
|
||||
class AIConversationBlock(Block):
|
||||
class Input(BlockSchema):
|
||||
messages: List[Message] = SchemaField(
|
||||
description="List of messages in the conversation.", min_items=1
|
||||
description="List of messages in the conversation.", min_length=1
|
||||
)
|
||||
model: LlmModel = SchemaField(
|
||||
default=LlmModel.GPT4_TURBO,
|
||||
|
||||
@@ -31,7 +31,7 @@ async def connect(call_count=0):
|
||||
except Exception as e:
|
||||
if call_count <= 5:
|
||||
logger.info(f"[Prisma-{conn_id}] Connection failed: {e}. Retrying now..")
|
||||
await asyncio.sleep(call_count)
|
||||
await asyncio.sleep(2**call_count)
|
||||
await connect(call_count + 1)
|
||||
else:
|
||||
raise e
|
||||
|
||||
@@ -9,7 +9,7 @@ from prisma.models import AgentGraph, AgentNode, AgentNodeLink
|
||||
from pydantic import BaseModel, PrivateAttr
|
||||
from pydantic_core import PydanticUndefinedType
|
||||
|
||||
from autogpt_server.blocks.basic import InputBlock, OutputBlock
|
||||
from autogpt_server.blocks.basic import AgentInputBlock, AgentOutputBlock
|
||||
from autogpt_server.data.block import BlockInput, get_block, get_blocks
|
||||
from autogpt_server.data.db import BaseDbModel, transaction
|
||||
from autogpt_server.data.user import DEFAULT_USER_ID
|
||||
@@ -106,7 +106,9 @@ class Graph(GraphMeta):
|
||||
def starting_nodes(self) -> list[Node]:
|
||||
outbound_nodes = {link.sink_id for link in self.links}
|
||||
input_nodes = {
|
||||
v.id for v in self.nodes if isinstance(get_block(v.block_id), InputBlock)
|
||||
v.id
|
||||
for v in self.nodes
|
||||
if isinstance(get_block(v.block_id), AgentInputBlock)
|
||||
}
|
||||
return [
|
||||
node
|
||||
@@ -116,7 +118,9 @@ class Graph(GraphMeta):
|
||||
|
||||
@property
|
||||
def ending_nodes(self) -> list[Node]:
|
||||
return [v for v in self.nodes if isinstance(get_block(v.block_id), OutputBlock)]
|
||||
return [
|
||||
v for v in self.nodes if isinstance(get_block(v.block_id), AgentOutputBlock)
|
||||
]
|
||||
|
||||
@property
|
||||
def subgraph_map(self) -> dict[str, str]:
|
||||
@@ -179,7 +183,9 @@ class Graph(GraphMeta):
|
||||
+ [sanitize(link.sink_name) for link in node.input_links]
|
||||
)
|
||||
for name in block.input_schema.get_required_fields():
|
||||
if name not in provided_inputs and not isinstance(block, InputBlock):
|
||||
if name not in provided_inputs and not isinstance(
|
||||
block, AgentInputBlock
|
||||
):
|
||||
raise ValueError(
|
||||
f"Node {block.name} #{node.id} required input missing: `{name}`"
|
||||
)
|
||||
@@ -193,7 +199,7 @@ class Graph(GraphMeta):
|
||||
def is_input_output_block(nid: str) -> bool:
|
||||
bid = node_map[nid].block_id
|
||||
b = get_block(bid)
|
||||
return isinstance(b, InputBlock) or isinstance(b, OutputBlock)
|
||||
return isinstance(b, AgentInputBlock) or isinstance(b, AgentOutputBlock)
|
||||
|
||||
# subgraphs: all nodes in subgraph must be present in the graph.
|
||||
for subgraph_id, node_ids in self.subgraphs.items():
|
||||
|
||||
15
rnd/autogpt_server/autogpt_server/exec.py
Normal file
15
rnd/autogpt_server/autogpt_server/exec.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from autogpt_server.app import run_processes
|
||||
from autogpt_server.executor import ExecutionManager
|
||||
|
||||
|
||||
def main():
|
||||
"""
|
||||
Run all the processes required for the AutoGPT-server REST API.
|
||||
"""
|
||||
run_processes(
|
||||
ExecutionManager(),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -14,7 +14,7 @@ from typing import TYPE_CHECKING, Any, Coroutine, Generator, TypeVar
|
||||
if TYPE_CHECKING:
|
||||
from autogpt_server.server.rest_api import AgentServer
|
||||
|
||||
from autogpt_server.blocks.basic import InputBlock
|
||||
from autogpt_server.blocks.basic import AgentInputBlock
|
||||
from autogpt_server.data import db
|
||||
from autogpt_server.data.block import Block, BlockData, BlockInput, get_block
|
||||
from autogpt_server.data.execution import (
|
||||
@@ -62,6 +62,10 @@ def get_log_metadata(
|
||||
}
|
||||
|
||||
|
||||
def get_log_prefix(graph_eid: str, node_eid: str, block_name: str = "-"):
|
||||
return f"[ExecutionManager][graph-eid-{graph_eid}|node-eid-{node_eid}|{block_name}]"
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
ExecutionStream = Generator[NodeExecution, None, None]
|
||||
|
||||
@@ -114,10 +118,15 @@ def execute_node(
|
||||
node_id=node_id,
|
||||
block_name=node_block.name,
|
||||
)
|
||||
prefix = get_log_prefix(
|
||||
graph_eid=graph_exec_id,
|
||||
node_eid=node_exec_id,
|
||||
block_name=node_block.name,
|
||||
)
|
||||
input_data, error = validate_exec(node, data.data, resolve_input=False)
|
||||
if input_data is None:
|
||||
logger.error(
|
||||
"Skip execution, input validation error",
|
||||
"{prefix} Skip execution, input validation error",
|
||||
extra={"json_fields": {**log_metadata, "error": error}},
|
||||
)
|
||||
return
|
||||
@@ -126,7 +135,7 @@ def execute_node(
|
||||
input_data_str = json.dumps(input_data)
|
||||
input_size = len(input_data_str)
|
||||
logger.info(
|
||||
"Executed node with input",
|
||||
f"{prefix} Executed node with input",
|
||||
extra={"json_fields": {**log_metadata, "input": input_data_str}},
|
||||
)
|
||||
update_execution(ExecutionStatus.RUNNING)
|
||||
@@ -136,7 +145,7 @@ def execute_node(
|
||||
for output_name, output_data in node_block.execute(input_data):
|
||||
output_size += len(json.dumps(output_data))
|
||||
logger.info(
|
||||
"Node produced output",
|
||||
f"{prefix} Node produced output",
|
||||
extra={"json_fields": {**log_metadata, output_name: output_data}},
|
||||
)
|
||||
wait(upsert_execution_output(node_exec_id, output_name, output_data))
|
||||
@@ -157,7 +166,7 @@ def execute_node(
|
||||
except Exception as e:
|
||||
error_msg = f"{e.__class__.__name__}: {e}"
|
||||
logger.exception(
|
||||
"Node execution failed with error",
|
||||
f"{prefix} Node execution failed with error",
|
||||
extra={"json_fields": {**log_metadata, error: error_msg}},
|
||||
)
|
||||
wait(upsert_execution_output(node_exec_id, "error", error_msg))
|
||||
@@ -364,7 +373,7 @@ def validate_exec(
|
||||
def get_agent_server_client() -> "AgentServer":
|
||||
from autogpt_server.server.rest_api import AgentServer
|
||||
|
||||
return get_service_client(AgentServer)
|
||||
return get_service_client(AgentServer, Config().agent_server_port)
|
||||
|
||||
|
||||
class Executor:
|
||||
@@ -441,10 +450,15 @@ class Executor:
|
||||
node_id=node_exec.node_id,
|
||||
block_name="-",
|
||||
)
|
||||
prefix = get_log_prefix(
|
||||
graph_eid=node_exec.graph_exec_id,
|
||||
node_eid=node_exec.node_exec_id,
|
||||
block_name="-",
|
||||
)
|
||||
|
||||
execution_stats = {}
|
||||
timing_info, _ = cls._on_node_execution(
|
||||
q, node_exec, log_metadata, execution_stats
|
||||
q, node_exec, log_metadata, prefix, execution_stats
|
||||
)
|
||||
execution_stats["walltime"] = timing_info.wall_time
|
||||
execution_stats["cputime"] = timing_info.cpu_time
|
||||
@@ -460,11 +474,12 @@ class Executor:
|
||||
q: ExecutionQueue[NodeExecution],
|
||||
node_exec: NodeExecution,
|
||||
log_metadata: dict,
|
||||
prefix: str,
|
||||
stats: dict[str, Any] | None = None,
|
||||
):
|
||||
try:
|
||||
logger.info(
|
||||
f"Start node execution {node_exec.node_exec_id}",
|
||||
f"{prefix} Start node execution {node_exec.node_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
for execution in execute_node(
|
||||
@@ -472,7 +487,7 @@ class Executor:
|
||||
):
|
||||
q.add(execution)
|
||||
logger.info(
|
||||
f"Finished node execution {node_exec.node_exec_id}",
|
||||
f"{prefix} Finished node execution {node_exec.node_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
except Exception as e:
|
||||
@@ -524,8 +539,13 @@ class Executor:
|
||||
node_eid="*",
|
||||
block_name="-",
|
||||
)
|
||||
prefix = get_log_prefix(
|
||||
graph_eid=graph_exec.graph_exec_id,
|
||||
node_eid="*",
|
||||
block_name="-",
|
||||
)
|
||||
timing_info, node_count = cls._on_graph_execution(
|
||||
graph_exec, cancel, log_metadata
|
||||
graph_exec, cancel, log_metadata, prefix
|
||||
)
|
||||
|
||||
cls.loop.run_until_complete(
|
||||
@@ -542,10 +562,14 @@ class Executor:
|
||||
@classmethod
|
||||
@time_measured
|
||||
def _on_graph_execution(
|
||||
cls, graph_exec: GraphExecution, cancel: threading.Event, log_metadata: dict
|
||||
cls,
|
||||
graph_exec: GraphExecution,
|
||||
cancel: threading.Event,
|
||||
log_metadata: dict,
|
||||
prefix: str,
|
||||
) -> int:
|
||||
logger.info(
|
||||
f"Start graph execution {graph_exec.graph_exec_id}",
|
||||
f"{prefix} Start graph execution {graph_exec.graph_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
n_node_executions = 0
|
||||
@@ -558,7 +582,7 @@ class Executor:
|
||||
return
|
||||
cls.executor.terminate()
|
||||
logger.info(
|
||||
f"Terminated graph execution {graph_exec.graph_exec_id}",
|
||||
f"{prefix} Terminated graph execution {graph_exec.graph_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
cls._init_node_executor_pool()
|
||||
@@ -599,7 +623,7 @@ class Executor:
|
||||
execution.wait()
|
||||
|
||||
logger.debug(
|
||||
f"Dispatching node execution {exec_data.node_exec_id} "
|
||||
f"{prefix} Dispatching node execution {exec_data.node_exec_id} "
|
||||
f"for node {exec_data.node_id}",
|
||||
extra={**log_metadata},
|
||||
)
|
||||
@@ -630,12 +654,12 @@ class Executor:
|
||||
execution.wait(3)
|
||||
|
||||
logger.info(
|
||||
f"Finished graph execution {graph_exec.graph_exec_id}",
|
||||
f"{prefix} Finished graph execution {graph_exec.graph_exec_id}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Failed graph execution {graph_exec.graph_exec_id}: {e}",
|
||||
f"{prefix} Failed graph execution {graph_exec.graph_exec_id}: {e}",
|
||||
extra={"json_fields": {**log_metadata}},
|
||||
)
|
||||
finally:
|
||||
@@ -648,6 +672,7 @@ class Executor:
|
||||
|
||||
class ExecutionManager(AppService):
|
||||
def __init__(self):
|
||||
super().__init__(port=Config().execution_manager_port)
|
||||
self.use_db = True
|
||||
self.pool_size = Config().num_graph_workers
|
||||
self.queue = ExecutionQueue[GraphExecution]()
|
||||
@@ -698,7 +723,7 @@ class ExecutionManager(AppService):
|
||||
nodes_input = []
|
||||
for node in graph.starting_nodes:
|
||||
input_data = {}
|
||||
if isinstance(get_block(node.block_id), InputBlock):
|
||||
if isinstance(get_block(node.block_id), AgentInputBlock):
|
||||
name = node.input_default.get("name")
|
||||
if name and name in data:
|
||||
input_data = {"value": data[name]}
|
||||
|
||||
@@ -9,6 +9,7 @@ from autogpt_server.data import schedule as model
|
||||
from autogpt_server.data.block import BlockInput
|
||||
from autogpt_server.executor.manager import ExecutionManager
|
||||
from autogpt_server.util.service import AppService, expose, get_service_client
|
||||
from autogpt_server.util.settings import Config
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -19,13 +20,15 @@ def log(msg, **kwargs):
|
||||
|
||||
class ExecutionScheduler(AppService):
|
||||
def __init__(self, refresh_interval=10):
|
||||
super().__init__(port=Config().execution_scheduler_port)
|
||||
self.use_db = True
|
||||
self.last_check = datetime.min
|
||||
self.refresh_interval = refresh_interval
|
||||
self.use_redis = False
|
||||
|
||||
@property
|
||||
def execution_manager_client(self) -> ExecutionManager:
|
||||
return get_service_client(ExecutionManager)
|
||||
return get_service_client(ExecutionManager, Config().execution_manager_port)
|
||||
|
||||
def run_service(self):
|
||||
scheduler = BackgroundScheduler()
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
from .base import BaseOAuthHandler
|
||||
from .github import GitHubOAuthHandler
|
||||
from .google import GoogleOAuthHandler
|
||||
from .notion import NotionOAuthHandler
|
||||
|
||||
HANDLERS_BY_NAME: dict[str, type[BaseOAuthHandler]] = {
|
||||
handler.PROVIDER_NAME: handler
|
||||
for handler in [
|
||||
GitHubOAuthHandler,
|
||||
GoogleOAuthHandler,
|
||||
NotionOAuthHandler,
|
||||
]
|
||||
}
|
||||
|
||||
__all__ = ["HANDLERS_BY_NAME"]
|
||||
48
rnd/autogpt_server/autogpt_server/integrations/oauth/base.py
Normal file
48
rnd/autogpt_server/autogpt_server/integrations/oauth/base.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import time
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import ClassVar
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
|
||||
class BaseOAuthHandler(ABC):
|
||||
PROVIDER_NAME: ClassVar[str]
|
||||
|
||||
@abstractmethod
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str): ...
|
||||
|
||||
@abstractmethod
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
"""Constructs a login URL that the user can be redirected to"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
"""Exchanges the acquired authorization code from login for a set of tokens"""
|
||||
...
|
||||
|
||||
@abstractmethod
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
"""Implements the token refresh mechanism"""
|
||||
...
|
||||
|
||||
def refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
if credentials.provider != self.PROVIDER_NAME:
|
||||
raise ValueError(
|
||||
f"{self.__class__.__name__} can not refresh tokens "
|
||||
f"for other provider '{credentials.provider}'"
|
||||
)
|
||||
return self._refresh_tokens(credentials)
|
||||
|
||||
def get_access_token(self, credentials: OAuth2Credentials) -> str:
|
||||
"""Returns a valid access token, refreshing it first if needed"""
|
||||
if self.needs_refresh(credentials):
|
||||
credentials = self.refresh_tokens(credentials)
|
||||
return credentials.access_token.get_secret_value()
|
||||
|
||||
def needs_refresh(self, credentials: OAuth2Credentials) -> bool:
|
||||
"""Indicates whether the given tokens need to be refreshed"""
|
||||
return (
|
||||
credentials.access_token_expires_at is not None
|
||||
and credentials.access_token_expires_at < int(time.time()) + 300
|
||||
)
|
||||
@@ -0,0 +1,99 @@
|
||||
import time
|
||||
from typing import Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
from .base import BaseOAuthHandler
|
||||
|
||||
|
||||
class GitHubOAuthHandler(BaseOAuthHandler):
|
||||
"""
|
||||
Based on the documentation at:
|
||||
- [Authorizing OAuth apps - GitHub Docs](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps)
|
||||
- [Refreshing user access tokens - GitHub Docs](https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/refreshing-user-access-tokens)
|
||||
|
||||
Notes:
|
||||
- By default, token expiration is disabled on GitHub Apps. This means the access
|
||||
token doesn't expire and no refresh token is returned by the authorization flow.
|
||||
- When token expiration gets enabled, any existing tokens will remain non-expiring.
|
||||
- When token expiration gets disabled, token refreshes will return a non-expiring
|
||||
access token *with no refresh token*.
|
||||
""" # noqa
|
||||
|
||||
PROVIDER_NAME = "github"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.redirect_uri = redirect_uri
|
||||
self.auth_base_url = "https://github.com/login/oauth/authorize"
|
||||
self.token_url = "https://github.com/login/oauth/access_token"
|
||||
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
params = {
|
||||
"client_id": self.client_id,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
"scope": " ".join(scopes),
|
||||
"state": state,
|
||||
}
|
||||
return f"{self.auth_base_url}?{urlencode(params)}"
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
return self._request_tokens({"code": code, "redirect_uri": self.redirect_uri})
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
if not credentials.refresh_token:
|
||||
return credentials
|
||||
|
||||
return self._request_tokens(
|
||||
{
|
||||
"refresh_token": credentials.refresh_token.get_secret_value(),
|
||||
"grant_type": "refresh_token",
|
||||
}
|
||||
)
|
||||
|
||||
def _request_tokens(
|
||||
self,
|
||||
params: dict[str, str],
|
||||
current_credentials: Optional[OAuth2Credentials] = None,
|
||||
) -> OAuth2Credentials:
|
||||
request_body = {
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
**params,
|
||||
}
|
||||
headers = {"Accept": "application/json"}
|
||||
response = requests.post(self.token_url, data=request_body, headers=headers)
|
||||
response.raise_for_status()
|
||||
token_data: dict = response.json()
|
||||
|
||||
now = int(time.time())
|
||||
new_credentials = OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=current_credentials.title if current_credentials else "GitHub",
|
||||
access_token=token_data["access_token"],
|
||||
# Token refresh responses have an empty `scope` property (see docs),
|
||||
# so we have to get the scope from the existing credentials object.
|
||||
scopes=(
|
||||
token_data.get("scope", "").split(",")
|
||||
or (current_credentials.scopes if current_credentials else [])
|
||||
),
|
||||
# Refresh token and expiration intervals are only given if token expiration
|
||||
# is enabled in the GitHub App's settings.
|
||||
refresh_token=token_data.get("refresh_token"),
|
||||
access_token_expires_at=(
|
||||
now + expires_in
|
||||
if (expires_in := token_data.get("expires_in", None))
|
||||
else None
|
||||
),
|
||||
refresh_token_expires_at=(
|
||||
now + expires_in
|
||||
if (expires_in := token_data.get("refresh_token_expires_in", None))
|
||||
else None
|
||||
),
|
||||
)
|
||||
if current_credentials:
|
||||
new_credentials.id = current_credentials.id
|
||||
return new_credentials
|
||||
@@ -0,0 +1,96 @@
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
from google.auth.transport.requests import Request
|
||||
from google.oauth2.credentials import Credentials
|
||||
from google_auth_oauthlib.flow import Flow
|
||||
from pydantic import SecretStr
|
||||
|
||||
from .base import BaseOAuthHandler
|
||||
|
||||
|
||||
class GoogleOAuthHandler(BaseOAuthHandler):
|
||||
"""
|
||||
Based on the documentation at https://developers.google.com/identity/protocols/oauth2/web-server
|
||||
""" # noqa
|
||||
|
||||
PROVIDER_NAME = "google"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.redirect_uri = redirect_uri
|
||||
self.token_uri = "https://oauth2.googleapis.com/token"
|
||||
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
flow = self._setup_oauth_flow(scopes)
|
||||
flow.redirect_uri = self.redirect_uri
|
||||
authorization_url, _ = flow.authorization_url(
|
||||
access_type="offline",
|
||||
include_granted_scopes="true",
|
||||
state=state,
|
||||
prompt="consent",
|
||||
)
|
||||
return authorization_url
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
flow = self._setup_oauth_flow(None)
|
||||
flow.redirect_uri = self.redirect_uri
|
||||
flow.fetch_token(code=code)
|
||||
|
||||
google_creds = flow.credentials
|
||||
# Google's OAuth library is poorly typed so we need some of these:
|
||||
assert google_creds.token
|
||||
assert google_creds.refresh_token
|
||||
assert google_creds.expiry
|
||||
assert google_creds.scopes
|
||||
return OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title="Google",
|
||||
access_token=SecretStr(google_creds.token),
|
||||
refresh_token=SecretStr(google_creds.refresh_token),
|
||||
access_token_expires_at=int(google_creds.expiry.timestamp()),
|
||||
refresh_token_expires_at=None,
|
||||
scopes=google_creds.scopes,
|
||||
)
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
# Google credentials should ALWAYS have a refresh token
|
||||
assert credentials.refresh_token
|
||||
|
||||
google_creds = Credentials(
|
||||
token=credentials.access_token.get_secret_value(),
|
||||
refresh_token=credentials.refresh_token.get_secret_value(),
|
||||
token_uri=self.token_uri,
|
||||
client_id=self.client_id,
|
||||
client_secret=self.client_secret,
|
||||
scopes=credentials.scopes,
|
||||
)
|
||||
# Google's OAuth library is poorly typed so we need some of these:
|
||||
assert google_creds.refresh_token
|
||||
assert google_creds.scopes
|
||||
|
||||
google_creds.refresh(Request())
|
||||
assert google_creds.expiry
|
||||
|
||||
return OAuth2Credentials(
|
||||
id=credentials.id,
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=credentials.title,
|
||||
access_token=SecretStr(google_creds.token),
|
||||
refresh_token=SecretStr(google_creds.refresh_token),
|
||||
access_token_expires_at=int(google_creds.expiry.timestamp()),
|
||||
refresh_token_expires_at=None,
|
||||
scopes=google_creds.scopes,
|
||||
)
|
||||
|
||||
def _setup_oauth_flow(self, scopes: list[str] | None) -> Flow:
|
||||
return Flow.from_client_config(
|
||||
{
|
||||
"web": {
|
||||
"client_id": self.client_id,
|
||||
"client_secret": self.client_secret,
|
||||
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
||||
"token_uri": self.token_uri,
|
||||
}
|
||||
},
|
||||
scopes=scopes,
|
||||
)
|
||||
@@ -0,0 +1,76 @@
|
||||
from base64 import b64encode
|
||||
from urllib.parse import urlencode
|
||||
|
||||
import requests
|
||||
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
|
||||
|
||||
from .base import BaseOAuthHandler
|
||||
|
||||
|
||||
class NotionOAuthHandler(BaseOAuthHandler):
|
||||
"""
|
||||
Based on the documentation at https://developers.notion.com/docs/authorization
|
||||
|
||||
Notes:
|
||||
- Notion uses non-expiring access tokens and therefore doesn't have a refresh flow
|
||||
- Notion doesn't use scopes
|
||||
"""
|
||||
|
||||
PROVIDER_NAME = "notion"
|
||||
|
||||
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
|
||||
self.client_id = client_id
|
||||
self.client_secret = client_secret
|
||||
self.redirect_uri = redirect_uri
|
||||
self.auth_base_url = "https://api.notion.com/v1/oauth/authorize"
|
||||
self.token_url = "https://api.notion.com/v1/oauth/token"
|
||||
|
||||
def get_login_url(self, scopes: list[str], state: str) -> str:
|
||||
params = {
|
||||
"client_id": self.client_id,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
"response_type": "code",
|
||||
"owner": "user",
|
||||
"state": state,
|
||||
}
|
||||
return f"{self.auth_base_url}?{urlencode(params)}"
|
||||
|
||||
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
|
||||
request_body = {
|
||||
"grant_type": "authorization_code",
|
||||
"code": code,
|
||||
"redirect_uri": self.redirect_uri,
|
||||
}
|
||||
auth_str = b64encode(f"{self.client_id}:{self.client_secret}".encode()).decode()
|
||||
headers = {
|
||||
"Authorization": f"Basic {auth_str}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
response = requests.post(self.token_url, json=request_body, headers=headers)
|
||||
response.raise_for_status()
|
||||
token_data = response.json()
|
||||
|
||||
return OAuth2Credentials(
|
||||
provider=self.PROVIDER_NAME,
|
||||
title=token_data.get("workspace_name", "Notion"),
|
||||
access_token=token_data["access_token"],
|
||||
refresh_token=None,
|
||||
access_token_expires_at=None, # Notion tokens don't expire
|
||||
refresh_token_expires_at=None,
|
||||
scopes=[],
|
||||
metadata={
|
||||
"owner": token_data["owner"],
|
||||
"bot_id": token_data["bot_id"],
|
||||
"workspace_id": token_data["workspace_id"],
|
||||
"workspace_name": token_data.get("workspace_name"),
|
||||
"workspace_icon": token_data.get("workspace_icon"),
|
||||
},
|
||||
)
|
||||
|
||||
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
|
||||
# Notion doesn't support token refresh
|
||||
return credentials
|
||||
|
||||
def needs_refresh(self, credentials: OAuth2Credentials) -> bool:
|
||||
# Notion access tokens don't expire
|
||||
return False
|
||||
@@ -1,7 +1,6 @@
|
||||
from autogpt_server.app import run_processes
|
||||
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
|
||||
from autogpt_server.executor import ExecutionScheduler
|
||||
from autogpt_server.server import AgentServer
|
||||
from autogpt_server.util.service import PyroNameServer
|
||||
|
||||
|
||||
def main():
|
||||
@@ -9,8 +8,6 @@ def main():
|
||||
Run all the processes required for the AutoGPT-server REST API.
|
||||
"""
|
||||
run_processes(
|
||||
PyroNameServer(),
|
||||
ExecutionManager(),
|
||||
ExecutionScheduler(),
|
||||
AgentServer(),
|
||||
)
|
||||
|
||||
105
rnd/autogpt_server/autogpt_server/server/integrations.py
Normal file
105
rnd/autogpt_server/autogpt_server/server/integrations.py
Normal file
@@ -0,0 +1,105 @@
|
||||
import logging
|
||||
from typing import Annotated, Literal
|
||||
|
||||
from autogpt_libs.supabase_integration_credentials_store import (
|
||||
SupabaseIntegrationCredentialsStore,
|
||||
)
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request
|
||||
from pydantic import BaseModel
|
||||
from supabase import Client
|
||||
|
||||
from autogpt_server.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
|
||||
from autogpt_server.util.settings import Settings
|
||||
|
||||
from .utils import get_supabase, get_user_id
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
settings = Settings()
|
||||
integrations_api_router = APIRouter()
|
||||
|
||||
|
||||
def get_store(supabase: Client = Depends(get_supabase)):
|
||||
return SupabaseIntegrationCredentialsStore(supabase)
|
||||
|
||||
|
||||
class LoginResponse(BaseModel):
|
||||
login_url: str
|
||||
|
||||
|
||||
@integrations_api_router.get("/{provider}/login")
|
||||
async def login(
|
||||
provider: Annotated[str, Path(title="The provider to initiate an OAuth flow for")],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
request: Request,
|
||||
store: Annotated[SupabaseIntegrationCredentialsStore, Depends(get_store)],
|
||||
scopes: Annotated[
|
||||
str, Query(title="Comma-separated list of authorization scopes")
|
||||
] = "",
|
||||
) -> LoginResponse:
|
||||
handler = _get_provider_oauth_handler(request, provider)
|
||||
|
||||
# Generate and store a secure random state token
|
||||
state = await store.store_state_token(user_id, provider)
|
||||
|
||||
requested_scopes = scopes.split(",") if scopes else []
|
||||
login_url = handler.get_login_url(requested_scopes, state)
|
||||
|
||||
return LoginResponse(login_url=login_url)
|
||||
|
||||
|
||||
class CredentialsMetaResponse(BaseModel):
|
||||
credentials_id: str
|
||||
credentials_type: Literal["oauth2", "api_key"]
|
||||
|
||||
|
||||
@integrations_api_router.post("/{provider}/callback")
|
||||
async def callback(
|
||||
provider: Annotated[str, Path(title="The target provider for this OAuth exchange")],
|
||||
code: Annotated[str, Body(title="Authorization code acquired by user login")],
|
||||
state_token: Annotated[str, Body(title="Anti-CSRF nonce")],
|
||||
store: Annotated[SupabaseIntegrationCredentialsStore, Depends(get_store)],
|
||||
user_id: Annotated[str, Depends(get_user_id)],
|
||||
request: Request,
|
||||
) -> CredentialsMetaResponse:
|
||||
handler = _get_provider_oauth_handler(request, provider)
|
||||
|
||||
# Verify the state token
|
||||
if not await store.verify_state_token(user_id, state_token, provider):
|
||||
raise HTTPException(status_code=400, detail="Invalid or expired state token")
|
||||
|
||||
try:
|
||||
credentials = handler.exchange_code_for_tokens(code)
|
||||
except Exception as e:
|
||||
logger.warning(f"Code->Token exchange failed for provider {provider}: {e}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
store.add_creds(user_id, credentials)
|
||||
return CredentialsMetaResponse(
|
||||
credentials_id=credentials.id,
|
||||
credentials_type=credentials.type,
|
||||
)
|
||||
|
||||
|
||||
# -------- UTILITIES --------- #
|
||||
|
||||
|
||||
def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHandler:
|
||||
if provider_name not in HANDLERS_BY_NAME:
|
||||
raise HTTPException(
|
||||
status_code=404, detail=f"Unknown provider '{provider_name}'"
|
||||
)
|
||||
|
||||
client_id = getattr(settings.secrets, f"{provider_name}_client_id")
|
||||
client_secret = getattr(settings.secrets, f"{provider_name}_client_secret")
|
||||
if not (client_id and client_secret):
|
||||
raise HTTPException(
|
||||
status_code=501,
|
||||
detail=f"Integration with provider '{provider_name}' is not configured",
|
||||
)
|
||||
|
||||
handler_class = HANDLERS_BY_NAME[provider_name]
|
||||
return handler_class(
|
||||
client_id=client_id,
|
||||
client_secret=client_secret,
|
||||
redirect_uri=str(req.url_for("callback", provider=provider_name)),
|
||||
)
|
||||
@@ -19,10 +19,11 @@ from autogpt_server.data.queue import AsyncEventQueue, AsyncRedisEventQueue
|
||||
from autogpt_server.data.user import get_or_create_user
|
||||
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
|
||||
from autogpt_server.server.model import CreateGraph, SetGraphActiveVersion
|
||||
from autogpt_server.util.auth import get_user_id
|
||||
from autogpt_server.util.lock import KeyedMutex
|
||||
from autogpt_server.util.service import AppService, expose, get_service_client
|
||||
from autogpt_server.util.settings import Settings
|
||||
from autogpt_server.util.settings import Config, Settings
|
||||
|
||||
from .utils import get_user_id
|
||||
|
||||
settings = Settings()
|
||||
|
||||
@@ -33,6 +34,7 @@ class AgentServer(AppService):
|
||||
_test_dependency_overrides = {}
|
||||
|
||||
def __init__(self, event_queue: AsyncEventQueue | None = None):
|
||||
super().__init__(port=Config().agent_server_port)
|
||||
self.event_queue = event_queue or AsyncRedisEventQueue()
|
||||
|
||||
@asynccontextmanager
|
||||
@@ -70,127 +72,132 @@ class AgentServer(AppService):
|
||||
)
|
||||
|
||||
# Define the API routes
|
||||
router = APIRouter(prefix="/api")
|
||||
router.dependencies.append(Depends(auth_middleware))
|
||||
api_router = APIRouter(prefix="/api")
|
||||
api_router.dependencies.append(Depends(auth_middleware))
|
||||
|
||||
router.add_api_route(
|
||||
# Import & Attach sub-routers
|
||||
from .integrations import integrations_api_router
|
||||
|
||||
api_router.include_router(integrations_api_router, prefix="/integrations")
|
||||
|
||||
api_router.add_api_route(
|
||||
path="/auth/user",
|
||||
endpoint=self.get_or_create_user_route,
|
||||
methods=["POST"],
|
||||
)
|
||||
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/blocks",
|
||||
endpoint=self.get_graph_blocks,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/blocks/{block_id}/execute",
|
||||
endpoint=self.execute_graph_block,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs",
|
||||
endpoint=self.get_graphs,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/templates",
|
||||
endpoint=self.get_templates,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs",
|
||||
endpoint=self.create_new_graph,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/templates",
|
||||
endpoint=self.create_new_template,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}",
|
||||
endpoint=self.get_graph,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}",
|
||||
endpoint=self.get_template,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}",
|
||||
endpoint=self.update_graph,
|
||||
methods=["PUT"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}",
|
||||
endpoint=self.update_graph,
|
||||
methods=["PUT"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions",
|
||||
endpoint=self.get_graph_all_versions,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/templates/{graph_id}/versions",
|
||||
endpoint=self.get_graph_all_versions,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions/{version}",
|
||||
endpoint=self.get_graph,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/versions/active",
|
||||
endpoint=self.set_graph_active_version,
|
||||
methods=["PUT"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/input_schema",
|
||||
endpoint=self.get_graph_input_schema,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/execute",
|
||||
endpoint=self.execute_graph,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions",
|
||||
endpoint=self.list_graph_runs,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}",
|
||||
endpoint=self.get_graph_run_node_execution_results,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/executions/{graph_exec_id}/stop",
|
||||
endpoint=self.stop_graph_run,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/schedules",
|
||||
endpoint=self.create_schedule,
|
||||
methods=["POST"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/{graph_id}/schedules",
|
||||
endpoint=self.get_execution_schedules,
|
||||
methods=["GET"],
|
||||
)
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/graphs/schedules/{schedule_id}",
|
||||
endpoint=self.update_schedule,
|
||||
methods=["PUT"],
|
||||
)
|
||||
|
||||
router.add_api_route(
|
||||
api_router.add_api_route(
|
||||
path="/settings",
|
||||
endpoint=self.update_configuration,
|
||||
methods=["POST"],
|
||||
@@ -198,7 +205,7 @@ class AgentServer(AppService):
|
||||
|
||||
app.add_exception_handler(500, self.handle_internal_http_error)
|
||||
|
||||
app.include_router(router)
|
||||
app.include_router(api_router)
|
||||
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000, log_config=None)
|
||||
|
||||
@@ -233,11 +240,11 @@ class AgentServer(AppService):
|
||||
|
||||
@property
|
||||
def execution_manager_client(self) -> ExecutionManager:
|
||||
return get_service_client(ExecutionManager)
|
||||
return get_service_client(ExecutionManager, Config().execution_manager_port)
|
||||
|
||||
@property
|
||||
def execution_scheduler_client(self) -> ExecutionScheduler:
|
||||
return get_service_client(ExecutionScheduler)
|
||||
return get_service_client(ExecutionScheduler, Config().execution_scheduler_port)
|
||||
|
||||
@classmethod
|
||||
def handle_internal_http_error(cls, request: Request, exc: Exception):
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
from autogpt_libs.auth import auth_middleware
|
||||
from autogpt_libs.auth.middleware import auth_middleware
|
||||
from fastapi import Depends, HTTPException
|
||||
from supabase import Client, create_client
|
||||
|
||||
from autogpt_server.data.user import DEFAULT_USER_ID
|
||||
from autogpt_server.util.settings import Settings
|
||||
|
||||
settings = Settings()
|
||||
|
||||
|
||||
def get_user_id(payload: dict = Depends(auth_middleware)) -> str:
|
||||
@@ -13,3 +17,7 @@ def get_user_id(payload: dict = Depends(auth_middleware)) -> str:
|
||||
if not user_id:
|
||||
raise HTTPException(status_code=401, detail="User ID not found in token")
|
||||
return user_id
|
||||
|
||||
|
||||
def get_supabase() -> Client:
|
||||
return create_client(settings.secrets.supabase_url, settings.secrets.supabase_key)
|
||||
@@ -1,6 +1,6 @@
|
||||
from prisma.models import User
|
||||
|
||||
from autogpt_server.blocks.basic import InputBlock, PrintToConsoleBlock
|
||||
from autogpt_server.blocks.basic import AgentInputBlock, PrintToConsoleBlock
|
||||
from autogpt_server.blocks.text import FillTextTemplateBlock
|
||||
from autogpt_server.data import graph
|
||||
from autogpt_server.data.graph import create_graph
|
||||
@@ -28,22 +28,12 @@ def create_test_graph() -> graph.Graph:
|
||||
"""
|
||||
nodes = [
|
||||
graph.Node(
|
||||
block_id=InputBlock().id,
|
||||
input_default={
|
||||
"name": "input_1",
|
||||
"description": "First input value",
|
||||
"placeholder_values": [],
|
||||
"limit_to_placeholder_values": False,
|
||||
},
|
||||
block_id=AgentInputBlock().id,
|
||||
input_default={"name": "input_1"},
|
||||
),
|
||||
graph.Node(
|
||||
block_id=InputBlock().id,
|
||||
input_default={
|
||||
"name": "input_2",
|
||||
"description": "Second input value",
|
||||
"placeholder_values": [],
|
||||
"limit_to_placeholder_values": False,
|
||||
},
|
||||
block_id=AgentInputBlock().id,
|
||||
input_default={"name": "input_2"},
|
||||
),
|
||||
graph.Node(
|
||||
block_id=FillTextTemplateBlock().id,
|
||||
|
||||
@@ -2,7 +2,7 @@ import functools
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from typing import Callable, Tuple, TypeVar
|
||||
from typing import Callable, ParamSpec, Tuple, TypeVar
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
@@ -24,18 +24,19 @@ def _end_measurement(
|
||||
return end_wall_time - start_wall_time, end_cpu_time - start_cpu_time
|
||||
|
||||
|
||||
P = ParamSpec("P")
|
||||
T = TypeVar("T")
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def time_measured(func: Callable[..., T]) -> Callable[..., Tuple[TimingInfo, T]]:
|
||||
def time_measured(func: Callable[P, T]) -> Callable[P, Tuple[TimingInfo, T]]:
|
||||
"""
|
||||
Decorator to measure the time taken by a function to execute.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs) -> Tuple[TimingInfo, T]:
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> Tuple[TimingInfo, T]:
|
||||
start_wall_time, start_cpu_time = _start_measurement()
|
||||
try:
|
||||
result = func(*args, **kwargs)
|
||||
@@ -49,13 +50,13 @@ def time_measured(func: Callable[..., T]) -> Callable[..., Tuple[TimingInfo, T]]
|
||||
return wrapper
|
||||
|
||||
|
||||
def error_logged(func: Callable[..., T]) -> Callable[..., T | None]:
|
||||
def error_logged(func: Callable[P, T]) -> Callable[P, T | None]:
|
||||
"""
|
||||
Decorator to suppress and log any exceptions raised by a function.
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args, **kwargs) -> T | None:
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T | None:
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
except Exception as e:
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
import os
|
||||
|
||||
from forge.logging.config import LogFormatName
|
||||
|
||||
|
||||
def configure_logging():
|
||||
import logging
|
||||
|
||||
from forge.logging import configure_logging
|
||||
import autogpt_libs.logging.config
|
||||
|
||||
if os.getenv("APP_ENV") != "cloud":
|
||||
configure_logging()
|
||||
autogpt_libs.logging.config.configure_logging(force_cloud_logging=False)
|
||||
else:
|
||||
configure_logging(log_format=LogFormatName.STRUCTURED)
|
||||
autogpt_libs.logging.config.configure_logging(force_cloud_logging=True)
|
||||
|
||||
# Silence httpx logger
|
||||
logging.getLogger("httpx").setLevel(logging.WARNING)
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from abc import abstractmethod
|
||||
from typing import Any, Callable, Coroutine, Type, TypeVar, cast
|
||||
|
||||
import Pyro5.api
|
||||
from Pyro5 import api as pyro
|
||||
from Pyro5 import nameserver
|
||||
|
||||
from autogpt_server.data import db
|
||||
from autogpt_server.data.queue import AsyncEventQueue, AsyncRedisEventQueue
|
||||
@@ -42,25 +43,16 @@ def expose(func: C) -> C:
|
||||
return pyro.expose(wrapper) # type: ignore
|
||||
|
||||
|
||||
class PyroNameServer(AppProcess):
|
||||
def run(self):
|
||||
nameserver.start_ns_loop(host=pyro_host, port=9090)
|
||||
|
||||
@conn_retry
|
||||
def _wait_for_ns(self):
|
||||
pyro.locate_ns(host="localhost", port=9090)
|
||||
|
||||
def health_check(self):
|
||||
self._wait_for_ns()
|
||||
logger.info(f"{__class__.__name__} is ready")
|
||||
|
||||
|
||||
class AppService(AppProcess):
|
||||
shared_event_loop: asyncio.AbstractEventLoop
|
||||
event_queue: AsyncEventQueue = AsyncRedisEventQueue()
|
||||
use_db: bool = False
|
||||
use_redis: bool = False
|
||||
|
||||
def __init__(self, port):
|
||||
self.port = port
|
||||
self.uri = None
|
||||
|
||||
@classmethod
|
||||
@property
|
||||
def service_name(cls) -> str:
|
||||
@@ -108,11 +100,10 @@ class AppService(AppProcess):
|
||||
|
||||
@conn_retry
|
||||
def __start_pyro(self):
|
||||
daemon = pyro.Daemon(host=pyro_host)
|
||||
ns = pyro.locate_ns(host=pyro_host, port=9090)
|
||||
uri = daemon.register(self)
|
||||
ns.register(self.service_name, uri)
|
||||
logger.info(f"[{self.service_name}] Connected to Pyro; URI = {uri}")
|
||||
host = Config().pyro_host
|
||||
daemon = Pyro5.api.Daemon(host=host, port=self.port)
|
||||
self.uri = daemon.register(self, objectId=self.service_name)
|
||||
logger.info(f"[{self.service_name}] Connected to Pyro; URI = {self.uri}")
|
||||
daemon.requestLoop()
|
||||
|
||||
def __start_async_loop(self):
|
||||
@@ -122,16 +113,19 @@ class AppService(AppProcess):
|
||||
AS = TypeVar("AS", bound=AppService)
|
||||
|
||||
|
||||
def get_service_client(service_type: Type[AS]) -> AS:
|
||||
def get_service_client(service_type: Type[AS], port: int) -> AS:
|
||||
service_name = service_type.service_name
|
||||
|
||||
class DynamicClient:
|
||||
@conn_retry
|
||||
def __init__(self):
|
||||
ns = pyro.locate_ns()
|
||||
uri = ns.lookup(service_name)
|
||||
self.proxy = pyro.Proxy(uri)
|
||||
host = os.environ.get(f"{service_name.upper()}_HOST", "localhost")
|
||||
uri = f"PYRO:{service_type.service_name}@{host}:{port}"
|
||||
logger.debug(f"Connecting to service [{service_name}]. URI = {uri}")
|
||||
self.proxy = Pyro5.api.Proxy(uri)
|
||||
# Attempt to bind to ensure the connection is established
|
||||
self.proxy._pyroBind()
|
||||
logger.debug(f"Successfully connected to service [{service_name}]")
|
||||
|
||||
def __getattr__(self, name: str) -> Callable[..., Any]:
|
||||
return getattr(self.proxy, name)
|
||||
|
||||
@@ -72,6 +72,21 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
extra="allow",
|
||||
)
|
||||
|
||||
execution_manager_port: int = Field(
|
||||
default=8002,
|
||||
description="The port for execution manager daemon to run on",
|
||||
)
|
||||
|
||||
execution_scheduler_port: int = Field(
|
||||
default=8003,
|
||||
description="The port for execution scheduler daemon to run on",
|
||||
)
|
||||
|
||||
agent_server_port: int = Field(
|
||||
default=8004,
|
||||
description="The port for agent server daemon to run on",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def settings_customise_sources(
|
||||
cls,
|
||||
@@ -93,6 +108,23 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
|
||||
class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
|
||||
"""Secrets for the server."""
|
||||
|
||||
supabase_url: str = Field(default="", description="Supabase URL")
|
||||
supabase_key: str = Field(default="", description="Supabase key")
|
||||
|
||||
# OAuth server credentials for integrations
|
||||
github_client_id: str = Field(default="", description="GitHub OAuth client ID")
|
||||
github_client_secret: str = Field(
|
||||
default="", description="GitHub OAuth client secret"
|
||||
)
|
||||
google_client_id: str = Field(default="", description="Google OAuth client ID")
|
||||
google_client_secret: str = Field(
|
||||
default="", description="Google OAuth client secret"
|
||||
)
|
||||
notion_client_id: str = Field(default="", description="Notion OAuth client ID")
|
||||
notion_client_secret: str = Field(
|
||||
default="", description="Notion OAuth client secret"
|
||||
)
|
||||
|
||||
openai_api_key: str = Field(default="", description="OpenAI API key")
|
||||
anthropic_api_key: str = Field(default="", description="Anthropic API key")
|
||||
groq_api_key: str = Field(default="", description="Groq API key")
|
||||
|
||||
@@ -8,7 +8,6 @@ from autogpt_server.data.queue import AsyncEventQueue
|
||||
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
|
||||
from autogpt_server.server import AgentServer
|
||||
from autogpt_server.server.rest_api import get_user_id
|
||||
from autogpt_server.util.service import PyroNameServer
|
||||
|
||||
log = print
|
||||
|
||||
@@ -48,7 +47,6 @@ class InMemoryAsyncEventQueue(AsyncEventQueue):
|
||||
|
||||
class SpinTestServer:
|
||||
def __init__(self):
|
||||
self.name_server = PyroNameServer()
|
||||
self.exec_manager = ExecutionManager()
|
||||
self.in_memory_queue = InMemoryAsyncEventQueue()
|
||||
self.agent_server = AgentServer(event_queue=self.in_memory_queue)
|
||||
@@ -59,7 +57,6 @@ class SpinTestServer:
|
||||
return "3e53486c-cf57-477e-ba2a-cb02dc828e1a"
|
||||
|
||||
async def __aenter__(self):
|
||||
self.name_server.__enter__()
|
||||
self.setup_dependency_overrides()
|
||||
self.agent_server.__enter__()
|
||||
self.exec_manager.__enter__()
|
||||
@@ -76,7 +73,6 @@ class SpinTestServer:
|
||||
self.scheduler.__exit__(exc_type, exc_val, exc_tb)
|
||||
self.exec_manager.__exit__(exc_type, exc_val, exc_tb)
|
||||
self.agent_server.__exit__(exc_type, exc_val, exc_tb)
|
||||
self.name_server.__exit__(exc_type, exc_val, exc_tb)
|
||||
|
||||
def setup_dependency_overrides(self):
|
||||
# Override get_user_id for testing
|
||||
|
||||
4859
rnd/autogpt_server/poetry.lock
generated
4859
rnd/autogpt_server/poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -10,10 +10,9 @@ readme = "README.md"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.10"
|
||||
agpt = { path = "../../autogpt", develop = true }
|
||||
aio-pika = "^9.4.3"
|
||||
anthropic = "^0.25.1"
|
||||
apscheduler = "^3.10.4"
|
||||
autogpt-forge = { path = "../../forge", develop = true }
|
||||
autogpt-libs = { path = "../autogpt_libs" }
|
||||
click = "^8.1.7"
|
||||
croniter = "^2.0.5"
|
||||
@@ -39,15 +38,14 @@ pyro5 = "^5.15"
|
||||
pytest = "^8.2.1"
|
||||
pytest-asyncio = "^0.23.7"
|
||||
python-dotenv = "^1.0.1"
|
||||
redis = "^5.0.8"
|
||||
sentry-sdk = "1.45.0"
|
||||
supabase = "^2.7.2"
|
||||
tenacity = "^8.3.0"
|
||||
uvicorn = { extras = ["standard"], version = "^0.30.1" }
|
||||
websockets = "^12.0"
|
||||
youtube-transcript-api = "^0.6.2"
|
||||
|
||||
aio-pika = "^9.4.3"
|
||||
redis = "^5.0.8"
|
||||
sentry-sdk = "1.45.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
poethepoet = "^0.26.1"
|
||||
httpx = "^0.27.0"
|
||||
@@ -66,11 +64,13 @@ build-backend = "poetry.core.masonry.api"
|
||||
app = "autogpt_server.app:main"
|
||||
rest = "autogpt_server.rest:main"
|
||||
ws = "autogpt_server.ws:main"
|
||||
executor = "autogpt_server.exec:main"
|
||||
cli = "autogpt_server.cli:main"
|
||||
format = "linter:format"
|
||||
lint = "linter:lint"
|
||||
test = "run_tests:test"
|
||||
# https://poethepoet.natn.io/index.html
|
||||
|
||||
[tool.poe]
|
||||
poetry_command = ""
|
||||
|
||||
|
||||
@@ -8,7 +8,8 @@ def wait_for_postgres(max_retries=5, delay=5):
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[
|
||||
"docker-compose",
|
||||
"docker",
|
||||
"compose",
|
||||
"-f",
|
||||
"docker-compose.test.yaml",
|
||||
"exec",
|
||||
@@ -45,7 +46,8 @@ def test():
|
||||
# Start PostgreSQL with Docker Compose
|
||||
run_command(
|
||||
[
|
||||
"docker-compose",
|
||||
"docker",
|
||||
"compose",
|
||||
"-f",
|
||||
"docker-compose.test.yaml",
|
||||
"up",
|
||||
@@ -55,7 +57,7 @@ def test():
|
||||
)
|
||||
|
||||
if not wait_for_postgres():
|
||||
run_command(["docker-compose", "-f", "docker-compose.test.yaml", "down"])
|
||||
run_command(["docker", "compose", "-f", "docker-compose.test.yaml", "down"])
|
||||
sys.exit(1)
|
||||
|
||||
# Run Prisma migrations
|
||||
@@ -64,6 +66,6 @@ def test():
|
||||
# Run the tests
|
||||
result = subprocess.run(["pytest"] + sys.argv[1:], check=False)
|
||||
|
||||
run_command(["docker-compose", "-f", "docker-compose.test.yaml", "down"])
|
||||
run_command(["docker", "compose", "-f", "docker-compose.test.yaml", "down"])
|
||||
|
||||
sys.exit(result.returncode)
|
||||
|
||||
@@ -2,7 +2,7 @@ from uuid import UUID
|
||||
|
||||
import pytest
|
||||
|
||||
from autogpt_server.blocks.basic import InputBlock, StoreValueBlock
|
||||
from autogpt_server.blocks.basic import AgentInputBlock, StoreValueBlock
|
||||
from autogpt_server.data.graph import Graph, Link, Node
|
||||
from autogpt_server.data.user import DEFAULT_USER_ID, create_default_user
|
||||
from autogpt_server.server.model import CreateGraph
|
||||
@@ -25,7 +25,7 @@ async def test_graph_creation(server: SpinTestServer):
|
||||
await create_default_user("false")
|
||||
|
||||
value_block = StoreValueBlock().id
|
||||
input_block = InputBlock().id
|
||||
input_block = AgentInputBlock().id
|
||||
|
||||
graph = Graph(
|
||||
id="test_graph",
|
||||
|
||||
@@ -36,23 +36,19 @@ async def assert_sample_graph_executions(
|
||||
graph_exec_id: str,
|
||||
):
|
||||
executions = await agent_server.get_graph_run_node_execution_results(
|
||||
test_graph.id, graph_exec_id, test_user.id
|
||||
test_graph.id,
|
||||
graph_exec_id,
|
||||
test_user.id,
|
||||
)
|
||||
|
||||
output_list = [{"result": ["Hello"]}, {"result": ["World"]}]
|
||||
input_list = [
|
||||
{
|
||||
"name": "input_1",
|
||||
"description": "First input value",
|
||||
"placeholder_values": [],
|
||||
"limit_to_placeholder_values": False,
|
||||
"value": "Hello",
|
||||
},
|
||||
{
|
||||
"name": "input_2",
|
||||
"description": "Second input value",
|
||||
"placeholder_values": [],
|
||||
"limit_to_placeholder_values": False,
|
||||
"value": "World",
|
||||
},
|
||||
]
|
||||
@@ -61,16 +57,24 @@ async def assert_sample_graph_executions(
|
||||
exec = executions[0]
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.graph_exec_id == graph_exec_id
|
||||
assert exec.output_data in output_list
|
||||
assert exec.input_data in input_list
|
||||
assert (
|
||||
exec.output_data in output_list
|
||||
), f"Output data: {exec.output_data} and {output_list}"
|
||||
assert (
|
||||
exec.input_data in input_list
|
||||
), f"Input data: {exec.input_data} and {input_list}"
|
||||
assert exec.node_id in [test_graph.nodes[0].id, test_graph.nodes[1].id]
|
||||
|
||||
# Executing StoreValueBlock
|
||||
exec = executions[1]
|
||||
assert exec.status == execution.ExecutionStatus.COMPLETED
|
||||
assert exec.graph_exec_id == graph_exec_id
|
||||
assert exec.output_data in output_list
|
||||
assert exec.input_data in input_list
|
||||
assert (
|
||||
exec.output_data in output_list
|
||||
), f"Output data: {exec.output_data} and {output_list}"
|
||||
assert (
|
||||
exec.input_data in input_list
|
||||
), f"Input data: {exec.input_data} and {input_list}"
|
||||
assert exec.node_id in [test_graph.nodes[0].id, test_graph.nodes[1].id]
|
||||
|
||||
# Executing FillTextTemplateBlock
|
||||
|
||||
@@ -4,6 +4,7 @@ from autogpt_server.data import db, graph
|
||||
from autogpt_server.executor import ExecutionScheduler
|
||||
from autogpt_server.usecases.sample import create_test_graph, create_test_user
|
||||
from autogpt_server.util.service import get_service_client
|
||||
from autogpt_server.util.settings import Config
|
||||
from autogpt_server.util.test import SpinTestServer
|
||||
|
||||
|
||||
@@ -13,7 +14,9 @@ async def test_agent_schedule(server: SpinTestServer):
|
||||
test_user = await create_test_user()
|
||||
test_graph = await graph.create_graph(create_test_graph(), user_id=test_user.id)
|
||||
|
||||
scheduler = get_service_client(ExecutionScheduler)
|
||||
scheduler = get_service_client(
|
||||
ExecutionScheduler, Config().execution_scheduler_port
|
||||
)
|
||||
|
||||
schedules = scheduler.get_execution_schedules(test_graph.id, test_user.id)
|
||||
assert len(schedules) == 0
|
||||
|
||||
@@ -5,6 +5,7 @@ from autogpt_server.util.service import AppService, expose, get_service_client
|
||||
|
||||
class TestService(AppService):
|
||||
def __init__(self):
|
||||
super().__init__(port=8005)
|
||||
self.use_redis = False
|
||||
|
||||
def run_service(self):
|
||||
@@ -29,7 +30,7 @@ class TestService(AppService):
|
||||
@pytest.mark.asyncio(scope="session")
|
||||
async def test_service_creation(server):
|
||||
with TestService():
|
||||
client = get_service_client(TestService)
|
||||
client = get_service_client(TestService, 8005)
|
||||
assert client.add(5, 3) == 8
|
||||
assert client.subtract(10, 4) == 6
|
||||
assert client.fun_with_async(5, 3) == 8
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
version: "3"
|
||||
services:
|
||||
postgres:
|
||||
image: ankane/pgvector:latest
|
||||
@@ -15,6 +14,31 @@ services:
|
||||
- "5432:5432"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
migrate:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_server/migrate
|
||||
action: rebuild
|
||||
command: ["poetry", "run", "prisma", "migrate", "deploy"]
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
networks:
|
||||
- app-network
|
||||
restart: on-failure
|
||||
healthcheck:
|
||||
test: ["CMD", "poetry", "run", "prisma", "migrate", "status"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
redis:
|
||||
image: redis:latest
|
||||
@@ -23,11 +47,18 @@ services:
|
||||
- "6379:6379"
|
||||
networks:
|
||||
- app-network
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
rest_server:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
command: ["python", "-m", "autogpt_server.rest"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
@@ -35,43 +66,136 @@ services:
|
||||
action: rebuild
|
||||
depends_on:
|
||||
redis:
|
||||
condition: service_started
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- AUTH_ENABLED=false
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- EXECUTIONMANAGER_HOST=executor
|
||||
ports:
|
||||
- "8000:8000"
|
||||
- "8003:8003" # execution scheduler
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
ws_server:
|
||||
executor:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile.ws
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
command: ["python", "-m", "autogpt_server.exec"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_server/
|
||||
action: rebuild
|
||||
depends_on:
|
||||
- postgres
|
||||
- redis
|
||||
redis:
|
||||
condition: service_healthy
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- AUTH_ENABLED=false
|
||||
- PYRO_HOST=0.0.0.0
|
||||
- AGENTSERVER_HOST=rest_server
|
||||
ports:
|
||||
- "8002:8000"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
websocket_server:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_server/Dockerfile
|
||||
target: server
|
||||
command: ["python", "-m", "autogpt_server.ws"]
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_server/
|
||||
action: rebuild
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- REDIS_HOST=redis
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=password
|
||||
- AUTH_ENABLED=false
|
||||
- PYRO_HOST=0.0.0.0
|
||||
ports:
|
||||
- "8001:8001"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
market:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/market/Dockerfile
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/market/
|
||||
action: rebuild
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60&schema=market
|
||||
ports:
|
||||
- "8015:8000"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
frontend:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: rnd/autogpt_builder/Dockerfile
|
||||
target: dev
|
||||
develop:
|
||||
watch:
|
||||
- path: ./
|
||||
target: rnd/autogpt_builder/
|
||||
action: rebuild
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
rest_server:
|
||||
condition: service_started
|
||||
websocket_server:
|
||||
condition: service_started
|
||||
migrate:
|
||||
condition: service_completed_successfully
|
||||
environment:
|
||||
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
|
||||
- NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8000/api
|
||||
- NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
|
||||
- NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
|
||||
ports:
|
||||
- "3000:3000"
|
||||
networks:
|
||||
- app-network
|
||||
|
||||
networks:
|
||||
app-network:
|
||||
driver: bridge
|
||||
app-network:
|
||||
driver: bridge
|
||||
|
||||
@@ -13,7 +13,7 @@ serviceAccount:
|
||||
service:
|
||||
type: ClusterIP
|
||||
port: 8000
|
||||
targetPort: 8000
|
||||
targetPort: 8005
|
||||
annotations:
|
||||
cloud.google.com/neg: '{"ingress": true}'
|
||||
beta.cloud.google.com/backend-config: '{"default": "autogpt-market"}'
|
||||
|
||||
@@ -39,6 +39,7 @@ spec:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
command: ["poetry", "run", "rest"]
|
||||
ports:
|
||||
- name: http
|
||||
containerPort: {{ .Values.service.port }}
|
||||
|
||||
@@ -39,6 +39,7 @@ spec:
|
||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||
command: ["poetry", "run", "ws"]
|
||||
ports:
|
||||
- name: ws
|
||||
containerPort: {{ .Values.service.port }}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
replicaCount: 1 # not scaling websocket server for now
|
||||
|
||||
image:
|
||||
repository: us-east1-docker.pkg.dev/agpt-dev/agpt-ws-server-dev/agpt-ws-server-dev
|
||||
repository: us-east1-docker.pkg.dev/agpt-dev/agpt-server-dev/agpt-server-dev
|
||||
tag: latest
|
||||
pullPolicy: Always
|
||||
|
||||
|
||||
@@ -1,22 +1,16 @@
|
||||
FROM python:3.11-slim-buster as server_base
|
||||
FROM python:3.11-slim-buster AS builder
|
||||
|
||||
# Set environment variables
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev \
|
||||
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client git \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& wget https://github.com/git/git/archive/v2.28.0.tar.gz -O git.tar.gz \
|
||||
&& tar -zxf git.tar.gz \
|
||||
&& cd git-* \
|
||||
&& make prefix=/usr all \
|
||||
&& make prefix=/usr install
|
||||
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
ENV POETRY_VERSION=1.8.3 \
|
||||
POETRY_HOME="/opt/poetry" \
|
||||
@@ -25,19 +19,43 @@ ENV POETRY_VERSION=1.8.3 \
|
||||
PATH="$POETRY_HOME/bin:$PATH"
|
||||
RUN pip3 install poetry
|
||||
|
||||
COPY rnd/market /app/rnd/market
|
||||
# Copy and install dependencies
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
COPY rnd/market/poetry.lock rnd/market/pyproject.toml /app/rnd/market/
|
||||
WORKDIR /app/rnd/market
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi
|
||||
|
||||
# Generate Prisma client
|
||||
COPY rnd/market /app/rnd/market
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry run prisma generate
|
||||
|
||||
FROM python:3.11-slim-buster AS server_dependencies
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy only necessary files from builder
|
||||
COPY --from=builder /app /app
|
||||
COPY --from=builder /usr/local/lib/python3.11 /usr/local/lib/python3.11
|
||||
COPY --from=builder /usr/local/bin /usr/local/bin
|
||||
# Copy Prisma binaries
|
||||
COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-python/binaries
|
||||
|
||||
ENV PATH="/app/.venv/bin:$PATH"
|
||||
|
||||
RUN mkdir -p /app/rnd/autogpt_libs
|
||||
RUN mkdir -p /app/rnd/market
|
||||
|
||||
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
|
||||
|
||||
COPY rnd/market /app/rnd/market
|
||||
|
||||
WORKDIR /app/rnd/market
|
||||
|
||||
# Install dependencies
|
||||
RUN poetry install --no-interaction --no-ansi
|
||||
FROM server_dependencies AS server
|
||||
|
||||
RUN poetry run prisma generate
|
||||
|
||||
FROM server_base as server
|
||||
|
||||
ENV PORT=8000
|
||||
ENV DATABASE_URL=""
|
||||
ENV PORT=8015
|
||||
|
||||
CMD ["poetry", "run", "app"]
|
||||
CMD ["uvicorn", "market.app:app", "--reload"]
|
||||
|
||||
@@ -87,5 +87,11 @@ def health():
|
||||
content="<h1>Marketplace API</h1>", status_code=200
|
||||
)
|
||||
|
||||
@app.get("/")
|
||||
def default():
|
||||
return fastapi.responses.HTMLResponse(
|
||||
content="<h1>Marketplace API</h1>", status_code=200
|
||||
)
|
||||
|
||||
|
||||
prometheus_fastapi_instrumentator.Instrumentator().instrument(app).expose(app)
|
||||
|
||||
@@ -58,7 +58,8 @@ def format():
|
||||
|
||||
|
||||
def app():
|
||||
run("uvicorn", "market.app:app", "--reload", "--port", "8001")
|
||||
port = os.getenv("PORT", "8015")
|
||||
run("uvicorn", "market.app:app", "--reload", "--port", port, "--host", "0.0.0.0")
|
||||
|
||||
|
||||
def setup():
|
||||
|
||||
48
rnd/rest-api-go/.gitignore
vendored
Normal file
48
rnd/rest-api-go/.gitignore
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
# Binaries for programs and plugins
|
||||
*.exe
|
||||
*.exe~
|
||||
*.dll
|
||||
*.so
|
||||
*.dylib
|
||||
|
||||
# Test binary, built with `go test -c`
|
||||
*.test
|
||||
|
||||
# Output of the go coverage tool, specifically when used with LiteIDE
|
||||
*.out
|
||||
|
||||
# Dependency directories (remove the comment below to include it)
|
||||
# vendor/
|
||||
|
||||
# Go workspace file
|
||||
go.work
|
||||
|
||||
# IDE-specific files
|
||||
.idea/
|
||||
.vscode/
|
||||
|
||||
# OS-specific files
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Binary output directory
|
||||
/bin/
|
||||
|
||||
# Log files
|
||||
*.log
|
||||
|
||||
# Environment variables file
|
||||
.env
|
||||
|
||||
# Air temporary files (if using Air for live reloading)
|
||||
tmp/
|
||||
|
||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
|
||||
# Debug files
|
||||
debug
|
||||
|
||||
# Project-specific build outputs
|
||||
/gosrv
|
||||
34
rnd/rest-api-go/Dockerfile
Normal file
34
rnd/rest-api-go/Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
||||
# Build stage
|
||||
FROM golang:1.23.1-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy go mod and sum files
|
||||
COPY go.mod go.sum ./
|
||||
|
||||
# Download all dependencies
|
||||
RUN go mod download
|
||||
|
||||
# Copy the source code
|
||||
COPY . .
|
||||
|
||||
# Build the application
|
||||
RUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -ldflags="-w -s" -o main .
|
||||
|
||||
# Run stage
|
||||
FROM alpine:latest
|
||||
|
||||
RUN apk --no-cache add ca-certificates
|
||||
ENV GIN_MODE=release
|
||||
|
||||
WORKDIR /root/
|
||||
|
||||
# Copy the pre-built binary file from the previous stage
|
||||
COPY --from=builder /app/main .
|
||||
COPY --from=builder /app/config.yaml .
|
||||
|
||||
# Expose port 8080 to the outside world
|
||||
EXPOSE 8080
|
||||
|
||||
# Command to run the executable
|
||||
CMD ["./main"]
|
||||
122
rnd/rest-api-go/README.md
Normal file
122
rnd/rest-api-go/README.md
Normal file
@@ -0,0 +1,122 @@
|
||||
# Market API
|
||||
|
||||
This project is a Go-based API for a marketplace application. It provides endpoints for managing agents, handling user authentication, and performing administrative tasks.
|
||||
|
||||
## Project Structure
|
||||
|
||||
The project is organized into several packages:
|
||||
|
||||
- `config`: Handles configuration loading and management
|
||||
- `docs`: Contains the Swagger documentation
|
||||
- `database`: Contains database migrations and interaction logic
|
||||
- `handlers`: Implements HTTP request handlers
|
||||
- `middleware`: Contains middleware functions for the API
|
||||
- `models`: Defines data structures used throughout the application
|
||||
- `utils`: Provides utility functions
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- Go 1.16 or later
|
||||
- PostgreSQL
|
||||
- [golang-migrate](https://github.com/golang-migrate/migrate)
|
||||
|
||||
## Setup
|
||||
|
||||
1. Clone the repository
|
||||
2. Install dependencies:
|
||||
```
|
||||
go mod tidy
|
||||
```
|
||||
3. Set up the database:
|
||||
- Create a PostgreSQL database
|
||||
- Update the `DatabaseURL` in your configuration file
|
||||
|
||||
4. Run database migrations:
|
||||
```
|
||||
migrate -source file://database/migrations -database "postgresql://agpt_user:pass123@localhost:5432/apgt_marketplace?sslmode=disable" up
|
||||
```
|
||||
|
||||
## Running the Application
|
||||
|
||||
To run the application in development mode with hot reloading:
|
||||
|
||||
```
|
||||
air
|
||||
```
|
||||
|
||||
For production, build and run the binary:
|
||||
|
||||
```
|
||||
go build -o market-api
|
||||
./market-api
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Run tests with coverage:
|
||||
```
|
||||
go test -cover ./...
|
||||
```
|
||||
|
||||
## Code Formatting
|
||||
|
||||
Format the code using:
|
||||
|
||||
```
|
||||
gofmt -w .
|
||||
```
|
||||
|
||||
## Database Migrations
|
||||
|
||||
Create a new migration:
|
||||
|
||||
```
|
||||
migrate create -ext sql -dir database/migrations -seq <migration_name>
|
||||
```
|
||||
|
||||
Apply migrations:
|
||||
|
||||
```
|
||||
migrate -source file://database/migrations -database "postgresql://user:password@localhost:5432/dbname?sslmode=disable" up
|
||||
```
|
||||
|
||||
Revert the last migration:
|
||||
|
||||
```
|
||||
migrate -source file://database/migrations -database "postgresql://user:password@localhost:5432/dbname?sslmode=disable" down 1
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
The API provides various endpoints for agent management, user authentication, and administrative tasks. Some key endpoints include:
|
||||
|
||||
- `/api/agents`: Get list of agents
|
||||
- `/api/agents/:agent_id`: Get agent details
|
||||
- `/api/agents/submit`: Submit a new agent
|
||||
- `/api/admin/*`: Various administrative endpoints (requires admin authentication)
|
||||
|
||||
Refer to the `main.go` file for a complete list of endpoints and their corresponding handlers.
|
||||
|
||||
|
||||
# Swagger Documentation
|
||||
|
||||
This project uses `gin-swagger` and `Swaggo` tools for automatic generation of API documentation in OpenAPI (Swagger) format. The documentation is based on comments added to the code using Swagger annotations.
|
||||
|
||||
To view and interact with the generated Swagger documentation, follow these steps:
|
||||
|
||||
1. Run your Gin server.
|
||||
2. Access the Swagger UI by navigating to `http://localhost:8015/docs/index.html` in your web browser.
|
||||
|
||||
Alternatively, you can view the raw OpenAPI specification at `http://localhost:8015/docs/doc.json`.
|
||||
|
||||
## Regenerating Swagger Documentation
|
||||
|
||||
If you make changes to your codebase and want to regenerate the Swagger documentation, follow these steps:
|
||||
|
||||
1. Run the `swag init` command in your project directory to create a new `docs.go` file (or update an existing one) with Swagger documentation comments based on your code:
|
||||
```bash
|
||||
swag init -g main.go
|
||||
```
|
||||
Replace `main.go` with the name of your main Go source file.
|
||||
|
||||
3. Run your Gin server, and access the updated Swagger UI at `http://localhost:8015/docs/index.html`. You should see your documentation reflecting the latest changes in your codebase.
|
||||
7
rnd/rest-api-go/config.yaml
Normal file
7
rnd/rest-api-go/config.yaml
Normal file
@@ -0,0 +1,7 @@
|
||||
ServerAddress: ":8015"
|
||||
DatabaseURL: "postgresql://agpt_user:pass123@localhost:5433/agpt_marketplace?connect_timeout=60"
|
||||
JWTSecret: "Z86RsQ+nhSk+A8ODJX1kQA11JCk9nlw8n+MRdSgmR+P1sMPTTDG1rjBTwj7Ucjb3TRHSVxkCNPgXISmzU/vMkA=="
|
||||
JWTAlgorithm: "HS256"
|
||||
CORSAllowOrigins:
|
||||
- "http://localhost:3000"
|
||||
- "http://127.0.0.1:3000"
|
||||
63
rnd/rest-api-go/config/config.go
Normal file
63
rnd/rest-api-go/config/config.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
ServerAddress string `mapstructure:"serveraddress"`
|
||||
DatabaseURL string `mapstructure:"databaseurl"`
|
||||
AuthEnabled bool `mapstructure:"authenabled"`
|
||||
JWTSecret string `mapstructure:"jwtsecret"`
|
||||
JWTAlgorithm string `mapstructure:"jwtalgorithm"`
|
||||
CORSAllowOrigins []string `mapstructure:"corsalloworigins"`
|
||||
}
|
||||
|
||||
func Load(configFile ...string) (*Config, error) {
|
||||
logger := zap.L().With(zap.String("function", "Load"))
|
||||
|
||||
if len(configFile) > 0 {
|
||||
viper.SetConfigFile(configFile[0])
|
||||
} else {
|
||||
viper.SetConfigName("config")
|
||||
viper.SetConfigType("yaml")
|
||||
viper.AddConfigPath(".")
|
||||
}
|
||||
|
||||
viper.SetEnvPrefix("AGPT")
|
||||
viper.AutomaticEnv()
|
||||
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
logger.Error("Failed to read config file", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var config Config
|
||||
if err := viper.Unmarshal(&config); err != nil {
|
||||
logger.Error("Failed to unmarshal config", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if config.ServerAddress == "" {
|
||||
logger.Error("ServerAddress is required")
|
||||
return nil, fmt.Errorf("ServerAddress is required")
|
||||
}
|
||||
if config.DatabaseURL == "" {
|
||||
logger.Error("DatabaseURL is required")
|
||||
return nil, fmt.Errorf("DatabaseURL is required")
|
||||
}
|
||||
if config.JWTSecret == "" {
|
||||
logger.Error("JWTSecret is required")
|
||||
return nil, fmt.Errorf("JWTSecret is required")
|
||||
}
|
||||
if config.JWTAlgorithm == "" {
|
||||
logger.Error("JWTAlgorithm is required")
|
||||
return nil, fmt.Errorf("JWTAlgorithm is required")
|
||||
}
|
||||
|
||||
return &config, nil
|
||||
}
|
||||
78
rnd/rest-api-go/config/config_test.go
Normal file
78
rnd/rest-api-go/config/config_test.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestLoadValidConfig(t *testing.T) {
|
||||
// Create a temporary config file for testing
|
||||
tempFile, err := os.CreateTemp("", "test-config*.yaml")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile.Name())
|
||||
|
||||
// Write test configuration to the temp file
|
||||
testConfig := []byte(`
|
||||
serveraddress: ":8080"
|
||||
databaseurl: "postgres://user:pass@localhost:5432/testdb"
|
||||
authenabled: true
|
||||
jwtsecret: "test-secret"
|
||||
jwtalgorithm: "HS256"
|
||||
`)
|
||||
if _, err := tempFile.Write(testConfig); err != nil {
|
||||
t.Fatalf("Failed to write to temp file: %v", err)
|
||||
}
|
||||
tempFile.Close()
|
||||
|
||||
// Test the Load function with a specific config file
|
||||
config, err := Load(tempFile.Name())
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, config)
|
||||
|
||||
// Verify the loaded configuration
|
||||
assert.Equal(t, ":8080", config.ServerAddress)
|
||||
assert.Equal(t, "postgres://user:pass@localhost:5432/testdb", config.DatabaseURL)
|
||||
assert.True(t, config.AuthEnabled)
|
||||
assert.Equal(t, "test-secret", config.JWTSecret)
|
||||
assert.Equal(t, "HS256", config.JWTAlgorithm)
|
||||
}
|
||||
|
||||
func TestLoadDefaultConfigFile(t *testing.T) {
|
||||
// Test with default config file (should fail in test environment)
|
||||
config, err := Load()
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, config)
|
||||
}
|
||||
|
||||
func TestLoadMissingConfigFile(t *testing.T) {
|
||||
// Test with missing config file
|
||||
config, err := Load("non_existent_config.yaml")
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, config)
|
||||
}
|
||||
|
||||
func TestLoadInvalidConfigFormat(t *testing.T) {
|
||||
// Create a temporary config file for testing
|
||||
tempFile, err := os.CreateTemp("", "test-config*.yaml")
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to create temp file: %v", err)
|
||||
}
|
||||
defer os.Remove(tempFile.Name())
|
||||
|
||||
// Test with invalid config format
|
||||
invalidConfig := []byte(`
|
||||
serveraddress: ":8080"
|
||||
databaseurl: 123 # Invalid type, should be string
|
||||
`)
|
||||
if err := os.WriteFile(tempFile.Name(), invalidConfig, 0644); err != nil {
|
||||
t.Fatalf("Failed to write invalid config: %v", err)
|
||||
}
|
||||
|
||||
config, err := Load(tempFile.Name())
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, config)
|
||||
}
|
||||
805
rnd/rest-api-go/database/db.go
Normal file
805
rnd/rest-api-go/database/db.go
Normal file
@@ -0,0 +1,805 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/swiftyos/market/config"
|
||||
"github.com/swiftyos/market/models"
|
||||
"github.com/swiftyos/market/utils"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func NewDB(cfg *config.Config) (*pgxpool.Pool, error) {
|
||||
return pgxpool.New(context.Background(), cfg.DatabaseURL)
|
||||
}
|
||||
|
||||
func GetAgents(ctx context.Context, db *pgxpool.Pool, logger *zap.Logger, page int, pageSize int, name *string, keywords *string, categories *string) ([]models.Agent, error) {
|
||||
logger = logger.With(zap.String("function", "GetAgents")).With(zap.String("file", "db.go"))
|
||||
|
||||
logger.Debug("Query parameters",
|
||||
zap.Int("page", page),
|
||||
zap.Int("pageSize", pageSize),
|
||||
zap.String("name", utils.StringOrNil(name)),
|
||||
zap.String("keywords", utils.StringOrNil(keywords)),
|
||||
zap.String("categories", utils.StringOrNil(categories)))
|
||||
|
||||
query := `
|
||||
SELECT "id", "name", "description", "author", "keywords", "categories", "graph" FROM "Agents"
|
||||
WHERE "submissionStatus" = 'APPROVED'
|
||||
AND ($3::text IS NULL OR name ILIKE '%' || $3 || '%')
|
||||
AND ($4::text IS NULL OR $4 = ANY(keywords))
|
||||
AND ($5::text IS NULL OR $5 = ANY(categories))
|
||||
ORDER BY "createdAt" DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query, pageSize, (page-1)*pageSize, name, keywords, categories)
|
||||
if err != nil {
|
||||
logger.Error("Error querying agents", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.Agent
|
||||
for rows.Next() {
|
||||
var agent models.Agent
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Error scanning agent", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
if err := rows.Err(); err != nil {
|
||||
logger.Error("Error iterating over agents", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
logger.Info("Found agents", zap.Int("count", len(agents)))
|
||||
|
||||
if agents == nil {
|
||||
agents = []models.Agent{}
|
||||
}
|
||||
return agents, err
|
||||
}
|
||||
|
||||
func SubmitAgent(ctx context.Context, db *pgxpool.Pool, request models.AddAgentRequest, user interface{}) (*models.AgentWithMetadata, error) {
|
||||
logger := zap.L().With(zap.String("function", "SubmitAgent"))
|
||||
logger.Info("Submitting new agent")
|
||||
|
||||
// Generate a new UUID for the agent
|
||||
agentID := uuid.New().String()
|
||||
|
||||
// Create the Agent struct
|
||||
agent := models.Agent{
|
||||
ID: agentID,
|
||||
Name: request.Graph.Name,
|
||||
Description: request.Graph.Description,
|
||||
Author: request.Author,
|
||||
Keywords: request.Keywords,
|
||||
Categories: request.Categories,
|
||||
Graph: request.Graph,
|
||||
}
|
||||
|
||||
// Create the AgentWithMetadata struct
|
||||
agentWithMetadata := models.AgentWithMetadata{
|
||||
Agent: agent,
|
||||
Version: 1,
|
||||
CreatedAt: time.Now(),
|
||||
UpdatedAt: time.Now(),
|
||||
SubmissionDate: time.Now(),
|
||||
SubmissionStatus: models.SubmissionStatusPending,
|
||||
}
|
||||
|
||||
// Start a transaction
|
||||
tx, err := db.Begin(ctx)
|
||||
if err != nil {
|
||||
logger.Error("Failed to begin transaction", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
|
||||
// Insert the agent into the database
|
||||
_, err = tx.Exec(ctx, `
|
||||
INSERT INTO "Agents" (id, name, description, author, keywords, categories, graph, version, created_at, updated_at, submission_date, submission_status)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||
`, agentWithMetadata.ID, agentWithMetadata.Name, agentWithMetadata.Description, agentWithMetadata.Author,
|
||||
agentWithMetadata.Keywords, agentWithMetadata.Categories, agentWithMetadata.Graph,
|
||||
agentWithMetadata.Version, agentWithMetadata.CreatedAt, agentWithMetadata.UpdatedAt,
|
||||
agentWithMetadata.SubmissionDate, agentWithMetadata.SubmissionStatus)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to insert agent", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Commit the transaction
|
||||
err = tx.Commit(ctx)
|
||||
if err != nil {
|
||||
logger.Error("Failed to commit transaction", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Successfully submitted new agent", zap.String("agentID", agentID))
|
||||
return &agentWithMetadata, nil
|
||||
}
|
||||
|
||||
func GetAgentDetails(ctx context.Context, db *pgxpool.Pool, agentID string) (*models.AgentWithMetadata, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAgentDetails"))
|
||||
|
||||
query := `
|
||||
SELECT id, name, description, author, keywords, categories, graph, version, created_at, updated_at, submission_date, submission_status
|
||||
FROM "Agents"
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
var agent models.AgentWithMetadata
|
||||
err := db.QueryRow(ctx, query, agentID).Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.Version,
|
||||
&agent.CreatedAt,
|
||||
&agent.UpdatedAt,
|
||||
&agent.SubmissionDate,
|
||||
&agent.SubmissionStatus,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if err == pgx.ErrNoRows {
|
||||
logger.Error("Agent not found", zap.String("agentID", agentID))
|
||||
return nil, fmt.Errorf("agent not found")
|
||||
}
|
||||
logger.Error("Error querying agent details", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Agent details retrieved", zap.String("agentID", agentID))
|
||||
return &agent, nil
|
||||
}
|
||||
|
||||
func IncrementDownloadCount(ctx context.Context, db *pgxpool.Pool, agentID string) error {
|
||||
logger := zap.L().With(zap.String("function", "IncrementDownloadCount"))
|
||||
|
||||
query := `
|
||||
UPDATE "Agents"
|
||||
SET download_count = download_count + 1
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
_, err := db.Exec(ctx, query, agentID)
|
||||
if err != nil {
|
||||
logger.Error("Failed to increment download count", zap.Error(err), zap.String("agentID", agentID))
|
||||
return err
|
||||
}
|
||||
|
||||
logger.Info("Download count incremented", zap.String("agentID", agentID))
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetAgentFile(ctx context.Context, db *pgxpool.Pool, agentID string) (*models.AgentFile, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAgentFile"))
|
||||
|
||||
query := `
|
||||
SELECT id, name, graph
|
||||
FROM "Agents"
|
||||
WHERE id = $1
|
||||
`
|
||||
|
||||
var agentFile models.AgentFile
|
||||
err := db.QueryRow(ctx, query, agentID).Scan(
|
||||
&agentFile.ID,
|
||||
&agentFile.Name,
|
||||
&agentFile.Graph,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if err == pgx.ErrNoRows {
|
||||
logger.Error("Agent not found", zap.String("agentID", agentID))
|
||||
return nil, fmt.Errorf("agent not found")
|
||||
}
|
||||
logger.Error("Error querying agent file", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Agent file retrieved", zap.String("agentID", agentID))
|
||||
return &agentFile, nil
|
||||
}
|
||||
|
||||
func GetTopAgentsByDownloads(ctx context.Context, db *pgxpool.Pool, page, pageSize int) ([]models.AgentWithDownloads, int, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetTopAgentsByDownloads"))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
query := `
|
||||
SELECT a.id, a.name, a.description, a.author, a.keywords, a.categories, a.graph, at.downloads
|
||||
FROM "Agents" a
|
||||
JOIN "AnalyticsTracker" at ON a.id = at.agent_id
|
||||
WHERE a.submission_status = 'APPROVED'
|
||||
ORDER BY at.downloads DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query, pageSize, offset)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query top agents", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.AgentWithDownloads
|
||||
for rows.Next() {
|
||||
var agent models.AgentWithDownloads
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.Downloads,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan agent row", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
var totalCount int
|
||||
err = db.QueryRow(ctx, `SELECT COUNT(*) FROM "Agents" WHERE submission_status = 'APPROVED'`).Scan(&totalCount)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get total count", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
logger.Info("Top agents retrieved", zap.Int("count", len(agents)))
|
||||
return agents, totalCount, nil
|
||||
}
|
||||
|
||||
func GetFeaturedAgents(ctx context.Context, db *pgxpool.Pool, category string, page, pageSize int) ([]models.Agent, int, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetFeaturedAgents"))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
query := `
|
||||
SELECT a.id, a.name, a.description, a.author, a.keywords, a.categories, a.graph
|
||||
FROM "Agents" a
|
||||
JOIN "FeaturedAgent" fa ON a.id = fa.agent_id
|
||||
WHERE $1 = ANY(fa.featured_categories) AND fa.is_active = true AND a.submission_status = 'APPROVED'
|
||||
ORDER BY a.created_at DESC
|
||||
LIMIT $2 OFFSET $3
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query, category, pageSize, offset)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query featured agents", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.Agent
|
||||
for rows.Next() {
|
||||
var agent models.Agent
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan featured agent row", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
var totalCount int
|
||||
err = db.QueryRow(ctx, `SELECT COUNT(*) FROM "FeaturedAgent" fa JOIN "Agents" a ON fa.agent_id = a.id WHERE $1 = ANY(fa.featured_categories) AND fa.is_active = true AND a.submission_status = 'APPROVED'`, category).Scan(&totalCount)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get total count of featured agents", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
logger.Info("Featured agents retrieved", zap.Int("count", len(agents)))
|
||||
return agents, totalCount, nil
|
||||
}
|
||||
|
||||
func Search(ctx context.Context, db *pgxpool.Pool, query string, categories []string, page, pageSize int, sortBy, sortOrder string) ([]models.AgentWithRank, error) {
|
||||
logger := zap.L().With(zap.String("function", "Search"))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
categoryFilter := ""
|
||||
if len(categories) > 0 {
|
||||
categoryConditions := make([]string, len(categories))
|
||||
for i, cat := range categories {
|
||||
categoryConditions[i] = fmt.Sprintf("'%s' = ANY(a.categories)", cat)
|
||||
}
|
||||
categoryFilter = "AND (" + strings.Join(categoryConditions, " OR ") + ")"
|
||||
}
|
||||
|
||||
orderByClause := ""
|
||||
switch sortBy {
|
||||
case "createdAt", "updatedAt":
|
||||
orderByClause = fmt.Sprintf(`a."%s" %s, rank DESC`, sortBy, sortOrder)
|
||||
case "name":
|
||||
orderByClause = fmt.Sprintf(`a.name %s, rank DESC`, sortOrder)
|
||||
default:
|
||||
orderByClause = `rank DESC, a."createdAt" DESC`
|
||||
}
|
||||
|
||||
sqlQuery := fmt.Sprintf(`
|
||||
WITH query AS (
|
||||
SELECT to_tsquery(string_agg(lexeme || ':*', ' & ' ORDER BY positions)) AS q
|
||||
FROM unnest(to_tsvector($1))
|
||||
)
|
||||
SELECT
|
||||
a.id,
|
||||
a.created_at,
|
||||
a.updated_at,
|
||||
a.version,
|
||||
a.name,
|
||||
LEFT(a.description, 500) AS description,
|
||||
a.author,
|
||||
a.keywords,
|
||||
a.categories,
|
||||
a.graph,
|
||||
a.submission_status,
|
||||
a.submission_date,
|
||||
ts_rank(CAST(a.search AS tsvector), query.q) AS rank
|
||||
FROM "Agents" a, query
|
||||
WHERE a.submission_status = 'APPROVED' %s
|
||||
ORDER BY %s
|
||||
LIMIT $2
|
||||
OFFSET $3
|
||||
`, categoryFilter, orderByClause)
|
||||
|
||||
rows, err := db.Query(ctx, sqlQuery, query, pageSize, offset)
|
||||
if err != nil {
|
||||
logger.Error("Failed to execute search query", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.AgentWithRank
|
||||
for rows.Next() {
|
||||
var agent models.AgentWithRank
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.CreatedAt,
|
||||
&agent.UpdatedAt,
|
||||
&agent.Version,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.SubmissionStatus,
|
||||
&agent.SubmissionDate,
|
||||
&agent.Rank,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan search result row", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
logger.Info("Search completed", zap.Int("results", len(agents)))
|
||||
return agents, nil
|
||||
}
|
||||
|
||||
func CreateAgentInstalledEvent(ctx context.Context, db *pgxpool.Pool, eventData models.InstallTracker) error {
|
||||
logger := zap.L().With(zap.String("function", "CreateAgentInstalledEvent"))
|
||||
logger.Info("Creating agent installed event")
|
||||
|
||||
query := `
|
||||
INSERT INTO install_tracker (marketplace_agent_id, installed_agent_id, installation_location)
|
||||
VALUES ($1, $2, $3)
|
||||
`
|
||||
|
||||
_, err := db.Exec(ctx, query,
|
||||
eventData.MarketplaceAgentID,
|
||||
eventData.InstalledAgentID,
|
||||
eventData.InstallationLocation,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to create agent installed event", zap.Error(err))
|
||||
return fmt.Errorf("failed to create agent installed event: %w", err)
|
||||
}
|
||||
|
||||
logger.Info("Agent installed event created successfully")
|
||||
return nil
|
||||
}
|
||||
|
||||
// Admin Queries
|
||||
|
||||
func CreateAgentEntry(ctx context.Context, db *pgxpool.Pool, agent models.Agent) (models.Agent, error) {
|
||||
logger := zap.L().With(zap.String("function", "CreateAgentEntry"))
|
||||
logger.Info("Creating agent entry")
|
||||
|
||||
query := `
|
||||
INSERT INTO agents (id, name, description, author, keywords, categories, graph)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING id, name, description, author, keywords, categories, graph
|
||||
`
|
||||
var createdAgent models.Agent
|
||||
err := db.QueryRow(ctx, query,
|
||||
agent.ID,
|
||||
agent.Name,
|
||||
agent.Description,
|
||||
agent.Author,
|
||||
agent.Keywords,
|
||||
agent.Categories,
|
||||
agent.Graph,
|
||||
).Scan(
|
||||
&createdAgent.ID,
|
||||
&createdAgent.Name,
|
||||
&createdAgent.Description,
|
||||
&createdAgent.Author,
|
||||
&createdAgent.Keywords,
|
||||
&createdAgent.Categories,
|
||||
&createdAgent.Graph,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to create agent entry", zap.Error(err))
|
||||
return models.Agent{}, err
|
||||
}
|
||||
|
||||
logger.Info("Agent entry created successfully", zap.String("agentID", agent.ID))
|
||||
return createdAgent, nil
|
||||
}
|
||||
|
||||
func SetAgentFeatured(ctx context.Context, db *pgxpool.Pool, agentID string, isActive bool, featuredCategories []string) (*models.FeaturedAgent, error) {
|
||||
logger := zap.L().With(zap.String("function", "SetAgentFeatured"))
|
||||
logger.Info("Setting agent featured status", zap.String("agentID", agentID), zap.Bool("isActive", isActive))
|
||||
|
||||
// Check if the agent exists
|
||||
var exists bool
|
||||
err := db.QueryRow(ctx, `SELECT EXISTS(SELECT 1 FROM "Agents" WHERE id = $1)`, agentID).Scan(&exists)
|
||||
if err != nil {
|
||||
logger.Error("Failed to check if agent exists", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to check if agent exists: %w", err)
|
||||
}
|
||||
if !exists {
|
||||
return nil, fmt.Errorf("agent with ID %s not found", agentID)
|
||||
}
|
||||
|
||||
var query string
|
||||
var args []interface{}
|
||||
|
||||
if isActive {
|
||||
// Set the agent as featured
|
||||
query = `
|
||||
INSERT INTO "FeaturedAgent" (agent_id, featured_categories, is_active)
|
||||
VALUES ($1, $2, $3)
|
||||
ON CONFLICT (agent_id) DO UPDATE
|
||||
SET featured_categories = $2, is_active = $3
|
||||
RETURNING agent_id, featured_categories, is_active
|
||||
`
|
||||
args = []interface{}{agentID, featuredCategories, isActive}
|
||||
} else {
|
||||
// Unset the agent as featured
|
||||
query = `
|
||||
DELETE FROM "FeaturedAgent"
|
||||
WHERE agent_id = $1
|
||||
RETURNING agent_id, featured_categories, is_active
|
||||
`
|
||||
args = []interface{}{agentID}
|
||||
}
|
||||
|
||||
var featuredAgent models.FeaturedAgent
|
||||
err = db.QueryRow(ctx, query, args...).Scan(
|
||||
&featuredAgent.AgentID,
|
||||
&featuredAgent.FeaturedCategories,
|
||||
&featuredAgent.IsActive,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if err == pgx.ErrNoRows && !isActive {
|
||||
logger.Info("Agent was not featured, no action needed", zap.String("agentID", agentID))
|
||||
return nil, nil
|
||||
}
|
||||
logger.Error("Failed to set agent featured status", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to set agent featured status: %w", err)
|
||||
}
|
||||
|
||||
if isActive {
|
||||
logger.Info("Agent set as featured successfully", zap.String("agentID", agentID))
|
||||
} else {
|
||||
logger.Info("Agent unset as featured successfully", zap.String("agentID", agentID))
|
||||
}
|
||||
return &featuredAgent, nil
|
||||
}
|
||||
|
||||
func GetAgentFeatured(ctx context.Context, db *pgxpool.Pool, agentID string) (*models.FeaturedAgent, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAgentFeatured"))
|
||||
logger.Info("Getting featured agent", zap.String("agentID", agentID))
|
||||
|
||||
query := `
|
||||
SELECT agent_id, featured_categories, is_active
|
||||
FROM "FeaturedAgent"
|
||||
WHERE agent_id = $1
|
||||
`
|
||||
|
||||
var featuredAgent models.FeaturedAgent
|
||||
err := db.QueryRow(ctx, query, agentID).Scan(
|
||||
&featuredAgent.AgentID,
|
||||
&featuredAgent.FeaturedCategories,
|
||||
&featuredAgent.IsActive,
|
||||
)
|
||||
|
||||
if err == pgx.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to get featured agent", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to get featured agent: %w", err)
|
||||
}
|
||||
|
||||
logger.Info("Featured agent retrieved successfully", zap.String("agentID", agentID))
|
||||
return &featuredAgent, nil
|
||||
}
|
||||
|
||||
func RemoveFeaturedCategory(ctx context.Context, db *pgxpool.Pool, agentID string, category string) (*models.FeaturedAgent, error) {
|
||||
logger := zap.L().With(zap.String("function", "RemoveFeaturedCategory"))
|
||||
logger.Info("Removing featured category", zap.String("agentID", agentID), zap.String("category", category))
|
||||
|
||||
query := `
|
||||
UPDATE "FeaturedAgent"
|
||||
SET featured_categories = array_remove(featured_categories, $1)
|
||||
WHERE agent_id = $2
|
||||
RETURNING agent_id, featured_categories, is_active
|
||||
`
|
||||
|
||||
var featuredAgent models.FeaturedAgent
|
||||
err := db.QueryRow(ctx, query, category, agentID).Scan(
|
||||
&featuredAgent.AgentID,
|
||||
&featuredAgent.FeaturedCategories,
|
||||
&featuredAgent.IsActive,
|
||||
)
|
||||
|
||||
if err == pgx.ErrNoRows {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Failed to remove featured category", zap.Error(err))
|
||||
return nil, fmt.Errorf("failed to remove featured category: %w", err)
|
||||
}
|
||||
|
||||
logger.Info("Featured category removed successfully", zap.String("agentID", agentID), zap.String("category", category))
|
||||
return &featuredAgent, nil
|
||||
}
|
||||
|
||||
func GetNotFeaturedAgents(ctx context.Context, db *pgxpool.Pool, page, pageSize int) ([]models.Agent, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetNotFeaturedAgents"))
|
||||
logger.Info("Getting not featured agents", zap.Int("page", page), zap.Int("pageSize", pageSize))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
query := `
|
||||
SELECT a.id, a.name, a.description, a.author, a.keywords, a.categories, a.graph
|
||||
FROM "Agents" a
|
||||
LEFT JOIN "FeaturedAgent" fa ON a.id = fa.agent_id
|
||||
WHERE (fa.agent_id IS NULL OR fa.featured_categories = '{}')
|
||||
AND a.submission_status = 'APPROVED'
|
||||
ORDER BY a.created_at DESC
|
||||
LIMIT $1 OFFSET $2
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query, pageSize, offset)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query not featured agents", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.Agent
|
||||
for rows.Next() {
|
||||
var agent models.Agent
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan not featured agent row", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
logger.Info("Not featured agents retrieved", zap.Int("count", len(agents)))
|
||||
return agents, nil
|
||||
}
|
||||
|
||||
func GetAgentSubmissions(ctx context.Context, db *pgxpool.Pool, page, pageSize int, name, keyword, category *string, sortBy, sortOrder string) ([]models.AgentWithMetadata, int, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAgentSubmissions"))
|
||||
logger.Info("Getting agent submissions", zap.Int("page", page), zap.Int("pageSize", pageSize))
|
||||
|
||||
offset := (page - 1) * pageSize
|
||||
|
||||
query := `
|
||||
SELECT a.id, a.name, a.description, a.author, a.keywords, a.categories, a.graph, a.created_at, a.updated_at, a.version, a.submission_status, a.submission_review_date, a.submission_review_comments
|
||||
FROM "Agents" a
|
||||
WHERE a.submission_status = 'PENDING'
|
||||
`
|
||||
|
||||
args := []interface{}{}
|
||||
argCount := 1
|
||||
|
||||
if name != nil {
|
||||
query += fmt.Sprintf(" AND a.name ILIKE $%d", argCount)
|
||||
args = append(args, "%"+*name+"%")
|
||||
argCount++
|
||||
}
|
||||
|
||||
if keyword != nil {
|
||||
query += fmt.Sprintf(" AND $%d = ANY(a.keywords)", argCount)
|
||||
args = append(args, *keyword)
|
||||
argCount++
|
||||
}
|
||||
|
||||
if category != nil {
|
||||
query += fmt.Sprintf(" AND $%d = ANY(a.categories)", argCount)
|
||||
args = append(args, *category)
|
||||
argCount++
|
||||
}
|
||||
|
||||
// Add sorting
|
||||
query += fmt.Sprintf(" ORDER BY a.%s %s", sortBy, sortOrder)
|
||||
|
||||
// Add pagination
|
||||
query += fmt.Sprintf(" LIMIT $%d OFFSET $%d", argCount, argCount+1)
|
||||
args = append(args, pageSize, offset)
|
||||
|
||||
rows, err := db.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query agent submissions", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var agents []models.AgentWithMetadata
|
||||
for rows.Next() {
|
||||
var agent models.AgentWithMetadata
|
||||
err := rows.Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.CreatedAt,
|
||||
&agent.UpdatedAt,
|
||||
&agent.Version,
|
||||
&agent.SubmissionStatus,
|
||||
&agent.SubmissionReviewDate,
|
||||
&agent.SubmissionReviewComments,
|
||||
)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan agent submission row", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
agents = append(agents, agent)
|
||||
}
|
||||
|
||||
// Get total count
|
||||
countQuery := `SELECT COUNT(*) FROM "Agents" WHERE submission_status = 'PENDING'`
|
||||
var totalCount int
|
||||
err = db.QueryRow(ctx, countQuery).Scan(&totalCount)
|
||||
if err != nil {
|
||||
logger.Error("Failed to get total count of agent submissions", zap.Error(err))
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
logger.Info("Agent submissions retrieved", zap.Int("count", len(agents)))
|
||||
return agents, totalCount, nil
|
||||
}
|
||||
|
||||
func ReviewSubmission(ctx context.Context, db *pgxpool.Pool, agentID string, version int, status models.SubmissionStatus, comments *string) (*models.AgentWithMetadata, error) {
|
||||
logger := zap.L().With(zap.String("function", "ReviewSubmission"))
|
||||
logger.Info("Reviewing agent submission", zap.String("agentID", agentID), zap.Int("version", version))
|
||||
|
||||
query := `
|
||||
UPDATE "Agents"
|
||||
SET submission_status = $1,
|
||||
submission_review_date = NOW(),
|
||||
submission_review_comments = $2
|
||||
WHERE id = $3 AND version = $4
|
||||
RETURNING id, name, description, author, keywords, categories, graph, created_at, updated_at, version, submission_status, submission_review_date, submission_review_comments
|
||||
`
|
||||
|
||||
var agent models.AgentWithMetadata
|
||||
err := db.QueryRow(ctx, query, status, comments, agentID, version).Scan(
|
||||
&agent.ID,
|
||||
&agent.Name,
|
||||
&agent.Description,
|
||||
&agent.Author,
|
||||
&agent.Keywords,
|
||||
&agent.Categories,
|
||||
&agent.Graph,
|
||||
&agent.CreatedAt,
|
||||
&agent.UpdatedAt,
|
||||
&agent.Version,
|
||||
&agent.SubmissionStatus,
|
||||
&agent.SubmissionReviewDate,
|
||||
&agent.SubmissionReviewComments,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
if err == pgx.ErrNoRows {
|
||||
logger.Error("Agent submission not found", zap.String("agentID", agentID), zap.Int("version", version))
|
||||
return nil, fmt.Errorf("agent submission not found")
|
||||
}
|
||||
logger.Error("Failed to review agent submission", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Agent submission reviewed successfully", zap.String("agentID", agentID), zap.Int("version", version))
|
||||
return &agent, nil
|
||||
}
|
||||
|
||||
func GetAllCategories(ctx context.Context, db *pgxpool.Pool) ([]string, error) {
|
||||
logger := zap.L().With(zap.String("function", "GetAllCategories"))
|
||||
logger.Info("Getting all categories")
|
||||
|
||||
query := `
|
||||
SELECT DISTINCT unnest(categories) AS category
|
||||
FROM "Agents"
|
||||
ORDER BY category
|
||||
`
|
||||
|
||||
rows, err := db.Query(ctx, query)
|
||||
if err != nil {
|
||||
logger.Error("Failed to query categories", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var categories []string
|
||||
for rows.Next() {
|
||||
var category string
|
||||
err := rows.Scan(&category)
|
||||
if err != nil {
|
||||
logger.Error("Failed to scan category row", zap.Error(err))
|
||||
return nil, err
|
||||
}
|
||||
categories = append(categories, category)
|
||||
}
|
||||
|
||||
logger.Info("Categories retrieved", zap.Int("count", len(categories)))
|
||||
return categories, nil
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
-- Drop foreign key constraints
|
||||
ALTER TABLE "AnalyticsTracker" DROP CONSTRAINT IF EXISTS "AnalyticsTracker_agentId_fkey";
|
||||
ALTER TABLE "InstallTracker" DROP CONSTRAINT IF EXISTS "InstallTracker_marketplaceAgentId_fkey";
|
||||
ALTER TABLE "FeaturedAgent" DROP CONSTRAINT IF EXISTS "FeaturedAgent_agentId_fkey";
|
||||
|
||||
-- Drop indexes
|
||||
DROP INDEX IF EXISTS "FeaturedAgent_agentId_key";
|
||||
DROP INDEX IF EXISTS "FeaturedAgent_id_key";
|
||||
DROP INDEX IF EXISTS "InstallTracker_marketplaceAgentId_installedAgentId_key";
|
||||
DROP INDEX IF EXISTS "AnalyticsTracker_agentId_key";
|
||||
DROP INDEX IF EXISTS "AnalyticsTracker_id_key";
|
||||
DROP INDEX IF EXISTS "Agents_id_key";
|
||||
|
||||
-- Drop tables
|
||||
DROP TABLE IF EXISTS "FeaturedAgent";
|
||||
DROP TABLE IF EXISTS "InstallTracker";
|
||||
DROP TABLE IF EXISTS "AnalyticsTracker";
|
||||
DROP TABLE IF EXISTS "Agents";
|
||||
|
||||
-- Drop enums
|
||||
DROP TYPE IF EXISTS "InstallationLocation";
|
||||
DROP TYPE IF EXISTS "SubmissionStatus";
|
||||
@@ -0,0 +1,86 @@
|
||||
-- CreateEnum
|
||||
CREATE TYPE "SubmissionStatus" AS ENUM ('PENDING', 'APPROVED', 'REJECTED');
|
||||
|
||||
-- CreateEnum
|
||||
CREATE TYPE "InstallationLocation" AS ENUM ('LOCAL', 'CLOUD');
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "Agents" (
|
||||
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
|
||||
"version" INTEGER NOT NULL DEFAULT 1,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"submissionDate" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"submissionReviewDate" TIMESTAMP(3),
|
||||
"submissionStatus" "SubmissionStatus" NOT NULL DEFAULT 'PENDING',
|
||||
"submissionReviewComments" TEXT,
|
||||
"name" TEXT,
|
||||
"description" TEXT,
|
||||
"author" TEXT,
|
||||
"keywords" TEXT[],
|
||||
"categories" TEXT[],
|
||||
"search" tsvector DEFAULT ''::tsvector,
|
||||
"graph" JSONB NOT NULL,
|
||||
|
||||
CONSTRAINT "Agents_pkey" PRIMARY KEY ("id","version")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "AnalyticsTracker" (
|
||||
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
|
||||
"agentId" UUID NOT NULL,
|
||||
"views" INTEGER NOT NULL,
|
||||
"downloads" INTEGER NOT NULL,
|
||||
|
||||
CONSTRAINT "AnalyticsTracker_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "InstallTracker" (
|
||||
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
|
||||
"marketplaceAgentId" UUID NOT NULL,
|
||||
"installedAgentId" UUID NOT NULL,
|
||||
"installationLocation" "InstallationLocation" NOT NULL,
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "InstallTracker_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateTable
|
||||
CREATE TABLE "FeaturedAgent" (
|
||||
"id" UUID NOT NULL DEFAULT gen_random_uuid(),
|
||||
"agentId" UUID NOT NULL,
|
||||
"isActive" BOOLEAN NOT NULL DEFAULT false,
|
||||
"featuredCategories" TEXT[],
|
||||
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"updatedAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
|
||||
CONSTRAINT "FeaturedAgent_pkey" PRIMARY KEY ("id")
|
||||
);
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "Agents_id_key" ON "Agents"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "AnalyticsTracker_id_key" ON "AnalyticsTracker"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "AnalyticsTracker_agentId_key" ON "AnalyticsTracker"("agentId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "InstallTracker_marketplaceAgentId_installedAgentId_key" ON "InstallTracker"("marketplaceAgentId", "installedAgentId");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "FeaturedAgent_id_key" ON "FeaturedAgent"("id");
|
||||
|
||||
-- CreateIndex
|
||||
CREATE UNIQUE INDEX "FeaturedAgent_agentId_key" ON "FeaturedAgent"("agentId");
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "AnalyticsTracker" ADD CONSTRAINT "AnalyticsTracker_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "InstallTracker" ADD CONSTRAINT "InstallTracker_marketplaceAgentId_fkey" FOREIGN KEY ("marketplaceAgentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
|
||||
-- AddForeignKey
|
||||
ALTER TABLE "FeaturedAgent" ADD CONSTRAINT "FeaturedAgent_agentId_fkey" FOREIGN KEY ("agentId") REFERENCES "Agents"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
|
||||
@@ -0,0 +1,11 @@
|
||||
-- Remove sample data from FeaturedAgent table
|
||||
DELETE FROM "FeaturedAgent" WHERE "agentId" IN ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'eaa773b1-5efa-485f-b2f0-2e05bae6d297');
|
||||
|
||||
-- Remove sample data from InstallTracker table
|
||||
DELETE FROM "InstallTracker" WHERE "marketplaceAgentId" IN ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'eaa773b1-5efa-485f-b2f0-2e05bae6d297', 'b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 'a4d3598f-6180-4e6d-96bf-6e15c3de05a9', '9f332ff3-4c74-4f5b-9838-65938a06711f');
|
||||
|
||||
-- Remove sample data from AnalyticsTracker table
|
||||
DELETE FROM "AnalyticsTracker" WHERE "agentId" IN ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'eaa773b1-5efa-485f-b2f0-2e05bae6d297', 'b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 'a4d3598f-6180-4e6d-96bf-6e15c3de05a9', '9f332ff3-4c74-4f5b-9838-65938a06711f');
|
||||
|
||||
-- Remove sample data from Agents table
|
||||
DELETE FROM "Agents" WHERE "id" IN ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'eaa773b1-5efa-485f-b2f0-2e05bae6d297', 'b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 'a4d3598f-6180-4e6d-96bf-6e15c3de05a9', '9f332ff3-4c74-4f5b-9838-65938a06711f');
|
||||
@@ -0,0 +1,86 @@
|
||||
-- Sample data for Agents table (10 agents)
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', 'AI Recruiter', 'An AI-powered tool that assists HR teams with talent acquisition, screening, and shortlisting.', 'Author1', ARRAY['recruitment', 'HR'], ARRAY['human resources', 'talent management'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 'Customer Service Bot', 'A chatbot that provides 24/7 support and assistance to customers, handling common inquiries and issues.', 'Author2', ARRAY['customer service', 'chatbot'], ARRAY['customer experience', 'artificial intelligence'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('eaa773b1-5efa-485f-b2f0-2e05bae6d297', 'Financial Advisor', 'An AI-powered financial advisor that offers personalized investment recommendations and portfolio management.', 'Author3', ARRAY['finance', 'investment'], ARRAY['wealth management', 'artificial intelligence'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 'AI Content Writer', 'An AI-powered tool that generates high-quality content for websites, blogs, and marketing materials.', 'Author4', ARRAY['content writing', 'AI'], ARRAY['marketing', 'artificial intelligence'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph", "submissionStatus")
|
||||
VALUES ('a4d3598f-6180-4e6d-96bf-6e15c3de05a9', 'AI Image Generator', 'An AI-powered tool that creates realistic images based on text prompts.', 'Author5', ARRAY['image generation', 'AI'], ARRAY['marketing', 'artificial intelligence'], '{"key": "value"}', 'APPROVED');
|
||||
|
||||
INSERT INTO "Agents" ("id", "name", "description", "author", "keywords", "categories", "graph")
|
||||
VALUES ('9f332ff3-4c74-4f5b-9838-65938a06711f', 'AI Video Editor', 'An AI-powered tool that edits and enhances videos with advanced AI algorithms.', 'Author6', ARRAY['video editing', 'AI'], ARRAY['marketing', 'artificial intelligence'], '{"key": "value"}');
|
||||
|
||||
-- Sample data for AnalyticsTracker table (10 agents)
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', 200, 80);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', 150, 60);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('eaa773b1-5efa-485f-b2f0-2e05bae6d297', 100, 40);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('b47e40a7-ad5f-4b29-9eac-abd5b728f19a', 120, 50);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('a4d3598f-6180-4e6d-96bf-6e15c3de05a9', 130, 55);
|
||||
|
||||
INSERT INTO "AnalyticsTracker" ("agentId", "views", "downloads")
|
||||
VALUES ('9f332ff3-4c74-4f5b-9838-65938a06711f', 140, 60);
|
||||
|
||||
|
||||
-- Sample data for InstallTracker table (10 agents)
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac11', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac12', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac13', 'LOCAL');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac14', 'LOCAL');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac15', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac16', 'LOCAL');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', '244f809e-1eee-4a36-a49b-ac2db008ac17', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', '244f809e-1eee-4a36-a49b-ac2db008ac18', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('eaa773b1-5efa-485f-b2f0-2e05bae6d297', '244f809e-1eee-4a36-a49b-ac2db008ac19', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('b47e40a7-ad5f-4b29-9eac-abd5b728f19a', '244f809e-1eee-4a36-a49b-ac2db008ac20', 'LOCAL');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('a4d3598f-6180-4e6d-96bf-6e15c3de05a9', '244f809e-1eee-4a36-a49b-ac2db008ac22', 'CLOUD');
|
||||
|
||||
INSERT INTO "InstallTracker" ("marketplaceAgentId", "installedAgentId", "installationLocation")
|
||||
VALUES ('9f332ff3-4c74-4f5b-9838-65938a06711f', '244f809e-1eee-4a36-a49b-ac2db008ac21', 'CLOUD');
|
||||
|
||||
-- Sample data for FeaturedAgent table (3 featured agents)
|
||||
INSERT INTO "FeaturedAgent" ("agentId", "isActive", "featuredCategories")
|
||||
VALUES ('b609e5fd-c992-4be9-b68f-afc1980f93c0', true, ARRAY['human resources', 'talent management']);
|
||||
|
||||
INSERT INTO "FeaturedAgent" ("agentId", "isActive", "featuredCategories")
|
||||
VALUES ('3b6d8f75-99d3-41e3-b484-4b2c5f835f5b', true, ARRAY['customer experience', 'artificial intelligence']);
|
||||
|
||||
INSERT INTO "FeaturedAgent" ("agentId", "isActive", "featuredCategories")
|
||||
VALUES ('eaa773b1-5efa-485f-b2f0-2e05bae6d297', true, ARRAY['wealth management', 'artificial intelligence']);
|
||||
47
rnd/rest-api-go/docker-compose.yml
Normal file
47
rnd/rest-api-go/docker-compose.yml
Normal file
@@ -0,0 +1,47 @@
|
||||
services:
|
||||
postgres:
|
||||
image: ankane/pgvector:latest
|
||||
environment:
|
||||
- POSTGRES_USER=agpt_user
|
||||
- POSTGRES_PASSWORD=pass123
|
||||
- POSTGRES_DB=agpt_marketplace
|
||||
healthcheck:
|
||||
test: pg_isready -U $$POSTGRES_USER -d $$POSTGRES_DB
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
ports:
|
||||
- "5433:5432"
|
||||
|
||||
market:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
ports:
|
||||
- "8015:8015"
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8015/metrics"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
|
||||
prometheus:
|
||||
image: prom/prometheus:latest
|
||||
volumes:
|
||||
- ./prometheus.yml:/etc/prometheus/prometheus.yml
|
||||
command:
|
||||
- '--config.file=/etc/prometheus/prometheus.yml'
|
||||
ports:
|
||||
- "9090:9090"
|
||||
depends_on:
|
||||
- market
|
||||
|
||||
grafana:
|
||||
image: grafana/grafana:latest
|
||||
ports:
|
||||
- "9091:3000"
|
||||
depends_on:
|
||||
- prometheus
|
||||
631
rnd/rest-api-go/docs/docs.go
Normal file
631
rnd/rest-api-go/docs/docs.go
Normal file
@@ -0,0 +1,631 @@
|
||||
// Package docs Code generated by swaggo/swag. DO NOT EDIT
|
||||
package docs
|
||||
|
||||
import "github.com/swaggo/swag"
|
||||
|
||||
const docTemplate = `{
|
||||
"schemes": {{ marshal .Schemes }},
|
||||
"swagger": "2.0",
|
||||
"info": {
|
||||
"description": "{{escape .Description}}",
|
||||
"title": "{{.Title}}",
|
||||
"contact": {},
|
||||
"version": "{{.Version}}"
|
||||
},
|
||||
"host": "{{.Host}}",
|
||||
"basePath": "{{.BasePath}}",
|
||||
"paths": {
|
||||
"/agent/featured/{agent_id}": {
|
||||
"get": {
|
||||
"description": "Get the featured agent for a specific category",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Get Agent Featured",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "agent_id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Category",
|
||||
"name": "category",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"description": "Set an agent as featured in a specific category",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Set Agent Featured",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "agent_id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Category",
|
||||
"name": "category",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"description": "Unset an agent as featured in a specific category",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Unset Agent Featured",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "agent_id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Category",
|
||||
"name": "category",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agent/not-featured": {
|
||||
"get": {
|
||||
"description": "Get a list of agents that are not featured",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Get Not Featured Agents",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "page_size",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agent/submissions": {
|
||||
"get": {
|
||||
"description": "Get a list of agent submissions",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Get Agent Submissions",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "page_size",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agent/submissions/{agent_id}": {
|
||||
"post": {
|
||||
"description": "Review an agent submission",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Review Submission",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "agent_id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Status",
|
||||
"name": "status",
|
||||
"in": "query",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents": {
|
||||
"get": {
|
||||
"description": "Get Agents",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Get Agents",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "pageSize",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent Name",
|
||||
"name": "name",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "csv",
|
||||
"description": "Keywords",
|
||||
"name": "keywords",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "csv",
|
||||
"description": "Categories",
|
||||
"name": "categories",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"description": "Submit an agent for review",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Submit Agent",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "Agent details",
|
||||
"name": "agent",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.AddAgentRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents/featured": {
|
||||
"get": {
|
||||
"description": "Get featured agents based on category",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Get Featured Agents",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Category",
|
||||
"name": "category",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "pageSize",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents/search": {
|
||||
"get": {
|
||||
"description": "Search for agents based on query and categories",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Search Agents",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Search query",
|
||||
"name": "q",
|
||||
"in": "query",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "csv",
|
||||
"description": "Categories",
|
||||
"name": "categories",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "pageSize",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Sort by",
|
||||
"name": "sortBy",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Sort order",
|
||||
"name": "sortOrder",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents/{id}": {
|
||||
"get": {
|
||||
"description": "Get details of a specific agent by ID",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Get Agent Details",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents/{id}/download": {
|
||||
"get": {
|
||||
"description": "Download an agent file by ID",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Download Agent File",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/categories": {
|
||||
"get": {
|
||||
"description": "Get a list of categories",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Get Categories",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"models.AddAgentRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"author": {
|
||||
"type": "string"
|
||||
},
|
||||
"categories": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"graph": {
|
||||
"$ref": "#/definitions/models.Graph"
|
||||
},
|
||||
"keywords": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"models.Agent": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"author": {
|
||||
"type": "string"
|
||||
},
|
||||
"categories": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"graph": {
|
||||
"$ref": "#/definitions/models.Graph"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"keywords": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"models.Graph": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
// SwaggerInfo holds exported Swagger Info so clients can modify it
|
||||
var SwaggerInfo = &swag.Spec{
|
||||
Version: "",
|
||||
Host: "",
|
||||
BasePath: "",
|
||||
Schemes: []string{},
|
||||
Title: "",
|
||||
Description: "",
|
||||
InfoInstanceName: "swagger",
|
||||
SwaggerTemplate: docTemplate,
|
||||
LeftDelim: "{{",
|
||||
RightDelim: "}}",
|
||||
}
|
||||
|
||||
func init() {
|
||||
swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo)
|
||||
}
|
||||
602
rnd/rest-api-go/docs/swagger.json
Normal file
602
rnd/rest-api-go/docs/swagger.json
Normal file
@@ -0,0 +1,602 @@
|
||||
{
|
||||
"swagger": "2.0",
|
||||
"info": {
|
||||
"contact": {}
|
||||
},
|
||||
"paths": {
|
||||
"/agent/featured/{agent_id}": {
|
||||
"get": {
|
||||
"description": "Get the featured agent for a specific category",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Get Agent Featured",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "agent_id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Category",
|
||||
"name": "category",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"description": "Set an agent as featured in a specific category",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Set Agent Featured",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "agent_id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Category",
|
||||
"name": "category",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete": {
|
||||
"description": "Unset an agent as featured in a specific category",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Unset Agent Featured",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "agent_id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Category",
|
||||
"name": "category",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agent/not-featured": {
|
||||
"get": {
|
||||
"description": "Get a list of agents that are not featured",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Get Not Featured Agents",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "page_size",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agent/submissions": {
|
||||
"get": {
|
||||
"description": "Get a list of agent submissions",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Get Agent Submissions",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "page_size",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agent/submissions/{agent_id}": {
|
||||
"post": {
|
||||
"description": "Review an agent submission",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Review Submission",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "agent_id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Status",
|
||||
"name": "status",
|
||||
"in": "query",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents": {
|
||||
"get": {
|
||||
"description": "Get Agents",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Get Agents",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "pageSize",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent Name",
|
||||
"name": "name",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "csv",
|
||||
"description": "Keywords",
|
||||
"name": "keywords",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "csv",
|
||||
"description": "Categories",
|
||||
"name": "categories",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"description": "Submit an agent for review",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Submit Agent",
|
||||
"parameters": [
|
||||
{
|
||||
"description": "Agent details",
|
||||
"name": "agent",
|
||||
"in": "body",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.AddAgentRequest"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents/featured": {
|
||||
"get": {
|
||||
"description": "Get featured agents based on category",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Get Featured Agents",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Category",
|
||||
"name": "category",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "pageSize",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents/search": {
|
||||
"get": {
|
||||
"description": "Search for agents based on query and categories",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Search Agents",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Search query",
|
||||
"name": "q",
|
||||
"in": "query",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"collectionFormat": "csv",
|
||||
"description": "Categories",
|
||||
"name": "categories",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page number",
|
||||
"name": "page",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "Page size",
|
||||
"name": "pageSize",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Sort by",
|
||||
"name": "sortBy",
|
||||
"in": "query"
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Sort order",
|
||||
"name": "sortOrder",
|
||||
"in": "query"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents/{id}": {
|
||||
"get": {
|
||||
"description": "Get details of a specific agent by ID",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Get Agent Details",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/agents/{id}/download": {
|
||||
"get": {
|
||||
"description": "Download an agent file by ID",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Agents"
|
||||
],
|
||||
"summary": "Download Agent File",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "Agent ID",
|
||||
"name": "id",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"$ref": "#/definitions/models.Agent"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/categories": {
|
||||
"get": {
|
||||
"description": "Get a list of categories",
|
||||
"consumes": [
|
||||
"application/json"
|
||||
],
|
||||
"produces": [
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Admin"
|
||||
],
|
||||
"summary": "Get Categories",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"definitions": {
|
||||
"models.AddAgentRequest": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"author": {
|
||||
"type": "string"
|
||||
},
|
||||
"categories": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"graph": {
|
||||
"$ref": "#/definitions/models.Graph"
|
||||
},
|
||||
"keywords": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"models.Agent": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"author": {
|
||||
"type": "string"
|
||||
},
|
||||
"categories": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"graph": {
|
||||
"$ref": "#/definitions/models.Graph"
|
||||
},
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"keywords": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"models.Graph": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"name": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
395
rnd/rest-api-go/docs/swagger.yaml
Normal file
395
rnd/rest-api-go/docs/swagger.yaml
Normal file
@@ -0,0 +1,395 @@
|
||||
definitions:
|
||||
models.AddAgentRequest:
|
||||
properties:
|
||||
author:
|
||||
type: string
|
||||
categories:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
graph:
|
||||
$ref: '#/definitions/models.Graph'
|
||||
keywords:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
type: object
|
||||
models.Agent:
|
||||
properties:
|
||||
author:
|
||||
type: string
|
||||
categories:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
description:
|
||||
type: string
|
||||
graph:
|
||||
$ref: '#/definitions/models.Graph'
|
||||
id:
|
||||
type: string
|
||||
keywords:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
name:
|
||||
type: string
|
||||
type: object
|
||||
models.Graph:
|
||||
properties:
|
||||
description:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
type: object
|
||||
info:
|
||||
contact: {}
|
||||
paths:
|
||||
/agent/featured/{agent_id}:
|
||||
delete:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Unset an agent as featured in a specific category
|
||||
parameters:
|
||||
- description: Agent ID
|
||||
in: path
|
||||
name: agent_id
|
||||
required: true
|
||||
type: string
|
||||
- description: Category
|
||||
in: query
|
||||
name: category
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Unset Agent Featured
|
||||
tags:
|
||||
- Admin
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Get the featured agent for a specific category
|
||||
parameters:
|
||||
- description: Agent ID
|
||||
in: path
|
||||
name: agent_id
|
||||
required: true
|
||||
type: string
|
||||
- description: Category
|
||||
in: query
|
||||
name: category
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Get Agent Featured
|
||||
tags:
|
||||
- Admin
|
||||
post:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Set an agent as featured in a specific category
|
||||
parameters:
|
||||
- description: Agent ID
|
||||
in: path
|
||||
name: agent_id
|
||||
required: true
|
||||
type: string
|
||||
- description: Category
|
||||
in: query
|
||||
name: category
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Set Agent Featured
|
||||
tags:
|
||||
- Admin
|
||||
/agent/not-featured:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Get a list of agents that are not featured
|
||||
parameters:
|
||||
- description: Page number
|
||||
in: query
|
||||
name: page
|
||||
type: integer
|
||||
- description: Page size
|
||||
in: query
|
||||
name: page_size
|
||||
type: integer
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Get Not Featured Agents
|
||||
tags:
|
||||
- Admin
|
||||
/agent/submissions:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Get a list of agent submissions
|
||||
parameters:
|
||||
- description: Page number
|
||||
in: query
|
||||
name: page
|
||||
type: integer
|
||||
- description: Page size
|
||||
in: query
|
||||
name: page_size
|
||||
type: integer
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Get Agent Submissions
|
||||
tags:
|
||||
- Admin
|
||||
/agent/submissions/{agent_id}:
|
||||
post:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Review an agent submission
|
||||
parameters:
|
||||
- description: Agent ID
|
||||
in: path
|
||||
name: agent_id
|
||||
required: true
|
||||
type: string
|
||||
- description: Status
|
||||
in: query
|
||||
name: status
|
||||
required: true
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Review Submission
|
||||
tags:
|
||||
- Admin
|
||||
/agents:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Get Agents
|
||||
parameters:
|
||||
- description: Page number
|
||||
in: query
|
||||
name: page
|
||||
type: integer
|
||||
- description: Page size
|
||||
in: query
|
||||
name: pageSize
|
||||
type: integer
|
||||
- description: Agent Name
|
||||
in: query
|
||||
name: name
|
||||
type: string
|
||||
- collectionFormat: csv
|
||||
description: Keywords
|
||||
in: query
|
||||
items:
|
||||
type: string
|
||||
name: keywords
|
||||
type: array
|
||||
- collectionFormat: csv
|
||||
description: Categories
|
||||
in: query
|
||||
items:
|
||||
type: string
|
||||
name: categories
|
||||
type: array
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
type: array
|
||||
summary: Get Agents
|
||||
tags:
|
||||
- Agents
|
||||
post:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Submit an agent for review
|
||||
parameters:
|
||||
- description: Agent details
|
||||
in: body
|
||||
name: agent
|
||||
required: true
|
||||
schema:
|
||||
$ref: '#/definitions/models.AddAgentRequest'
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Submit Agent
|
||||
tags:
|
||||
- Agents
|
||||
/agents/{id}:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Get details of a specific agent by ID
|
||||
parameters:
|
||||
- description: Agent ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Get Agent Details
|
||||
tags:
|
||||
- Agents
|
||||
/agents/{id}/download:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Download an agent file by ID
|
||||
parameters:
|
||||
- description: Agent ID
|
||||
in: path
|
||||
name: id
|
||||
required: true
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
summary: Download Agent File
|
||||
tags:
|
||||
- Agents
|
||||
/agents/featured:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Get featured agents based on category
|
||||
parameters:
|
||||
- description: Category
|
||||
in: query
|
||||
name: category
|
||||
type: string
|
||||
- description: Page number
|
||||
in: query
|
||||
name: page
|
||||
type: integer
|
||||
- description: Page size
|
||||
in: query
|
||||
name: pageSize
|
||||
type: integer
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
type: array
|
||||
summary: Get Featured Agents
|
||||
tags:
|
||||
- Agents
|
||||
/agents/search:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Search for agents based on query and categories
|
||||
parameters:
|
||||
- description: Search query
|
||||
in: query
|
||||
name: q
|
||||
required: true
|
||||
type: string
|
||||
- collectionFormat: csv
|
||||
description: Categories
|
||||
in: query
|
||||
items:
|
||||
type: string
|
||||
name: categories
|
||||
type: array
|
||||
- description: Page number
|
||||
in: query
|
||||
name: page
|
||||
type: integer
|
||||
- description: Page size
|
||||
in: query
|
||||
name: pageSize
|
||||
type: integer
|
||||
- description: Sort by
|
||||
in: query
|
||||
name: sortBy
|
||||
type: string
|
||||
- description: Sort order
|
||||
in: query
|
||||
name: sortOrder
|
||||
type: string
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
items:
|
||||
$ref: '#/definitions/models.Agent'
|
||||
type: array
|
||||
summary: Search Agents
|
||||
tags:
|
||||
- Agents
|
||||
/categories:
|
||||
get:
|
||||
consumes:
|
||||
- application/json
|
||||
description: Get a list of categories
|
||||
produces:
|
||||
- application/json
|
||||
responses:
|
||||
"200":
|
||||
description: OK
|
||||
schema:
|
||||
items:
|
||||
type: string
|
||||
type: array
|
||||
summary: Get Categories
|
||||
tags:
|
||||
- Admin
|
||||
swagger: "2.0"
|
||||
84
rnd/rest-api-go/go.mod
Normal file
84
rnd/rest-api-go/go.mod
Normal file
@@ -0,0 +1,84 @@
|
||||
module github.com/swiftyos/market
|
||||
|
||||
go 1.23.1
|
||||
|
||||
require (
|
||||
github.com/Depado/ginprom v1.8.1
|
||||
github.com/gin-contrib/cors v1.7.2
|
||||
github.com/gin-contrib/gzip v1.0.1
|
||||
github.com/gin-contrib/zap v1.1.4
|
||||
github.com/gin-gonic/gin v1.10.0
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0
|
||||
github.com/google/uuid v1.4.0
|
||||
github.com/jackc/pgx/v5 v5.7.1
|
||||
github.com/spf13/viper v1.19.0
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/swaggo/files v1.0.1
|
||||
github.com/swaggo/gin-swagger v1.6.0
|
||||
github.com/swaggo/swag v1.16.3
|
||||
go.uber.org/zap v1.27.0
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/KyleBanks/depth v1.2.1 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bytedance/sonic v1.12.2 // indirect
|
||||
github.com/bytedance/sonic/loader v0.2.0 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
github.com/cloudwego/base64x v0.1.4 // indirect
|
||||
github.com/cloudwego/iasm v0.2.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/fsnotify/fsnotify v1.7.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.5 // indirect
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.21.0 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.0 // indirect
|
||||
github.com/go-openapi/spec v0.21.0 // indirect
|
||||
github.com/go-openapi/swag v0.23.0 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||
github.com/go-playground/validator/v10 v10.22.1 // indirect
|
||||
github.com/goccy/go-json v0.10.3 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/jackc/pgpassfile v1.0.0 // indirect
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect
|
||||
github.com/jackc/puddle/v2 v2.2.2 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.8 // indirect
|
||||
github.com/leodido/go-urn v1.4.0 // indirect
|
||||
github.com/magiconair/properties v1.8.7 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.2.3 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/prometheus/client_golang v1.18.0 // indirect
|
||||
github.com/prometheus/client_model v0.5.0 // indirect
|
||||
github.com/prometheus/common v0.45.0 // indirect
|
||||
github.com/prometheus/procfs v0.12.0 // indirect
|
||||
github.com/sagikazarmark/locafero v0.6.0 // indirect
|
||||
github.com/sagikazarmark/slog-shim v0.1.0 // indirect
|
||||
github.com/sourcegraph/conc v0.3.0 // indirect
|
||||
github.com/spf13/afero v1.11.0 // indirect
|
||||
github.com/spf13/cast v1.7.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
golang.org/x/arch v0.10.0 // indirect
|
||||
golang.org/x/crypto v0.27.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect
|
||||
golang.org/x/net v0.29.0 // indirect
|
||||
golang.org/x/sync v0.8.0 // indirect
|
||||
golang.org/x/sys v0.25.0 // indirect
|
||||
golang.org/x/text v0.18.0 // indirect
|
||||
golang.org/x/tools v0.25.0 // indirect
|
||||
google.golang.org/protobuf v1.34.2 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
217
rnd/rest-api-go/go.sum
Normal file
217
rnd/rest-api-go/go.sum
Normal file
@@ -0,0 +1,217 @@
|
||||
github.com/Depado/ginprom v1.8.1 h1:lrQTddbRqlHq1j6SpJDySDumJlR7FEybzdX0PS3HXPc=
|
||||
github.com/Depado/ginprom v1.8.1/go.mod h1:9Z+ahPJLSeMndDfnDTfiuBn2SKVAuL2yvihApWzof9A=
|
||||
github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc=
|
||||
github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE=
|
||||
github.com/appleboy/gofight/v2 v2.1.2 h1:VOy3jow4vIK8BRQJoC/I9muxyYlJ2yb9ht2hZoS3rf4=
|
||||
github.com/appleboy/gofight/v2 v2.1.2/go.mod h1:frW+U1QZEdDgixycTj4CygQ48yLTUhplt43+Wczp3rw=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bytedance/sonic v1.12.2 h1:oaMFuRTpMHYLpCntGca65YWt5ny+wAceDERTkT2L9lg=
|
||||
github.com/bytedance/sonic v1.12.2/go.mod h1:B8Gt/XvtZ3Fqj+iSKMypzymZxw/FVwgIGKzMzT9r/rk=
|
||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||
github.com/bytedance/sonic/loader v0.2.0 h1:zNprn+lsIP06C/IqCHs3gPQIvnvpKbbxyXQP1iU4kWM=
|
||||
github.com/bytedance/sonic/loader v0.2.0/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
|
||||
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/cloudwego/base64x v0.1.4 h1:jwCgWpFanWmN8xoIUHa2rtzmkd5J2plF/dnLS6Xd/0Y=
|
||||
github.com/cloudwego/base64x v0.1.4/go.mod h1:0zlkT4Wn5C6NdauXdJRhSKRlJvmclQ1hhJgA0rcu/8w=
|
||||
github.com/cloudwego/iasm v0.2.0 h1:1KNIy1I1H9hNNFEEH3DVnI4UujN+1zjpuk6gwHLTssg=
|
||||
github.com/cloudwego/iasm v0.2.0/go.mod h1:8rXZaNYT2n95jn+zTI1sDr+IgcD2GVs0nlbbQPiEFhY=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM=
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8=
|
||||
github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0=
|
||||
github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA=
|
||||
github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM=
|
||||
github.com/gabriel-vasile/mimetype v1.4.5 h1:J7wGKdGu33ocBOhGy0z653k/lFKLFDPJMG8Gql0kxn4=
|
||||
github.com/gabriel-vasile/mimetype v1.4.5/go.mod h1:ibHel+/kbxn9x2407k1izTA1S81ku1z/DlgOW2QE0M4=
|
||||
github.com/gin-contrib/cors v1.7.2 h1:oLDHxdg8W/XDoN/8zamqk/Drgt4oVZDvaV0YmvVICQw=
|
||||
github.com/gin-contrib/cors v1.7.2/go.mod h1:SUJVARKgQ40dmrzgXEVxj2m7Ig1v1qIboQkPDTQ9t2E=
|
||||
github.com/gin-contrib/gzip v1.0.1 h1:HQ8ENHODeLY7a4g1Au/46Z92bdGFl74OhxcZble9WJE=
|
||||
github.com/gin-contrib/gzip v1.0.1/go.mod h1:njt428fdUNRvjuJf16tZMYZ2Yl+WQB53X5wmhDwXvC4=
|
||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||
github.com/gin-contrib/zap v1.1.4 h1:xvxTybg6XBdNtcQLH3Tf0lFr4vhDkwzgLLrIGlNTqIo=
|
||||
github.com/gin-contrib/zap v1.1.4/go.mod h1:7lgEpe91kLbeJkwBTPgtVBy4zMa6oSBEcvj662diqKQ=
|
||||
github.com/gin-gonic/gin v1.10.0 h1:nTuyha1TYqgedzytsKYqna+DfLos46nTv2ygFy86HFU=
|
||||
github.com/gin-gonic/gin v1.10.0/go.mod h1:4PMNQiOhvDRa013RKVbsiNwoyezlm2rm0uX/T7kzp5Y=
|
||||
github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ=
|
||||
github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY=
|
||||
github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ=
|
||||
github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4=
|
||||
github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY=
|
||||
github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk=
|
||||
github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE=
|
||||
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
|
||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||
github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||
github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA=
|
||||
github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY=
|
||||
github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY=
|
||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||
github.com/go-playground/validator/v10 v10.22.1 h1:40JcKH+bBNGFczGuoBYgX4I6m/i27HYW8P9FDk5PbgA=
|
||||
github.com/go-playground/validator/v10 v10.22.1/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/goccy/go-json v0.10.3 h1:KZ5WoDbxAIgm2HNbYckL0se1fHD6rz5j4ywS6ebzDqA=
|
||||
github.com/goccy/go-json v0.10.3/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/uuid v1.4.0 h1:MtMxsa51/r9yyhkyLsVeVt0B+BGQZzpQiTQ4eHZ8bc4=
|
||||
github.com/google/uuid v1.4.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=
|
||||
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
|
||||
github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM=
|
||||
github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo=
|
||||
github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM=
|
||||
github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs=
|
||||
github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA=
|
||||
github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo=
|
||||
github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.8 h1:+StwCXwm9PdpiEkPyzBXIy+M9KUb4ODm0Zarf1kS5BM=
|
||||
github.com/klauspost/cpuid/v2 v2.2.8/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ=
|
||||
github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI=
|
||||
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||
github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg=
|
||||
github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||
github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M=
|
||||
github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_golang v1.18.0 h1:HzFfmkOzH5Q8L8G+kSJKUx5dtG87sewO+FoDDqP5Tbk=
|
||||
github.com/prometheus/client_golang v1.18.0/go.mod h1:T+GXkCk5wSJyOqMIzVgvvjFDlkOQntgjkJWKrN5txjA=
|
||||
github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
|
||||
github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI=
|
||||
github.com/prometheus/common v0.45.0 h1:2BGz0eBc2hdMDLnO/8n0jeB3oPrt2D08CekT0lneoxM=
|
||||
github.com/prometheus/common v0.45.0/go.mod h1:YJmSTw9BoKxJplESWWxlbyttQR4uaEcGyv9MZjVOJsY=
|
||||
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
|
||||
github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
|
||||
github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M=
|
||||
github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA=
|
||||
github.com/sagikazarmark/locafero v0.6.0 h1:ON7AQg37yzcRPU69mt7gwhFEBwxI6P9T4Qu3N51bwOk=
|
||||
github.com/sagikazarmark/locafero v0.6.0/go.mod h1:77OmuIc6VTraTXKXIs/uvUxKGUXjE1GbemJYHqdNjX0=
|
||||
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
|
||||
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
|
||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||
github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8=
|
||||
github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY=
|
||||
github.com/spf13/cast v1.7.0 h1:ntdiHjuueXFgm5nzDRdOS4yfT43P5Fnud6DH50rz/7w=
|
||||
github.com/spf13/cast v1.7.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI=
|
||||
github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/swaggo/files v1.0.1 h1:J1bVJ4XHZNq0I46UU90611i9/YzdrF7x92oX1ig5IdE=
|
||||
github.com/swaggo/files v1.0.1/go.mod h1:0qXmMNH6sXNf+73t65aKeB+ApmgxdnkQzVTAj2uaMUg=
|
||||
github.com/swaggo/gin-swagger v1.6.0 h1:y8sxvQ3E20/RCyrXeFfg60r6H0Z+SwpTjMYsMm+zy8M=
|
||||
github.com/swaggo/gin-swagger v1.6.0/go.mod h1:BG00cCEy294xtVpyIAHG6+e2Qzj/xKlRdOqDkvq0uzo=
|
||||
github.com/swaggo/swag v1.16.3 h1:PnCYjPCah8FK4I26l2F/KQ4yz3sILcVUN3cTlBFA9Pg=
|
||||
github.com/swaggo/swag v1.16.3/go.mod h1:DImHIuOFXKpMFAQjcC7FG4m3Dg4+QuUgUzJmKjI/gRk=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
|
||||
github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65EE=
|
||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto=
|
||||
go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE=
|
||||
go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0=
|
||||
go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y=
|
||||
go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8=
|
||||
go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E=
|
||||
golang.org/x/arch v0.10.0 h1:S3huipmSclq3PJMNe76NGwkBR504WFkQ5dhzWzP8ZW8=
|
||||
golang.org/x/arch v0.10.0/go.mod h1:FEVrYAQjsQXMVJ1nsMoVVXPZg6p2JE2mx8psSWTDQys=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
|
||||
golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk=
|
||||
golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0=
|
||||
golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
|
||||
golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ=
|
||||
golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
|
||||
golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
|
||||
golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.25.0 h1:oFU9pkj/iJgs+0DT+VMHrx+oBKs/LJMV+Uvg78sl+fE=
|
||||
golang.org/x/tools v0.25.0/go.mod h1:/vtpO8WL1N9cQC3FN5zPqb//fRXskFHbLKk4OW1Q7rg=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/protobuf v1.34.2 h1:6xV6lTsCfpGD21XK49h7MhtcApnLqkfYgPcdHftf6hg=
|
||||
google.golang.org/protobuf v1.34.2/go.mod h1:qYOHts0dSfpeUzUFpOMr/WGzszTmLH+DiWniOlNbLDw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA=
|
||||
gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||
282
rnd/rest-api-go/handlers/admin.go
Normal file
282
rnd/rest-api-go/handlers/admin.go
Normal file
@@ -0,0 +1,282 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/swiftyos/market/database"
|
||||
"github.com/swiftyos/market/models"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func requireAdminUser() gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
user, exists := c.Get("user")
|
||||
if !exists {
|
||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "User not found in context"})
|
||||
return
|
||||
}
|
||||
|
||||
userModel, ok := user.(models.User)
|
||||
if !ok {
|
||||
c.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "Invalid user model"})
|
||||
return
|
||||
}
|
||||
|
||||
if userModel.Role != "admin" {
|
||||
c.AbortWithStatusJSON(http.StatusForbidden, gin.H{"error": "Admin access required"})
|
||||
return
|
||||
}
|
||||
|
||||
c.Next()
|
||||
}
|
||||
}
|
||||
|
||||
// @BasePath /api/v1/marketplace/admin
|
||||
|
||||
// CreateAgentEntry godoc
|
||||
// @Summary Create Agent Entry
|
||||
// @Description Create a new agent entry
|
||||
// @Tags Admin
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param request body models.AddAgentRequest true "Agent details"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agents [post]
|
||||
func CreateAgentEntry(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
requireAdminUser()(c)
|
||||
if c.IsAborted() {
|
||||
return
|
||||
}
|
||||
|
||||
var request models.AddAgentRequest
|
||||
if err := c.ShouldBindJSON(&request); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
agent, err := database.CreateAgentEntry(c.Request.Context(), db, models.Agent{
|
||||
Name: request.Graph.Name,
|
||||
Description: request.Graph.Description,
|
||||
Author: request.Author,
|
||||
Keywords: request.Keywords,
|
||||
Categories: request.Categories,
|
||||
Graph: request.Graph,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, agent)
|
||||
}
|
||||
}
|
||||
|
||||
// SetAgentFeatured godoc
|
||||
// @Summary Set Agent Featured
|
||||
// @Description Set an agent as featured in a specific category
|
||||
// @Tags Admin
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param agent_id path string true "Agent ID"
|
||||
// @Param category query string false "Category"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agent/featured/{agent_id} [post]
|
||||
func SetAgentFeatured(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
requireAdminUser()(c)
|
||||
if c.IsAborted() {
|
||||
return
|
||||
}
|
||||
|
||||
agentID := c.Param("agent_id")
|
||||
categories := c.QueryArray("categories")
|
||||
if len(categories) == 0 {
|
||||
categories = []string{"featured"}
|
||||
}
|
||||
|
||||
featuredAgent, err := database.SetAgentFeatured(c.Request.Context(), db, agentID, true, categories)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, featuredAgent)
|
||||
}
|
||||
}
|
||||
|
||||
// GetAgentFeatured godoc
|
||||
// @Summary Get Agent Featured
|
||||
// @Description Get the featured agent for a specific category
|
||||
// @Tags Admin
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param agent_id path string true "Agent ID"
|
||||
// @Param category query string false "Category"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agent/featured/{agent_id} [get]
|
||||
func GetAgentFeatured(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
requireAdminUser()(c)
|
||||
if c.IsAborted() {
|
||||
return
|
||||
}
|
||||
|
||||
agentID := c.Param("agent_id")
|
||||
|
||||
featuredAgent, err := database.GetAgentFeatured(c.Request.Context(), db, agentID)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
if featuredAgent == nil {
|
||||
c.JSON(http.StatusNotFound, gin.H{"message": "Featured agent not found"})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, featuredAgent)
|
||||
}
|
||||
}
|
||||
|
||||
// UnsetAgentFeatured godoc
|
||||
// @Summary Unset Agent Featured
|
||||
// @Description Unset an agent as featured in a specific category
|
||||
// @Tags Admin
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param agent_id path string true "Agent ID"
|
||||
// @Param category query string false "Category"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agent/featured/{agent_id} [delete]
|
||||
func UnsetAgentFeatured(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
requireAdminUser()(c)
|
||||
if c.IsAborted() {
|
||||
return
|
||||
}
|
||||
|
||||
agentID := c.Param("agent_id")
|
||||
category := c.DefaultQuery("category", "featured")
|
||||
|
||||
featuredAgent, err := database.RemoveFeaturedCategory(c.Request.Context(), db, agentID, category)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
if featuredAgent == nil {
|
||||
c.JSON(http.StatusNotFound, gin.H{"message": "Featured agent not found"})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, featuredAgent)
|
||||
}
|
||||
}
|
||||
|
||||
// GetNotFeaturedAgents godoc
|
||||
// @Summary Get Not Featured Agents
|
||||
// @Description Get a list of agents that are not featured
|
||||
// @Tags Admin
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param page query int false "Page number"
|
||||
// @Param page_size query int false "Page size"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agent/not-featured [get]
|
||||
func GetNotFeaturedAgents(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
requireAdminUser()(c)
|
||||
if c.IsAborted() {
|
||||
return
|
||||
}
|
||||
|
||||
page, _ := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||
pageSize, _ := strconv.Atoi(c.DefaultQuery("page_size", "10"))
|
||||
|
||||
agents, err := database.GetNotFeaturedAgents(c.Request.Context(), db, page, pageSize)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"agents": agents,
|
||||
"total_count": len(agents),
|
||||
"page": page,
|
||||
"page_size": pageSize,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// GetAgentSubmissions godoc
|
||||
// @Summary Get Agent Submissions
|
||||
// @Description Get a list of agent submissions
|
||||
// @Tags Admin
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param page query int false "Page number"
|
||||
// @Param page_size query int false "Page size"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agent/submissions [get]
|
||||
func GetAgentSubmissions(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
requireAdminUser()(c)
|
||||
if c.IsAborted() {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: Implement GetAgentSubmissions
|
||||
c.JSON(http.StatusNotImplemented, gin.H{"message": "Not Implemented: GetAgentSubmissions"})
|
||||
}
|
||||
}
|
||||
|
||||
// ReviewSubmission godoc
|
||||
// @Summary Review Submission
|
||||
// @Description Review an agent submission
|
||||
// @Tags Admin
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param agent_id path string true "Agent ID"
|
||||
// @Param status query string true "Status"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agent/submissions/{agent_id} [post]
|
||||
func ReviewSubmission(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
requireAdminUser()(c)
|
||||
if c.IsAborted() {
|
||||
return
|
||||
}
|
||||
|
||||
// TODO: Implement ReviewSubmission
|
||||
c.JSON(http.StatusNotImplemented, gin.H{"message": "Not Implemented: ReviewSubmission"})
|
||||
}
|
||||
}
|
||||
|
||||
// GetCategories godoc
|
||||
// @Summary Get Categories
|
||||
// @Description Get a list of categories
|
||||
// @Tags Admin
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Success 200 {array} string
|
||||
// @Router /categories [get]
|
||||
func GetCategories(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
if c.IsAborted() {
|
||||
return
|
||||
}
|
||||
|
||||
categories, err := database.GetAllCategories(c.Request.Context(), db)
|
||||
if err != nil {
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, categories)
|
||||
}
|
||||
}
|
||||
369
rnd/rest-api-go/handlers/agents.go
Normal file
369
rnd/rest-api-go/handlers/agents.go
Normal file
@@ -0,0 +1,369 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/swiftyos/market/database"
|
||||
"github.com/swiftyos/market/models"
|
||||
"github.com/swiftyos/market/utils"
|
||||
)
|
||||
|
||||
// @BasePath /api/v1/marketplace
|
||||
|
||||
// GetAgents godoc
|
||||
// @Summary Get Agents
|
||||
// @Schemes
|
||||
// @Description Get Agents
|
||||
// @Tags Agents
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param page query int false "Page number"
|
||||
// @Param pageSize query int false "Page size"
|
||||
// @Param name query string false "Agent Name"
|
||||
// @Param keywords query []string false "Keywords"
|
||||
// @Param categories query []string false "Categories"
|
||||
// @Success 200 {array} models.Agent
|
||||
// @Router /agents [get]
|
||||
func GetAgents(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := log_ctx.With(zap.String("function", "GetAgents")).With(zap.String("file", "handlers/agents.go"))
|
||||
|
||||
logger.Info("Get Agents Request Started")
|
||||
// Get pagination parameters from context
|
||||
page := getPageFromContext(c.Request.Context())
|
||||
pageSize := getPageSizeFromContext(c.Request.Context())
|
||||
|
||||
// Get filter parameters from context
|
||||
name := getNameFromContext(c.Request.Context())
|
||||
keywords := getKeywordsFromContext(c.Request.Context())
|
||||
categories := getCategoriesFromContext(c.Request.Context())
|
||||
|
||||
logger.Debug("Request parameters",
|
||||
zap.Int("page", page),
|
||||
zap.Int("pageSize", pageSize),
|
||||
zap.String("name", utils.StringOrNil(name)),
|
||||
zap.String("keywords", utils.StringOrNil(keywords)),
|
||||
zap.String("categories", utils.StringOrNil(categories)))
|
||||
|
||||
agents, err := database.GetAgents(c.Request.Context(), db, log_ctx, page, pageSize, name, keywords, categories)
|
||||
|
||||
if err != nil {
|
||||
logger.Error("Database requested returned error!", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch agents"})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, agents)
|
||||
}
|
||||
}
|
||||
|
||||
// @Summary Submit Agent
|
||||
// @Description Submit an agent for review
|
||||
// @Tags Agents
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param agent body models.AddAgentRequest true "Agent details"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agents [post]
|
||||
func SubmitAgent(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := log_ctx.With(zap.String("function", "SubmitAgent"))
|
||||
var request models.AddAgentRequest
|
||||
logger.Debug("Add Agent Request body", zap.Any("request", request))
|
||||
if err := c.ShouldBindJSON(&request); err != nil {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
user, exists := c.Get("user")
|
||||
if !exists {
|
||||
c.JSON(http.StatusUnauthorized, gin.H{"error": "User not authenticated"})
|
||||
return
|
||||
}
|
||||
|
||||
agent, err := database.SubmitAgent(c.Request.Context(), db, request, user)
|
||||
if err != nil {
|
||||
logger.Error("Failed to submit agent", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to submit agent"})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, agent)
|
||||
}
|
||||
}
|
||||
|
||||
// @Summary Get Agent Details
|
||||
// @Description Get details of a specific agent by ID
|
||||
// @Tags Agents
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path string true "Agent ID"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agents/{id} [get]
|
||||
func GetAgentDetails(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := log_ctx.With(zap.String("function", "GetAgentDetails"))
|
||||
|
||||
agentID := c.Param("id")
|
||||
logger.Debug("Agent ID", zap.String("agentID", agentID))
|
||||
|
||||
if agentID == "" {
|
||||
logger.Error("Agent ID is required")
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Agent ID is required"})
|
||||
return
|
||||
}
|
||||
|
||||
agent, err := database.GetAgentDetails(c.Request.Context(), db, agentID)
|
||||
if err != nil {
|
||||
if err.Error() == "agent not found" {
|
||||
logger.Error("Agent not found", zap.String("agentID", agentID))
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "Agent not found"})
|
||||
return
|
||||
}
|
||||
logger.Error("Failed to fetch agent details", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch agent details"})
|
||||
return
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, agent)
|
||||
}
|
||||
}
|
||||
|
||||
// @Summary Download Agent
|
||||
// @Description Download an agent by ID
|
||||
// @Tags Agents
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path string true "Agent ID"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agents/{id}/download [get]
|
||||
func DownloadAgent(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := log_ctx.With(zap.String("function", "DownloadAgent"))
|
||||
|
||||
agentID := c.Param("id")
|
||||
if agentID == "" {
|
||||
logger.Error("Agent ID is required")
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Agent ID is required"})
|
||||
return
|
||||
}
|
||||
|
||||
agent, err := database.GetAgentDetails(c.Request.Context(), db, agentID)
|
||||
if err != nil {
|
||||
if err.Error() == "agent not found" {
|
||||
logger.Error("Agent not found", zap.String("agentID", agentID))
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "Agent not found"})
|
||||
return
|
||||
}
|
||||
logger.Error("Failed to fetch agent details", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch agent details"})
|
||||
return
|
||||
}
|
||||
|
||||
err = database.IncrementDownloadCount(c.Request.Context(), db, agentID)
|
||||
if err != nil {
|
||||
logger.Error("Failed to increment download count", zap.Error(err))
|
||||
// Continue with the download even if the count update fails
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, agent)
|
||||
}
|
||||
}
|
||||
|
||||
// @Summary Download Agent File
|
||||
// @Description Download an agent file by ID
|
||||
// @Tags Agents
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param id path string true "Agent ID"
|
||||
// @Success 200 {object} models.Agent
|
||||
// @Router /agents/{id}/download [get]
|
||||
func DownloadAgentFile(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := log_ctx.With(zap.String("function", "DownloadAgentFile"))
|
||||
|
||||
agentID := c.Param("id")
|
||||
if agentID == "" {
|
||||
logger.Error("Agent ID is required")
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Agent ID is required"})
|
||||
return
|
||||
}
|
||||
|
||||
agentFile, err := database.GetAgentFile(c.Request.Context(), db, agentID)
|
||||
if err != nil {
|
||||
if err.Error() == "agent not found" {
|
||||
logger.Error("Agent not found", zap.String("agentID", agentID))
|
||||
c.JSON(http.StatusNotFound, gin.H{"error": "Agent not found"})
|
||||
return
|
||||
}
|
||||
logger.Error("Failed to fetch agent file", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch agent file"})
|
||||
return
|
||||
}
|
||||
|
||||
err = database.IncrementDownloadCount(c.Request.Context(), db, agentID)
|
||||
if err != nil {
|
||||
logger.Error("Failed to increment download count", zap.Error(err))
|
||||
// Continue with the download even if the count update fails
|
||||
}
|
||||
|
||||
fileName := fmt.Sprintf("agent_%s.json", agentID)
|
||||
c.Header("Content-Disposition", fmt.Sprintf("attachment; filename=%s", fileName))
|
||||
c.JSON(http.StatusOK, agentFile.Graph)
|
||||
}
|
||||
}
|
||||
|
||||
func TopAgentsByDownloads(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := log_ctx.With(zap.String("function", "TopAgentsByDownloads"))
|
||||
logger.Info("Handling request for top agents by downloads")
|
||||
|
||||
page, err := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||
if err != nil || page < 1 {
|
||||
logger.Error("Invalid page number", zap.Error(err))
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid page number"})
|
||||
return
|
||||
}
|
||||
|
||||
pageSize, err := strconv.Atoi(c.DefaultQuery("pageSize", "10"))
|
||||
if err != nil || pageSize < 1 {
|
||||
logger.Error("Invalid page size", zap.Error(err))
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid page size"})
|
||||
return
|
||||
}
|
||||
|
||||
agents, totalCount, err := database.GetTopAgentsByDownloads(c.Request.Context(), db, page, pageSize)
|
||||
if err != nil {
|
||||
logger.Error("Failed to fetch top agents", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch top agents"})
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Successfully fetched top agents", zap.Int("count", len(agents)), zap.Int("totalCount", totalCount))
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"agents": agents,
|
||||
"totalCount": totalCount,
|
||||
"page": page,
|
||||
"pageSize": pageSize,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// @Summary Get Featured Agents
|
||||
// @Description Get featured agents based on category
|
||||
// @Tags Agents
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param category query string false "Category"
|
||||
// @Param page query int false "Page number"
|
||||
// @Param pageSize query int false "Page size"
|
||||
// @Success 200 {array} models.Agent
|
||||
// @Router /agents/featured [get]
|
||||
func GetFeaturedAgents(db *pgxpool.Pool, logger *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := logger.With(zap.String("function", "GetFeaturedAgents"))
|
||||
logger.Info("Handling request for featured agents")
|
||||
|
||||
category := c.Query("category")
|
||||
if category == "" {
|
||||
logger.Debug("No category specified, fetching all featured agents")
|
||||
} else {
|
||||
logger.Debug("Fetching featured agents for category", zap.String("category", category))
|
||||
}
|
||||
|
||||
page, err := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||
if err != nil || page < 1 {
|
||||
logger.Error("Invalid page number", zap.Error(err))
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid page number"})
|
||||
return
|
||||
}
|
||||
|
||||
pageSize, err := strconv.Atoi(c.DefaultQuery("pageSize", "10"))
|
||||
if err != nil || pageSize < 1 {
|
||||
logger.Error("Invalid page size", zap.Error(err))
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid page size"})
|
||||
return
|
||||
}
|
||||
|
||||
agents, totalCount, err := database.GetFeaturedAgents(c.Request.Context(), db, category, page, pageSize)
|
||||
if err != nil {
|
||||
logger.Error("Failed to fetch featured agents", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to fetch featured agents"})
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Successfully fetched featured agents", zap.Int("count", len(agents)), zap.Int("totalCount", totalCount))
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"agents": agents,
|
||||
"totalCount": totalCount,
|
||||
"page": page,
|
||||
"pageSize": pageSize,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// @Summary Search Agents
|
||||
// @Description Search for agents based on query and categories
|
||||
// @Tags Agents
|
||||
// @Accept json
|
||||
// @Produce json
|
||||
// @Param q query string true "Search query"
|
||||
// @Param categories query []string false "Categories"
|
||||
// @Param page query int false "Page number"
|
||||
// @Param pageSize query int false "Page size"
|
||||
// @Param sortBy query string false "Sort by"
|
||||
// @Param sortOrder query string false "Sort order"
|
||||
// @Success 200 {array} models.Agent
|
||||
// @Router /agents/search [get]
|
||||
func SearchAgents(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := log_ctx.With(zap.String("function", "Search"))
|
||||
logger.Info("Handling search request")
|
||||
|
||||
query := c.Query("q")
|
||||
if query == "" {
|
||||
logger.Error("Search query is required")
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Search query is required"})
|
||||
return
|
||||
}
|
||||
|
||||
categories := c.QueryArray("categories")
|
||||
|
||||
page, err := strconv.Atoi(c.DefaultQuery("page", "1"))
|
||||
if err != nil || page < 1 {
|
||||
logger.Error("Invalid page number", zap.Error(err))
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid page number"})
|
||||
return
|
||||
}
|
||||
|
||||
pageSize, err := strconv.Atoi(c.DefaultQuery("pageSize", "10"))
|
||||
if err != nil || pageSize < 1 {
|
||||
logger.Error("Invalid page size", zap.Error(err))
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid page size"})
|
||||
return
|
||||
}
|
||||
|
||||
sortBy := c.DefaultQuery("sortBy", "rank")
|
||||
sortOrder := c.DefaultQuery("sortOrder", "DESC")
|
||||
|
||||
agents, err := database.Search(c.Request.Context(), db, query, categories, page, pageSize, sortBy, sortOrder)
|
||||
if err != nil {
|
||||
logger.Error("Failed to perform search", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to perform search"})
|
||||
return
|
||||
}
|
||||
|
||||
logger.Info("Successfully performed search", zap.Int("resultCount", len(agents)))
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"agents": agents,
|
||||
"page": page,
|
||||
"pageSize": pageSize,
|
||||
})
|
||||
}
|
||||
}
|
||||
41
rnd/rest-api-go/handlers/analytics.go
Normal file
41
rnd/rest-api-go/handlers/analytics.go
Normal file
@@ -0,0 +1,41 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/swiftyos/market/database"
|
||||
"go.uber.org/zap"
|
||||
|
||||
"github.com/swiftyos/market/models"
|
||||
)
|
||||
|
||||
func AgentInstalled(db *pgxpool.Pool, log_ctx *zap.Logger) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
logger := zap.L().With(zap.String("function", "AgentInstalled"))
|
||||
var eventData models.InstallTracker
|
||||
if err := c.ShouldBindJSON(&eventData); err != nil {
|
||||
logger.Error("Failed to bind JSON", zap.Error(err))
|
||||
c.JSON(http.StatusBadRequest, gin.H{"error": "Invalid request body"})
|
||||
return
|
||||
}
|
||||
|
||||
err := database.CreateAgentInstalledEvent(c.Request.Context(), db, models.InstallTracker{
|
||||
MarketplaceAgentID: eventData.MarketplaceAgentID,
|
||||
InstalledAgentID: eventData.InstalledAgentID,
|
||||
InstallationLocation: eventData.InstallationLocation,
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error("Failed to process agent installed event", zap.Error(err))
|
||||
c.JSON(http.StatusInternalServerError, gin.H{"error": "Failed to process agent installed event"})
|
||||
return
|
||||
}
|
||||
logger.Info("Agent installed event processed successfully",
|
||||
zap.String("marketplaceAgentID", eventData.MarketplaceAgentID),
|
||||
zap.String("installedAgentID", eventData.InstalledAgentID),
|
||||
zap.String("installationLocation", string(eventData.InstallationLocation)))
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{"message": "Agent installed event processed successfully"})
|
||||
}
|
||||
}
|
||||
95
rnd/rest-api-go/handlers/parameters.go
Normal file
95
rnd/rest-api-go/handlers/parameters.go
Normal file
@@ -0,0 +1,95 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const pageKey string = "page"
|
||||
|
||||
func getPageFromContext(ctx context.Context) int {
|
||||
defaultPage := 1
|
||||
if ctx == nil {
|
||||
return defaultPage
|
||||
}
|
||||
|
||||
pageValue := ctx.Value(pageKey)
|
||||
|
||||
if pageValue == nil {
|
||||
return defaultPage
|
||||
}
|
||||
|
||||
// Type assertion to check if the value is an int
|
||||
if page, ok := pageValue.(int); ok {
|
||||
if page < 1 {
|
||||
return defaultPage
|
||||
}
|
||||
return page
|
||||
}
|
||||
|
||||
// If it's not an int, try to convert from string
|
||||
if pageStr, ok := pageValue.(string); ok {
|
||||
page, err := strconv.Atoi(pageStr)
|
||||
if err != nil || page < 1 {
|
||||
return defaultPage
|
||||
}
|
||||
return page
|
||||
}
|
||||
|
||||
return defaultPage
|
||||
}
|
||||
|
||||
const pageSizeKey string = "page_size"
|
||||
|
||||
func getPageSizeFromContext(ctx context.Context) int {
|
||||
pageSizeValue := ctx.Value(pageSizeKey)
|
||||
if pageSizeValue == nil {
|
||||
return 10
|
||||
}
|
||||
if pageSizeValue, ok := pageSizeValue.(int); ok {
|
||||
if pageSizeValue < 1 {
|
||||
return 10
|
||||
}
|
||||
return pageSizeValue
|
||||
}
|
||||
return 10
|
||||
}
|
||||
|
||||
const nameKey string = "name"
|
||||
|
||||
func getNameFromContext(ctx context.Context) *string {
|
||||
nameValue := ctx.Value(nameKey)
|
||||
if nameValue == nil {
|
||||
return nil
|
||||
}
|
||||
if nameValue, ok := nameValue.(string); ok {
|
||||
return &nameValue
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
const keywordsKey string = "keywords"
|
||||
|
||||
func getKeywordsFromContext(ctx context.Context) *string {
|
||||
keywordsValue := ctx.Value(keywordsKey)
|
||||
if keywordsValue == nil {
|
||||
return nil
|
||||
}
|
||||
if keywordsValue, ok := keywordsValue.(string); ok {
|
||||
return &keywordsValue
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
const categoriesKey string = "categories"
|
||||
|
||||
func getCategoriesFromContext(ctx context.Context) *string {
|
||||
categoriesValue := ctx.Value(categoriesKey)
|
||||
if categoriesValue == nil {
|
||||
return nil
|
||||
}
|
||||
if categoriesValue, ok := categoriesValue.(string); ok {
|
||||
return &categoriesValue
|
||||
}
|
||||
return nil
|
||||
}
|
||||
151
rnd/rest-api-go/handlers/parameters_test.go
Normal file
151
rnd/rest-api-go/handlers/parameters_test.go
Normal file
@@ -0,0 +1,151 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zaptest"
|
||||
)
|
||||
|
||||
func TestGetPageFromContext_ValidPage(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), pageKey, 5)
|
||||
result := getPageFromContext(ctx)
|
||||
assert.Equal(t, 5, result)
|
||||
}
|
||||
|
||||
func TestGetPageFromContext_InvalidPageZero(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), pageKey, 0)
|
||||
result := getPageFromContext(ctx)
|
||||
assert.Equal(t, 1, result)
|
||||
}
|
||||
|
||||
func TestGetPageFromContext_NoPageValue(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
result := getPageFromContext(ctx)
|
||||
assert.Equal(t, 1, result)
|
||||
}
|
||||
|
||||
func TestGetPageFromContext_InvalidPageNegative(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), pageKey, -1)
|
||||
result := getPageFromContext(ctx)
|
||||
assert.Equal(t, 1, result)
|
||||
}
|
||||
|
||||
func TestGetPageFromContext_InvalidType(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), pageKey, "not an int")
|
||||
result := getPageFromContext(ctx)
|
||||
assert.Equal(t, 1, result)
|
||||
}
|
||||
|
||||
func TestGetPageSizeFromContext_ValidPageSize(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), pageSizeKey, 20)
|
||||
result := getPageSizeFromContext(ctx)
|
||||
assert.Equal(t, 20, result)
|
||||
}
|
||||
|
||||
func TestGetPageSizeFromContext_InvalidPageSizeNegative(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), pageSizeKey, -1)
|
||||
result := getPageSizeFromContext(ctx)
|
||||
assert.Equal(t, 10, result)
|
||||
}
|
||||
|
||||
func TestGetPageSizeFromContext_InvalidPageSizeZero(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), pageSizeKey, 0)
|
||||
result := getPageSizeFromContext(ctx)
|
||||
assert.Equal(t, 10, result)
|
||||
}
|
||||
|
||||
func TestGetPageSizeFromContext_NoPageSizeValue(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
result := getPageSizeFromContext(ctx)
|
||||
assert.Equal(t, 10, result)
|
||||
}
|
||||
|
||||
func TestGetPageSizeFromContext_InvalidType(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), pageSizeKey, "not an int")
|
||||
result := getPageSizeFromContext(ctx)
|
||||
assert.Equal(t, 10, result)
|
||||
}
|
||||
|
||||
func TestGetNameFromContext_ValidName(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), nameKey, "Test Name")
|
||||
result := getNameFromContext(ctx)
|
||||
assert.Equal(t, strPtr("Test Name"), result)
|
||||
}
|
||||
|
||||
func TestGetNameFromContext_EmptyString(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), nameKey, "")
|
||||
result := getNameFromContext(ctx)
|
||||
assert.Equal(t, strPtr(""), result)
|
||||
}
|
||||
|
||||
func TestGetNameFromContext_NoNameValue(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
result := getNameFromContext(ctx)
|
||||
assert.Nil(t, result)
|
||||
}
|
||||
|
||||
func TestGetNameFromContext_InvalidType(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), nameKey, 123)
|
||||
result := getNameFromContext(ctx)
|
||||
assert.Nil(t, result)
|
||||
}
|
||||
|
||||
func TestGetKeywordsFromContext_ValidKeywords(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), keywordsKey, "keyword1,keyword2")
|
||||
result := getKeywordsFromContext(ctx)
|
||||
assert.Equal(t, strPtr("keyword1,keyword2"), result)
|
||||
}
|
||||
|
||||
func TestGetKeywordsFromContext_EmptyString(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), keywordsKey, "")
|
||||
result := getKeywordsFromContext(ctx)
|
||||
assert.Equal(t, strPtr(""), result)
|
||||
}
|
||||
|
||||
func TestGetKeywordsFromContext_NoKeywordsValue(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
result := getKeywordsFromContext(ctx)
|
||||
assert.Nil(t, result)
|
||||
}
|
||||
|
||||
func TestGetKeywordsFromContext_InvalidType(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), keywordsKey, 123)
|
||||
result := getKeywordsFromContext(ctx)
|
||||
assert.Nil(t, result)
|
||||
}
|
||||
|
||||
func TestGetCategoriesFromContext_ValidCategories(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), categoriesKey, "category1,category2")
|
||||
result := getCategoriesFromContext(ctx)
|
||||
assert.Equal(t, strPtr("category1,category2"), result)
|
||||
}
|
||||
|
||||
func TestGetCategoriesFromContext_EmptyString(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), categoriesKey, "")
|
||||
result := getCategoriesFromContext(ctx)
|
||||
assert.Equal(t, strPtr(""), result)
|
||||
}
|
||||
|
||||
func TestGetCategoriesFromContext_NoCategoriesValue(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
result := getCategoriesFromContext(ctx)
|
||||
assert.Nil(t, result)
|
||||
}
|
||||
|
||||
func TestGetCategoriesFromContext_InvalidType(t *testing.T) {
|
||||
ctx := context.WithValue(context.Background(), categoriesKey, 123)
|
||||
result := getCategoriesFromContext(ctx)
|
||||
assert.Nil(t, result)
|
||||
}
|
||||
|
||||
func strPtr(s string) *string {
|
||||
return &s
|
||||
}
|
||||
|
||||
func init() {
|
||||
logger := zaptest.NewLogger(nil)
|
||||
zap.ReplaceGlobals(logger)
|
||||
}
|
||||
14
rnd/rest-api-go/handlers/user.go
Normal file
14
rnd/rest-api-go/handlers/user.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/swiftyos/market/models"
|
||||
)
|
||||
|
||||
func GetUserFromContext(c *gin.Context) (models.User, bool) {
|
||||
user, exists := c.Get("user")
|
||||
if !exists {
|
||||
return models.User{}, false
|
||||
}
|
||||
return user.(models.User), true
|
||||
}
|
||||
45
rnd/rest-api-go/handlers/user_test.go
Normal file
45
rnd/rest-api-go/handlers/user_test.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/swiftyos/market/models"
|
||||
)
|
||||
|
||||
func TestGetUserFromContext(t *testing.T) {
|
||||
t.Run("User exists in context", func(t *testing.T) {
|
||||
// Create a new gin context
|
||||
c, _ := gin.CreateTestContext(nil)
|
||||
|
||||
// Create a test user
|
||||
testUser := models.User{
|
||||
UserID: "123",
|
||||
Role: "admin",
|
||||
Email: "test@example.com",
|
||||
}
|
||||
|
||||
// Set the user in the context
|
||||
c.Set("user", testUser)
|
||||
|
||||
// Call the function
|
||||
user, exists := GetUserFromContext(c)
|
||||
|
||||
// Assert the results
|
||||
assert.True(t, exists)
|
||||
assert.Equal(t, testUser, user)
|
||||
})
|
||||
|
||||
t.Run("User does not exist in context", func(t *testing.T) {
|
||||
// Create a new gin context
|
||||
c, _ := gin.CreateTestContext(nil)
|
||||
|
||||
// Call the function
|
||||
user, exists := GetUserFromContext(c)
|
||||
|
||||
// Assert the results
|
||||
assert.False(t, exists)
|
||||
assert.Equal(t, models.User{}, user)
|
||||
})
|
||||
}
|
||||
116
rnd/rest-api-go/main.go
Normal file
116
rnd/rest-api-go/main.go
Normal file
@@ -0,0 +1,116 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Depado/ginprom"
|
||||
"github.com/gin-contrib/cors"
|
||||
"github.com/gin-contrib/zap"
|
||||
"github.com/gin-gonic/gin"
|
||||
swaggerfiles "github.com/swaggo/files"
|
||||
ginSwagger "github.com/swaggo/gin-swagger"
|
||||
"github.com/swiftyos/market/config"
|
||||
"github.com/swiftyos/market/database"
|
||||
docs "github.com/swiftyos/market/docs"
|
||||
"github.com/swiftyos/market/handlers"
|
||||
"github.com/swiftyos/market/middleware"
|
||||
"github.com/swiftyos/market/utils"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Initialize configuration
|
||||
cfg, err := config.Load()
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to load config: %v", err)
|
||||
}
|
||||
|
||||
// Initialize logger
|
||||
logger := utils.NewLogger(cfg)
|
||||
|
||||
// Initialize database connection
|
||||
db, err := database.NewDB(cfg)
|
||||
if err != nil {
|
||||
logger.Fatal("Failed to connect to database", zap.Error(err))
|
||||
}
|
||||
// Initialize Gin router
|
||||
r := gin.New()
|
||||
// Set the port
|
||||
port := cfg.ServerAddress
|
||||
if port == "" {
|
||||
port = "8080" // Default port if not specified in config
|
||||
}
|
||||
r.Run(":" + port)
|
||||
p := ginprom.New(
|
||||
ginprom.Engine(r),
|
||||
ginprom.Subsystem("gin"),
|
||||
ginprom.Path("/metrics"),
|
||||
)
|
||||
r.Use(p.Instrument())
|
||||
// Use middleware
|
||||
r.Use(ginzap.Ginzap(logger, time.RFC1123, true))
|
||||
r.Use(ginzap.RecoveryWithZap(logger, true))
|
||||
r.Use(middleware.Gzip())
|
||||
|
||||
// Update CORS configuration
|
||||
corsConfig := cors.DefaultConfig()
|
||||
if len(cfg.CORSAllowOrigins) > 0 {
|
||||
corsConfig.AllowOrigins = cfg.CORSAllowOrigins
|
||||
} else {
|
||||
corsConfig.AllowOrigins = []string{"*"} // Fallback to allow all origins if not specified
|
||||
}
|
||||
corsConfig.AllowHeaders = append(corsConfig.AllowHeaders, "Authorization")
|
||||
corsConfig.AllowMethods = []string{"GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"}
|
||||
corsConfig.AllowCredentials = true
|
||||
r.Use(cors.New(corsConfig))
|
||||
|
||||
// Route welcome
|
||||
r.GET("/", func(c *gin.Context) {
|
||||
c.String(http.StatusOK, "Welcome to the Marketplace API")
|
||||
})
|
||||
docs.SwaggerInfo.BasePath = "/api/v1/market/"
|
||||
|
||||
// Setup routes
|
||||
// [Error] Request header field Authorization is not allowed by Access-Control-Allow-Headers.
|
||||
// [Error] Fetch API cannot load http://localhost:8015/api/v1/market/featured/agents?page=1&page_size=10 due to access control checks.
|
||||
// [Error] Failed to load resource: Request header field Authorization is not allowed by Access-Control-Allow-Headers. (agents, line 0)
|
||||
api := r.Group("/api/v1/market/")
|
||||
{
|
||||
|
||||
agents := api.Group("/agents")
|
||||
{
|
||||
agents.GET("", handlers.GetAgents(db, logger))
|
||||
agents.GET("/:agent_id", handlers.GetAgentDetails(db, logger))
|
||||
agents.GET("/:agent_id/download", handlers.DownloadAgent(db, logger))
|
||||
agents.GET("/:agent_id/download-file", handlers.DownloadAgentFile(db, logger))
|
||||
agents.GET("/top-downloads", handlers.TopAgentsByDownloads(db, logger))
|
||||
agents.GET("/featured", handlers.GetFeaturedAgents(db, logger))
|
||||
agents.GET("/search", handlers.SearchAgents(db, logger))
|
||||
agents.POST("/submit", middleware.Auth(cfg), handlers.SubmitAgent(db, logger))
|
||||
}
|
||||
|
||||
// Admin routes
|
||||
admin := api.Group("/admin")
|
||||
{
|
||||
admin.POST("/agent", middleware.Auth(cfg), handlers.CreateAgentEntry(db, logger))
|
||||
admin.POST("/agent/featured/:agent_id", middleware.Auth(cfg), handlers.SetAgentFeatured(db, logger))
|
||||
admin.GET("/agent/featured/:agent_id", middleware.Auth(cfg), handlers.GetAgentFeatured(db, logger))
|
||||
admin.DELETE("/agent/featured/:agent_id", middleware.Auth(cfg), handlers.UnsetAgentFeatured(db, logger))
|
||||
admin.GET("/agent/not-featured", middleware.Auth(cfg), handlers.GetNotFeaturedAgents(db, logger))
|
||||
admin.GET("/agent/submissions", middleware.Auth(cfg), handlers.GetAgentSubmissions(db, logger))
|
||||
admin.POST("/agent/submissions", middleware.Auth(cfg), handlers.ReviewSubmission(db, logger))
|
||||
}
|
||||
|
||||
api.GET("/categories", handlers.GetCategories(db, logger))
|
||||
// Analytics routes
|
||||
api.POST("/agent-installed", handlers.AgentInstalled(db, logger))
|
||||
}
|
||||
r.GET("/docs/*any", ginSwagger.WrapHandler(swaggerfiles.Handler))
|
||||
|
||||
// Start server
|
||||
if err := r.Run(cfg.ServerAddress); err != nil {
|
||||
logger.Fatal("Failed to start server", zap.Error(err))
|
||||
}
|
||||
}
|
||||
83
rnd/rest-api-go/middleware/auth.go
Normal file
83
rnd/rest-api-go/middleware/auth.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
"github.com/swiftyos/market/config"
|
||||
"github.com/swiftyos/market/models"
|
||||
)
|
||||
|
||||
func Auth(cfg *config.Config) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
if !cfg.AuthEnabled {
|
||||
// This handles the case when authentication is disabled
|
||||
defaultUser := models.User{
|
||||
UserID: "3e53486c-cf57-477e-ba2a-cb02dc828e1a",
|
||||
Role: "admin",
|
||||
}
|
||||
c.Set("user", defaultUser)
|
||||
c.Next()
|
||||
return
|
||||
}
|
||||
|
||||
authHeader := c.GetHeader("Authorization")
|
||||
if authHeader == "" {
|
||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Authorization header is missing"})
|
||||
return
|
||||
}
|
||||
|
||||
tokenString := strings.TrimPrefix(authHeader, "Bearer ")
|
||||
token, err := parseJWTToken(tokenString, cfg.JWTSecret)
|
||||
if err != nil {
|
||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
claims, ok := token.Claims.(jwt.MapClaims)
|
||||
if !ok || !token.Valid {
|
||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "Invalid token"})
|
||||
return
|
||||
}
|
||||
|
||||
user, err := verifyUser(claims, false) // Pass 'true' for admin-only routes
|
||||
if err != nil {
|
||||
c.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
c.Set("user", user)
|
||||
c.Next()
|
||||
}
|
||||
}
|
||||
|
||||
func verifyUser(payload jwt.MapClaims, adminOnly bool) (models.User, error) {
|
||||
user, err := models.NewUserFromPayload(payload)
|
||||
if err != nil {
|
||||
return models.User{}, err
|
||||
}
|
||||
|
||||
if adminOnly && user.Role != "admin" {
|
||||
return models.User{}, errors.New("Admin access required")
|
||||
}
|
||||
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func parseJWTToken(tokenString string, secret string) (*jwt.Token, error) {
|
||||
token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) {
|
||||
if _, ok := token.Method.(*jwt.SigningMethodHMAC); !ok {
|
||||
return nil, errors.New("unexpected signing method")
|
||||
}
|
||||
return []byte(secret), nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return token, nil
|
||||
}
|
||||
213
rnd/rest-api-go/middleware/auth_test.go
Normal file
213
rnd/rest-api-go/middleware/auth_test.go
Normal file
@@ -0,0 +1,213 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/swiftyos/market/config"
|
||||
"github.com/swiftyos/market/models"
|
||||
)
|
||||
|
||||
func TestVerifyUser(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
payload jwt.MapClaims
|
||||
adminOnly bool
|
||||
wantUser models.User
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Valid user",
|
||||
payload: jwt.MapClaims{
|
||||
"sub": "test-user",
|
||||
"email": "test@example.com",
|
||||
"role": "user",
|
||||
},
|
||||
adminOnly: false,
|
||||
wantUser: models.User{
|
||||
UserID: "test-user",
|
||||
Email: "test@example.com",
|
||||
Role: "user",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Valid admin",
|
||||
payload: jwt.MapClaims{
|
||||
"sub": "admin-user",
|
||||
"email": "admin@example.com",
|
||||
"role": "admin",
|
||||
},
|
||||
adminOnly: true,
|
||||
wantUser: models.User{
|
||||
UserID: "admin-user",
|
||||
Email: "admin@example.com",
|
||||
Role: "admin",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Non-admin accessing admin-only route",
|
||||
payload: jwt.MapClaims{
|
||||
"sub": "test-user",
|
||||
"email": "test@example.com",
|
||||
"role": "user",
|
||||
},
|
||||
adminOnly: true,
|
||||
wantUser: models.User{},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Missing sub claim",
|
||||
payload: jwt.MapClaims{},
|
||||
adminOnly: false,
|
||||
wantUser: models.User{},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
gotUser, err := verifyUser(tt.payload, tt.adminOnly)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.wantUser, gotUser)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseJWTToken(t *testing.T) {
|
||||
secret := "test-secret"
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
tokenString string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "Valid token",
|
||||
tokenString: createValidToken(secret),
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "Invalid token",
|
||||
tokenString: "invalid.token.string",
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "Empty token",
|
||||
tokenString: "",
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
token, err := parseJWTToken(tt.tokenString, secret)
|
||||
if tt.wantErr {
|
||||
assert.Error(t, err)
|
||||
assert.Nil(t, token)
|
||||
} else {
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, token)
|
||||
assert.True(t, token.Valid)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func createValidToken(secret string) string {
|
||||
token := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
|
||||
"sub": "test-user",
|
||||
"email": "test@example.com",
|
||||
"role": "user",
|
||||
})
|
||||
tokenString, _ := token.SignedString([]byte(secret))
|
||||
return tokenString
|
||||
}
|
||||
|
||||
func TestAuth(t *testing.T) {
|
||||
cfg := &config.Config{
|
||||
JWTSecret: "test-secret",
|
||||
AuthEnabled: true,
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
authHeader string
|
||||
expectedUser models.User
|
||||
expectedError bool
|
||||
}{
|
||||
{
|
||||
name: "Valid token",
|
||||
authHeader: "Bearer " + createValidToken(cfg.JWTSecret),
|
||||
expectedUser: models.User{
|
||||
UserID: "test-user",
|
||||
Email: "test@example.com",
|
||||
Role: "user",
|
||||
},
|
||||
expectedError: false,
|
||||
},
|
||||
{
|
||||
name: "Invalid token",
|
||||
authHeader: "Bearer invalid.token.string",
|
||||
expectedUser: models.User{},
|
||||
expectedError: true,
|
||||
},
|
||||
{
|
||||
name: "Missing auth header",
|
||||
authHeader: "",
|
||||
expectedUser: models.User{},
|
||||
expectedError: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Create a mock gin.Context
|
||||
c, _ := gin.CreateTestContext(httptest.NewRecorder())
|
||||
c.Request = httptest.NewRequest("GET", "/", nil)
|
||||
c.Request.Header.Set("Authorization", tt.authHeader)
|
||||
|
||||
// Call the Auth middleware
|
||||
Auth(cfg)(c)
|
||||
|
||||
// Check the results
|
||||
if tt.expectedError {
|
||||
assert.True(t, c.IsAborted())
|
||||
} else {
|
||||
assert.False(t, c.IsAborted())
|
||||
user, exists := c.Get("user")
|
||||
assert.True(t, exists)
|
||||
assert.Equal(t, tt.expectedUser, user.(models.User))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestAuthDisabled(t *testing.T) {
|
||||
cfg := &config.Config{
|
||||
JWTSecret: "test-secret",
|
||||
AuthEnabled: false,
|
||||
}
|
||||
|
||||
// Create a mock gin.Context
|
||||
c, _ := gin.CreateTestContext(httptest.NewRecorder())
|
||||
c.Request = httptest.NewRequest("GET", "/", nil)
|
||||
|
||||
Auth(cfg)(c)
|
||||
|
||||
assert.False(t, c.IsAborted())
|
||||
user, exists := c.Get("user")
|
||||
assert.True(t, exists)
|
||||
assert.Equal(t, models.User{
|
||||
UserID: "3e53486c-cf57-477e-ba2a-cb02dc828e1a",
|
||||
Role: "admin",
|
||||
}, user.(models.User))
|
||||
}
|
||||
10
rnd/rest-api-go/middleware/gzip.go
Normal file
10
rnd/rest-api-go/middleware/gzip.go
Normal file
@@ -0,0 +1,10 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"github.com/gin-contrib/gzip"
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
func Gzip() gin.HandlerFunc {
|
||||
return gzip.Gzip(gzip.DefaultCompression)
|
||||
}
|
||||
104
rnd/rest-api-go/models/agent.go
Normal file
104
rnd/rest-api-go/models/agent.go
Normal file
@@ -0,0 +1,104 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
// Agent represents the basic agent information
|
||||
type Agent struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
Author string `json:"author"`
|
||||
Keywords []string `json:"keywords"`
|
||||
Categories []string `json:"categories"`
|
||||
Graph Graph `json:"graph"`
|
||||
}
|
||||
|
||||
// Graph represents the graph structure of an agent
|
||||
type Graph struct {
|
||||
Name string `json:"name"`
|
||||
Description string `json:"description"`
|
||||
// Add other fields as needed
|
||||
}
|
||||
|
||||
// AddAgentRequest represents the request structure for adding a new agent
|
||||
type AddAgentRequest struct {
|
||||
Graph Graph `json:"graph"`
|
||||
Author string `json:"author"`
|
||||
Keywords []string `json:"keywords"`
|
||||
Categories []string `json:"categories"`
|
||||
}
|
||||
|
||||
// SubmissionStatus represents the status of an agent submission
|
||||
type SubmissionStatus string
|
||||
|
||||
const (
|
||||
SubmissionStatusPending SubmissionStatus = "PENDING"
|
||||
SubmissionStatusApproved SubmissionStatus = "APPROVED"
|
||||
SubmissionStatusRejected SubmissionStatus = "REJECTED"
|
||||
)
|
||||
|
||||
// AgentWithMetadata extends Agent with additional metadata
|
||||
type AgentWithMetadata struct {
|
||||
Agent
|
||||
Version int `json:"version"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
SubmissionDate time.Time `json:"submissionDate"`
|
||||
SubmissionReviewDate *time.Time `json:"submissionReviewDate,omitempty"`
|
||||
SubmissionStatus SubmissionStatus `json:"submissionStatus"`
|
||||
SubmissionReviewComments *string `json:"submissionReviewComments,omitempty"`
|
||||
}
|
||||
|
||||
// AgentWithRank extends AgentWithMetadata with a rank field for search results
|
||||
type AgentWithRank struct {
|
||||
AgentWithMetadata
|
||||
Rank float64 `json:"rank"`
|
||||
}
|
||||
|
||||
type AgentWithDownloads struct {
|
||||
AgentWithMetadata
|
||||
Downloads int `json:"downloads"`
|
||||
}
|
||||
|
||||
// AnalyticsTracker represents analytics data for an agent
|
||||
type AnalyticsTracker struct {
|
||||
ID string `json:"id"`
|
||||
AgentID string `json:"agentId"`
|
||||
Views int `json:"views"`
|
||||
Downloads int `json:"downloads"`
|
||||
}
|
||||
|
||||
// InstallationLocation represents the location where an agent is installed
|
||||
type InstallationLocation string
|
||||
|
||||
const (
|
||||
InstallationLocationLocal InstallationLocation = "LOCAL"
|
||||
InstallationLocationCloud InstallationLocation = "CLOUD"
|
||||
)
|
||||
|
||||
// InstallTracker represents installation data for an agent
|
||||
type InstallTracker struct {
|
||||
ID string `json:"id"`
|
||||
MarketplaceAgentID string `json:"marketplaceAgentId"`
|
||||
InstalledAgentID string `json:"installedAgentId"`
|
||||
InstallationLocation InstallationLocation `json:"installationLocation"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
}
|
||||
|
||||
// FeaturedAgent represents a featured agent in the marketplace
|
||||
type FeaturedAgent struct {
|
||||
ID string `json:"id"`
|
||||
AgentID string `json:"agentId"`
|
||||
IsActive bool `json:"isActive"`
|
||||
FeaturedCategories []string `json:"featuredCategories"`
|
||||
CreatedAt time.Time `json:"createdAt"`
|
||||
UpdatedAt time.Time `json:"updatedAt"`
|
||||
}
|
||||
|
||||
type AgentFile struct {
|
||||
ID string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Graph interface{} `json:"graph"`
|
||||
}
|
||||
176
rnd/rest-api-go/models/agent_test.go
Normal file
176
rnd/rest-api-go/models/agent_test.go
Normal file
@@ -0,0 +1,176 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestAgentJSON(t *testing.T) {
|
||||
jsonStr := `{
|
||||
"id": "test-id",
|
||||
"name": "Test Agent",
|
||||
"description": "A test agent",
|
||||
"author": "Test Author",
|
||||
"keywords": ["test", "agent"],
|
||||
"categories": ["testing"],
|
||||
"graph": {
|
||||
"name": "Test Graph",
|
||||
"description": "A test graph"
|
||||
}
|
||||
}`
|
||||
|
||||
var agent Agent
|
||||
err := json.Unmarshal([]byte(jsonStr), &agent)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "test-id", agent.ID)
|
||||
assert.Equal(t, "Test Agent", agent.Name)
|
||||
assert.Equal(t, "A test agent", agent.Description)
|
||||
assert.Equal(t, "Test Author", agent.Author)
|
||||
assert.Equal(t, []string{"test", "agent"}, agent.Keywords)
|
||||
assert.Equal(t, []string{"testing"}, agent.Categories)
|
||||
assert.Equal(t, "Test Graph", agent.Graph.Name)
|
||||
assert.Equal(t, "A test graph", agent.Graph.Description)
|
||||
}
|
||||
|
||||
func TestGraphJSON(t *testing.T) {
|
||||
jsonStr := `{
|
||||
"name": "Test Graph",
|
||||
"description": "A test graph"
|
||||
}`
|
||||
|
||||
var graph Graph
|
||||
err := json.Unmarshal([]byte(jsonStr), &graph)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "Test Graph", graph.Name)
|
||||
assert.Equal(t, "A test graph", graph.Description)
|
||||
}
|
||||
|
||||
func TestAddAgentRequestJSON(t *testing.T) {
|
||||
jsonStr := `{
|
||||
"graph": {
|
||||
"name": "Test Graph",
|
||||
"description": "A test graph"
|
||||
},
|
||||
"author": "Test Author",
|
||||
"keywords": ["test", "agent"],
|
||||
"categories": ["testing"]
|
||||
}`
|
||||
|
||||
var request AddAgentRequest
|
||||
err := json.Unmarshal([]byte(jsonStr), &request)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "Test Graph", request.Graph.Name)
|
||||
assert.Equal(t, "A test graph", request.Graph.Description)
|
||||
assert.Equal(t, "Test Author", request.Author)
|
||||
assert.Equal(t, []string{"test", "agent"}, request.Keywords)
|
||||
assert.Equal(t, []string{"testing"}, request.Categories)
|
||||
}
|
||||
|
||||
func TestAgentWithMetadataJSON(t *testing.T) {
|
||||
now := time.Now().UTC().Round(time.Second)
|
||||
jsonStr := `{
|
||||
"id": "test-id",
|
||||
"name": "Test Agent",
|
||||
"description": "A test agent",
|
||||
"author": "Test Author",
|
||||
"keywords": ["test", "agent"],
|
||||
"categories": ["testing"],
|
||||
"graph": {
|
||||
"name": "Test Graph",
|
||||
"description": "A test graph"
|
||||
},
|
||||
"version": 1,
|
||||
"createdAt": "` + now.Format(time.RFC3339) + `",
|
||||
"updatedAt": "` + now.Format(time.RFC3339) + `",
|
||||
"submissionDate": "` + now.Format(time.RFC3339) + `",
|
||||
"submissionStatus": "PENDING"
|
||||
}`
|
||||
|
||||
var agent AgentWithMetadata
|
||||
err := json.Unmarshal([]byte(jsonStr), &agent)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "test-id", agent.ID)
|
||||
assert.Equal(t, "Test Agent", agent.Name)
|
||||
assert.Equal(t, "A test agent", agent.Description)
|
||||
assert.Equal(t, "Test Author", agent.Author)
|
||||
assert.Equal(t, []string{"test", "agent"}, agent.Keywords)
|
||||
assert.Equal(t, []string{"testing"}, agent.Categories)
|
||||
assert.Equal(t, "Test Graph", agent.Graph.Name)
|
||||
assert.Equal(t, "A test graph", agent.Graph.Description)
|
||||
assert.Equal(t, 1, agent.Version)
|
||||
assert.Equal(t, now, agent.CreatedAt)
|
||||
assert.Equal(t, now, agent.UpdatedAt)
|
||||
assert.Equal(t, now, agent.SubmissionDate)
|
||||
assert.Equal(t, SubmissionStatusPending, agent.SubmissionStatus)
|
||||
assert.Nil(t, agent.SubmissionReviewDate)
|
||||
assert.Nil(t, agent.SubmissionReviewComments)
|
||||
}
|
||||
|
||||
func TestAnalyticsTrackerJSON(t *testing.T) {
|
||||
jsonStr := `{
|
||||
"id": "tracker-id",
|
||||
"agentId": "agent-id",
|
||||
"views": 100,
|
||||
"downloads": 50
|
||||
}`
|
||||
|
||||
var tracker AnalyticsTracker
|
||||
err := json.Unmarshal([]byte(jsonStr), &tracker)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "tracker-id", tracker.ID)
|
||||
assert.Equal(t, "agent-id", tracker.AgentID)
|
||||
assert.Equal(t, 100, tracker.Views)
|
||||
assert.Equal(t, 50, tracker.Downloads)
|
||||
}
|
||||
|
||||
func TestInstallTrackerJSON(t *testing.T) {
|
||||
now := time.Now().UTC().Round(time.Second)
|
||||
jsonStr := `{
|
||||
"id": "install-id",
|
||||
"marketplaceAgentId": "marketplace-agent-id",
|
||||
"installedAgentId": "installed-agent-id",
|
||||
"installationLocation": "LOCAL",
|
||||
"createdAt": "` + now.Format(time.RFC3339) + `"
|
||||
}`
|
||||
|
||||
var tracker InstallTracker
|
||||
err := json.Unmarshal([]byte(jsonStr), &tracker)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "install-id", tracker.ID)
|
||||
assert.Equal(t, "marketplace-agent-id", tracker.MarketplaceAgentID)
|
||||
assert.Equal(t, "installed-agent-id", tracker.InstalledAgentID)
|
||||
assert.Equal(t, InstallationLocationLocal, tracker.InstallationLocation)
|
||||
assert.Equal(t, now, tracker.CreatedAt)
|
||||
}
|
||||
|
||||
func TestFeaturedAgentJSON(t *testing.T) {
|
||||
now := time.Now().UTC().Round(time.Second)
|
||||
jsonStr := `{
|
||||
"id": "featured-id",
|
||||
"agentId": "agent-id",
|
||||
"isActive": true,
|
||||
"featuredCategories": ["category1", "category2"],
|
||||
"createdAt": "` + now.Format(time.RFC3339) + `",
|
||||
"updatedAt": "` + now.Format(time.RFC3339) + `"
|
||||
}`
|
||||
|
||||
var featured FeaturedAgent
|
||||
err := json.Unmarshal([]byte(jsonStr), &featured)
|
||||
assert.NoError(t, err)
|
||||
|
||||
assert.Equal(t, "featured-id", featured.ID)
|
||||
assert.Equal(t, "agent-id", featured.AgentID)
|
||||
assert.True(t, featured.IsActive)
|
||||
assert.Equal(t, []string{"category1", "category2"}, featured.FeaturedCategories)
|
||||
assert.Equal(t, now, featured.CreatedAt)
|
||||
assert.Equal(t, now, featured.UpdatedAt)
|
||||
}
|
||||
28
rnd/rest-api-go/models/user.go
Normal file
28
rnd/rest-api-go/models/user.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package models
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
)
|
||||
|
||||
type User struct {
|
||||
UserID string `json:"user_id"`
|
||||
Email string `json:"email"`
|
||||
Role string `json:"role"`
|
||||
}
|
||||
|
||||
func NewUserFromPayload(claims jwt.MapClaims) (User, error) {
|
||||
userID, ok := claims["sub"].(string)
|
||||
if !ok {
|
||||
return User{}, fmt.Errorf("invalid or missing 'sub' claim")
|
||||
}
|
||||
|
||||
email, _ := claims["email"].(string)
|
||||
role, _ := claims["role"].(string)
|
||||
|
||||
return User{
|
||||
UserID: userID,
|
||||
Email: email,
|
||||
Role: role,
|
||||
}, nil
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user