Compare commits

..

21 Commits

Author SHA1 Message Date
Aarushi
0cdfdd5f32 Merge branch 'master' into aarushikansal/open-1646-ci-for-docker 2024-09-12 09:32:22 +01:00
Zamil Majdy
52c731abd6 fix(rnd): Fix decorator function type hint (#8043) 2024-09-12 05:35:33 +07:00
Aarushi
c8fbce643e fix(rnd): Add connection timeout (#8041)
add connection timeout
2024-09-11 18:32:07 +01:00
Swifty
6c001bd595 Create Input Node Custom UI Node (#8016) 2024-09-11 14:14:03 +02:00
Aarushi
f5b89672f8 feat(rnd): Add k8s default health check (#8037)
add k8s default health check
2024-09-11 12:30:34 +01:00
Aarushi
76480ffa03 fix(rnd): Update port in market (#8036)
update port
2024-09-11 12:19:11 +01:00
Aarushi
ab60a57379 tweak(rnd): Ignore .env in market (#8035)
ignore .env
2024-09-11 11:01:34 +01:00
Aarushi
1d9b01fc77 tweak(rnd): Use docker compose not docker-compose (#8034)
* use docker compose not docker-compose

* linting
2024-09-11 10:20:31 +01:00
Swifty
e81d9f9f0b docker nits (#8033) 2024-09-11 10:31:12 +02:00
Bentlybro
0d5d0270ea Merge branch 'master' of https://github.com/Significant-Gravitas/AutoGPT 2024-09-10 18:24:53 +01:00
SwiftyOS
bd25f9223c expose schedular port and fix marketplace port 2024-09-10 17:09:39 +02:00
SwiftyOS
07305b55ff fix(rnd) use migrate deploy 2024-09-10 16:18:42 +02:00
SwiftyOS
cdfe3e5fbc Updated rnd/README.md 2024-09-10 16:16:16 +02:00
Swifty
e992cdf8c2 Fixing docker setup for local testing (#8026)
* Fixing docker setup

* Updated docker compose setup

* update helm charts

* Corrected agent server host name
2024-09-10 15:46:22 +02:00
Aarushi
ebd2ecd84c docs(server): Update docs (#8031)
update docs
2024-09-10 10:24:54 +01:00
Aarushi
0b919522ae feat(rnd): Split Execution Manager (#8008)
* split execution manager and removed ns and use direct uri with k8s and docker specific dns

* formating

* split execution manager

* refactor(builder): Fix linting warning and errors (#8021)

* Fix lint errors

* Fix dependency loop

* address feedback

* docker compose

* remove ns entirely

* remove yarn lock changes

* update readme

* remove ref

* dockerfile and log

* update log

* debug

* rename to executor

* remove execution from rest

* exec.py

* linting

* udpate tests to use config

* fix test

---------

Co-authored-by: Krzysztof Czerwinski <34861343+kcze@users.noreply.github.com>
2024-09-10 10:05:31 +01:00
Nicholas Tindle
ef691359b7 feat: document the use of isolation better (#8028) 2024-09-09 20:55:05 +00:00
Aarushi
f8815c3053 fix(builder): Use escaped apostrophe (#8027)
* use escaped apostrophe

* linter
2024-09-09 18:08:32 +00:00
Reinier van der Leer
a60ed21404 feat(server): Add OAuth flow endpoints for integrations (#7872)
- feat(server): Initial draft of OAuth init and exchange endpoints
  - Add `supabase` dependency
  - Add Supabase credentials to `Secrets`
  - Add `get_supabase` utility to `.server.utils`
  - Add `.server.integrations` API segment with initial implementations for OAuth init and exchange endpoints
- Move integration OAuth handlers to `autogpt_server.integrations.oauth`
- Change constructor of `SupabaseIntegrationCredentialsStore` to take a Supabase client
- Fix type issues in `GoogleOAuthHandler`
2024-09-09 17:21:56 +02:00
Aarushi
f0beae46af Merge branch 'master' into aarushikansal/open-1646-ci-for-docker 2024-08-08 11:11:55 +01:00
Aarushi
6006a56277 Add docker steps for builder and server in CI 2024-08-08 10:11:40 +01:00
53 changed files with 6823 additions and 365 deletions

View File

@@ -37,3 +37,4 @@ rnd/autogpt_builder/.env.local
rnd/autogpt_server/.env
rnd/autogpt_server/.venv/
rnd/market/.env

View File

@@ -0,0 +1,41 @@
name: AutoGPT Builder Docker
on:
push:
branches: [ master ]
paths:
- 'rnd/autogpt_builder/**'
defaults:
run:
shell: bash
working-directory: rnd/autogpt_builder
env:
PROJECT_ID: agpt-dev
IMAGE_NAME: agpt-builder-dev
REGION: us-central1
jobs:
build-and-push:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@v0.2.1
with:
project_id: ${{ env.PROJECT_ID }}
service_account_key: ${{ secrets.GCP_SA_KEY }}
export_default_credentials: true
- name: Configure Docker
run: gcloud auth configure-docker ${{ env.REGION }}-docker.pkg.dev
- name: Build Docker image
run: docker build -t ${{ env.REGION }}-docker.pkg.dev/${{ env.PROJECT_ID }}/${{ env.IMAGE_NAME }}:${{ github.sha }} .
- name: Push Docker image
run: docker push ${{ env.REGION }}-docker.pkg.dev/${{ env.PROJECT_ID }}/${{ env.IMAGE_NAME }}:${{ github.sha }}

View File

@@ -0,0 +1,41 @@
name: AutoGPT Server Docker
on:
push:
branches: [ master ]
paths:
- 'rnd/autogpt_server/**'
defaults:
run:
shell: bash
working-directory: rnd/autogpt_server
env:
PROJECT_ID: agpt-dev
IMAGE_NAME: agpt-server-dev
REGION: us-central1
jobs:
build-and-push:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Cloud SDK
uses: google-github-actions/setup-gcloud@v0.2.1
with:
project_id: ${{ env.PROJECT_ID }}
service_account_key: ${{ secrets.GCP_SA_KEY }}
export_default_credentials: true
- name: Configure Docker
run: gcloud auth configure-docker ${{ env.REGION }}-docker.pkg.dev
- name: Build Docker image
run: docker build -t ${{ env.REGION }}-docker.pkg.dev/${{ env.PROJECT_ID }}/${{ env.IMAGE_NAME }}:${{ github.sha }} .
- name: Push Docker image
run: docker push ${{ env.REGION }}-docker.pkg.dev/${{ env.PROJECT_ID }}/${{ env.IMAGE_NAME }}:${{ github.sha }}

View File

@@ -69,6 +69,8 @@ Lets the agent execute non-interactive Shell commands and Python code. Python ex
| `shell_denylist` | List of prohibited shell commands | `List[str]` | `[]` |
| `docker_container_name` | Name of the Docker container used for code execution | `str` | `"agent_sandbox"` |
All shell command configurations are expected to be for convience only. This component is not secure and should not be used in production environments. It is recommended to use more appropriate sandboxing.
### CommandProvider
- `execute_shell` execute shell command

View File

@@ -73,6 +73,7 @@ Once you have installed Yarn and Poetry, you can run the following command to in
```bash
cd rnd/autogpt_server
cp .env.example .env
poetry install
```
@@ -90,7 +91,7 @@ Once you have installed the dependencies, you can proceed to the next step.
In order to setup the database, you need to run the following commands, in the same terminal you ran the `poetry install` command:
```sh
docker compose up postgres -d
docker compose up postgres redis -d
poetry run prisma migrate dev
```
After deploying the migration, to ensure that the database schema is correctly mapped to your codebase, allowing the application to interact with the database properly, you need to generate the Prisma database model:
@@ -101,7 +102,15 @@ poetry run prisma generate
Without running this command, the necessary Python modules (prisma.models) won't be available, leading to a `ModuleNotFoundError`.
### Running the server
### Running the server without Docker
To run the server, you can run the following commands in the same terminal you ran the `poetry install` command:
```bash
poetry run app
```
### Running the server within Docker
To run the server, you can run the following commands in the same terminal you ran the `poetry install` command:
@@ -110,7 +119,7 @@ docker compose build
docker compose up
```
In the other terminal, you can run the following command to start the frontend:
In the other terminal from autogpt_builder, you can run the following command to start the frontend:
```bash
yarn dev
@@ -119,3 +128,10 @@ yarn dev
### Checking if the server is running
You can check if the server is running by visiting [http://localhost:3000](http://localhost:3000) in your browser.
### Notes:
By default the daemons for different services run on the following ports:
Execution Manager Daemon: 8002
Execution Scheduler Daemon: 8003
Rest Server Daemon: 8004

View File

@@ -1,36 +1,114 @@
This is a guide to setting up and running the AutoGPT Server and Builder. This tutorial will cover downloading the necessary files, setting up the server, and testing the system.
# AutoGPT Platform
https://github.com/user-attachments/assets/fd0d0f35-3155-4263-b575-ba3efb126cb4
Welcome to the AutoGPT Platform - a powerful system for creating and running AI agents to solve business problems. This platform enables you to harness the power of artificial intelligence to automate tasks, analyze data, and generate insights for your organization.
1. Navigate to the AutoGPT GitHub repository.
2. Click the "Code" button, then select "Download ZIP".
3. Once downloaded, extract the ZIP file to a folder of your choice.
## Getting Started
4. Open the extracted folder and navigate to the "rnd" directory.
5. Enter the "AutoGPT server" folder.
6. Open a terminal window in this directory.
7. Locate and open the README file in the AutoGPT server folder: [doc](./autogpt_server/README.md#setup).
8. Copy and paste each command from the setup section in the README into your terminal.
- Important: Wait for each command to finish before running the next one.
9. If all commands run without errors, enter the final command: `poetry run app`
10. You should now see the server running in your terminal.
### Prerequisites
- Docker
- Docker Compose V2 (comes with Docker Desktop, or can be installed separately)
### Running the System
To run the AutoGPT Platform, follow these steps:
1. Clone this repository to your local machine.
2. Navigate to the project directory.
3. Run the following command:
```
docker compose up -d
```
This command will start all the necessary services defined in the `docker-compose.yml` file in detached mode.
### Docker Compose Commands
Here are some useful Docker Compose commands for managing your AutoGPT Platform:
- `docker compose up -d`: Start the services in detached mode.
- `docker compose stop`: Stop the running services without removing them.
- `docker compose rm`: Remove stopped service containers.
- `docker compose build`: Build or rebuild services.
- `docker compose down`: Stop and remove containers, networks, and volumes.
- `docker compose watch`: Watch for changes in your services and automatically update them.
### Sample Scenarios
Here are some common scenarios where you might use multiple Docker Compose commands:
1. Updating and restarting a specific service:
```
docker compose build api_srv
docker compose up -d --no-deps api_srv
```
This rebuilds the `api_srv` service and restarts it without affecting other services.
2. Viewing logs for troubleshooting:
```
docker compose logs -f api_srv ws_srv
```
This shows and follows the logs for both `api_srv` and `ws_srv` services.
3. Scaling a service for increased load:
```
docker compose up -d --scale executor=3
```
This scales the `executor` service to 3 instances to handle increased load.
4. Stopping the entire system for maintenance:
```
docker compose stop
docker compose rm -f
docker compose pull
docker compose up -d
```
This stops all services, removes containers, pulls the latest images, and restarts the system.
5. Developing with live updates:
```
docker compose watch
```
This watches for changes in your code and automatically updates the relevant services.
6. Checking the status of services:
```
docker compose ps
```
This shows the current status of all services defined in your docker-compose.yml file.
These scenarios demonstrate how to use Docker Compose commands in combination to manage your AutoGPT Platform effectively.
### Persisting Data
To persist data for PostgreSQL and Redis, you can modify the `docker-compose.yml` file to add volumes. Here's how:
1. Open the `docker-compose.yml` file in a text editor.
2. Add volume configurations for PostgreSQL and Redis services:
```yaml
services:
postgres:
# ... other configurations ...
volumes:
- postgres_data:/var/lib/postgresql/data
redis:
# ... other configurations ...
volumes:
- redis_data:/data
volumes:
postgres_data:
redis_data:
```
3. Save the file and run `docker compose up -d` to apply the changes.
This configuration will create named volumes for PostgreSQL and Redis, ensuring that your data persists across container restarts.
11. Navigate back to the "rnd" folder.
12. Open the "AutoGPT builder" folder.
13. Open the README file in this folder: [doc](./autogpt_builder/README.md#getting-started).
14. In your terminal, run the following commands:
```
npm install
```
```
npm run dev
```
15. Once the front-end is running, click the link to navigate to `localhost:3000`.
16. Click on the "Build" option.
17. Add a few blocks to test the functionality.
18. Connect the blocks together.
19. Click "Run".
20. Check your terminal window - you should see that the server has received the request, is processing it, and has executed it.
And there you have it! You've successfully set up and tested AutoGPT.

View File

@@ -1,6 +1,6 @@
NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8000/api
NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8001/api/v1/market
NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8005/api/v1/market
## Supabase credentials
## YOU ONLY NEED THEM IF YOU WANT TO USE SUPABASE USER AUTHENTICATION

View File

@@ -1,19 +1,19 @@
# Base stage for both dev and prod
FROM node:21-alpine AS base
WORKDIR /app
COPY autogpt_builder/package.json autogpt_builder/yarn.lock ./
COPY rnd/autogpt_builder/package.json rnd/autogpt_builder/yarn.lock ./
RUN yarn install --frozen-lockfile
# Dev stage
FROM base AS dev
ENV NODE_ENV=development
COPY autogpt_builder/ .
COPY rnd/autogpt_builder/ .
EXPOSE 3000
CMD ["npm", "run", "dev"]
CMD ["yarn", "run", "dev"]
# Build stage for prod
FROM base AS build
COPY autogpt_builder/ .
COPY rnd/autogpt_builder/ .
RUN npm run build
# Prod stage

View File

@@ -12,6 +12,7 @@ import InputModalComponent from "./InputModalComponent";
import OutputModalComponent from "./OutputModalComponent";
import {
BlockIORootSchema,
BlockIOStringSubSchema,
Category,
NodeExecutionResult,
BlockUIType,
@@ -22,7 +23,10 @@ import { Switch } from "@/components/ui/switch";
import { Copy, Trash2 } from "lucide-react";
import { history } from "./history";
import NodeHandle from "./NodeHandle";
import { NodeGenericInputField } from "./node-input-components";
import {
NodeGenericInputField,
NodeTextBoxInput,
} from "./node-input-components";
import SchemaTooltip from "./SchemaTooltip";
import { getPrimaryCategoryColor } from "@/lib/utils";
import { FlowContext } from "./Flow";
@@ -151,17 +155,77 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
let keys = Object.entries(schema.properties);
switch (nodeType) {
case BlockUIType.INPUT:
// For INPUT blocks, only show the 'value' property
// For INPUT blocks, dont include connection handles
return keys.map(([propKey, propSchema]) => {
const isRequired = data.inputSchema.required?.includes(propKey);
const isConnected = isHandleConnected(propKey);
const isAdvanced = propSchema.advanced;
return (
<div key={propKey}>
<span className="text-m green -mb-1 text-gray-900">
{propSchema.title || beautifyString(propKey)}
</span>
(isRequired || isAdvancedOpen || !isAdvanced) && (
<div key={propKey}>
<span className="text-m green -mb-1 text-gray-900">
{propSchema.title || beautifyString(propKey)}
</span>
<div key={propKey} onMouseOver={() => {}}>
{!isConnected && (
<NodeGenericInputField
className="mb-2 mt-1"
propKey={propKey}
propSchema={propSchema}
currentValue={getValue(propKey)}
connections={data.connections}
handleInputChange={handleInputChange}
handleInputClick={handleInputClick}
errors={data.errors ?? {}}
displayName={propSchema.title || beautifyString(propKey)}
/>
)}
</div>
</div>
)
);
});
case BlockUIType.NOTE:
// For NOTE blocks, don't render any input handles
const [noteKey, noteSchema] = keys[0];
return (
<div key={noteKey}>
<NodeTextBoxInput
className=""
selfKey={noteKey}
schema={noteSchema as BlockIOStringSubSchema}
value={getValue(noteKey)}
handleInputChange={handleInputChange}
handleInputClick={handleInputClick}
error={data.errors?.[noteKey] ?? ""}
displayName={noteSchema.title || beautifyString(noteKey)}
/>
</div>
);
case BlockUIType.OUTPUT:
// For OUTPUT blocks, only show the 'value' property
return keys.map(([propKey, propSchema]) => {
const isRequired = data.inputSchema.required?.includes(propKey);
const isConnected = isHandleConnected(propKey);
const isAdvanced = propSchema.advanced;
return (
(isRequired || isAdvancedOpen || !isAdvanced) && (
<div key={propKey} onMouseOver={() => {}}>
{propKey !== "value" ? (
<span className="text-m green -mb-1 text-gray-900">
{propSchema.title || beautifyString(propKey)}
</span>
) : (
<NodeHandle
keyName={propKey}
isConnected={isConnected}
isRequired={isRequired}
schema={propSchema}
side="left"
/>
)}
{!isConnected && (
<NodeGenericInputField
className="mb-2 mt-1"
@@ -176,66 +240,7 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
/>
)}
</div>
</div>
);
});
case BlockUIType.NOTE:
// For NOTE blocks, don't render any input handles
return keys.map(([propKey, propSchema]) => {
const isConnected = isHandleConnected(propKey);
return (
<div key={propKey}>
<NodeGenericInputField
className="mb-2 mt-1"
propKey={propKey}
propSchema={propSchema}
currentValue={getValue(propKey)}
connections={data.connections}
handleInputChange={handleInputChange}
handleInputClick={handleInputClick}
errors={data.errors ?? {}}
displayName={propSchema.title || beautifyString(propKey)}
/>
</div>
);
});
case BlockUIType.OUTPUT:
// For OUTPUT blocks, only show the 'recorded_value' property
return keys.map(([propKey, propSchema]) => {
const isRequired = data.inputSchema.required?.includes(propKey);
const isConnected = isHandleConnected(propKey);
const isAdvanced = propSchema.advanced;
return (
<div key={propKey} onMouseOver={() => {}}>
{propKey !== "value" ? (
<span className="text-m green -mb-1 text-gray-900">
{propSchema.title || beautifyString(propKey)}
</span>
) : (
<NodeHandle
keyName={propKey}
isConnected={isConnected}
isRequired={isRequired}
schema={propSchema}
side="left"
/>
)}
{!isConnected && (
<NodeGenericInputField
className="mb-2 mt-1"
propKey={propKey}
propSchema={propSchema}
currentValue={getValue(propKey)}
connections={data.connections}
handleInputChange={handleInputChange}
handleInputClick={handleInputClick}
errors={data.errors ?? {}}
displayName={propSchema.title || beautifyString(propKey)}
/>
)}
</div>
)
);
});
@@ -518,13 +523,13 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
return (
<div
className={`${blockClasses} ${errorClass} ${statusClass}`}
className={`${data.uiType === BlockUIType.NOTE ? "w-[300px]" : "w-[500px]"} ${blockClasses} ${errorClass} ${statusClass} ${data.uiType === BlockUIType.NOTE ? "bg-yellow-100" : "bg-white"}`}
onMouseEnter={handleHovered}
onMouseLeave={handleMouseLeave}
data-id={`custom-node-${id}`}
>
<div
className={`mb-2 p-3 ${getPrimaryCategoryColor(data.categories)} rounded-t-xl`}
className={`mb-2 p-3 ${data.uiType === BlockUIType.NOTE ? "bg-yellow-100" : getPrimaryCategoryColor(data.categories)} rounded-t-xl`}
>
<div className="flex items-center justify-between">
<div className="font-roboto p-3 text-lg font-semibold">
@@ -557,17 +562,24 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
)}
</div>
</div>
<div className="flex items-start justify-between gap-2 p-3">
{data.uiType !== BlockUIType.NOTE ? (
<div className="flex items-start justify-between p-3">
<div>
{data.inputSchema &&
generateInputHandles(data.inputSchema, data.uiType)}
</div>
<div className="flex-none">
{data.outputSchema &&
generateOutputHandles(data.outputSchema, data.uiType)}
</div>
</div>
) : (
<div>
{data.inputSchema &&
generateInputHandles(data.inputSchema, data.uiType)}
</div>
<div className="flex-none">
{data.outputSchema &&
generateOutputHandles(data.outputSchema, data.uiType)}
</div>
</div>
{isOutputOpen && (
)}
{isOutputOpen && data.uiType !== BlockUIType.NOTE && (
<div
data-id="latest-output"
className="nodrag m-3 break-words rounded-md border-[1.5px] p-2"
@@ -594,7 +606,7 @@ export function CustomNode({ data, id, width, height }: NodeProps<CustomNode>) {
<div className="mt-2.5 flex items-center pb-4 pl-4">
<Switch checked={isOutputOpen} onCheckedChange={toggleOutput} />
<span className="m-1 mr-4">Output</span>
{hasAdvancedFields && data.uiType === BlockUIType.STANDARD && (
{hasAdvancedFields && (
<>
<Switch onCheckedChange={toggleAdvancedSettings} />
<span className="m-1">Advanced</span>

View File

@@ -1,6 +1,5 @@
.custom-node {
color: #000000;
width: 500px;
box-sizing: border-box;
transition: border-color 0.3s ease-in-out;
}

View File

@@ -10,7 +10,7 @@ import {
BlockIONumberSubSchema,
BlockIOBooleanSubSchema,
} from "@/lib/autogpt-server-api/types";
import { FC, useCallback, useEffect, useState } from "react";
import React, { FC, useCallback, useEffect, useState } from "react";
import { Button } from "./ui/button";
import { Switch } from "./ui/switch";
import {
@@ -587,6 +587,52 @@ const NodeStringInput: FC<{
);
};
export const NodeTextBoxInput: FC<{
selfKey: string;
schema: BlockIOStringSubSchema;
value?: string;
error?: string;
handleInputChange: NodeObjectInputTreeProps["handleInputChange"];
handleInputClick: NodeObjectInputTreeProps["handleInputClick"];
className?: string;
displayName: string;
}> = ({
selfKey,
schema,
value = "",
error,
handleInputChange,
handleInputClick,
className,
displayName,
}) => {
return (
<div className={className}>
<div
className="nodrag relative m-0 h-[200px] w-full bg-yellow-100 p-4"
onClick={schema.secret ? () => handleInputClick(selfKey) : undefined}
>
<textarea
id={selfKey}
value={schema.secret && value ? "********" : value}
readOnly={schema.secret}
placeholder={
schema?.placeholder || `Enter ${beautifyString(displayName)}`
}
onChange={(e) => handleInputChange(selfKey, e.target.value)}
onBlur={(e) => handleInputChange(selfKey, e.target.value)}
className="h-full w-full resize-none overflow-hidden border-none bg-transparent text-lg text-black outline-none"
style={{
fontSize: "min(1em, 16px)",
lineHeight: "1.2",
}}
/>
</div>
{error && <span className="error-message">{error}</span>}
</div>
);
};
const NodeNumberInput: FC<{
selfKey: string;
schema: BlockIONumberSubSchema;

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,21 @@
import secrets
from datetime import datetime, timedelta, timezone
from typing import cast
from supabase import Client, create_client
from supabase import Client
from .types import Credentials, OAuth2Credentials, UserMetadata, UserMetadataRaw
from .types import (
Credentials,
OAuth2Credentials,
OAuthState,
UserMetadata,
UserMetadataRaw,
)
class SupabaseIntegrationCredentialsStore:
def __init__(self, url: str, key: str):
self.supabase: Client = create_client(url, key)
def __init__(self, supabase: Client):
self.supabase = supabase
def add_creds(self, user_id: str, credentials: Credentials) -> None:
if self.get_creds_by_id(user_id, credentials.id):
@@ -73,6 +81,52 @@ class SupabaseIntegrationCredentialsStore:
]
self._set_user_integration_creds(user_id, filtered_credentials)
async def store_state_token(self, user_id: str, provider: str) -> str:
token = secrets.token_urlsafe(32)
expires_at = datetime.now(timezone.utc) + timedelta(minutes=10)
state = OAuthState(
token=token, provider=provider, expires_at=int(expires_at.timestamp())
)
user_metadata = self._get_user_metadata(user_id)
oauth_states = user_metadata.get("integration_oauth_states", [])
oauth_states.append(state.model_dump())
user_metadata["integration_oauth_states"] = oauth_states
self.supabase.auth.admin.update_user_by_id(
user_id, {"user_metadata": user_metadata}
)
return token
async def verify_state_token(self, user_id: str, token: str, provider: str) -> bool:
user_metadata = self._get_user_metadata(user_id)
oauth_states = user_metadata.get("integration_oauth_states", [])
now = datetime.now(timezone.utc)
valid_state = next(
(
state
for state in oauth_states
if state["token"] == token
and state["provider"] == provider
and state["expires_at"] > now.timestamp()
),
None,
)
if valid_state:
# Remove the used state
oauth_states.remove(valid_state)
user_metadata["integration_oauth_states"] = oauth_states
self.supabase.auth.admin.update_user_by_id(
user_id, {"user_metadata": user_metadata}
)
return True
return False
def _set_user_integration_creds(
self, user_id: str, credentials: list[Credentials]
) -> None:

View File

@@ -19,9 +19,11 @@ class _BaseCredentials(BaseModel):
class OAuth2Credentials(_BaseCredentials):
type: Literal["oauth2"] = "oauth2"
access_token: SecretStr
access_token_expires_at: Optional[int] # seconds
access_token_expires_at: Optional[int]
"""Unix timestamp (seconds) indicating when the access token expires (if at all)"""
refresh_token: Optional[SecretStr]
refresh_token_expires_at: Optional[int] # seconds
refresh_token_expires_at: Optional[int]
"""Unix timestamp (seconds) indicating when the refresh token expires (if at all)"""
scopes: list[str]
metadata: dict[str, Any] = Field(default_factory=dict)
@@ -29,7 +31,8 @@ class OAuth2Credentials(_BaseCredentials):
class APIKeyCredentials(_BaseCredentials):
type: Literal["api_key"] = "api_key"
api_key: SecretStr
expires_at: Optional[int] # seconds
expires_at: Optional[int]
"""Unix timestamp (seconds) indicating when the API key expires (if at all)"""
Credentials = Annotated[
@@ -38,9 +41,18 @@ Credentials = Annotated[
]
class OAuthState(BaseModel):
token: str
provider: str
expires_at: int
"""Unix timestamp (seconds) indicating when this OAuth state expires"""
class UserMetadata(BaseModel):
integration_credentials: list[Credentials] = Field(default_factory=list)
integration_oauth_states: list[OAuthState] = Field(default_factory=list)
class UserMetadataRaw(TypedDict, total=False):
integration_credentials: list[dict]
integration_oauth_states: list[dict]

View File

@@ -11,6 +11,7 @@ REDIS_PASSWORD=password
AUTH_ENABLED=false
APP_ENV="local"
PYRO_HOST=localhost
SENTRY_DSN=
## ===== OPTIONAL API KEYS ===== ##

View File

@@ -1,14 +1,14 @@
FROM python:3.11-slim-buster as server_base
FROM python:3.11-slim-buster AS server_base
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
WORKDIR /app
# postgresql-client is needed to check if the postgres service is ready for running migrations
RUN apt-get update \
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev \
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& wget https://github.com/git/git/archive/v2.28.0.tar.gz -O git.tar.gz \
@@ -17,7 +17,6 @@ RUN apt-get update \
&& make prefix=/usr all \
&& make prefix=/usr install
ENV POETRY_VERSION=1.8.3 \
POETRY_HOME="/opt/poetry" \
POETRY_NO_INTERACTION=1 \
@@ -25,22 +24,34 @@ ENV POETRY_VERSION=1.8.3 \
PATH="$POETRY_HOME/bin:$PATH"
RUN pip3 install poetry
FROM server_base AS server_dependencies
RUN mkdir -p /app/autogpt
RUN mkdir -p /app/forge
RUN mkdir -p /app/rnd/autogpt_libs
RUN mkdir -p /app/rnd/autogpt_server
COPY autogpt /app/autogpt
COPY forge /app/forge
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
COPY rnd/autogpt_server/poetry.lock rnd/autogpt_server/pyproject.toml /app/rnd/autogpt_server/
WORKDIR /app/rnd/autogpt_server
COPY rnd/autogpt_server/pyproject.toml rnd/autogpt_server/poetry.lock ./
RUN poetry install --no-interaction --no-ansi
FROM server_dependencies AS server_prisma
COPY rnd/autogpt_server/schema.prisma ./
RUN poetry run prisma generate
COPY rnd/autogpt_server /app/rnd/autogpt_server
FROM server_base as server
FROM server_prisma AS server
COPY rnd/autogpt_server /app/rnd/autogpt_server
ENV PORT=8000
ENV DATABASE_URL=""
ENV PORT=8000
CMD ["poetry", "run", "rest"]

View File

@@ -1,48 +0,0 @@
FROM python:3.11-slim-buster as server_base
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
WORKDIR /app
RUN apt-get update \
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& wget https://github.com/git/git/archive/v2.28.0.tar.gz -O git.tar.gz \
&& tar -zxf git.tar.gz \
&& cd git-* \
&& make prefix=/usr all \
&& make prefix=/usr install
ENV POETRY_VERSION=1.8.3 \
POETRY_HOME="/opt/poetry" \
POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_CREATE=false \
PATH="$POETRY_HOME/bin:$PATH"
RUN pip3 install poetry
COPY autogpt /app/autogpt
COPY forge /app/forge
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
WORKDIR /app/rnd/autogpt_server
COPY rnd/autogpt_server/pyproject.toml rnd/autogpt_server/poetry.lock ./
RUN poetry install --no-interaction --no-ansi
COPY rnd/autogpt_server/schema.prisma ./
RUN poetry run prisma generate
COPY rnd/autogpt_server /app/rnd/autogpt_server
FROM server_base as server
FROM server_base as server
ENV PORT=8001
ENV DATABASE_URL=""
CMD ["poetry", "run", "ws"]

View File

@@ -101,7 +101,7 @@ docker compose down
If you run into issues with dangling orphans, try:
```sh
docker-compose down --volumes --remove-orphans && docker-compose up --force-recreate --renew-anon-volumes --remove-orphans
docker compose down --volumes --remove-orphans && docker-compose up --force-recreate --renew-anon-volumes --remove-orphans
```
## Testing
@@ -183,6 +183,13 @@ A communication layer (`service.py`) is created to decouple the communication li
Currently, the IPC is done using Pyro5 and abstracted in a way that allows a function decorated with `@expose` to be called from a different process.
By default the daemons run on the following ports:
Execution Manager Daemon: 8002
Execution Scheduler Daemon: 8003
Rest Server Daemon: 8004
## Adding a New Agent Block
To add a new agent block, you need to create a new class that inherits from `Block` and provides the following information:

View File

@@ -26,10 +26,8 @@ def main(**kwargs):
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
from autogpt_server.server import AgentServer, WebsocketServer
from autogpt_server.util.service import PyroNameServer
run_processes(
PyroNameServer(),
ExecutionManager(),
ExecutionScheduler(),
WebsocketServer(),

View File

@@ -55,15 +55,15 @@ for cls in all_subclasses(Block):
raise ValueError(f"Block ID {block.name} error: {block.id} is already in use")
# Prevent duplicate field name in input_schema and output_schema
duplicate_field_names = set(block.input_schema.__fields__.keys()) & set(
block.output_schema.__fields__.keys()
duplicate_field_names = set(block.input_schema.model_fields.keys()) & set(
block.output_schema.model_fields.keys()
)
if duplicate_field_names:
raise ValueError(
f"{block.name} has duplicate field names in input_schema and output_schema: {duplicate_field_names}"
)
for field in block.input_schema.__fields__.values():
for field in block.input_schema.model_fields.values():
if field.annotation is bool and field.default not in (True, False):
raise ValueError(f"{block.name} has a boolean field with no default value")

View File

@@ -1,6 +1,5 @@
from abc import ABC, abstractmethod
import re
from typing import Any, Generic, List, TypeVar
from typing import Any, List
from jinja2 import BaseLoader, Environment
from pydantic import Field
@@ -153,13 +152,20 @@ class AgentInputBlock(Block):
class Input(BlockSchema):
value: Any = SchemaField(description="The value to be passed as input.")
name: str = SchemaField(description="The name of the input.")
description: str = SchemaField(description="The description of the input.")
description: str = SchemaField(
description="The description of the input.",
default="",
advanced=True,
)
placeholder_values: List[Any] = SchemaField(
description="The placeholder values to be passed as input."
description="The placeholder values to be passed as input.",
default=[],
advanced=True,
)
limit_to_placeholder_values: bool = SchemaField(
description="Whether to limit the selection to placeholder values.",
default=False,
advanced=True,
)
class Output(BlockSchema):
@@ -222,9 +228,15 @@ class AgentOutputBlock(Block):
class Input(BlockSchema):
value: Any = SchemaField(description="The value to be recorded as output.")
name: str = SchemaField(description="The name of the output.")
description: str = SchemaField(description="The description of the output.")
description: str = SchemaField(
description="The description of the output.",
default="",
advanced=True,
)
format: str = SchemaField(
description="The format string to be used to format the recorded_value."
description="The format string to be used to format the recorded_value.",
default="",
advanced=True,
)
class Output(BlockSchema):
@@ -248,24 +260,24 @@ class AgentOutputBlock(Block):
"value": "Hello, World!",
"name": "output_1",
"description": "This is a test output.",
"format": "{value}!!",
"format": "{{ output_1 }}!!",
},
{
"value": 42,
"value": "42",
"name": "output_2",
"description": "This is another test output.",
"format": "{value}",
"format": "{{ output_2 }}",
},
{
"value": MockObject(value="!!", key="key"),
"name": "output_3",
"description": "This is a test output with a mock object.",
"format": "{value}",
"format": "{{ output_3 }}",
},
],
test_output=[
("output", "Hello, World!!!"),
("output", 42),
("output", "42"),
("output", MockObject(value="!!", key="key")),
],
categories={BlockCategory.OUTPUT, BlockCategory.BASIC},
@@ -281,9 +293,9 @@ class AgentOutputBlock(Block):
try:
fmt = re.sub(r"(?<!{){[ a-zA-Z0-9_]+}", r"{\g<0>}", input_data.format)
template = jinja.from_string(fmt)
yield "output", template.render(input_data.value)
except Exception:
yield "output", f"Error: {input_data.value}"
yield "output", template.render({input_data.name: input_data.value})
except Exception as e:
yield "output", f"Error: {e}, {input_data.value}"
else:
yield "output", input_data.value
@@ -427,7 +439,8 @@ class NoteBlock(Block):
class Input(BlockSchema):
text: str = SchemaField(description="The text to display in the sticky note.")
class Output(BlockSchema): ...
class Output(BlockSchema):
output: str = SchemaField(description="The text to display in the sticky note.")
def __init__(self):
super().__init__(
@@ -437,8 +450,11 @@ class NoteBlock(Block):
input_schema=NoteBlock.Input,
output_schema=NoteBlock.Output,
test_input={"text": "Hello, World!"},
test_output=None,
test_output=[
("output", "Hello, World!"),
],
ui_type=BlockUIType.NOTE,
)
def run(self, input_data: Input) -> BlockOutput: ...
def run(self, input_data: Input) -> BlockOutput:
yield "output", input_data.text

View File

@@ -438,7 +438,7 @@ class Message(BlockSchema):
class AIConversationBlock(Block):
class Input(BlockSchema):
messages: List[Message] = SchemaField(
description="List of messages in the conversation.", min_items=1
description="List of messages in the conversation.", min_length=1
)
model: LlmModel = SchemaField(
default=LlmModel.GPT4_TURBO,

View File

@@ -0,0 +1,15 @@
from autogpt_server.app import run_processes
from autogpt_server.executor import ExecutionManager
def main():
"""
Run all the processes required for the AutoGPT-server REST API.
"""
run_processes(
ExecutionManager(),
)
if __name__ == "__main__":
main()

View File

@@ -364,7 +364,7 @@ def validate_exec(
def get_agent_server_client() -> "AgentServer":
from autogpt_server.server.rest_api import AgentServer
return get_service_client(AgentServer)
return get_service_client(AgentServer, Config().agent_server_port)
class Executor:
@@ -648,6 +648,7 @@ class Executor:
class ExecutionManager(AppService):
def __init__(self):
super().__init__(port=Config().execution_manager_port)
self.use_db = True
self.pool_size = Config().num_graph_workers
self.queue = ExecutionQueue[GraphExecution]()

View File

@@ -9,6 +9,7 @@ from autogpt_server.data import schedule as model
from autogpt_server.data.block import BlockInput
from autogpt_server.executor.manager import ExecutionManager
from autogpt_server.util.service import AppService, expose, get_service_client
from autogpt_server.util.settings import Config
logger = logging.getLogger(__name__)
@@ -19,13 +20,15 @@ def log(msg, **kwargs):
class ExecutionScheduler(AppService):
def __init__(self, refresh_interval=10):
super().__init__(port=Config().execution_scheduler_port)
self.use_db = True
self.last_check = datetime.min
self.refresh_interval = refresh_interval
self.use_redis = False
@property
def execution_manager_client(self) -> ExecutionManager:
return get_service_client(ExecutionManager)
return get_service_client(ExecutionManager, Config().execution_manager_port)
def run_service(self):
scheduler = BackgroundScheduler()

View File

@@ -0,0 +1,15 @@
from .base import BaseOAuthHandler
from .github import GitHubOAuthHandler
from .google import GoogleOAuthHandler
from .notion import NotionOAuthHandler
HANDLERS_BY_NAME: dict[str, type[BaseOAuthHandler]] = {
handler.PROVIDER_NAME: handler
for handler in [
GitHubOAuthHandler,
GoogleOAuthHandler,
NotionOAuthHandler,
]
}
__all__ = ["HANDLERS_BY_NAME"]

View File

@@ -0,0 +1,48 @@
import time
from abc import ABC, abstractmethod
from typing import ClassVar
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
class BaseOAuthHandler(ABC):
PROVIDER_NAME: ClassVar[str]
@abstractmethod
def __init__(self, client_id: str, client_secret: str, redirect_uri: str): ...
@abstractmethod
def get_login_url(self, scopes: list[str], state: str) -> str:
"""Constructs a login URL that the user can be redirected to"""
...
@abstractmethod
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
"""Exchanges the acquired authorization code from login for a set of tokens"""
...
@abstractmethod
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
"""Implements the token refresh mechanism"""
...
def refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
if credentials.provider != self.PROVIDER_NAME:
raise ValueError(
f"{self.__class__.__name__} can not refresh tokens "
f"for other provider '{credentials.provider}'"
)
return self._refresh_tokens(credentials)
def get_access_token(self, credentials: OAuth2Credentials) -> str:
"""Returns a valid access token, refreshing it first if needed"""
if self.needs_refresh(credentials):
credentials = self.refresh_tokens(credentials)
return credentials.access_token.get_secret_value()
def needs_refresh(self, credentials: OAuth2Credentials) -> bool:
"""Indicates whether the given tokens need to be refreshed"""
return (
credentials.access_token_expires_at is not None
and credentials.access_token_expires_at < int(time.time()) + 300
)

View File

@@ -0,0 +1,99 @@
import time
from typing import Optional
from urllib.parse import urlencode
import requests
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
from .base import BaseOAuthHandler
class GitHubOAuthHandler(BaseOAuthHandler):
"""
Based on the documentation at:
- [Authorizing OAuth apps - GitHub Docs](https://docs.github.com/en/apps/oauth-apps/building-oauth-apps/authorizing-oauth-apps)
- [Refreshing user access tokens - GitHub Docs](https://docs.github.com/en/apps/creating-github-apps/authenticating-with-a-github-app/refreshing-user-access-tokens)
Notes:
- By default, token expiration is disabled on GitHub Apps. This means the access
token doesn't expire and no refresh token is returned by the authorization flow.
- When token expiration gets enabled, any existing tokens will remain non-expiring.
- When token expiration gets disabled, token refreshes will return a non-expiring
access token *with no refresh token*.
""" # noqa
PROVIDER_NAME = "github"
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
self.auth_base_url = "https://github.com/login/oauth/authorize"
self.token_url = "https://github.com/login/oauth/access_token"
def get_login_url(self, scopes: list[str], state: str) -> str:
params = {
"client_id": self.client_id,
"redirect_uri": self.redirect_uri,
"scope": " ".join(scopes),
"state": state,
}
return f"{self.auth_base_url}?{urlencode(params)}"
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
return self._request_tokens({"code": code, "redirect_uri": self.redirect_uri})
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
if not credentials.refresh_token:
return credentials
return self._request_tokens(
{
"refresh_token": credentials.refresh_token.get_secret_value(),
"grant_type": "refresh_token",
}
)
def _request_tokens(
self,
params: dict[str, str],
current_credentials: Optional[OAuth2Credentials] = None,
) -> OAuth2Credentials:
request_body = {
"client_id": self.client_id,
"client_secret": self.client_secret,
**params,
}
headers = {"Accept": "application/json"}
response = requests.post(self.token_url, data=request_body, headers=headers)
response.raise_for_status()
token_data: dict = response.json()
now = int(time.time())
new_credentials = OAuth2Credentials(
provider=self.PROVIDER_NAME,
title=current_credentials.title if current_credentials else "GitHub",
access_token=token_data["access_token"],
# Token refresh responses have an empty `scope` property (see docs),
# so we have to get the scope from the existing credentials object.
scopes=(
token_data.get("scope", "").split(",")
or (current_credentials.scopes if current_credentials else [])
),
# Refresh token and expiration intervals are only given if token expiration
# is enabled in the GitHub App's settings.
refresh_token=token_data.get("refresh_token"),
access_token_expires_at=(
now + expires_in
if (expires_in := token_data.get("expires_in", None))
else None
),
refresh_token_expires_at=(
now + expires_in
if (expires_in := token_data.get("refresh_token_expires_in", None))
else None
),
)
if current_credentials:
new_credentials.id = current_credentials.id
return new_credentials

View File

@@ -0,0 +1,96 @@
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import Flow
from pydantic import SecretStr
from .base import BaseOAuthHandler
class GoogleOAuthHandler(BaseOAuthHandler):
"""
Based on the documentation at https://developers.google.com/identity/protocols/oauth2/web-server
""" # noqa
PROVIDER_NAME = "google"
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
self.token_uri = "https://oauth2.googleapis.com/token"
def get_login_url(self, scopes: list[str], state: str) -> str:
flow = self._setup_oauth_flow(scopes)
flow.redirect_uri = self.redirect_uri
authorization_url, _ = flow.authorization_url(
access_type="offline",
include_granted_scopes="true",
state=state,
prompt="consent",
)
return authorization_url
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
flow = self._setup_oauth_flow(None)
flow.redirect_uri = self.redirect_uri
flow.fetch_token(code=code)
google_creds = flow.credentials
# Google's OAuth library is poorly typed so we need some of these:
assert google_creds.token
assert google_creds.refresh_token
assert google_creds.expiry
assert google_creds.scopes
return OAuth2Credentials(
provider=self.PROVIDER_NAME,
title="Google",
access_token=SecretStr(google_creds.token),
refresh_token=SecretStr(google_creds.refresh_token),
access_token_expires_at=int(google_creds.expiry.timestamp()),
refresh_token_expires_at=None,
scopes=google_creds.scopes,
)
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
# Google credentials should ALWAYS have a refresh token
assert credentials.refresh_token
google_creds = Credentials(
token=credentials.access_token.get_secret_value(),
refresh_token=credentials.refresh_token.get_secret_value(),
token_uri=self.token_uri,
client_id=self.client_id,
client_secret=self.client_secret,
scopes=credentials.scopes,
)
# Google's OAuth library is poorly typed so we need some of these:
assert google_creds.refresh_token
assert google_creds.scopes
google_creds.refresh(Request())
assert google_creds.expiry
return OAuth2Credentials(
id=credentials.id,
provider=self.PROVIDER_NAME,
title=credentials.title,
access_token=SecretStr(google_creds.token),
refresh_token=SecretStr(google_creds.refresh_token),
access_token_expires_at=int(google_creds.expiry.timestamp()),
refresh_token_expires_at=None,
scopes=google_creds.scopes,
)
def _setup_oauth_flow(self, scopes: list[str] | None) -> Flow:
return Flow.from_client_config(
{
"web": {
"client_id": self.client_id,
"client_secret": self.client_secret,
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": self.token_uri,
}
},
scopes=scopes,
)

View File

@@ -0,0 +1,76 @@
from base64 import b64encode
from urllib.parse import urlencode
import requests
from autogpt_libs.supabase_integration_credentials_store import OAuth2Credentials
from .base import BaseOAuthHandler
class NotionOAuthHandler(BaseOAuthHandler):
"""
Based on the documentation at https://developers.notion.com/docs/authorization
Notes:
- Notion uses non-expiring access tokens and therefore doesn't have a refresh flow
- Notion doesn't use scopes
"""
PROVIDER_NAME = "notion"
def __init__(self, client_id: str, client_secret: str, redirect_uri: str):
self.client_id = client_id
self.client_secret = client_secret
self.redirect_uri = redirect_uri
self.auth_base_url = "https://api.notion.com/v1/oauth/authorize"
self.token_url = "https://api.notion.com/v1/oauth/token"
def get_login_url(self, scopes: list[str], state: str) -> str:
params = {
"client_id": self.client_id,
"redirect_uri": self.redirect_uri,
"response_type": "code",
"owner": "user",
"state": state,
}
return f"{self.auth_base_url}?{urlencode(params)}"
def exchange_code_for_tokens(self, code: str) -> OAuth2Credentials:
request_body = {
"grant_type": "authorization_code",
"code": code,
"redirect_uri": self.redirect_uri,
}
auth_str = b64encode(f"{self.client_id}:{self.client_secret}".encode()).decode()
headers = {
"Authorization": f"Basic {auth_str}",
"Accept": "application/json",
}
response = requests.post(self.token_url, json=request_body, headers=headers)
response.raise_for_status()
token_data = response.json()
return OAuth2Credentials(
provider=self.PROVIDER_NAME,
title=token_data.get("workspace_name", "Notion"),
access_token=token_data["access_token"],
refresh_token=None,
access_token_expires_at=None, # Notion tokens don't expire
refresh_token_expires_at=None,
scopes=[],
metadata={
"owner": token_data["owner"],
"bot_id": token_data["bot_id"],
"workspace_id": token_data["workspace_id"],
"workspace_name": token_data.get("workspace_name"),
"workspace_icon": token_data.get("workspace_icon"),
},
)
def _refresh_tokens(self, credentials: OAuth2Credentials) -> OAuth2Credentials:
# Notion doesn't support token refresh
return credentials
def needs_refresh(self, credentials: OAuth2Credentials) -> bool:
# Notion access tokens don't expire
return False

View File

@@ -1,7 +1,6 @@
from autogpt_server.app import run_processes
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
from autogpt_server.executor import ExecutionScheduler
from autogpt_server.server import AgentServer
from autogpt_server.util.service import PyroNameServer
def main():
@@ -9,8 +8,6 @@ def main():
Run all the processes required for the AutoGPT-server REST API.
"""
run_processes(
PyroNameServer(),
ExecutionManager(),
ExecutionScheduler(),
AgentServer(),
)

View File

@@ -0,0 +1,105 @@
import logging
from typing import Annotated, Literal
from autogpt_libs.supabase_integration_credentials_store import (
SupabaseIntegrationCredentialsStore,
)
from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, Request
from pydantic import BaseModel
from supabase import Client
from autogpt_server.integrations.oauth import HANDLERS_BY_NAME, BaseOAuthHandler
from autogpt_server.util.settings import Settings
from .utils import get_supabase, get_user_id
logger = logging.getLogger(__name__)
settings = Settings()
integrations_api_router = APIRouter()
def get_store(supabase: Client = Depends(get_supabase)):
return SupabaseIntegrationCredentialsStore(supabase)
class LoginResponse(BaseModel):
login_url: str
@integrations_api_router.get("/{provider}/login")
async def login(
provider: Annotated[str, Path(title="The provider to initiate an OAuth flow for")],
user_id: Annotated[str, Depends(get_user_id)],
request: Request,
store: Annotated[SupabaseIntegrationCredentialsStore, Depends(get_store)],
scopes: Annotated[
str, Query(title="Comma-separated list of authorization scopes")
] = "",
) -> LoginResponse:
handler = _get_provider_oauth_handler(request, provider)
# Generate and store a secure random state token
state = await store.store_state_token(user_id, provider)
requested_scopes = scopes.split(",") if scopes else []
login_url = handler.get_login_url(requested_scopes, state)
return LoginResponse(login_url=login_url)
class CredentialsMetaResponse(BaseModel):
credentials_id: str
credentials_type: Literal["oauth2", "api_key"]
@integrations_api_router.post("/{provider}/callback")
async def callback(
provider: Annotated[str, Path(title="The target provider for this OAuth exchange")],
code: Annotated[str, Body(title="Authorization code acquired by user login")],
state_token: Annotated[str, Body(title="Anti-CSRF nonce")],
store: Annotated[SupabaseIntegrationCredentialsStore, Depends(get_store)],
user_id: Annotated[str, Depends(get_user_id)],
request: Request,
) -> CredentialsMetaResponse:
handler = _get_provider_oauth_handler(request, provider)
# Verify the state token
if not await store.verify_state_token(user_id, state_token, provider):
raise HTTPException(status_code=400, detail="Invalid or expired state token")
try:
credentials = handler.exchange_code_for_tokens(code)
except Exception as e:
logger.warning(f"Code->Token exchange failed for provider {provider}: {e}")
raise HTTPException(status_code=400, detail=str(e))
store.add_creds(user_id, credentials)
return CredentialsMetaResponse(
credentials_id=credentials.id,
credentials_type=credentials.type,
)
# -------- UTILITIES --------- #
def _get_provider_oauth_handler(req: Request, provider_name: str) -> BaseOAuthHandler:
if provider_name not in HANDLERS_BY_NAME:
raise HTTPException(
status_code=404, detail=f"Unknown provider '{provider_name}'"
)
client_id = getattr(settings.secrets, f"{provider_name}_client_id")
client_secret = getattr(settings.secrets, f"{provider_name}_client_secret")
if not (client_id and client_secret):
raise HTTPException(
status_code=501,
detail=f"Integration with provider '{provider_name}' is not configured",
)
handler_class = HANDLERS_BY_NAME[provider_name]
return handler_class(
client_id=client_id,
client_secret=client_secret,
redirect_uri=str(req.url_for("callback", provider=provider_name)),
)

View File

@@ -19,10 +19,11 @@ from autogpt_server.data.queue import AsyncEventQueue, AsyncRedisEventQueue
from autogpt_server.data.user import get_or_create_user
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
from autogpt_server.server.model import CreateGraph, SetGraphActiveVersion
from autogpt_server.util.auth import get_user_id
from autogpt_server.util.lock import KeyedMutex
from autogpt_server.util.service import AppService, expose, get_service_client
from autogpt_server.util.settings import Settings
from autogpt_server.util.settings import Config, Settings
from .utils import get_user_id
settings = Settings()
@@ -33,6 +34,7 @@ class AgentServer(AppService):
_test_dependency_overrides = {}
def __init__(self, event_queue: AsyncEventQueue | None = None):
super().__init__(port=Config().agent_server_port)
self.event_queue = event_queue or AsyncRedisEventQueue()
@asynccontextmanager
@@ -70,127 +72,132 @@ class AgentServer(AppService):
)
# Define the API routes
router = APIRouter(prefix="/api")
router.dependencies.append(Depends(auth_middleware))
api_router = APIRouter(prefix="/api")
api_router.dependencies.append(Depends(auth_middleware))
router.add_api_route(
# Import & Attach sub-routers
from .integrations import integrations_api_router
api_router.include_router(integrations_api_router, prefix="/integrations")
api_router.add_api_route(
path="/auth/user",
endpoint=self.get_or_create_user_route,
methods=["POST"],
)
router.add_api_route(
api_router.add_api_route(
path="/blocks",
endpoint=self.get_graph_blocks,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/blocks/{block_id}/execute",
endpoint=self.execute_graph_block,
methods=["POST"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs",
endpoint=self.get_graphs,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/templates",
endpoint=self.get_templates,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs",
endpoint=self.create_new_graph,
methods=["POST"],
)
router.add_api_route(
api_router.add_api_route(
path="/templates",
endpoint=self.create_new_template,
methods=["POST"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}",
endpoint=self.get_graph,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/templates/{graph_id}",
endpoint=self.get_template,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}",
endpoint=self.update_graph,
methods=["PUT"],
)
router.add_api_route(
api_router.add_api_route(
path="/templates/{graph_id}",
endpoint=self.update_graph,
methods=["PUT"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/versions",
endpoint=self.get_graph_all_versions,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/templates/{graph_id}/versions",
endpoint=self.get_graph_all_versions,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/versions/{version}",
endpoint=self.get_graph,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/versions/active",
endpoint=self.set_graph_active_version,
methods=["PUT"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/input_schema",
endpoint=self.get_graph_input_schema,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/execute",
endpoint=self.execute_graph,
methods=["POST"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/executions",
endpoint=self.list_graph_runs,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/executions/{graph_exec_id}",
endpoint=self.get_graph_run_node_execution_results,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/executions/{graph_exec_id}/stop",
endpoint=self.stop_graph_run,
methods=["POST"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/schedules",
endpoint=self.create_schedule,
methods=["POST"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/{graph_id}/schedules",
endpoint=self.get_execution_schedules,
methods=["GET"],
)
router.add_api_route(
api_router.add_api_route(
path="/graphs/schedules/{schedule_id}",
endpoint=self.update_schedule,
methods=["PUT"],
)
router.add_api_route(
api_router.add_api_route(
path="/settings",
endpoint=self.update_configuration,
methods=["POST"],
@@ -198,7 +205,7 @@ class AgentServer(AppService):
app.add_exception_handler(500, self.handle_internal_http_error)
app.include_router(router)
app.include_router(api_router)
uvicorn.run(app, host="0.0.0.0", port=8000, log_config=None)
@@ -233,11 +240,11 @@ class AgentServer(AppService):
@property
def execution_manager_client(self) -> ExecutionManager:
return get_service_client(ExecutionManager)
return get_service_client(ExecutionManager, Config().execution_manager_port)
@property
def execution_scheduler_client(self) -> ExecutionScheduler:
return get_service_client(ExecutionScheduler)
return get_service_client(ExecutionScheduler, Config().execution_scheduler_port)
@classmethod
def handle_internal_http_error(cls, request: Request, exc: Exception):

View File

@@ -1,7 +1,11 @@
from autogpt_libs.auth import auth_middleware
from autogpt_libs.auth.middleware import auth_middleware
from fastapi import Depends, HTTPException
from supabase import Client, create_client
from autogpt_server.data.user import DEFAULT_USER_ID
from autogpt_server.util.settings import Settings
settings = Settings()
def get_user_id(payload: dict = Depends(auth_middleware)) -> str:
@@ -13,3 +17,7 @@ def get_user_id(payload: dict = Depends(auth_middleware)) -> str:
if not user_id:
raise HTTPException(status_code=401, detail="User ID not found in token")
return user_id
def get_supabase() -> Client:
return create_client(settings.secrets.supabase_url, settings.secrets.supabase_key)

View File

@@ -2,7 +2,7 @@ import functools
import logging
import os
import time
from typing import Callable, Tuple, TypeVar
from typing import Callable, ParamSpec, Tuple, TypeVar
from pydantic import BaseModel
@@ -24,18 +24,19 @@ def _end_measurement(
return end_wall_time - start_wall_time, end_cpu_time - start_cpu_time
P = ParamSpec("P")
T = TypeVar("T")
logger = logging.getLogger(__name__)
def time_measured(func: Callable[..., T]) -> Callable[..., Tuple[TimingInfo, T]]:
def time_measured(func: Callable[P, T]) -> Callable[P, Tuple[TimingInfo, T]]:
"""
Decorator to measure the time taken by a function to execute.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs) -> Tuple[TimingInfo, T]:
def wrapper(*args: P.args, **kwargs: P.kwargs) -> Tuple[TimingInfo, T]:
start_wall_time, start_cpu_time = _start_measurement()
try:
result = func(*args, **kwargs)
@@ -49,13 +50,13 @@ def time_measured(func: Callable[..., T]) -> Callable[..., Tuple[TimingInfo, T]]
return wrapper
def error_logged(func: Callable[..., T]) -> Callable[..., T | None]:
def error_logged(func: Callable[P, T]) -> Callable[P, T | None]:
"""
Decorator to suppress and log any exceptions raised by a function.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs) -> T | None:
def wrapper(*args: P.args, **kwargs: P.kwargs) -> T | None:
try:
return func(*args, **kwargs)
except Exception as e:

View File

@@ -1,12 +1,13 @@
import asyncio
import logging
import os
import threading
import time
from abc import abstractmethod
from typing import Any, Callable, Coroutine, Type, TypeVar, cast
import Pyro5.api
from Pyro5 import api as pyro
from Pyro5 import nameserver
from autogpt_server.data import db
from autogpt_server.data.queue import AsyncEventQueue, AsyncRedisEventQueue
@@ -42,25 +43,16 @@ def expose(func: C) -> C:
return pyro.expose(wrapper) # type: ignore
class PyroNameServer(AppProcess):
def run(self):
nameserver.start_ns_loop(host=pyro_host, port=9090)
@conn_retry
def _wait_for_ns(self):
pyro.locate_ns(host="localhost", port=9090)
def health_check(self):
self._wait_for_ns()
logger.info(f"{__class__.__name__} is ready")
class AppService(AppProcess):
shared_event_loop: asyncio.AbstractEventLoop
event_queue: AsyncEventQueue = AsyncRedisEventQueue()
use_db: bool = False
use_redis: bool = False
def __init__(self, port):
self.port = port
self.uri = None
@classmethod
@property
def service_name(cls) -> str:
@@ -108,11 +100,10 @@ class AppService(AppProcess):
@conn_retry
def __start_pyro(self):
daemon = pyro.Daemon(host=pyro_host)
ns = pyro.locate_ns(host=pyro_host, port=9090)
uri = daemon.register(self)
ns.register(self.service_name, uri)
logger.info(f"[{self.service_name}] Connected to Pyro; URI = {uri}")
host = Config().pyro_host
daemon = Pyro5.api.Daemon(host=host, port=self.port)
self.uri = daemon.register(self, objectId=self.service_name)
logger.info(f"[{self.service_name}] Connected to Pyro; URI = {self.uri}")
daemon.requestLoop()
def __start_async_loop(self):
@@ -122,16 +113,19 @@ class AppService(AppProcess):
AS = TypeVar("AS", bound=AppService)
def get_service_client(service_type: Type[AS]) -> AS:
def get_service_client(service_type: Type[AS], port: int) -> AS:
service_name = service_type.service_name
class DynamicClient:
@conn_retry
def __init__(self):
ns = pyro.locate_ns()
uri = ns.lookup(service_name)
self.proxy = pyro.Proxy(uri)
host = os.environ.get(f"{service_name.upper()}_HOST", "localhost")
uri = f"PYRO:{service_type.service_name}@{host}:{port}"
logger.debug(f"Connecting to service [{service_name}]. URI = {uri}")
self.proxy = Pyro5.api.Proxy(uri)
# Attempt to bind to ensure the connection is established
self.proxy._pyroBind()
logger.debug(f"Successfully connected to service [{service_name}]")
def __getattr__(self, name: str) -> Callable[..., Any]:
return getattr(self.proxy, name)

View File

@@ -72,6 +72,21 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
extra="allow",
)
execution_manager_port: int = Field(
default=8002,
description="The port for execution manager daemon to run on",
)
execution_scheduler_port: int = Field(
default=8003,
description="The port for execution scheduler daemon to run on",
)
agent_server_port: int = Field(
default=8004,
description="The port for agent server daemon to run on",
)
@classmethod
def settings_customise_sources(
cls,
@@ -93,6 +108,23 @@ class Config(UpdateTrackingModel["Config"], BaseSettings):
class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
"""Secrets for the server."""
supabase_url: str = Field(default="", description="Supabase URL")
supabase_key: str = Field(default="", description="Supabase key")
# OAuth server credentials for integrations
github_client_id: str = Field(default="", description="GitHub OAuth client ID")
github_client_secret: str = Field(
default="", description="GitHub OAuth client secret"
)
google_client_id: str = Field(default="", description="Google OAuth client ID")
google_client_secret: str = Field(
default="", description="Google OAuth client secret"
)
notion_client_id: str = Field(default="", description="Notion OAuth client ID")
notion_client_secret: str = Field(
default="", description="Notion OAuth client secret"
)
openai_api_key: str = Field(default="", description="OpenAI API key")
anthropic_api_key: str = Field(default="", description="Anthropic API key")
groq_api_key: str = Field(default="", description="Groq API key")

View File

@@ -8,7 +8,6 @@ from autogpt_server.data.queue import AsyncEventQueue
from autogpt_server.executor import ExecutionManager, ExecutionScheduler
from autogpt_server.server import AgentServer
from autogpt_server.server.rest_api import get_user_id
from autogpt_server.util.service import PyroNameServer
log = print
@@ -48,7 +47,6 @@ class InMemoryAsyncEventQueue(AsyncEventQueue):
class SpinTestServer:
def __init__(self):
self.name_server = PyroNameServer()
self.exec_manager = ExecutionManager()
self.in_memory_queue = InMemoryAsyncEventQueue()
self.agent_server = AgentServer(event_queue=self.in_memory_queue)
@@ -59,7 +57,6 @@ class SpinTestServer:
return "3e53486c-cf57-477e-ba2a-cb02dc828e1a"
async def __aenter__(self):
self.name_server.__enter__()
self.setup_dependency_overrides()
self.agent_server.__enter__()
self.exec_manager.__enter__()
@@ -76,7 +73,6 @@ class SpinTestServer:
self.scheduler.__exit__(exc_type, exc_val, exc_tb)
self.exec_manager.__exit__(exc_type, exc_val, exc_tb)
self.agent_server.__exit__(exc_type, exc_val, exc_tb)
self.name_server.__exit__(exc_type, exc_val, exc_tb)
def setup_dependency_overrides(self):
# Override get_user_id for testing

View File

@@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "agpt"
@@ -25,7 +25,7 @@ requests = "*"
sentry-sdk = "^1.40.4"
[package.extras]
benchmark = ["agbenchmark @ file:///Users/majdyz/Code/AutoGPT/benchmark"]
benchmark = ["agbenchmark @ file:///home/reinier/code/agpt/AutoGPT/benchmark"]
[package.source]
type = "directory"
@@ -386,7 +386,7 @@ watchdog = "4.0.0"
webdriver-manager = "^4.0.1"
[package.extras]
benchmark = ["agbenchmark @ file:///Users/majdyz/Code/AutoGPT/benchmark"]
benchmark = ["agbenchmark @ file:///home/reinier/code/agpt/AutoGPT/benchmark"]
[package.source]
type = "directory"
@@ -3429,6 +3429,8 @@ files = [
{file = "orjson-3.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:960db0e31c4e52fa0fc3ecbaea5b2d3b58f379e32a95ae6b0ebeaa25b93dfd34"},
{file = "orjson-3.10.6-cp312-none-win32.whl", hash = "sha256:a6ea7afb5b30b2317e0bee03c8d34c8181bc5a36f2afd4d0952f378972c4efd5"},
{file = "orjson-3.10.6-cp312-none-win_amd64.whl", hash = "sha256:874ce88264b7e655dde4aeaacdc8fd772a7962faadfb41abe63e2a4861abc3dc"},
{file = "orjson-3.10.6-cp313-none-win32.whl", hash = "sha256:efdf2c5cde290ae6b83095f03119bdc00303d7a03b42b16c54517baa3c4ca3d0"},
{file = "orjson-3.10.6-cp313-none-win_amd64.whl", hash = "sha256:8e190fe7888e2e4392f52cafb9626113ba135ef53aacc65cd13109eb9746c43e"},
{file = "orjson-3.10.6-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:66680eae4c4e7fc193d91cfc1353ad6d01b4801ae9b5314f17e11ba55e934183"},
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caff75b425db5ef8e8f23af93c80f072f97b4fb3afd4af44482905c9f588da28"},
{file = "orjson-3.10.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3722fddb821b6036fd2a3c814f6bd9b57a89dc6337b9924ecd614ebce3271394"},
@@ -6452,4 +6454,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "126731188e8fdc7df0bd2dc92cd069fcd2b90edd5e1065cebd8f4adcedf982b5"
content-hash = "0ecd19c5cdf414368aa81b83ae76ba6db34b1bfc8f32a482d1222d6b839792da"

View File

@@ -11,6 +11,7 @@ readme = "README.md"
[tool.poetry.dependencies]
python = "^3.10"
agpt = { path = "../../autogpt", develop = true }
aio-pika = "^9.4.3"
anthropic = "^0.25.1"
apscheduler = "^3.10.4"
autogpt-forge = { path = "../../forge", develop = true }
@@ -39,15 +40,14 @@ pyro5 = "^5.15"
pytest = "^8.2.1"
pytest-asyncio = "^0.23.7"
python-dotenv = "^1.0.1"
redis = "^5.0.8"
sentry-sdk = "1.45.0"
supabase = "^2.7.2"
tenacity = "^8.3.0"
uvicorn = { extras = ["standard"], version = "^0.30.1" }
websockets = "^12.0"
youtube-transcript-api = "^0.6.2"
aio-pika = "^9.4.3"
redis = "^5.0.8"
sentry-sdk = "1.45.0"
[tool.poetry.group.dev.dependencies]
poethepoet = "^0.26.1"
httpx = "^0.27.0"
@@ -66,11 +66,13 @@ build-backend = "poetry.core.masonry.api"
app = "autogpt_server.app:main"
rest = "autogpt_server.rest:main"
ws = "autogpt_server.ws:main"
executor = "autogpt_server.exec:main"
cli = "autogpt_server.cli:main"
format = "linter:format"
lint = "linter:lint"
test = "run_tests:test"
# https://poethepoet.natn.io/index.html
[tool.poe]
poetry_command = ""

View File

@@ -8,7 +8,8 @@ def wait_for_postgres(max_retries=5, delay=5):
try:
result = subprocess.run(
[
"docker-compose",
"docker",
"compose",
"-f",
"docker-compose.test.yaml",
"exec",
@@ -45,7 +46,8 @@ def test():
# Start PostgreSQL with Docker Compose
run_command(
[
"docker-compose",
"docker",
"compose",
"-f",
"docker-compose.test.yaml",
"up",
@@ -55,7 +57,7 @@ def test():
)
if not wait_for_postgres():
run_command(["docker-compose", "-f", "docker-compose.test.yaml", "down"])
run_command(["docker", "compose", "-f", "docker-compose.test.yaml", "down"])
sys.exit(1)
# Run Prisma migrations
@@ -64,6 +66,6 @@ def test():
# Run the tests
result = subprocess.run(["pytest"] + sys.argv[1:], check=False)
run_command(["docker-compose", "-f", "docker-compose.test.yaml", "down"])
run_command(["docker", "compose", "-f", "docker-compose.test.yaml", "down"])
sys.exit(result.returncode)

View File

@@ -36,23 +36,19 @@ async def assert_sample_graph_executions(
graph_exec_id: str,
):
executions = await agent_server.get_graph_run_node_execution_results(
test_graph.id, graph_exec_id, test_user.id
test_graph.id,
graph_exec_id,
test_user.id,
)
output_list = [{"result": ["Hello"]}, {"result": ["World"]}]
input_list = [
{
"name": "input_1",
"description": "First input value",
"placeholder_values": [],
"limit_to_placeholder_values": False,
"value": "Hello",
},
{
"name": "input_2",
"description": "Second input value",
"placeholder_values": [],
"limit_to_placeholder_values": False,
"value": "World",
},
]
@@ -61,16 +57,24 @@ async def assert_sample_graph_executions(
exec = executions[0]
assert exec.status == execution.ExecutionStatus.COMPLETED
assert exec.graph_exec_id == graph_exec_id
assert exec.output_data in output_list
assert exec.input_data in input_list
assert (
exec.output_data in output_list
), f"Output data: {exec.output_data} and {output_list}"
assert (
exec.input_data in input_list
), f"Input data: {exec.input_data} and {input_list}"
assert exec.node_id in [test_graph.nodes[0].id, test_graph.nodes[1].id]
# Executing StoreValueBlock
exec = executions[1]
assert exec.status == execution.ExecutionStatus.COMPLETED
assert exec.graph_exec_id == graph_exec_id
assert exec.output_data in output_list
assert exec.input_data in input_list
assert (
exec.output_data in output_list
), f"Output data: {exec.output_data} and {output_list}"
assert (
exec.input_data in input_list
), f"Input data: {exec.input_data} and {input_list}"
assert exec.node_id in [test_graph.nodes[0].id, test_graph.nodes[1].id]
# Executing FillTextTemplateBlock

View File

@@ -4,6 +4,7 @@ from autogpt_server.data import db, graph
from autogpt_server.executor import ExecutionScheduler
from autogpt_server.usecases.sample import create_test_graph, create_test_user
from autogpt_server.util.service import get_service_client
from autogpt_server.util.settings import Config
from autogpt_server.util.test import SpinTestServer
@@ -13,7 +14,9 @@ async def test_agent_schedule(server: SpinTestServer):
test_user = await create_test_user()
test_graph = await graph.create_graph(create_test_graph(), user_id=test_user.id)
scheduler = get_service_client(ExecutionScheduler)
scheduler = get_service_client(
ExecutionScheduler, Config().execution_scheduler_port
)
schedules = scheduler.get_execution_schedules(test_graph.id, test_user.id)
assert len(schedules) == 0

View File

@@ -5,6 +5,7 @@ from autogpt_server.util.service import AppService, expose, get_service_client
class TestService(AppService):
def __init__(self):
super().__init__(port=8005)
self.use_redis = False
def run_service(self):
@@ -29,7 +30,7 @@ class TestService(AppService):
@pytest.mark.asyncio(scope="session")
async def test_service_creation(server):
with TestService():
client = get_service_client(TestService)
client = get_service_client(TestService, 8005)
assert client.add(5, 3) == 8
assert client.subtract(10, 4) == 6
assert client.fun_with_async(5, 3) == 8

View File

@@ -1,4 +1,3 @@
version: "3"
services:
postgres:
image: ankane/pgvector:latest
@@ -16,6 +15,32 @@ services:
networks:
- app-network
server_base:
build:
context: ../
dockerfile: rnd/autogpt_server/Dockerfile
target: server
image: autogpt_server:latest
command: ["echo", "This is a base image and should not be run directly"]
migrate:
image: autogpt_server:latest
command: ["sh", "-c", "until pg_isready -h postgres -U agpt_user -d agpt_local; do echo 'Waiting for postgres...'; sleep 2; done; poetry run prisma migrate deploy"]
depends_on:
postgres:
condition: service_healthy
environment:
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
networks:
- app-network
restart: on-failure
healthcheck:
test: ["CMD", "poetry", "run", "prisma", "migrate", "status"]
interval: 10s
timeout: 5s
retries: 5
redis:
image: redis:latest
command: redis-server --requirepass password
@@ -25,9 +50,8 @@ services:
- app-network
rest_server:
build:
context: ../
dockerfile: rnd/autogpt_server/Dockerfile
image: autogpt_server:latest
command: ["poetry", "run", "rest"]
develop:
watch:
- path: ./
@@ -38,21 +62,53 @@ services:
condition: service_started
postgres:
condition: service_healthy
migrate:
condition: service_started
environment:
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_PASSWORD=password
- AUTH_ENABLED=false
- PYRO_HOST=0.0.0.0
- EXECUTIONMANAGER_HOST=executor
ports:
- "8000:8000"
- "8003:8003" # execution scheduler
networks:
- app-network
ws_server:
build:
context: ../
dockerfile: rnd/autogpt_server/Dockerfile.ws
executor:
image: autogpt_server:latest
command: ["poetry", "run", "executor"]
develop:
watch:
- path: ./
target: rnd/autogpt_server/
action: rebuild
depends_on:
redis:
condition: service_started
postgres:
condition: service_healthy
migrate:
condition: service_started
environment:
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_PASSWORD=password
- AUTH_ENABLED=false
- PYRO_HOST=0.0.0.0
- AGENTSERVER_HOST=rest_server
ports:
- "8002:8000"
networks:
- app-network
websocket_server:
image: autogpt_server:latest
command: ["poetry", "run", "ws"]
develop:
watch:
- path: ./
@@ -61,17 +117,52 @@ services:
depends_on:
- postgres
- redis
- migrate
environment:
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_PASSWORD=password
- AUTH_ENABLED=false
- PYRO_HOST=0.0.0.0
ports:
- "8001:8001"
- "8001:8000"
networks:
- app-network
market:
build:
context: ../
dockerfile: rnd/market/Dockerfile
depends_on:
- postgres
- migrate
environment:
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
ports:
- "8015:8015"
networks:
- app-network
frontend:
build:
context: ../
dockerfile: rnd/autogpt_builder/Dockerfile
target: dev
depends_on:
- postgres
- rest_server
- websocket_server
- migrate
environment:
- DATABASE_URL=postgresql://agpt_user:pass123@postgres:5432/agpt_local?connect_timeout=60
- NEXT_PUBLIC_AGPT_SERVER_URL=http://localhost:8000/api
- NEXT_PUBLIC_AGPT_WS_SERVER_URL=ws://localhost:8001/ws
- NEXT_PUBLIC_AGPT_MARKETPLACE_URL=http://localhost:8015/api/v1/market
ports:
- "3000:3000"
networks:
- app-network
networks:
app-network:
driver: bridge

View File

@@ -13,7 +13,7 @@ serviceAccount:
service:
type: ClusterIP
port: 8000
targetPort: 8000
targetPort: 8005
annotations:
cloud.google.com/neg: '{"ingress": true}'
beta.cloud.google.com/backend-config: '{"default": "autogpt-market"}'

View File

@@ -39,6 +39,7 @@ spec:
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
command: ["poetry", "run", "rest"]
ports:
- name: http
containerPort: {{ .Values.service.port }}

View File

@@ -39,6 +39,7 @@ spec:
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
command: ["poetry", "run", "ws"]
ports:
- name: ws
containerPort: {{ .Values.service.port }}

View File

@@ -1,7 +1,7 @@
replicaCount: 1 # not scaling websocket server for now
image:
repository: us-east1-docker.pkg.dev/agpt-dev/agpt-ws-server-dev/agpt-ws-server-dev
repository: us-east1-docker.pkg.dev/agpt-dev/agpt-server-dev/agpt-server-dev
tag: latest
pullPolicy: Always

View File

@@ -17,6 +17,10 @@ service_accounts = {
display_name = "AutoGPT Dev Server Account"
description = "Service account for agpt dev server"
},
"dev-agpt-gha-sa" = {
display_name = "GitHub Actions Service Account"
description = "Service account for GitHub Actions"
"dev-agpt-builder-sa" = {
display_name = "AutoGPT Dev Builder Account"
description = "Service account for agpt dev builder"
@@ -89,6 +93,13 @@ role_bindings = {
"serviceAccount:dev-agpt-market-sa@agpt-dev.iam.gserviceaccount.com"
],
"roles/container.hostServiceAgentUser" = [
"serviceAccount:dev-agpt-server-sa@agpt-dev.iam.gserviceaccount.com"
],
"roles/storage.admin" = [
"serviceAccount:dev-agpt-gha-sa@agpt-dev.iam.gserviceaccount.com"
],
"roles/iam.serviceAccountUser" = [
"serviceAccount:dev-agpt-gha-sa@agpt-dev.iam.gserviceaccount.com"
"serviceAccount:dev-agpt-server-sa@agpt-dev.iam.gserviceaccount.com",
"serviceAccount:dev-agpt-builder-sa@agpt-dev.iam.gserviceaccount.com",
"serviceAccount:dev-agpt-ws-server-sa@agpt-dev.iam.gserviceaccount.com",

View File

@@ -1,14 +1,15 @@
FROM python:3.11-slim-buster as server_base
FROM python:3.11-slim-buster AS server_base
# Set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
WORKDIR /app
# postgresql-client is needed to check if the postgres service is ready for running migrations
# We need to check if the rest of the packages need to be installed
RUN apt-get update \
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev \
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& wget https://github.com/git/git/archive/v2.28.0.tar.gz -O git.tar.gz \
@@ -25,19 +26,31 @@ ENV POETRY_VERSION=1.8.3 \
PATH="$POETRY_HOME/bin:$PATH"
RUN pip3 install poetry
COPY rnd/market /app/rnd/market
FROM server_base AS server_dependencies
RUN mkdir -p /app/autogpt
RUN mkdir -p /app/forge
RUN mkdir -p /app/rnd/autogpt_libs
RUN mkdir -p /app/rnd/market
COPY rnd/autogpt_libs /app/rnd/autogpt_libs
COPY rnd/market/poetry.lock rnd/market/pyproject.toml /app/rnd/market/
WORKDIR /app/rnd/market
# Install dependencies
RUN poetry install --no-interaction --no-ansi
FROM server_dependencies AS server_prisma
# Need the market/utils/partial_types.py
COPY rnd/market /app/rnd/market
COPY rnd/market/schema.prisma ./
RUN poetry run prisma generate
FROM server_base as server
FROM server_prisma AS server
ENV PORT=8000
ENV PORT=8005
ENV DATABASE_URL=""
CMD ["poetry", "run", "app"]

View File

@@ -87,5 +87,11 @@ def health():
content="<h1>Marketplace API</h1>", status_code=200
)
@app.get("/")
def default():
return fastapi.responses.HTMLResponse(
content="<h1>Marketplace API</h1>", status_code=200
)
prometheus_fastapi_instrumentator.Instrumentator().instrument(app).expose(app)

View File

@@ -58,7 +58,8 @@ def format():
def app():
run("uvicorn", "market.app:app", "--reload", "--port", "8001")
port = os.getenv("PORT", "8015")
run("uvicorn", "market.app:app", "--reload", "--port", port)
def setup():