Merge branch 'dev' into zamilmajdy/fix-iteration-and-timer-block

This commit is contained in:
Zamil Majdy
2024-11-11 18:11:27 +07:00
committed by GitHub
11 changed files with 503 additions and 73 deletions

View File

@@ -1,36 +1,38 @@
### Background
<!-- Clearly explain the need for these changes: -->
### Changes 🏗️
<!-- Concisely describe all of the changes made in this pull request: -->
### Checklist 📋
### Testing 🔍
> [!NOTE]
Only for the new autogpt platform, currently in autogpt_platform/
#### For code changes:
- [ ] I have clearly listed my changes in the PR description
- [ ] I have made a test plan
- [ ] I have tested my changes according to the test plan:
<!-- Put your test plan here: -->
- [ ] ...
<!--
Please make sure your changes have been tested and are in good working condition.
Here is a list of our critical paths, if you need some inspiration on what and how to test:
-->
<details>
<summary>Example test plan</summary>
- [ ] Create from scratch and execute an agent with at least 3 blocks
- [ ] Import an agent from file upload, and confirm it executes correctly
- [ ] Upload agent to marketplace
- [ ] Import an agent from marketplace and confirm it executes correctly
- [ ] Edit an agent from monitor, and confirm it executes correctly
</details>
- Create from scratch and execute an agent with at least 3 blocks
- Import an agent from file upload, and confirm it executes correctly
- Upload agent to marketplace
- Import an agent from marketplace and confirm it executes correctly
- Edit an agent from monitor, and confirm it executes correctly
#### For configuration changes:
- [ ] `.env.example` is updated or already compatible with my changes
- [ ] `docker-compose.yml` is updated or already compatible with my changes
- [ ] I have included a list of my configuration changes in the PR description (under **Changes**)
### Configuration Changes 📝
> [!NOTE]
Only for the new autogpt platform, currently in autogpt_platform/
<details>
<summary>Examples of configuration changes</summary>
If you're making configuration or infrastructure changes, please remember to check you've updated the related infrastructure code in the autogpt_platform/infra folder.
Examples of such changes might include:
- Changing ports
- Adding new services that need to communicate with each other
- Secrets or environment variable changes
- New or infrastructure changes such as databases
- Changing ports
- Adding new services that need to communicate with each other
- Secrets or environment variable changes
- New or infrastructure changes such as databases
</details>

View File

@@ -9,7 +9,7 @@ repos:
- id: check-merge-conflict
- id: check-symlinks
- id: debug-statements
- repo: https://github.com/Yelp/detect-secrets
rev: v1.5.0
hooks:
@@ -19,26 +19,116 @@ repos:
files: ^autogpt_platform/
stages: [push]
- repo: local
# For proper type checking, all dependencies need to be up-to-date.
# It's also a good idea to check that poetry.lock is consistent with pyproject.toml.
hooks:
- id: poetry-install
name: Check & Install dependencies - AutoGPT Platform - Backend
alias: poetry-install-platform-backend
entry: poetry -C autogpt_platform/backend install
# include autogpt_libs source (since it's a path dependency)
files: ^autogpt_platform/(backend|autogpt_libs)/poetry\.lock$
types: [file]
language: system
pass_filenames: false
- id: poetry-install
name: Check & Install dependencies - AutoGPT Platform - Libs
alias: poetry-install-platform-libs
entry: poetry -C autogpt_platform/autogpt_libs install
files: ^autogpt_platform/autogpt_libs/poetry\.lock$
types: [file]
language: system
pass_filenames: false
- id: poetry-install
name: Check & Install dependencies - Classic - AutoGPT
alias: poetry-install-classic-autogpt
entry: poetry -C classic/original_autogpt install
# include forge source (since it's a path dependency)
files: ^classic/(original_autogpt|forge)/poetry\.lock$
types: [file]
language: system
pass_filenames: false
- id: poetry-install
name: Check & Install dependencies - Classic - Forge
alias: poetry-install-classic-forge
entry: poetry -C classic/forge install
files: ^classic/forge/poetry\.lock$
types: [file]
language: system
pass_filenames: false
- id: poetry-install
name: Check & Install dependencies - Classic - Benchmark
alias: poetry-install-classic-benchmark
entry: poetry -C classic/benchmark install
files: ^classic/benchmark/poetry\.lock$
types: [file]
language: system
pass_filenames: false
- repo: local
# For proper type checking, Prisma client must be up-to-date.
hooks:
- id: prisma-generate
name: Prisma Generate - AutoGPT Platform - Backend
alias: prisma-generate-platform-backend
entry: bash -c 'cd autogpt_platform/backend && poetry run prisma generate'
# include everything that triggers poetry install + the prisma schema
files: ^autogpt_platform/((backend|autogpt_libs)/poetry\.lock|backend/schema.prisma)$
types: [file]
language: system
pass_filenames: false
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.7.2
hooks:
- id: ruff
name: Lint (Ruff) - AutoGPT Platform - Backend
alias: ruff-lint-platform-backend
files: ^autogpt_platform/backend/
args: [--fix]
- id: ruff
name: Lint (Ruff) - AutoGPT Platform - Libs
alias: ruff-lint-platform-libs
files: ^autogpt_platform/autogpt_libs/
args: [--fix]
- repo: local
# isort needs the context of which packages are installed to function, so we
# can't use a vendored isort pre-commit hook (which runs in its own isolated venv).
hooks:
- id: isort-autogpt
name: Lint (isort) - AutoGPT
- id: isort
name: Lint (isort) - AutoGPT Platform - Backend
alias: isort-platform-backend
entry: poetry -C autogpt_platform/backend run isort
files: ^autogpt_platform/backend/
types: [file, python]
language: system
- id: isort
name: Lint (isort) - Classic - AutoGPT
alias: isort-classic-autogpt
entry: poetry -C classic/original_autogpt run isort
files: ^classic/original_autogpt/
types: [file, python]
language: system
- id: isort-forge
name: Lint (isort) - Forge
- id: isort
name: Lint (isort) - Classic - Forge
alias: isort-classic-forge
entry: poetry -C classic/forge run isort
files: ^classic/forge/
types: [file, python]
language: system
- id: isort-benchmark
name: Lint (isort) - Benchmark
- id: isort
name: Lint (isort) - Classic - Benchmark
alias: isort-classic-benchmark
entry: poetry -C classic/benchmark run isort
files: ^classic/benchmark/
types: [file, python]
@@ -51,7 +141,6 @@ repos:
hooks:
- id: black
name: Lint (Black)
language_version: python3.12
- repo: https://github.com/PyCQA/flake8
rev: 7.0.0
@@ -59,20 +148,20 @@ repos:
# them separately.
hooks:
- id: flake8
name: Lint (Flake8) - AutoGPT
alias: flake8-autogpt
name: Lint (Flake8) - Classic - AutoGPT
alias: flake8-classic-autogpt
files: ^classic/original_autogpt/(autogpt|scripts|tests)/
args: [--config=classic/original_autogpt/.flake8]
- id: flake8
name: Lint (Flake8) - Forge
alias: flake8-forge
name: Lint (Flake8) - Classic - Forge
alias: flake8-classic-forge
files: ^classic/forge/(forge|tests)/
args: [--config=classic/forge/.flake8]
- id: flake8
name: Lint (Flake8) - Benchmark
alias: flake8-benchmark
name: Lint (Flake8) - Classic - Benchmark
alias: flake8-classic-benchmark
files: ^classic/benchmark/(agbenchmark|tests)/((?!reports).)*[/.]
args: [--config=classic/benchmark/.flake8]
@@ -81,31 +170,52 @@ repos:
# project. To trigger on poetry.lock we also reset the file `types` filter.
hooks:
- id: pyright
name: Typecheck - AutoGPT
alias: pyright-autogpt
entry: poetry -C classic/original_autogpt run pyright
args: [-p, autogpt, autogpt]
name: Typecheck - AutoGPT Platform - Backend
alias: pyright-platform-backend
entry: poetry -C autogpt_platform/backend run pyright
args: [-p, autogpt_platform/backend, autogpt_platform/backend]
# include forge source (since it's a path dependency) but exclude *_test.py files:
files: ^(classic/original_autogpt/((autogpt|scripts|tests)/|poetry\.lock$)|classic/forge/(classic/forge/.*(?<!_test)\.py|poetry\.lock)$)
files: ^autogpt_platform/(backend/((backend|test)/|(\w+\.py|poetry\.lock)$)|autogpt_libs/(autogpt_libs/.*(?<!_test)\.py|poetry\.lock)$)
types: [file]
language: system
pass_filenames: false
- id: pyright
name: Typecheck - Forge
alias: pyright-forge
name: Typecheck - AutoGPT Platform - Libs
alias: pyright-platform-libs
entry: poetry -C autogpt_platform/autogpt_libs run pyright
args: [-p, autogpt_platform/autogpt_libs, autogpt_platform/autogpt_libs]
files: ^autogpt_platform/autogpt_libs/(autogpt_libs/|poetry\.lock$)
types: [file]
language: system
pass_filenames: false
- id: pyright
name: Typecheck - Classic - AutoGPT
alias: pyright-classic-autogpt
entry: poetry -C classic/original_autogpt run pyright
args: [-p, classic/original_autogpt, classic/original_autogpt]
# include forge source (since it's a path dependency) but exclude *_test.py files:
files: ^(classic/original_autogpt/((autogpt|scripts|tests)/|poetry\.lock$)|classic/forge/(forge/.*(?<!_test)\.py|poetry\.lock)$)
types: [file]
language: system
pass_filenames: false
- id: pyright
name: Typecheck - Classic - Forge
alias: pyright-classic-forge
entry: poetry -C classic/forge run pyright
args: [-p, forge, forge]
files: ^classic/forge/(classic/forge/|poetry\.lock$)
args: [-p, classic/forge, classic/forge]
files: ^classic/forge/(forge/|poetry\.lock$)
types: [file]
language: system
pass_filenames: false
- id: pyright
name: Typecheck - Benchmark
alias: pyright-benchmark
name: Typecheck - Classic - Benchmark
alias: pyright-classic-benchmark
entry: poetry -C classic/benchmark run pyright
args: [-p, benchmark, benchmark]
args: [-p, classic/benchmark, classic/benchmark]
files: ^classic/benchmark/(agbenchmark/|tests/|poetry\.lock$)
types: [file]
language: system
@@ -113,23 +223,35 @@ repos:
- repo: local
hooks:
- id: pytest-autogpt
name: Run tests - AutoGPT (excl. slow tests)
- id: pytest
name: Run tests - AutoGPT Platform - Backend
alias: pytest-platform-backend
entry: bash -c 'cd autogpt_platform/backend && poetry run pytest'
# include autogpt_libs source (since it's a path dependency) but exclude *_test.py files:
files: ^autogpt_platform/(backend/((backend|test)/|poetry\.lock$)|autogpt_libs/(autogpt_libs/.*(?<!_test)\.py|poetry\.lock)$)
language: system
pass_filenames: false
- id: pytest
name: Run tests - Classic - AutoGPT (excl. slow tests)
alias: pytest-classic-autogpt
entry: bash -c 'cd classic/original_autogpt && poetry run pytest --cov=autogpt -m "not slow" tests/unit tests/integration'
# include forge source (since it's a path dependency) but exclude *_test.py files:
files: ^(classic/original_autogpt/((autogpt|tests)/|poetry\.lock$)|classic/forge/(classic/forge/.*(?<!_test)\.py|poetry\.lock)$)
files: ^(classic/original_autogpt/((autogpt|tests)/|poetry\.lock$)|classic/forge/(forge/.*(?<!_test)\.py|poetry\.lock)$)
language: system
pass_filenames: false
- id: pytest-forge
name: Run tests - Forge (excl. slow tests)
- id: pytest
name: Run tests - Classic - Forge (excl. slow tests)
alias: pytest-classic-forge
entry: bash -c 'cd classic/forge && poetry run pytest --cov=forge -m "not slow"'
files: ^classic/forge/(classic/forge/|tests/|poetry\.lock$)
files: ^classic/forge/(forge/|tests/|poetry\.lock$)
language: system
pass_filenames: false
- id: pytest-benchmark
name: Run tests - Benchmark
- id: pytest
name: Run tests - Classic - Benchmark
alias: pytest-classic-benchmark
entry: bash -c 'cd classic/benchmark && poetry run pytest --cov=benchmark'
files: ^classic/benchmark/(agbenchmark/|tests/|poetry\.lock$)
language: system

67
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,67 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Frontend: Server Side",
"type": "node-terminal",
"request": "launch",
"cwd": "${workspaceFolder}/autogpt_platform/frontend",
"command": "yarn dev"
},
{
"name": "Frontend: Client Side",
"type": "msedge",
"request": "launch",
"url": "http://localhost:3000"
},
{
"name": "Frontend: Full Stack",
"type": "node-terminal",
"request": "launch",
"command": "yarn dev",
"cwd": "${workspaceFolder}/autogpt_platform/frontend",
"serverReadyAction": {
"pattern": "- Local:.+(https?://.+)",
"uriFormat": "%s",
"action": "debugWithEdge"
}
},
{
"name": "Backend",
"type": "debugpy",
"request": "launch",
"module": "backend.app",
// "env": {
// "ENV": "dev"
// },
"envFile": "${workspaceFolder}/backend/.env",
"justMyCode": false,
"cwd": "${workspaceFolder}/autogpt_platform/backend"
},
{
"name": "Marketplace",
"type": "debugpy",
"request": "launch",
"module": "autogpt_platform.market.main",
"env": {
"ENV": "dev"
},
"envFile": "${workspaceFolder}/market/.env",
"justMyCode": false,
"cwd": "${workspaceFolder}/market"
}
],
"compounds": [
{
"name": "Everything",
"configurations": ["Backend", "Frontend: Full Stack"],
// "preLaunchTask": "${defaultBuildTask}",
"stopAll": true,
"presentation": {
"hidden": false,
"order": 0
}
}
]
}

View File

@@ -72,6 +72,14 @@ did_credentials = APIKeyCredentials(
title="Use Credits for D-ID",
expires_at=None,
)
jina_credentials = APIKeyCredentials(
id="7f26de70-ba0d-494e-ba76-238e65e7b45f",
provider="jina",
api_key=SecretStr(settings.secrets.jina_api_key),
title="Use Credits for Jina",
expires_at=None,
)
DEFAULT_CREDENTIALS = [
revid_credentials,
@@ -81,6 +89,7 @@ DEFAULT_CREDENTIALS = [
anthropic_credentials,
groq_credentials,
did_credentials,
jina_credentials,
]
@@ -124,6 +133,8 @@ class SupabaseIntegrationCredentialsStore:
all_credentials.append(anthropic_credentials)
if settings.secrets.did_api_key:
all_credentials.append(did_credentials)
if settings.secrets.jina_api_key:
all_credentials.append(jina_credentials)
return all_credentials
def get_creds_by_id(self, user_id: str, credentials_id: str) -> Credentials | None:

View File

@@ -1322,6 +1322,33 @@ files = [
[package.dependencies]
pyasn1 = ">=0.1.3"
[[package]]
name = "ruff"
version = "0.7.2"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
files = [
{file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"},
{file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"},
{file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"},
{file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"},
{file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"},
{file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"},
{file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"},
{file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"},
{file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"},
{file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"},
{file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"},
{file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"},
{file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"},
{file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"},
{file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"},
{file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"},
{file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"},
{file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"},
]
[[package]]
name = "six"
version = "1.16.0"
@@ -1724,4 +1751,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<4.0"
content-hash = "f80654aae542b1f2f3a44a01f197f87ffbaea52f474dd2cc2b72b8d56b155563"
content-hash = "55475acb18a4fd5dc74bc64d89a24fff1f41e8cd61304c15ec3df2503bbeba56"

View File

@@ -19,7 +19,14 @@ supabase = "^2.9.1"
[tool.poetry.group.dev.dependencies]
redis = "^5.2.0"
ruff = "^0.7.2"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
[tool.ruff]
line-length = 88
[tool.ruff.lint]
extend-select = ["I"] # sort dependencies

View File

@@ -1,4 +1,5 @@
from typing import Literal
import uuid
from typing import Any, Literal
from autogpt_libs.supabase_integration_credentials_store import APIKeyCredentials
from pinecone import Pinecone, ServerlessSpec
@@ -98,10 +99,14 @@ class PineconeQueryBlock(Block):
include_metadata: bool = SchemaField(
description="Whether to include metadata in the response", default=True
)
host: str = SchemaField(description="Host for pinecone")
host: str = SchemaField(description="Host for pinecone", default="")
idx_name: str = SchemaField(description="Index name for pinecone")
class Output(BlockSchema):
results: dict = SchemaField(description="Query results from Pinecone")
results: Any = SchemaField(description="Query results from Pinecone")
combined_results: Any = SchemaField(
description="Combined results from Pinecone"
)
def __init__(self):
super().__init__(
@@ -119,13 +124,105 @@ class PineconeQueryBlock(Block):
credentials: APIKeyCredentials,
**kwargs,
) -> BlockOutput:
pc = Pinecone(api_key=credentials.api_key.get_secret_value())
idx = pc.Index(host=input_data.host)
results = idx.query(
namespace=input_data.namespace,
vector=input_data.query_vector,
top_k=input_data.top_k,
include_values=input_data.include_values,
include_metadata=input_data.include_metadata,
try:
# Create a new client instance
pc = Pinecone(api_key=credentials.api_key.get_secret_value())
# Get the index
idx = pc.Index(input_data.idx_name)
# Ensure query_vector is in correct format
query_vector = input_data.query_vector
if isinstance(query_vector, list) and len(query_vector) > 0:
if isinstance(query_vector[0], list):
query_vector = query_vector[0]
results = idx.query(
namespace=input_data.namespace,
vector=query_vector,
top_k=input_data.top_k,
include_values=input_data.include_values,
include_metadata=input_data.include_metadata,
).to_dict()
combined_text = ""
if results["matches"]:
texts = [
match["metadata"]["text"]
for match in results["matches"]
if match.get("metadata", {}).get("text")
]
combined_text = "\n\n".join(texts)
# Return both the raw matches and combined text
yield "results", {
"matches": results["matches"],
"combined_text": combined_text,
}
yield "combined_results", combined_text
except Exception as e:
error_msg = f"Error querying Pinecone: {str(e)}"
raise RuntimeError(error_msg) from e
class PineconeInsertBlock(Block):
class Input(BlockSchema):
credentials: PineconeCredentialsInput = PineconeCredentialsField()
index: str = SchemaField(description="Initialized Pinecone index")
chunks: list = SchemaField(description="List of text chunks to ingest")
embeddings: list = SchemaField(
description="List of embeddings corresponding to the chunks"
)
yield "results", results
namespace: str = SchemaField(
description="Namespace to use in Pinecone", default=""
)
metadata: dict = SchemaField(
description="Additional metadata to store with each vector", default={}
)
class Output(BlockSchema):
upsert_response: str = SchemaField(
description="Response from Pinecone upsert operation"
)
def __init__(self):
super().__init__(
id="477f2168-cd91-475a-8146-9499a5982434",
description="Upload data to a Pinecone index",
categories={BlockCategory.LOGIC},
input_schema=PineconeInsertBlock.Input,
output_schema=PineconeInsertBlock.Output,
)
def run(
self,
input_data: Input,
*,
credentials: APIKeyCredentials,
**kwargs,
) -> BlockOutput:
try:
# Create a new client instance
pc = Pinecone(api_key=credentials.api_key.get_secret_value())
# Get the index
idx = pc.Index(input_data.index)
vectors = []
for chunk, embedding in zip(input_data.chunks, input_data.embeddings):
vector_metadata = input_data.metadata.copy()
vector_metadata["text"] = chunk
vectors.append(
{
"id": str(uuid.uuid4()),
"values": embedding,
"metadata": vector_metadata,
}
)
idx.upsert(vectors=vectors, namespace=input_data.namespace)
yield "upsert_response", "successfully upserted"
except Exception as e:
error_msg = f"Error uploading to Pinecone: {str(e)}"
raise RuntimeError(error_msg) from e

View File

@@ -9,6 +9,7 @@ from autogpt_libs.supabase_integration_credentials_store.store import (
did_credentials,
groq_credentials,
ideogram_credentials,
jina_credentials,
openai_credentials,
replicate_credentials,
revid_credentials,
@@ -144,7 +145,18 @@ BLOCK_COSTS: dict[Type[Block], list[BlockCost]] = {
},
)
],
SearchTheWebBlock: [BlockCost(cost_amount=1)],
SearchTheWebBlock: [
BlockCost(
cost_amount=1,
cost_filter={
"credentials": {
"id": jina_credentials.id,
"provider": jina_credentials.provider,
"type": jina_credentials.type,
}
},
)
],
ExtractWebsiteContentBlock: [
BlockCost(cost_amount=1, cost_filter={"raw_content": False})
],

View File

@@ -266,6 +266,7 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
replicate_api_key: str = Field(default="", description="Replicate API Key")
unreal_speech_api_key: str = Field(default="", description="Unreal Speech API Key")
ideogram_api_key: str = Field(default="", description="Ideogram API Key")
jina_api_key: str = Field(default="", description="Jina API Key")
# Add more secret fields as needed

View File

@@ -0,0 +1,44 @@
-- CreateEnum
CREATE TYPE "APIKeyPermission" AS ENUM ('EXECUTE_GRAPH', 'READ_GRAPH', 'EXECUTE_BLOCK', 'READ_BLOCK');
-- CreateEnum
CREATE TYPE "APIKeyStatus" AS ENUM ('ACTIVE', 'REVOKED', 'SUSPENDED');
-- CreateTable
CREATE TABLE "APIKey" (
"id" TEXT NOT NULL,
"name" TEXT NOT NULL,
"prefix" TEXT NOT NULL,
"postfix" TEXT NOT NULL,
"key" TEXT NOT NULL,
"status" "APIKeyStatus" NOT NULL DEFAULT 'ACTIVE',
"permissions" "APIKeyPermission"[],
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"lastUsedAt" TIMESTAMP(3),
"revokedAt" TIMESTAMP(3),
"description" TEXT,
"userId" TEXT NOT NULL,
CONSTRAINT "APIKey_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "APIKey_key_key" ON "APIKey"("key");
-- CreateIndex
CREATE INDEX "APIKey_key_idx" ON "APIKey"("key");
-- CreateIndex
CREATE INDEX "APIKey_prefix_idx" ON "APIKey"("prefix");
-- CreateIndex
CREATE INDEX "APIKey_userId_idx" ON "APIKey"("userId");
-- CreateIndex
CREATE INDEX "APIKey_status_idx" ON "APIKey"("status");
-- CreateIndex
CREATE INDEX "APIKey_userId_status_idx" ON "APIKey"("userId", "status");
-- AddForeignKey
ALTER TABLE "APIKey" ADD CONSTRAINT "APIKey_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -27,6 +27,7 @@ model User {
AnalyticsDetails AnalyticsDetails[]
AnalyticsMetrics AnalyticsMetrics[]
UserBlockCredit UserBlockCredit[]
APIKeys APIKey[]
@@index([id])
@@index([email])
@@ -277,3 +278,42 @@ model UserBlockCredit {
@@id(name: "creditTransactionIdentifier", [transactionKey, userId])
}
enum APIKeyPermission {
EXECUTE_GRAPH // Can execute agent graphs
READ_GRAPH // Can get graph versions and details
EXECUTE_BLOCK // Can execute individual blocks
READ_BLOCK // Can get block information
}
model APIKey {
id String @id @default(uuid())
name String
prefix String // First 8 chars for identification
postfix String
key String @unique // Hashed key
status APIKeyStatus @default(ACTIVE)
permissions APIKeyPermission[]
createdAt DateTime @default(now())
lastUsedAt DateTime?
revokedAt DateTime?
description String?
// Relation to user
userId String
user User @relation(fields: [userId], references: [id], onDelete: Cascade)
@@index([key])
@@index([prefix])
@@index([userId])
@@index([status])
@@index([userId, status])
}
enum APIKeyStatus {
ACTIVE
REVOKED
SUSPENDED
}