mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-11 23:35:25 -05:00
Compare commits
1 Commits
otto/open-
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3e33558ea2 |
24
.github/dependabot.yml
vendored
24
.github/dependabot.yml
vendored
@@ -1,5 +1,29 @@
|
||||
version: 2
|
||||
updates:
|
||||
# autogpt_libs (Poetry project)
|
||||
- package-ecosystem: "pip"
|
||||
directory: "autogpt_platform/autogpt_libs"
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
open-pull-requests-limit: 10
|
||||
target-branch: "dev"
|
||||
commit-message:
|
||||
prefix: "chore(libs/deps)"
|
||||
prefix-development: "chore(libs/deps-dev)"
|
||||
ignore:
|
||||
- dependency-name: "poetry"
|
||||
groups:
|
||||
production-dependencies:
|
||||
dependency-type: "production"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
development-dependencies:
|
||||
dependency-type: "development"
|
||||
update-types:
|
||||
- "minor"
|
||||
- "patch"
|
||||
|
||||
# backend (Poetry project)
|
||||
- package-ecosystem: "pip"
|
||||
directory: "autogpt_platform/backend"
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -62,7 +62,7 @@ jobs:
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
uses: github/codeql-action/init@v4
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
@@ -93,6 +93,6 @@ jobs:
|
||||
exit 1
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
uses: github/codeql-action/analyze@v4
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
|
||||
2
.github/workflows/platform-backend-ci.yml
vendored
2
.github/workflows/platform-backend-ci.yml
vendored
@@ -6,11 +6,13 @@ on:
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
- "autogpt_platform/autogpt_libs/**"
|
||||
pull_request:
|
||||
branches: [master, dev, release-*]
|
||||
paths:
|
||||
- ".github/workflows/platform-backend-ci.yml"
|
||||
- "autogpt_platform/backend/**"
|
||||
- "autogpt_platform/autogpt_libs/**"
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
|
||||
@@ -8,7 +8,7 @@ AutoGPT Platform is a monorepo containing:
|
||||
|
||||
- **Backend** (`backend`): Python FastAPI server with async support
|
||||
- **Frontend** (`frontend`): Next.js React application
|
||||
- **Shared Libraries** (`backend/api/auth`, `backend/logging`): Auth, logging, and common utilities integrated into backend
|
||||
- **Shared Libraries** (`autogpt_libs`): Common Python utilities
|
||||
|
||||
## Component Documentation
|
||||
|
||||
|
||||
3
autogpt_platform/autogpt_libs/README.md
Normal file
3
autogpt_platform/autogpt_libs/README.md
Normal file
@@ -0,0 +1,3 @@
|
||||
# AutoGPT Libs
|
||||
|
||||
This is a new project to store shared functionality across different services in the AutoGPT Platform (e.g. authentication)
|
||||
@@ -1,6 +1,6 @@
|
||||
import hashlib
|
||||
|
||||
from backend.api.auth.api_key.keysmith import APIKeySmith
|
||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||
|
||||
|
||||
def test_generate_api_key():
|
||||
@@ -9,7 +9,7 @@ import os
|
||||
import pytest
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from backend.api.auth.config import AuthConfigError, Settings
|
||||
from autogpt_libs.auth.config import AuthConfigError, Settings
|
||||
|
||||
|
||||
def test_environment_variable_precedence(mocker: MockerFixture):
|
||||
@@ -228,7 +228,7 @@ def test_no_crypto_warning(mocker: MockerFixture, caplog: pytest.LogCaptureFixtu
|
||||
mocker.patch.dict(os.environ, {"JWT_VERIFY_KEY": secret}, clear=True)
|
||||
|
||||
# Mock has_crypto to return False
|
||||
mocker.patch("backend.api.auth.config.has_crypto", False)
|
||||
mocker.patch("autogpt_libs.auth.config.has_crypto", False)
|
||||
|
||||
with caplog.at_level(logging.WARNING):
|
||||
Settings()
|
||||
@@ -43,7 +43,7 @@ def get_optional_user_id(
|
||||
|
||||
try:
|
||||
# Parse JWT token to get user ID
|
||||
from backend.api.auth.jwt_utils import parse_jwt_token
|
||||
from autogpt_libs.auth.jwt_utils import parse_jwt_token
|
||||
|
||||
payload = parse_jwt_token(credentials.credentials)
|
||||
return payload.get("sub")
|
||||
@@ -11,12 +11,12 @@ from fastapi import FastAPI, HTTPException, Request, Security
|
||||
from fastapi.testclient import TestClient
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from backend.api.auth.dependencies import (
|
||||
from autogpt_libs.auth.dependencies import (
|
||||
get_user_id,
|
||||
requires_admin_user,
|
||||
requires_user,
|
||||
)
|
||||
from backend.api.auth.models import User
|
||||
from autogpt_libs.auth.models import User
|
||||
|
||||
|
||||
class TestAuthDependencies:
|
||||
@@ -53,7 +53,7 @@ class TestAuthDependencies:
|
||||
|
||||
# Mock get_jwt_payload to return our test payload
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
user = await requires_user(jwt_payload)
|
||||
assert isinstance(user, User)
|
||||
@@ -70,7 +70,7 @@ class TestAuthDependencies:
|
||||
}
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
user = await requires_user(jwt_payload)
|
||||
assert user.user_id == "admin-456"
|
||||
@@ -105,7 +105,7 @@ class TestAuthDependencies:
|
||||
}
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
user = await requires_admin_user(jwt_payload)
|
||||
assert user.user_id == "admin-789"
|
||||
@@ -137,7 +137,7 @@ class TestAuthDependencies:
|
||||
jwt_payload = {"sub": "user-id-xyz", "role": "user"}
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
user_id = await get_user_id(request, jwt_payload)
|
||||
assert user_id == "user-id-xyz"
|
||||
@@ -344,7 +344,7 @@ class TestAuthDependenciesEdgeCases:
|
||||
):
|
||||
"""Test that errors propagate correctly through dependencies."""
|
||||
# Import verify_user to test it directly since dependencies use FastAPI Security
|
||||
from backend.api.auth.jwt_utils import verify_user
|
||||
from autogpt_libs.auth.jwt_utils import verify_user
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
verify_user(payload, admin_only=admin_only)
|
||||
@@ -354,7 +354,7 @@ class TestAuthDependenciesEdgeCases:
|
||||
async def test_dependency_valid_user(self):
|
||||
"""Test valid user case for dependency."""
|
||||
# Import verify_user to test it directly since dependencies use FastAPI Security
|
||||
from backend.api.auth.jwt_utils import verify_user
|
||||
from autogpt_libs.auth.jwt_utils import verify_user
|
||||
|
||||
# Valid case
|
||||
user = verify_user({"sub": "user", "role": "user"}, admin_only=False)
|
||||
@@ -376,16 +376,16 @@ class TestAdminImpersonation:
|
||||
}
|
||||
|
||||
# Mock verify_user to return admin user data
|
||||
mock_verify_user = mocker.patch("backend.api.auth.dependencies.verify_user")
|
||||
mock_verify_user = mocker.patch("autogpt_libs.auth.dependencies.verify_user")
|
||||
mock_verify_user.return_value = Mock(
|
||||
user_id="admin-456", email="admin@example.com", role="admin"
|
||||
)
|
||||
|
||||
# Mock logger to verify audit logging
|
||||
mock_logger = mocker.patch("backend.api.auth.dependencies.logger")
|
||||
mock_logger = mocker.patch("autogpt_libs.auth.dependencies.logger")
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
|
||||
user_id = await get_user_id(request, jwt_payload)
|
||||
@@ -412,13 +412,13 @@ class TestAdminImpersonation:
|
||||
}
|
||||
|
||||
# Mock verify_user to return regular user data
|
||||
mock_verify_user = mocker.patch("backend.api.auth.dependencies.verify_user")
|
||||
mock_verify_user = mocker.patch("autogpt_libs.auth.dependencies.verify_user")
|
||||
mock_verify_user.return_value = Mock(
|
||||
user_id="regular-user", email="user@example.com", role="user"
|
||||
)
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
@@ -439,7 +439,7 @@ class TestAdminImpersonation:
|
||||
}
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
|
||||
user_id = await get_user_id(request, jwt_payload)
|
||||
@@ -459,7 +459,7 @@ class TestAdminImpersonation:
|
||||
}
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
|
||||
user_id = await get_user_id(request, jwt_payload)
|
||||
@@ -479,16 +479,16 @@ class TestAdminImpersonation:
|
||||
}
|
||||
|
||||
# Mock verify_user to return admin user data
|
||||
mock_verify_user = mocker.patch("backend.api.auth.dependencies.verify_user")
|
||||
mock_verify_user = mocker.patch("autogpt_libs.auth.dependencies.verify_user")
|
||||
mock_verify_user.return_value = Mock(
|
||||
user_id="admin-999", email="superadmin@company.com", role="admin"
|
||||
)
|
||||
|
||||
# Mock logger to capture audit trail
|
||||
mock_logger = mocker.patch("backend.api.auth.dependencies.logger")
|
||||
mock_logger = mocker.patch("autogpt_libs.auth.dependencies.logger")
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
|
||||
user_id = await get_user_id(request, jwt_payload)
|
||||
@@ -515,7 +515,7 @@ class TestAdminImpersonation:
|
||||
}
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
|
||||
user_id = await get_user_id(request, jwt_payload)
|
||||
@@ -535,16 +535,16 @@ class TestAdminImpersonation:
|
||||
}
|
||||
|
||||
# Mock verify_user to return admin user data
|
||||
mock_verify_user = mocker.patch("backend.api.auth.dependencies.verify_user")
|
||||
mock_verify_user = mocker.patch("autogpt_libs.auth.dependencies.verify_user")
|
||||
mock_verify_user.return_value = Mock(
|
||||
user_id="admin-456", email="admin@example.com", role="admin"
|
||||
)
|
||||
|
||||
# Mock logger
|
||||
mock_logger = mocker.patch("backend.api.auth.dependencies.logger")
|
||||
mock_logger = mocker.patch("autogpt_libs.auth.dependencies.logger")
|
||||
|
||||
mocker.patch(
|
||||
"backend.api.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
"autogpt_libs.auth.dependencies.get_jwt_payload", return_value=jwt_payload
|
||||
)
|
||||
|
||||
user_id = await get_user_id(request, jwt_payload)
|
||||
@@ -3,11 +3,13 @@ Comprehensive tests for auth helpers module to achieve 100% coverage.
|
||||
Tests OpenAPI schema generation and authentication response handling.
|
||||
"""
|
||||
|
||||
from unittest import mock
|
||||
|
||||
from fastapi import FastAPI
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
|
||||
from backend.api.auth.helpers import add_auth_responses_to_openapi
|
||||
from backend.api.auth.jwt_utils import bearer_jwt_auth
|
||||
from autogpt_libs.auth.helpers import add_auth_responses_to_openapi
|
||||
from autogpt_libs.auth.jwt_utils import bearer_jwt_auth
|
||||
|
||||
|
||||
def test_add_auth_responses_to_openapi_basic():
|
||||
@@ -17,7 +19,7 @@ def test_add_auth_responses_to_openapi_basic():
|
||||
# Add some test endpoints with authentication
|
||||
from fastapi import Depends
|
||||
|
||||
from backend.api.auth.dependencies import requires_user
|
||||
from autogpt_libs.auth.dependencies import requires_user
|
||||
|
||||
@app.get("/protected", dependencies=[Depends(requires_user)])
|
||||
def protected_endpoint():
|
||||
@@ -62,7 +64,7 @@ def test_add_auth_responses_to_openapi_with_security():
|
||||
# Mock endpoint with security
|
||||
from fastapi import Security
|
||||
|
||||
from backend.api.auth.dependencies import get_user_id
|
||||
from autogpt_libs.auth.dependencies import get_user_id
|
||||
|
||||
@app.get("/secured")
|
||||
def secured_endpoint(user_id: str = Security(get_user_id)):
|
||||
@@ -128,7 +130,7 @@ def test_add_auth_responses_to_openapi_existing_responses():
|
||||
|
||||
from fastapi import Security
|
||||
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
@app.get(
|
||||
"/with-responses",
|
||||
@@ -195,8 +197,8 @@ def test_add_auth_responses_to_openapi_multiple_security_schemes():
|
||||
|
||||
from fastapi import Security
|
||||
|
||||
from backend.api.auth.dependencies import requires_admin_user, requires_user
|
||||
from backend.api.auth.models import User
|
||||
from autogpt_libs.auth.dependencies import requires_admin_user, requires_user
|
||||
from autogpt_libs.auth.models import User
|
||||
|
||||
@app.get("/multi-auth")
|
||||
def multi_auth(
|
||||
@@ -225,29 +227,26 @@ def test_add_auth_responses_to_openapi_empty_components():
|
||||
"""Test when OpenAPI schema has no components section initially."""
|
||||
app = FastAPI()
|
||||
|
||||
def mock_openapi():
|
||||
schema = get_openapi(
|
||||
title=app.title,
|
||||
version=app.version,
|
||||
routes=app.routes,
|
||||
)
|
||||
# Remove components if it exists to test component creation
|
||||
# Mock get_openapi to return schema without components
|
||||
original_get_openapi = get_openapi
|
||||
|
||||
def mock_get_openapi(*args, **kwargs):
|
||||
schema = original_get_openapi(*args, **kwargs)
|
||||
# Remove components if it exists
|
||||
if "components" in schema:
|
||||
del schema["components"]
|
||||
return schema
|
||||
|
||||
# Replace app's openapi method
|
||||
app.openapi = mock_openapi
|
||||
with mock.patch("autogpt_libs.auth.helpers.get_openapi", mock_get_openapi):
|
||||
# Apply customization
|
||||
add_auth_responses_to_openapi(app)
|
||||
|
||||
# Apply customization (this wraps our mock)
|
||||
add_auth_responses_to_openapi(app)
|
||||
schema = app.openapi()
|
||||
|
||||
schema = app.openapi()
|
||||
|
||||
# Components should be created
|
||||
assert "components" in schema
|
||||
assert "responses" in schema["components"]
|
||||
assert "HTTP401NotAuthenticatedError" in schema["components"]["responses"]
|
||||
# Components should be created
|
||||
assert "components" in schema
|
||||
assert "responses" in schema["components"]
|
||||
assert "HTTP401NotAuthenticatedError" in schema["components"]["responses"]
|
||||
|
||||
|
||||
def test_add_auth_responses_to_openapi_all_http_methods():
|
||||
@@ -256,7 +255,7 @@ def test_add_auth_responses_to_openapi_all_http_methods():
|
||||
|
||||
from fastapi import Security
|
||||
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
@app.get("/resource")
|
||||
def get_resource(jwt: dict = Security(get_jwt_payload)):
|
||||
@@ -334,59 +333,53 @@ def test_endpoint_without_responses_section():
|
||||
app = FastAPI()
|
||||
|
||||
from fastapi import Security
|
||||
from fastapi.openapi.utils import get_openapi as original_get_openapi
|
||||
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
# Create endpoint
|
||||
@app.get("/no-responses")
|
||||
def endpoint_without_responses(jwt: dict = Security(get_jwt_payload)):
|
||||
return {"data": "test"}
|
||||
|
||||
# Create a mock openapi method that removes responses from the endpoint
|
||||
def mock_openapi():
|
||||
schema = get_openapi(
|
||||
title=app.title,
|
||||
version=app.version,
|
||||
routes=app.routes,
|
||||
)
|
||||
# Remove responses from our endpoint to test response creation
|
||||
# Mock get_openapi to remove responses from the endpoint
|
||||
def mock_get_openapi(*args, **kwargs):
|
||||
schema = original_get_openapi(*args, **kwargs)
|
||||
# Remove responses from our endpoint to trigger line 40
|
||||
if "/no-responses" in schema.get("paths", {}):
|
||||
if "get" in schema["paths"]["/no-responses"]:
|
||||
# Delete responses to force the code to create it
|
||||
if "responses" in schema["paths"]["/no-responses"]["get"]:
|
||||
del schema["paths"]["/no-responses"]["get"]["responses"]
|
||||
return schema
|
||||
|
||||
# Replace app's openapi method
|
||||
app.openapi = mock_openapi
|
||||
with mock.patch("autogpt_libs.auth.helpers.get_openapi", mock_get_openapi):
|
||||
# Apply customization
|
||||
add_auth_responses_to_openapi(app)
|
||||
|
||||
# Apply customization (this wraps our mock)
|
||||
add_auth_responses_to_openapi(app)
|
||||
# Get schema and verify 401 was added
|
||||
schema = app.openapi()
|
||||
|
||||
# Get schema and verify 401 was added
|
||||
schema = app.openapi()
|
||||
|
||||
# The endpoint should now have 401 response
|
||||
if "/no-responses" in schema["paths"]:
|
||||
if "get" in schema["paths"]["/no-responses"]:
|
||||
responses = schema["paths"]["/no-responses"]["get"].get("responses", {})
|
||||
assert "401" in responses
|
||||
assert (
|
||||
responses["401"]["$ref"]
|
||||
== "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
)
|
||||
# The endpoint should now have 401 response
|
||||
if "/no-responses" in schema["paths"]:
|
||||
if "get" in schema["paths"]["/no-responses"]:
|
||||
responses = schema["paths"]["/no-responses"]["get"].get("responses", {})
|
||||
assert "401" in responses
|
||||
assert (
|
||||
responses["401"]["$ref"]
|
||||
== "#/components/responses/HTTP401NotAuthenticatedError"
|
||||
)
|
||||
|
||||
|
||||
def test_components_with_existing_responses():
|
||||
"""Test when components already has a responses section."""
|
||||
app = FastAPI()
|
||||
|
||||
# Create a mock openapi method that adds existing components/responses
|
||||
def mock_openapi():
|
||||
schema = get_openapi(
|
||||
title=app.title,
|
||||
version=app.version,
|
||||
routes=app.routes,
|
||||
)
|
||||
# Mock get_openapi to return schema with existing components/responses
|
||||
from fastapi.openapi.utils import get_openapi as original_get_openapi
|
||||
|
||||
def mock_get_openapi(*args, **kwargs):
|
||||
schema = original_get_openapi(*args, **kwargs)
|
||||
# Add existing components/responses
|
||||
if "components" not in schema:
|
||||
schema["components"] = {}
|
||||
@@ -395,21 +388,21 @@ def test_components_with_existing_responses():
|
||||
}
|
||||
return schema
|
||||
|
||||
# Replace app's openapi method
|
||||
app.openapi = mock_openapi
|
||||
with mock.patch("autogpt_libs.auth.helpers.get_openapi", mock_get_openapi):
|
||||
# Apply customization
|
||||
add_auth_responses_to_openapi(app)
|
||||
|
||||
# Apply customization (this wraps our mock)
|
||||
add_auth_responses_to_openapi(app)
|
||||
schema = app.openapi()
|
||||
|
||||
schema = app.openapi()
|
||||
# Both responses should exist
|
||||
assert "ExistingResponse" in schema["components"]["responses"]
|
||||
assert "HTTP401NotAuthenticatedError" in schema["components"]["responses"]
|
||||
|
||||
# Both responses should exist
|
||||
assert "ExistingResponse" in schema["components"]["responses"]
|
||||
assert "HTTP401NotAuthenticatedError" in schema["components"]["responses"]
|
||||
|
||||
# Verify our 401 response structure
|
||||
error_response = schema["components"]["responses"]["HTTP401NotAuthenticatedError"]
|
||||
assert error_response["description"] == "Authentication required"
|
||||
# Verify our 401 response structure
|
||||
error_response = schema["components"]["responses"][
|
||||
"HTTP401NotAuthenticatedError"
|
||||
]
|
||||
assert error_response["description"] == "Authentication required"
|
||||
|
||||
|
||||
def test_openapi_schema_persistence():
|
||||
@@ -418,7 +411,7 @@ def test_openapi_schema_persistence():
|
||||
|
||||
from fastapi import Security
|
||||
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
@app.get("/test")
|
||||
def test_endpoint(jwt: dict = Security(get_jwt_payload)):
|
||||
@@ -12,9 +12,9 @@ from fastapi import HTTPException
|
||||
from fastapi.security import HTTPAuthorizationCredentials
|
||||
from pytest_mock import MockerFixture
|
||||
|
||||
from backend.api.auth import config, jwt_utils
|
||||
from backend.api.auth.config import Settings
|
||||
from backend.api.auth.models import User
|
||||
from autogpt_libs.auth import config, jwt_utils
|
||||
from autogpt_libs.auth.config import Settings
|
||||
from autogpt_libs.auth.models import User
|
||||
|
||||
MOCK_JWT_SECRET = "test-secret-key-with-at-least-32-characters"
|
||||
TEST_USER_PAYLOAD = {
|
||||
@@ -0,0 +1,33 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class RateLimitSettings(BaseSettings):
|
||||
redis_host: str = Field(
|
||||
default="redis://localhost:6379",
|
||||
description="Redis host",
|
||||
validation_alias="REDIS_HOST",
|
||||
)
|
||||
|
||||
redis_port: str = Field(
|
||||
default="6379", description="Redis port", validation_alias="REDIS_PORT"
|
||||
)
|
||||
|
||||
redis_password: Optional[str] = Field(
|
||||
default=None,
|
||||
description="Redis password",
|
||||
validation_alias="REDIS_PASSWORD",
|
||||
)
|
||||
|
||||
requests_per_minute: int = Field(
|
||||
default=60,
|
||||
description="Maximum number of requests allowed per minute per API key",
|
||||
validation_alias="RATE_LIMIT_REQUESTS_PER_MINUTE",
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(case_sensitive=True, extra="ignore")
|
||||
|
||||
|
||||
RATE_LIMIT_SETTINGS = RateLimitSettings()
|
||||
@@ -0,0 +1,51 @@
|
||||
import time
|
||||
from typing import Tuple
|
||||
|
||||
from redis import Redis
|
||||
|
||||
from .config import RATE_LIMIT_SETTINGS
|
||||
|
||||
|
||||
class RateLimiter:
|
||||
def __init__(
|
||||
self,
|
||||
redis_host: str = RATE_LIMIT_SETTINGS.redis_host,
|
||||
redis_port: str = RATE_LIMIT_SETTINGS.redis_port,
|
||||
redis_password: str | None = RATE_LIMIT_SETTINGS.redis_password,
|
||||
requests_per_minute: int = RATE_LIMIT_SETTINGS.requests_per_minute,
|
||||
):
|
||||
self.redis = Redis(
|
||||
host=redis_host,
|
||||
port=int(redis_port),
|
||||
password=redis_password,
|
||||
decode_responses=True,
|
||||
)
|
||||
self.window = 60
|
||||
self.max_requests = requests_per_minute
|
||||
|
||||
async def check_rate_limit(self, api_key_id: str) -> Tuple[bool, int, int]:
|
||||
"""
|
||||
Check if request is within rate limits.
|
||||
|
||||
Args:
|
||||
api_key_id: The API key identifier to check
|
||||
|
||||
Returns:
|
||||
Tuple of (is_allowed, remaining_requests, reset_time)
|
||||
"""
|
||||
now = time.time()
|
||||
window_start = now - self.window
|
||||
key = f"ratelimit:{api_key_id}:1min"
|
||||
|
||||
pipe = self.redis.pipeline()
|
||||
pipe.zremrangebyscore(key, 0, window_start)
|
||||
pipe.zadd(key, {str(now): now})
|
||||
pipe.zcount(key, window_start, now)
|
||||
pipe.expire(key, self.window)
|
||||
|
||||
_, _, request_count, _ = pipe.execute()
|
||||
|
||||
remaining = max(0, self.max_requests - request_count)
|
||||
reset_time = int(now + self.window)
|
||||
|
||||
return request_count <= self.max_requests, remaining, reset_time
|
||||
@@ -0,0 +1,32 @@
|
||||
from fastapi import HTTPException, Request
|
||||
from starlette.middleware.base import RequestResponseEndpoint
|
||||
|
||||
from .limiter import RateLimiter
|
||||
|
||||
|
||||
async def rate_limit_middleware(request: Request, call_next: RequestResponseEndpoint):
|
||||
"""FastAPI middleware for rate limiting API requests."""
|
||||
limiter = RateLimiter()
|
||||
|
||||
if not request.url.path.startswith("/api"):
|
||||
return await call_next(request)
|
||||
|
||||
api_key = request.headers.get("Authorization")
|
||||
if not api_key:
|
||||
return await call_next(request)
|
||||
|
||||
api_key = api_key.replace("Bearer ", "")
|
||||
|
||||
is_allowed, remaining, reset_time = await limiter.check_rate_limit(api_key)
|
||||
|
||||
if not is_allowed:
|
||||
raise HTTPException(
|
||||
status_code=429, detail="Rate limit exceeded. Please try again later."
|
||||
)
|
||||
|
||||
response = await call_next(request)
|
||||
response.headers["X-RateLimit-Limit"] = str(limiter.max_requests)
|
||||
response.headers["X-RateLimit-Remaining"] = str(remaining)
|
||||
response.headers["X-RateLimit-Reset"] = str(reset_time)
|
||||
|
||||
return response
|
||||
@@ -0,0 +1,76 @@
|
||||
from typing import Annotated, Any, Literal, Optional, TypedDict
|
||||
from uuid import uuid4
|
||||
|
||||
from pydantic import BaseModel, Field, SecretStr, field_serializer
|
||||
|
||||
|
||||
class _BaseCredentials(BaseModel):
|
||||
id: str = Field(default_factory=lambda: str(uuid4()))
|
||||
provider: str
|
||||
title: Optional[str]
|
||||
|
||||
@field_serializer("*")
|
||||
def dump_secret_strings(value: Any, _info):
|
||||
if isinstance(value, SecretStr):
|
||||
return value.get_secret_value()
|
||||
return value
|
||||
|
||||
|
||||
class OAuth2Credentials(_BaseCredentials):
|
||||
type: Literal["oauth2"] = "oauth2"
|
||||
username: Optional[str]
|
||||
"""Username of the third-party service user that these credentials belong to"""
|
||||
access_token: SecretStr
|
||||
access_token_expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the access token expires (if at all)"""
|
||||
refresh_token: Optional[SecretStr]
|
||||
refresh_token_expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the refresh token expires (if at all)"""
|
||||
scopes: list[str]
|
||||
metadata: dict[str, Any] = Field(default_factory=dict)
|
||||
|
||||
def bearer(self) -> str:
|
||||
return f"Bearer {self.access_token.get_secret_value()}"
|
||||
|
||||
|
||||
class APIKeyCredentials(_BaseCredentials):
|
||||
type: Literal["api_key"] = "api_key"
|
||||
api_key: SecretStr
|
||||
expires_at: Optional[int]
|
||||
"""Unix timestamp (seconds) indicating when the API key expires (if at all)"""
|
||||
|
||||
def bearer(self) -> str:
|
||||
return f"Bearer {self.api_key.get_secret_value()}"
|
||||
|
||||
|
||||
Credentials = Annotated[
|
||||
OAuth2Credentials | APIKeyCredentials,
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
|
||||
|
||||
CredentialsType = Literal["api_key", "oauth2"]
|
||||
|
||||
|
||||
class OAuthState(BaseModel):
|
||||
token: str
|
||||
provider: str
|
||||
expires_at: int
|
||||
code_verifier: Optional[str] = None
|
||||
scopes: list[str]
|
||||
"""Unix timestamp (seconds) indicating when this OAuth state expires"""
|
||||
|
||||
|
||||
class UserMetadata(BaseModel):
|
||||
integration_credentials: list[Credentials] = Field(default_factory=list)
|
||||
integration_oauth_states: list[OAuthState] = Field(default_factory=list)
|
||||
|
||||
|
||||
class UserMetadataRaw(TypedDict, total=False):
|
||||
integration_credentials: list[dict]
|
||||
integration_oauth_states: list[dict]
|
||||
|
||||
|
||||
class UserIntegrations(BaseModel):
|
||||
credentials: list[Credentials] = Field(default_factory=list)
|
||||
oauth_states: list[OAuthState] = Field(default_factory=list)
|
||||
2914
autogpt_platform/autogpt_libs/poetry.lock
generated
Normal file
2914
autogpt_platform/autogpt_libs/poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
40
autogpt_platform/autogpt_libs/pyproject.toml
Normal file
40
autogpt_platform/autogpt_libs/pyproject.toml
Normal file
@@ -0,0 +1,40 @@
|
||||
[tool.poetry]
|
||||
name = "autogpt-libs"
|
||||
version = "0.2.0"
|
||||
description = "Shared libraries across AutoGPT Platform"
|
||||
authors = ["AutoGPT team <info@agpt.co>"]
|
||||
readme = "README.md"
|
||||
packages = [{ include = "autogpt_libs" }]
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = ">=3.10,<4.0"
|
||||
colorama = "^0.4.6"
|
||||
cryptography = "^46.0"
|
||||
expiringdict = "^1.2.2"
|
||||
fastapi = "^0.128.0"
|
||||
google-cloud-logging = "^3.13.0"
|
||||
launchdarkly-server-sdk = "^9.14.1"
|
||||
pydantic = "^2.12.5"
|
||||
pydantic-settings = "^2.12.0"
|
||||
pyjwt = { version = "^2.11.0", extras = ["crypto"] }
|
||||
redis = "^6.2.0"
|
||||
supabase = "^2.27.2"
|
||||
uvicorn = "^0.40.0"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
pyright = "^1.1.408"
|
||||
pytest = "^8.4.1"
|
||||
pytest-asyncio = "^1.3.0"
|
||||
pytest-mock = "^3.15.1"
|
||||
pytest-cov = "^7.0.0"
|
||||
ruff = "^0.15.0"
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
||||
[tool.ruff]
|
||||
line-length = 88
|
||||
|
||||
[tool.ruff.lint]
|
||||
extend-select = ["I"] # sort dependencies
|
||||
@@ -39,7 +39,8 @@ ENV PATH=/opt/poetry/bin:$PATH
|
||||
|
||||
RUN pip3 install poetry --break-system-packages
|
||||
|
||||
# Copy and install dependencies (autogpt_libs merged into backend - OPEN-2998)
|
||||
# Copy and install dependencies
|
||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
||||
WORKDIR /app/autogpt_platform/backend
|
||||
RUN poetry install --no-ansi --no-root
|
||||
@@ -82,9 +83,11 @@ COPY --from=builder /root/.cache/prisma-python/binaries /root/.cache/prisma-pyth
|
||||
|
||||
ENV PATH="/app/autogpt_platform/backend/.venv/bin:$PATH"
|
||||
|
||||
# autogpt_libs merged into backend (OPEN-2998)
|
||||
RUN mkdir -p /app/autogpt_platform/autogpt_libs
|
||||
RUN mkdir -p /app/autogpt_platform/backend
|
||||
|
||||
COPY autogpt_platform/autogpt_libs /app/autogpt_platform/autogpt_libs
|
||||
|
||||
COPY autogpt_platform/backend/poetry.lock autogpt_platform/backend/pyproject.toml /app/autogpt_platform/backend/
|
||||
|
||||
WORKDIR /app/autogpt_platform/backend
|
||||
|
||||
@@ -132,7 +132,7 @@ def test_endpoint_success(snapshot: Snapshot):
|
||||
|
||||
### Testing with Authentication
|
||||
|
||||
For the main API routes that use JWT authentication, auth is provided by the `backend.api.auth` module. If the test actually uses the `user_id`, the recommended approach for testing is to mock the `get_jwt_payload` function, which underpins all higher-level auth functions used in the API (`requires_user`, `requires_admin_user`, `get_user_id`).
|
||||
For the main API routes that use JWT authentication, auth is provided by the `autogpt_libs.auth` module. If the test actually uses the `user_id`, the recommended approach for testing is to mock the `get_jwt_payload` function, which underpins all higher-level auth functions used in the API (`requires_user`, `requires_admin_user`, `get_user_id`).
|
||||
|
||||
If the test doesn't need the `user_id` specifically, mocking is not necessary as during tests auth is disabled anyway (see `conftest.py`).
|
||||
|
||||
@@ -158,7 +158,7 @@ client = fastapi.testclient.TestClient(app)
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_app_auth(mock_jwt_user):
|
||||
"""Setup auth overrides for all tests in this module"""
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
app.dependency_overrides[get_jwt_payload] = mock_jwt_user['get_jwt_payload']
|
||||
yield
|
||||
@@ -171,7 +171,7 @@ For admin-only endpoints, use `mock_jwt_admin` instead:
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_app_auth(mock_jwt_admin):
|
||||
"""Setup auth overrides for admin tests"""
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
app.dependency_overrides[get_jwt_payload] = mock_jwt_admin['get_jwt_payload']
|
||||
yield
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import logging
|
||||
import typing
|
||||
|
||||
from autogpt_libs.auth import get_user_id, requires_admin_user
|
||||
from fastapi import APIRouter, Body, Security
|
||||
from prisma.enums import CreditTransactionType
|
||||
|
||||
from backend.api.auth import get_user_id, requires_admin_user
|
||||
from backend.data.credit import admin_get_user_history, get_user_credit_model
|
||||
from backend.util.json import SafeJson
|
||||
|
||||
|
||||
@@ -6,9 +6,9 @@ import fastapi.testclient
|
||||
import prisma.enums
|
||||
import pytest
|
||||
import pytest_mock
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
from pytest_snapshot.plugin import Snapshot
|
||||
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from backend.data.model import UserTransaction
|
||||
from backend.util.json import SafeJson
|
||||
from backend.util.models import Pagination
|
||||
|
||||
@@ -3,10 +3,10 @@ import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from autogpt_libs.auth import get_user_id, requires_admin_user
|
||||
from fastapi import APIRouter, HTTPException, Security
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.api.auth import get_user_id, requires_admin_user
|
||||
from backend.blocks.llm import LlmModel
|
||||
from backend.data.analytics import (
|
||||
AccuracyTrendsResponse,
|
||||
|
||||
@@ -2,11 +2,11 @@ import logging
|
||||
import tempfile
|
||||
import typing
|
||||
|
||||
import autogpt_libs.auth
|
||||
import fastapi
|
||||
import fastapi.responses
|
||||
import prisma.enums
|
||||
|
||||
import backend.api.auth
|
||||
import backend.api.features.store.cache as store_cache
|
||||
import backend.api.features.store.db as store_db
|
||||
import backend.api.features.store.model as store_model
|
||||
@@ -17,7 +17,7 @@ logger = logging.getLogger(__name__)
|
||||
router = fastapi.APIRouter(
|
||||
prefix="/admin",
|
||||
tags=["store", "admin"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_admin_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_admin_user)],
|
||||
)
|
||||
|
||||
|
||||
@@ -73,7 +73,7 @@ async def get_admin_listings_with_versions(
|
||||
async def review_submission(
|
||||
store_listing_version_id: str,
|
||||
request: store_model.ReviewSubmissionRequest,
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
Review a store listing submission.
|
||||
@@ -117,7 +117,7 @@ async def review_submission(
|
||||
tags=["store", "admin"],
|
||||
)
|
||||
async def admin_download_agent_file(
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
store_listing_version_id: str = fastapi.Path(
|
||||
..., description="The ID of the agent to download"
|
||||
),
|
||||
|
||||
@@ -5,10 +5,10 @@ from typing import Annotated
|
||||
|
||||
import fastapi
|
||||
import pydantic
|
||||
from autogpt_libs.auth import get_user_id
|
||||
from autogpt_libs.auth.dependencies import requires_user
|
||||
|
||||
import backend.data.analytics
|
||||
from backend.api.auth import get_user_id
|
||||
from backend.api.auth.dependencies import requires_user
|
||||
|
||||
router = fastapi.APIRouter(dependencies=[fastapi.Security(requires_user)])
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,7 +20,7 @@ client = fastapi.testclient.TestClient(app)
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_app_auth(mock_jwt_user):
|
||||
"""Setup auth overrides for all tests in this module."""
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"]
|
||||
yield
|
||||
|
||||
@@ -2,8 +2,8 @@ import logging
|
||||
from typing import Annotated, Sequence
|
||||
|
||||
import fastapi
|
||||
from autogpt_libs.auth.dependencies import get_user_id, requires_user
|
||||
|
||||
from backend.api.auth.dependencies import get_user_id, requires_user
|
||||
from backend.integrations.providers import ProviderName
|
||||
from backend.util.models import Pagination
|
||||
|
||||
|
||||
@@ -10,8 +10,6 @@ from typing import Any
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.util.json import dumps as json_dumps
|
||||
|
||||
|
||||
class ResponseType(str, Enum):
|
||||
"""Types of streaming responses following AI SDK protocol."""
|
||||
@@ -195,18 +193,6 @@ class StreamError(StreamBaseResponse):
|
||||
default=None, description="Additional error details"
|
||||
)
|
||||
|
||||
def to_sse(self) -> str:
|
||||
"""Convert to SSE format, only emitting fields required by AI SDK protocol.
|
||||
|
||||
The AI SDK uses z.strictObject({type, errorText}) which rejects
|
||||
any extra fields like `code` or `details`.
|
||||
"""
|
||||
data = {
|
||||
"type": self.type.value,
|
||||
"errorText": self.errorText,
|
||||
}
|
||||
return f"data: {json_dumps(data)}\n\n"
|
||||
|
||||
|
||||
class StreamHeartbeat(StreamBaseResponse):
|
||||
"""Heartbeat to keep SSE connection alive during long-running operations.
|
||||
|
||||
@@ -5,11 +5,11 @@ import uuid as uuid_module
|
||||
from collections.abc import AsyncGenerator
|
||||
from typing import Annotated
|
||||
|
||||
from autogpt_libs import auth
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, Query, Response, Security
|
||||
from fastapi.responses import StreamingResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
from backend.api import auth
|
||||
from backend.util.exceptions import NotFoundError
|
||||
|
||||
from . import service as chat_service
|
||||
@@ -303,7 +303,7 @@ async def stream_chat_post(
|
||||
|
||||
session = await _validate_and_get_session(session_id, user_id)
|
||||
logger.info(
|
||||
f"[TIMING] session validated in {(time.perf_counter() - stream_start_time) * 1000:.1f}ms",
|
||||
f"[TIMING] session validated in {(time.perf_counter() - stream_start_time)*1000:.1f}ms",
|
||||
extra={
|
||||
"json_fields": {
|
||||
**log_meta,
|
||||
@@ -327,7 +327,7 @@ async def stream_chat_post(
|
||||
operation_id=operation_id,
|
||||
)
|
||||
logger.info(
|
||||
f"[TIMING] create_task completed in {(time.perf_counter() - task_create_start) * 1000:.1f}ms",
|
||||
f"[TIMING] create_task completed in {(time.perf_counter() - task_create_start)*1000:.1f}ms",
|
||||
extra={
|
||||
"json_fields": {
|
||||
**log_meta,
|
||||
@@ -377,7 +377,7 @@ async def stream_chat_post(
|
||||
gen_end_time = time_module.perf_counter()
|
||||
total_time = (gen_end_time - gen_start_time) * 1000
|
||||
logger.info(
|
||||
f"[TIMING] run_ai_generation FINISHED in {total_time / 1000:.1f}s; "
|
||||
f"[TIMING] run_ai_generation FINISHED in {total_time/1000:.1f}s; "
|
||||
f"task={task_id}, session={session_id}, "
|
||||
f"ttfc={ttfc or -1:.2f}s, n_chunks={chunk_count}",
|
||||
extra={
|
||||
|
||||
@@ -1233,7 +1233,7 @@ async def _stream_chat_chunks(
|
||||
|
||||
total_time = (time_module.perf_counter() - stream_chunks_start) * 1000
|
||||
logger.info(
|
||||
f"[TIMING] _stream_chat_chunks COMPLETED in {total_time / 1000:.1f}s; "
|
||||
f"[TIMING] _stream_chat_chunks COMPLETED in {total_time/1000:.1f}s; "
|
||||
f"session={session.session_id}, user={session.user_id}",
|
||||
extra={"json_fields": {**log_meta, "total_time_ms": total_time}},
|
||||
)
|
||||
|
||||
@@ -569,7 +569,7 @@ async def _stream_listener(
|
||||
if isinstance(chunk, StreamFinish):
|
||||
total_time = (time.perf_counter() - start_time) * 1000
|
||||
logger.info(
|
||||
f"[TIMING] StreamFinish received in {total_time / 1000:.1f}s; delivered={messages_delivered}",
|
||||
f"[TIMING] StreamFinish received in {total_time/1000:.1f}s; delivered={messages_delivered}",
|
||||
extra={
|
||||
"json_fields": {
|
||||
**log_meta,
|
||||
@@ -620,7 +620,7 @@ async def _stream_listener(
|
||||
# Clean up listener task mapping on exit
|
||||
total_time = (time.perf_counter() - start_time) * 1000
|
||||
logger.info(
|
||||
f"[TIMING] _stream_listener FINISHED in {total_time / 1000:.1f}s; task={task_id}, "
|
||||
f"[TIMING] _stream_listener FINISHED in {total_time/1000:.1f}s; task={task_id}, "
|
||||
f"delivered={messages_delivered}, xread_count={xread_count}",
|
||||
extra={
|
||||
"json_fields": {
|
||||
|
||||
@@ -151,10 +151,9 @@ class RunBlockTool(BaseTool):
|
||||
logger.info(f"Executing block {block.name} ({block_id}) for user {user_id}")
|
||||
|
||||
creds_manager = IntegrationCredentialsManager()
|
||||
(
|
||||
matched_credentials,
|
||||
missing_credentials,
|
||||
) = await self._resolve_block_credentials(user_id, block, input_data)
|
||||
matched_credentials, missing_credentials = (
|
||||
await self._resolve_block_credentials(user_id, block, input_data)
|
||||
)
|
||||
|
||||
if missing_credentials:
|
||||
# Return setup requirements response with missing credentials
|
||||
|
||||
@@ -25,7 +25,7 @@ FIXED_NOW = datetime.datetime(2023, 1, 1, 0, 0, 0, tzinfo=datetime.timezone.utc)
|
||||
@pytest_asyncio.fixture(loop_scope="session")
|
||||
async def client(server, mock_jwt_user) -> AsyncGenerator[httpx.AsyncClient, None]:
|
||||
"""Create async HTTP client with auth overrides"""
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
# Override get_jwt_payload dependency to return our test user
|
||||
app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"]
|
||||
|
||||
@@ -2,10 +2,10 @@ import asyncio
|
||||
import logging
|
||||
from typing import Any, List
|
||||
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
from fastapi import APIRouter, HTTPException, Query, Security, status
|
||||
from prisma.enums import ReviewStatus
|
||||
|
||||
import backend.api.auth as autogpt_auth_lib
|
||||
from backend.data.execution import (
|
||||
ExecutionContext,
|
||||
ExecutionStatus,
|
||||
|
||||
@@ -3,6 +3,7 @@ import logging
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import TYPE_CHECKING, Annotated, List, Literal
|
||||
|
||||
from autogpt_libs.auth import get_user_id
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
Body,
|
||||
@@ -16,7 +17,6 @@ from fastapi import (
|
||||
from pydantic import BaseModel, Field, SecretStr
|
||||
from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR, HTTP_502_BAD_GATEWAY
|
||||
|
||||
from backend.api.auth import get_user_id
|
||||
from backend.api.features.library.db import set_preset_webhook, update_preset
|
||||
from backend.api.features.library.model import LibraryAgentPreset
|
||||
from backend.data.graph import NodeModel, get_graph, set_node_webhook
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
from typing import Literal, Optional
|
||||
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
from fastapi import APIRouter, Body, HTTPException, Query, Security, status
|
||||
from fastapi.responses import Response
|
||||
from prisma.enums import OnboardingStep
|
||||
|
||||
import backend.api.auth as autogpt_auth_lib
|
||||
from backend.data.onboarding import complete_onboarding_step
|
||||
|
||||
from .. import db as library_db
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import logging
|
||||
from typing import Any, Optional
|
||||
|
||||
import autogpt_libs.auth as autogpt_auth_lib
|
||||
from fastapi import APIRouter, Body, HTTPException, Query, Security, status
|
||||
|
||||
import backend.api.auth as autogpt_auth_lib
|
||||
from backend.data.execution import GraphExecutionMeta
|
||||
from backend.data.graph import get_graph
|
||||
from backend.data.integrations import get_webhook
|
||||
|
||||
@@ -23,7 +23,7 @@ FIXED_NOW = datetime.datetime(2023, 1, 1, 0, 0, 0)
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_app_auth(mock_jwt_user):
|
||||
"""Setup auth overrides for all tests in this module"""
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"]
|
||||
yield
|
||||
|
||||
@@ -21,13 +21,13 @@ from datetime import datetime
|
||||
from typing import Literal, Optional
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from autogpt_libs.auth import get_user_id
|
||||
from fastapi import APIRouter, Body, HTTPException, Security, UploadFile, status
|
||||
from gcloud.aio import storage as async_storage
|
||||
from PIL import Image
|
||||
from prisma.enums import APIKeyPermission
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from backend.api.auth import get_user_id
|
||||
from backend.data.auth.oauth import (
|
||||
InvalidClientError,
|
||||
InvalidGrantError,
|
||||
|
||||
@@ -21,6 +21,7 @@ from typing import AsyncGenerator
|
||||
import httpx
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
from autogpt_libs.api_key.keysmith import APIKeySmith
|
||||
from prisma.enums import APIKeyPermission
|
||||
from prisma.models import OAuthAccessToken as PrismaOAuthAccessToken
|
||||
from prisma.models import OAuthApplication as PrismaOAuthApplication
|
||||
@@ -28,7 +29,6 @@ from prisma.models import OAuthAuthorizationCode as PrismaOAuthAuthorizationCode
|
||||
from prisma.models import OAuthRefreshToken as PrismaOAuthRefreshToken
|
||||
from prisma.models import User as PrismaUser
|
||||
|
||||
from backend.api.auth.api_key.keysmith import APIKeySmith
|
||||
from backend.api.rest_api import app
|
||||
|
||||
keysmith = APIKeySmith()
|
||||
@@ -134,7 +134,7 @@ async def client(server, test_user: str) -> AsyncGenerator[httpx.AsyncClient, No
|
||||
Depends on `server` to ensure the DB is connected and `test_user` to ensure
|
||||
the user exists in the database before running tests.
|
||||
"""
|
||||
from backend.api.auth import get_user_id
|
||||
from autogpt_libs.auth import get_user_id
|
||||
|
||||
# Override get_user_id dependency to return our test user
|
||||
def override_get_user_id():
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import logging
|
||||
|
||||
from autogpt_libs.auth import get_user_id, requires_user
|
||||
from fastapi import APIRouter, HTTPException, Security
|
||||
|
||||
from backend.api.auth import get_user_id, requires_user
|
||||
|
||||
from .models import ApiResponse, ChatRequest
|
||||
from .service import OttoService
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ client = fastapi.testclient.TestClient(app)
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_app_auth(mock_jwt_user):
|
||||
"""Setup auth overrides for all tests in this module"""
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"]
|
||||
yield
|
||||
|
||||
@@ -57,7 +57,7 @@ async def postmark_webhook_handler(
|
||||
webhook: Annotated[
|
||||
PostmarkWebhook,
|
||||
Body(discriminator="RecordType"),
|
||||
],
|
||||
]
|
||||
):
|
||||
logger.info(f"Received webhook from Postmark: {webhook}")
|
||||
match webhook:
|
||||
|
||||
@@ -164,7 +164,7 @@ class BlockHandler(ContentHandler):
|
||||
block_ids = list(all_blocks.keys())
|
||||
|
||||
# Query for existing embeddings
|
||||
placeholders = ",".join([f"${i + 1}" for i in range(len(block_ids))])
|
||||
placeholders = ",".join([f"${i+1}" for i in range(len(block_ids))])
|
||||
existing_result = await query_raw_with_schema(
|
||||
f"""
|
||||
SELECT "contentId"
|
||||
@@ -265,7 +265,7 @@ class BlockHandler(ContentHandler):
|
||||
return {"total": 0, "with_embeddings": 0, "without_embeddings": 0}
|
||||
|
||||
block_ids = enabled_block_ids
|
||||
placeholders = ",".join([f"${i + 1}" for i in range(len(block_ids))])
|
||||
placeholders = ",".join([f"${i+1}" for i in range(len(block_ids))])
|
||||
|
||||
embedded_result = await query_raw_with_schema(
|
||||
f"""
|
||||
@@ -508,7 +508,7 @@ class DocumentationHandler(ContentHandler):
|
||||
]
|
||||
|
||||
# Check which ones have embeddings
|
||||
placeholders = ",".join([f"${i + 1}" for i in range(len(section_content_ids))])
|
||||
placeholders = ",".join([f"${i+1}" for i in range(len(section_content_ids))])
|
||||
existing_result = await query_raw_with_schema(
|
||||
f"""
|
||||
SELECT "contentId"
|
||||
|
||||
@@ -47,7 +47,7 @@ def mock_storage_client(mocker):
|
||||
|
||||
async def test_upload_media_success(mock_settings, mock_storage_client):
|
||||
# Create test JPEG data with valid signature
|
||||
test_data = b"\xff\xd8\xff" + b"test data"
|
||||
test_data = b"\xFF\xD8\xFF" + b"test data"
|
||||
|
||||
test_file = fastapi.UploadFile(
|
||||
filename="laptop.jpeg",
|
||||
@@ -85,7 +85,7 @@ async def test_upload_media_missing_credentials(monkeypatch):
|
||||
|
||||
test_file = fastapi.UploadFile(
|
||||
filename="laptop.jpeg",
|
||||
file=io.BytesIO(b"\xff\xd8\xff" + b"test data"), # Valid JPEG signature
|
||||
file=io.BytesIO(b"\xFF\xD8\xFF" + b"test data"), # Valid JPEG signature
|
||||
headers=starlette.datastructures.Headers({"content-type": "image/jpeg"}),
|
||||
)
|
||||
|
||||
@@ -110,7 +110,7 @@ async def test_upload_media_video_type(mock_settings, mock_storage_client):
|
||||
|
||||
|
||||
async def test_upload_media_file_too_large(mock_settings, mock_storage_client):
|
||||
large_data = b"\xff\xd8\xff" + b"x" * (
|
||||
large_data = b"\xFF\xD8\xFF" + b"x" * (
|
||||
50 * 1024 * 1024 + 1
|
||||
) # 50MB + 1 byte with valid JPEG signature
|
||||
test_file = fastapi.UploadFile(
|
||||
|
||||
@@ -4,11 +4,11 @@ import typing
|
||||
import urllib.parse
|
||||
from typing import Literal
|
||||
|
||||
import autogpt_libs.auth
|
||||
import fastapi
|
||||
import fastapi.responses
|
||||
import prisma.enums
|
||||
|
||||
import backend.api.auth
|
||||
import backend.data.graph
|
||||
import backend.util.json
|
||||
from backend.util.models import Pagination
|
||||
@@ -34,11 +34,11 @@ router = fastapi.APIRouter()
|
||||
"/profile",
|
||||
summary="Get user profile",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=store_model.ProfileDetails,
|
||||
)
|
||||
async def get_profile(
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
Get the profile details for the authenticated user.
|
||||
@@ -57,12 +57,12 @@ async def get_profile(
|
||||
"/profile",
|
||||
summary="Update user profile",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=store_model.CreatorDetails,
|
||||
)
|
||||
async def update_or_create_profile(
|
||||
profile: store_model.Profile,
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
Update the store profile for the authenticated user.
|
||||
@@ -169,7 +169,7 @@ async def unified_search(
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
user_id: str | None = fastapi.Security(
|
||||
backend.api.auth.get_optional_user_id, use_cache=False
|
||||
autogpt_libs.auth.get_optional_user_id, use_cache=False
|
||||
),
|
||||
):
|
||||
"""
|
||||
@@ -274,7 +274,7 @@ async def get_agent(
|
||||
"/graph/{store_listing_version_id}",
|
||||
summary="Get agent graph",
|
||||
tags=["store"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
)
|
||||
async def get_graph_meta_by_store_listing_version_id(
|
||||
store_listing_version_id: str,
|
||||
@@ -290,7 +290,7 @@ async def get_graph_meta_by_store_listing_version_id(
|
||||
"/agents/{store_listing_version_id}",
|
||||
summary="Get agent by version",
|
||||
tags=["store"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=store_model.StoreAgentDetails,
|
||||
)
|
||||
async def get_store_agent(store_listing_version_id: str):
|
||||
@@ -306,14 +306,14 @@ async def get_store_agent(store_listing_version_id: str):
|
||||
"/agents/{username}/{agent_name}/review",
|
||||
summary="Create agent review",
|
||||
tags=["store"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=store_model.StoreReview,
|
||||
)
|
||||
async def create_review(
|
||||
username: str,
|
||||
agent_name: str,
|
||||
review: store_model.StoreReviewCreate,
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
Create a review for a store agent.
|
||||
@@ -417,11 +417,11 @@ async def get_creator(
|
||||
"/myagents",
|
||||
summary="Get my agents",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=store_model.MyAgentsResponse,
|
||||
)
|
||||
async def get_my_agents(
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
page: typing.Annotated[int, fastapi.Query(ge=1)] = 1,
|
||||
page_size: typing.Annotated[int, fastapi.Query(ge=1)] = 20,
|
||||
):
|
||||
@@ -436,12 +436,12 @@ async def get_my_agents(
|
||||
"/submissions/{submission_id}",
|
||||
summary="Delete store submission",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=bool,
|
||||
)
|
||||
async def delete_submission(
|
||||
submission_id: str,
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
Delete a store listing submission.
|
||||
@@ -465,11 +465,11 @@ async def delete_submission(
|
||||
"/submissions",
|
||||
summary="List my submissions",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=store_model.StoreSubmissionsResponse,
|
||||
)
|
||||
async def get_submissions(
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
page: int = 1,
|
||||
page_size: int = 20,
|
||||
):
|
||||
@@ -508,12 +508,12 @@ async def get_submissions(
|
||||
"/submissions",
|
||||
summary="Create store submission",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=store_model.StoreSubmission,
|
||||
)
|
||||
async def create_submission(
|
||||
submission_request: store_model.StoreSubmissionRequest,
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
Create a new store listing submission.
|
||||
@@ -552,13 +552,13 @@ async def create_submission(
|
||||
"/submissions/{store_listing_version_id}",
|
||||
summary="Edit store submission",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
response_model=store_model.StoreSubmission,
|
||||
)
|
||||
async def edit_submission(
|
||||
store_listing_version_id: str,
|
||||
submission_request: store_model.StoreSubmissionEditRequest,
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
Edit an existing store listing submission.
|
||||
@@ -596,11 +596,11 @@ async def edit_submission(
|
||||
"/submissions/media",
|
||||
summary="Upload submission media",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
)
|
||||
async def upload_submission_media(
|
||||
file: fastapi.UploadFile,
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
):
|
||||
"""
|
||||
Upload media (images/videos) for a store listing submission.
|
||||
@@ -623,11 +623,11 @@ async def upload_submission_media(
|
||||
"/submissions/generate_image",
|
||||
summary="Generate submission image",
|
||||
tags=["store", "private"],
|
||||
dependencies=[fastapi.Security(backend.api.auth.requires_user)],
|
||||
dependencies=[fastapi.Security(autogpt_libs.auth.requires_user)],
|
||||
)
|
||||
async def generate_image(
|
||||
agent_id: str,
|
||||
user_id: str = fastapi.Security(backend.api.auth.get_user_id),
|
||||
user_id: str = fastapi.Security(autogpt_libs.auth.get_user_id),
|
||||
) -> fastapi.responses.Response:
|
||||
"""
|
||||
Generate an image for a store listing submission.
|
||||
|
||||
@@ -24,7 +24,7 @@ client = fastapi.testclient.TestClient(app)
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_app_auth(mock_jwt_user):
|
||||
"""Setup auth overrides for all tests in this module"""
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
app.dependency_overrides[get_jwt_payload] = mock_jwt_user["get_jwt_payload"]
|
||||
yield
|
||||
|
||||
@@ -9,6 +9,8 @@ from typing import Annotated, Any, Sequence, get_args
|
||||
|
||||
import pydantic
|
||||
import stripe
|
||||
from autogpt_libs.auth import get_user_id, requires_user
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
from fastapi import (
|
||||
APIRouter,
|
||||
Body,
|
||||
@@ -26,8 +28,6 @@ from pydantic import BaseModel
|
||||
from starlette.status import HTTP_204_NO_CONTENT, HTTP_404_NOT_FOUND
|
||||
from typing_extensions import Optional, TypedDict
|
||||
|
||||
from backend.api.auth import get_user_id, requires_user
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from backend.api.model import (
|
||||
CreateAPIKeyRequest,
|
||||
CreateAPIKeyResponse,
|
||||
|
||||
@@ -25,7 +25,7 @@ client = fastapi.testclient.TestClient(app)
|
||||
@pytest.fixture(autouse=True)
|
||||
def setup_app_auth(mock_jwt_user, setup_test_user):
|
||||
"""Setup auth overrides for all tests in this module"""
|
||||
from backend.api.auth.jwt_utils import get_jwt_payload
|
||||
from autogpt_libs.auth.jwt_utils import get_jwt_payload
|
||||
|
||||
# setup_test_user fixture already executed and user is created in database
|
||||
# It returns the user_id which we don't need to await
|
||||
@@ -499,12 +499,10 @@ async def test_upload_file_success(test_user_id: str):
|
||||
)
|
||||
|
||||
# Mock dependencies
|
||||
with (
|
||||
patch("backend.api.features.v1.scan_content_safe") as mock_scan,
|
||||
patch(
|
||||
"backend.api.features.v1.get_cloud_storage_handler"
|
||||
) as mock_handler_getter,
|
||||
):
|
||||
with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch(
|
||||
"backend.api.features.v1.get_cloud_storage_handler"
|
||||
) as mock_handler_getter:
|
||||
|
||||
mock_scan.return_value = None
|
||||
mock_handler = AsyncMock()
|
||||
mock_handler.store_file.return_value = "gcs://test-bucket/uploads/123/test.txt"
|
||||
@@ -553,12 +551,10 @@ async def test_upload_file_no_filename(test_user_id: str):
|
||||
),
|
||||
)
|
||||
|
||||
with (
|
||||
patch("backend.api.features.v1.scan_content_safe") as mock_scan,
|
||||
patch(
|
||||
"backend.api.features.v1.get_cloud_storage_handler"
|
||||
) as mock_handler_getter,
|
||||
):
|
||||
with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch(
|
||||
"backend.api.features.v1.get_cloud_storage_handler"
|
||||
) as mock_handler_getter:
|
||||
|
||||
mock_scan.return_value = None
|
||||
mock_handler = AsyncMock()
|
||||
mock_handler.store_file.return_value = (
|
||||
@@ -636,12 +632,10 @@ async def test_upload_file_cloud_storage_failure(test_user_id: str):
|
||||
headers=starlette.datastructures.Headers({"content-type": "text/plain"}),
|
||||
)
|
||||
|
||||
with (
|
||||
patch("backend.api.features.v1.scan_content_safe") as mock_scan,
|
||||
patch(
|
||||
"backend.api.features.v1.get_cloud_storage_handler"
|
||||
) as mock_handler_getter,
|
||||
):
|
||||
with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch(
|
||||
"backend.api.features.v1.get_cloud_storage_handler"
|
||||
) as mock_handler_getter:
|
||||
|
||||
mock_scan.return_value = None
|
||||
mock_handler = AsyncMock()
|
||||
mock_handler.store_file.side_effect = RuntimeError("Storage error!")
|
||||
@@ -685,12 +679,10 @@ async def test_upload_file_gcs_not_configured_fallback(test_user_id: str):
|
||||
headers=starlette.datastructures.Headers({"content-type": "text/plain"}),
|
||||
)
|
||||
|
||||
with (
|
||||
patch("backend.api.features.v1.scan_content_safe") as mock_scan,
|
||||
patch(
|
||||
"backend.api.features.v1.get_cloud_storage_handler"
|
||||
) as mock_handler_getter,
|
||||
):
|
||||
with patch("backend.api.features.v1.scan_content_safe") as mock_scan, patch(
|
||||
"backend.api.features.v1.get_cloud_storage_handler"
|
||||
) as mock_handler_getter:
|
||||
|
||||
mock_scan.return_value = None
|
||||
mock_handler = AsyncMock()
|
||||
mock_handler.config.gcs_bucket_name = "" # Simulate no GCS bucket configured
|
||||
|
||||
@@ -8,9 +8,9 @@ from typing import Annotated
|
||||
from urllib.parse import quote
|
||||
|
||||
import fastapi
|
||||
from autogpt_libs.auth.dependencies import get_user_id, requires_user
|
||||
from fastapi.responses import Response
|
||||
|
||||
from backend.api.auth.dependencies import get_user_id, requires_user
|
||||
from backend.data.workspace import get_workspace, get_workspace_file
|
||||
from backend.util.workspace_storage import get_workspace_storage
|
||||
|
||||
|
||||
@@ -9,6 +9,8 @@ import fastapi.responses
|
||||
import pydantic
|
||||
import starlette.middleware.cors
|
||||
import uvicorn
|
||||
from autogpt_libs.auth import add_auth_responses_to_openapi
|
||||
from autogpt_libs.auth import verify_settings as verify_auth_settings
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from fastapi.middleware.gzip import GZipMiddleware
|
||||
from fastapi.routing import APIRoute
|
||||
@@ -38,8 +40,6 @@ import backend.data.user
|
||||
import backend.integrations.webhooks.utils
|
||||
import backend.util.service
|
||||
import backend.util.settings
|
||||
from backend.api.auth import add_auth_responses_to_openapi
|
||||
from backend.api.auth import verify_settings as verify_auth_settings
|
||||
from backend.api.features.chat.completion_consumer import (
|
||||
start_completion_consumer,
|
||||
stop_completion_consumer,
|
||||
@@ -69,7 +69,7 @@ from .utils.openapi import sort_openapi
|
||||
settings = backend.util.settings.Settings()
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logging.getLogger("backend.api.auth").setLevel(logging.INFO)
|
||||
logging.getLogger("autogpt_libs").setLevel(logging.INFO)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
|
||||
@@ -457,8 +457,7 @@ async def test_api_key_with_unicode_characters_normalization_attack(mock_request
|
||||
"""Test that Unicode normalization doesn't bypass validation."""
|
||||
# Create auth with composed Unicode character
|
||||
auth = APIKeyAuthenticator(
|
||||
header_name="X-API-Key",
|
||||
expected_token="café", # é is composed
|
||||
header_name="X-API-Key", expected_token="café" # é is composed
|
||||
)
|
||||
|
||||
# Try with decomposed version (c + a + f + e + ´)
|
||||
@@ -523,8 +522,8 @@ async def test_api_keys_with_newline_variations(mock_request):
|
||||
"valid\r\ntoken", # Windows newline
|
||||
"valid\rtoken", # Mac newline
|
||||
"valid\x85token", # NEL (Next Line)
|
||||
"valid\x0btoken", # Vertical Tab
|
||||
"valid\x0ctoken", # Form Feed
|
||||
"valid\x0Btoken", # Vertical Tab
|
||||
"valid\x0Ctoken", # Form Feed
|
||||
]
|
||||
|
||||
for api_key in newline_variations:
|
||||
|
||||
@@ -5,10 +5,10 @@ from typing import Protocol
|
||||
|
||||
import pydantic
|
||||
import uvicorn
|
||||
from autogpt_libs.auth.jwt_utils import parse_jwt_token
|
||||
from fastapi import Depends, FastAPI, WebSocket, WebSocketDisconnect
|
||||
from starlette.middleware.cors import CORSMiddleware
|
||||
|
||||
from backend.api.auth.jwt_utils import parse_jwt_token
|
||||
from backend.api.conn_manager import ConnectionManager
|
||||
from backend.api.model import (
|
||||
WSMessage,
|
||||
|
||||
@@ -44,12 +44,9 @@ def test_websocket_server_uses_cors_helper(mocker) -> None:
|
||||
"backend.api.ws_api.build_cors_params", return_value=cors_params
|
||||
)
|
||||
|
||||
with (
|
||||
override_config(
|
||||
settings, "backend_cors_allow_origins", cors_params["allow_origins"]
|
||||
),
|
||||
override_config(settings, "app_env", AppEnvironment.LOCAL),
|
||||
):
|
||||
with override_config(
|
||||
settings, "backend_cors_allow_origins", cors_params["allow_origins"]
|
||||
), override_config(settings, "app_env", AppEnvironment.LOCAL):
|
||||
WebsocketServer().run()
|
||||
|
||||
build_cors.assert_called_once_with(
|
||||
@@ -68,12 +65,9 @@ def test_websocket_server_uses_cors_helper(mocker) -> None:
|
||||
def test_websocket_server_blocks_localhost_in_production(mocker) -> None:
|
||||
mocker.patch("backend.api.ws_api.uvicorn.run")
|
||||
|
||||
with (
|
||||
override_config(
|
||||
settings, "backend_cors_allow_origins", ["http://localhost:3000"]
|
||||
),
|
||||
override_config(settings, "app_env", AppEnvironment.PRODUCTION),
|
||||
):
|
||||
with override_config(
|
||||
settings, "backend_cors_allow_origins", ["http://localhost:3000"]
|
||||
), override_config(settings, "app_env", AppEnvironment.PRODUCTION):
|
||||
with pytest.raises(ValueError):
|
||||
WebsocketServer().run()
|
||||
|
||||
|
||||
@@ -174,9 +174,7 @@ class AIImageGeneratorBlock(Block):
|
||||
],
|
||||
test_mock={
|
||||
# Return a data URI directly so store_media_file doesn't need to download
|
||||
"_run_client": lambda *args, **kwargs: (
|
||||
"data:image/webp;base64,UklGRiQAAABXRUJQVlA4IBgAAAAwAQCdASoBAAEAAQAcJYgCdAEO"
|
||||
)
|
||||
"_run_client": lambda *args, **kwargs: "data:image/webp;base64,UklGRiQAAABXRUJQVlA4IBgAAAAwAQCdASoBAAEAAQAcJYgCdAEO"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -142,9 +142,7 @@ class AIMusicGeneratorBlock(Block):
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"run_model": lambda api_key, music_gen_model_version, prompt, duration, temperature, top_k, top_p, classifier_free_guidance, output_format, normalization_strategy: (
|
||||
"https://replicate.com/output/generated-audio-url.wav"
|
||||
),
|
||||
"run_model": lambda api_key, music_gen_model_version, prompt, duration, temperature, top_k, top_p, classifier_free_guidance, output_format, normalization_strategy: "https://replicate.com/output/generated-audio-url.wav",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
@@ -69,18 +69,12 @@ class PostToBlueskyBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate character limit for Bluesky
|
||||
if len(input_data.post) > 300:
|
||||
yield (
|
||||
"error",
|
||||
f"Post text exceeds Bluesky's 300 character limit ({len(input_data.post)} characters)",
|
||||
)
|
||||
yield "error", f"Post text exceeds Bluesky's 300 character limit ({len(input_data.post)} characters)"
|
||||
return
|
||||
|
||||
# Validate media constraints for Bluesky
|
||||
|
||||
@@ -131,10 +131,7 @@ class PostToFacebookBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Convert datetime to ISO format if provided
|
||||
|
||||
@@ -120,18 +120,12 @@ class PostToGMBBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate GMB constraints
|
||||
if len(input_data.media_urls) > 1:
|
||||
yield (
|
||||
"error",
|
||||
"Google My Business supports only one image or video per post",
|
||||
)
|
||||
yield "error", "Google My Business supports only one image or video per post"
|
||||
return
|
||||
|
||||
# Validate offer coupon code length
|
||||
|
||||
@@ -123,25 +123,16 @@ class PostToInstagramBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate Instagram constraints
|
||||
if len(input_data.post) > 2200:
|
||||
yield (
|
||||
"error",
|
||||
f"Instagram post text exceeds 2,200 character limit ({len(input_data.post)} characters)",
|
||||
)
|
||||
yield "error", f"Instagram post text exceeds 2,200 character limit ({len(input_data.post)} characters)"
|
||||
return
|
||||
|
||||
if len(input_data.media_urls) > 10:
|
||||
yield (
|
||||
"error",
|
||||
"Instagram supports a maximum of 10 images/videos in a carousel",
|
||||
)
|
||||
yield "error", "Instagram supports a maximum of 10 images/videos in a carousel"
|
||||
return
|
||||
|
||||
if len(input_data.collaborators) > 3:
|
||||
@@ -156,10 +147,7 @@ class PostToInstagramBlock(Block):
|
||||
]
|
||||
|
||||
if any(reel_options) and not all(reel_options):
|
||||
yield (
|
||||
"error",
|
||||
"When posting a reel, all reel options must be set: share_reels_feed, audio_name, and either thumbnail or thumbnail_offset",
|
||||
)
|
||||
yield "error", "When posting a reel, all reel options must be set: share_reels_feed, audio_name, and either thumbnail or thumbnail_offset"
|
||||
return
|
||||
|
||||
# Count hashtags and mentions
|
||||
@@ -167,17 +155,11 @@ class PostToInstagramBlock(Block):
|
||||
mention_count = input_data.post.count("@")
|
||||
|
||||
if hashtag_count > 30:
|
||||
yield (
|
||||
"error",
|
||||
f"Instagram allows maximum 30 hashtags ({hashtag_count} found)",
|
||||
)
|
||||
yield "error", f"Instagram allows maximum 30 hashtags ({hashtag_count} found)"
|
||||
return
|
||||
|
||||
if mention_count > 3:
|
||||
yield (
|
||||
"error",
|
||||
f"Instagram allows maximum 3 @mentions ({mention_count} found)",
|
||||
)
|
||||
yield "error", f"Instagram allows maximum 3 @mentions ({mention_count} found)"
|
||||
return
|
||||
|
||||
# Convert datetime to ISO format if provided
|
||||
@@ -209,10 +191,7 @@ class PostToInstagramBlock(Block):
|
||||
# Validate alt text length
|
||||
for i, alt in enumerate(input_data.alt_text):
|
||||
if len(alt) > 1000:
|
||||
yield (
|
||||
"error",
|
||||
f"Alt text {i + 1} exceeds 1,000 character limit ({len(alt)} characters)",
|
||||
)
|
||||
yield "error", f"Alt text {i+1} exceeds 1,000 character limit ({len(alt)} characters)"
|
||||
return
|
||||
instagram_options["altText"] = input_data.alt_text
|
||||
|
||||
@@ -227,19 +206,13 @@ class PostToInstagramBlock(Block):
|
||||
try:
|
||||
tag_obj = InstagramUserTag(**tag)
|
||||
except Exception as e:
|
||||
yield (
|
||||
"error",
|
||||
f"Invalid user tag: {e}, tages need to be a dictionary with a 3 items: username (str), x (float) and y (float)",
|
||||
)
|
||||
yield "error", f"Invalid user tag: {e}, tages need to be a dictionary with a 3 items: username (str), x (float) and y (float)"
|
||||
return
|
||||
tag_dict: dict[str, float | str] = {"username": tag_obj.username}
|
||||
if tag_obj.x is not None and tag_obj.y is not None:
|
||||
# Validate coordinates
|
||||
if not (0.0 <= tag_obj.x <= 1.0) or not (0.0 <= tag_obj.y <= 1.0):
|
||||
yield (
|
||||
"error",
|
||||
f"User tag coordinates must be between 0.0 and 1.0 (user: {tag_obj.username})",
|
||||
)
|
||||
yield "error", f"User tag coordinates must be between 0.0 and 1.0 (user: {tag_obj.username})"
|
||||
return
|
||||
tag_dict["x"] = tag_obj.x
|
||||
tag_dict["y"] = tag_obj.y
|
||||
|
||||
@@ -123,18 +123,12 @@ class PostToLinkedInBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate LinkedIn constraints
|
||||
if len(input_data.post) > 3000:
|
||||
yield (
|
||||
"error",
|
||||
f"LinkedIn post text exceeds 3,000 character limit ({len(input_data.post)} characters)",
|
||||
)
|
||||
yield "error", f"LinkedIn post text exceeds 3,000 character limit ({len(input_data.post)} characters)"
|
||||
return
|
||||
|
||||
if len(input_data.media_urls) > 9:
|
||||
@@ -142,19 +136,13 @@ class PostToLinkedInBlock(Block):
|
||||
return
|
||||
|
||||
if input_data.document_title and len(input_data.document_title) > 400:
|
||||
yield (
|
||||
"error",
|
||||
f"LinkedIn document title exceeds 400 character limit ({len(input_data.document_title)} characters)",
|
||||
)
|
||||
yield "error", f"LinkedIn document title exceeds 400 character limit ({len(input_data.document_title)} characters)"
|
||||
return
|
||||
|
||||
# Validate visibility option
|
||||
valid_visibility = ["public", "connections", "loggedin"]
|
||||
if input_data.visibility not in valid_visibility:
|
||||
yield (
|
||||
"error",
|
||||
f"LinkedIn visibility must be one of: {', '.join(valid_visibility)}",
|
||||
)
|
||||
yield "error", f"LinkedIn visibility must be one of: {', '.join(valid_visibility)}"
|
||||
return
|
||||
|
||||
# Check for document extensions
|
||||
|
||||
@@ -103,32 +103,20 @@ class PostToPinterestBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate Pinterest constraints
|
||||
if len(input_data.post) > 500:
|
||||
yield (
|
||||
"error",
|
||||
f"Pinterest pin description exceeds 500 character limit ({len(input_data.post)} characters)",
|
||||
)
|
||||
yield "error", f"Pinterest pin description exceeds 500 character limit ({len(input_data.post)} characters)"
|
||||
return
|
||||
|
||||
if len(input_data.pin_title) > 100:
|
||||
yield (
|
||||
"error",
|
||||
f"Pinterest pin title exceeds 100 character limit ({len(input_data.pin_title)} characters)",
|
||||
)
|
||||
yield "error", f"Pinterest pin title exceeds 100 character limit ({len(input_data.pin_title)} characters)"
|
||||
return
|
||||
|
||||
if len(input_data.link) > 2048:
|
||||
yield (
|
||||
"error",
|
||||
f"Pinterest link URL exceeds 2048 character limit ({len(input_data.link)} characters)",
|
||||
)
|
||||
yield "error", f"Pinterest link URL exceeds 2048 character limit ({len(input_data.link)} characters)"
|
||||
return
|
||||
|
||||
if len(input_data.media_urls) == 0:
|
||||
@@ -153,10 +141,7 @@ class PostToPinterestBlock(Block):
|
||||
# Validate alt text length
|
||||
for i, alt in enumerate(input_data.alt_text):
|
||||
if len(alt) > 500:
|
||||
yield (
|
||||
"error",
|
||||
f"Pinterest alt text {i + 1} exceeds 500 character limit ({len(alt)} characters)",
|
||||
)
|
||||
yield "error", f"Pinterest alt text {i+1} exceeds 500 character limit ({len(alt)} characters)"
|
||||
return
|
||||
|
||||
# Convert datetime to ISO format if provided
|
||||
|
||||
@@ -73,10 +73,7 @@ class PostToSnapchatBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate Snapchat constraints
|
||||
@@ -91,10 +88,7 @@ class PostToSnapchatBlock(Block):
|
||||
# Validate story type
|
||||
valid_story_types = ["story", "saved_story", "spotlight"]
|
||||
if input_data.story_type not in valid_story_types:
|
||||
yield (
|
||||
"error",
|
||||
f"Snapchat story type must be one of: {', '.join(valid_story_types)}",
|
||||
)
|
||||
yield "error", f"Snapchat story type must be one of: {', '.join(valid_story_types)}"
|
||||
return
|
||||
|
||||
# Convert datetime to ISO format if provided
|
||||
|
||||
@@ -68,10 +68,7 @@ class PostToTelegramBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate Telegram constraints
|
||||
|
||||
@@ -61,34 +61,22 @@ class PostToThreadsBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate Threads constraints
|
||||
if len(input_data.post) > 500:
|
||||
yield (
|
||||
"error",
|
||||
f"Threads post text exceeds 500 character limit ({len(input_data.post)} characters)",
|
||||
)
|
||||
yield "error", f"Threads post text exceeds 500 character limit ({len(input_data.post)} characters)"
|
||||
return
|
||||
|
||||
if len(input_data.media_urls) > 20:
|
||||
yield (
|
||||
"error",
|
||||
"Threads supports a maximum of 20 images/videos in a carousel",
|
||||
)
|
||||
yield "error", "Threads supports a maximum of 20 images/videos in a carousel"
|
||||
return
|
||||
|
||||
# Count hashtags (only 1 allowed)
|
||||
hashtag_count = input_data.post.count("#")
|
||||
if hashtag_count > 1:
|
||||
yield (
|
||||
"error",
|
||||
f"Threads allows only 1 hashtag per post ({hashtag_count} found)",
|
||||
)
|
||||
yield "error", f"Threads allows only 1 hashtag per post ({hashtag_count} found)"
|
||||
return
|
||||
|
||||
# Convert datetime to ISO format if provided
|
||||
|
||||
@@ -123,25 +123,16 @@ class PostToTikTokBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate TikTok constraints
|
||||
if len(input_data.post) > 2200:
|
||||
yield (
|
||||
"error",
|
||||
f"TikTok post text exceeds 2,200 character limit ({len(input_data.post)} characters)",
|
||||
)
|
||||
yield "error", f"TikTok post text exceeds 2,200 character limit ({len(input_data.post)} characters)"
|
||||
return
|
||||
|
||||
if not input_data.media_urls:
|
||||
yield (
|
||||
"error",
|
||||
"TikTok requires at least one media URL (either 1 video or up to 35 images)",
|
||||
)
|
||||
yield "error", "TikTok requires at least one media URL (either 1 video or up to 35 images)"
|
||||
return
|
||||
|
||||
# Check for video vs image constraints
|
||||
@@ -159,10 +150,7 @@ class PostToTikTokBlock(Block):
|
||||
)
|
||||
|
||||
if has_video and has_images:
|
||||
yield (
|
||||
"error",
|
||||
"TikTok does not support mixing video and images in the same post",
|
||||
)
|
||||
yield "error", "TikTok does not support mixing video and images in the same post"
|
||||
return
|
||||
|
||||
if has_video and len(input_data.media_urls) > 1:
|
||||
@@ -175,19 +163,13 @@ class PostToTikTokBlock(Block):
|
||||
|
||||
# Validate image cover index
|
||||
if has_images and input_data.image_cover_index >= len(input_data.media_urls):
|
||||
yield (
|
||||
"error",
|
||||
f"Image cover index {input_data.image_cover_index} is out of range (max: {len(input_data.media_urls) - 1})",
|
||||
)
|
||||
yield "error", f"Image cover index {input_data.image_cover_index} is out of range (max: {len(input_data.media_urls) - 1})"
|
||||
return
|
||||
|
||||
# Check for PNG files (not supported)
|
||||
has_png = any(url.lower().endswith(".png") for url in input_data.media_urls)
|
||||
if has_png:
|
||||
yield (
|
||||
"error",
|
||||
"TikTok does not support PNG files. Please use JPG, JPEG, or WEBP for images.",
|
||||
)
|
||||
yield "error", "TikTok does not support PNG files. Please use JPG, JPEG, or WEBP for images."
|
||||
return
|
||||
|
||||
# Convert datetime to ISO format if provided
|
||||
|
||||
@@ -126,25 +126,16 @@ class PostToXBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate X constraints
|
||||
if not input_data.long_post and len(input_data.post) > 280:
|
||||
yield (
|
||||
"error",
|
||||
f"X post text exceeds 280 character limit ({len(input_data.post)} characters). Enable 'long_post' for Premium accounts.",
|
||||
)
|
||||
yield "error", f"X post text exceeds 280 character limit ({len(input_data.post)} characters). Enable 'long_post' for Premium accounts."
|
||||
return
|
||||
|
||||
if input_data.long_post and len(input_data.post) > 25000:
|
||||
yield (
|
||||
"error",
|
||||
f"X long post text exceeds 25,000 character limit ({len(input_data.post)} characters)",
|
||||
)
|
||||
yield "error", f"X long post text exceeds 25,000 character limit ({len(input_data.post)} characters)"
|
||||
return
|
||||
|
||||
if len(input_data.media_urls) > 4:
|
||||
@@ -158,20 +149,14 @@ class PostToXBlock(Block):
|
||||
return
|
||||
|
||||
if input_data.poll_duration < 1 or input_data.poll_duration > 10080:
|
||||
yield (
|
||||
"error",
|
||||
"X poll duration must be between 1 and 10,080 minutes (7 days)",
|
||||
)
|
||||
yield "error", "X poll duration must be between 1 and 10,080 minutes (7 days)"
|
||||
return
|
||||
|
||||
# Validate alt text
|
||||
if input_data.alt_text:
|
||||
for i, alt in enumerate(input_data.alt_text):
|
||||
if len(alt) > 1000:
|
||||
yield (
|
||||
"error",
|
||||
f"X alt text {i + 1} exceeds 1,000 character limit ({len(alt)} characters)",
|
||||
)
|
||||
yield "error", f"X alt text {i+1} exceeds 1,000 character limit ({len(alt)} characters)"
|
||||
return
|
||||
|
||||
# Validate subtitle settings
|
||||
@@ -183,10 +168,7 @@ class PostToXBlock(Block):
|
||||
return
|
||||
|
||||
if len(input_data.subtitle_name) > 150:
|
||||
yield (
|
||||
"error",
|
||||
f"Subtitle name exceeds 150 character limit ({len(input_data.subtitle_name)} characters)",
|
||||
)
|
||||
yield "error", f"Subtitle name exceeds 150 character limit ({len(input_data.subtitle_name)} characters)"
|
||||
return
|
||||
|
||||
# Convert datetime to ISO format if provided
|
||||
|
||||
@@ -149,10 +149,7 @@ class PostToYouTubeBlock(Block):
|
||||
|
||||
client = create_ayrshare_client()
|
||||
if not client:
|
||||
yield (
|
||||
"error",
|
||||
"Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY.",
|
||||
)
|
||||
yield "error", "Ayrshare integration is not configured. Please set up the AYRSHARE_API_KEY."
|
||||
return
|
||||
|
||||
# Validate YouTube constraints
|
||||
@@ -161,17 +158,11 @@ class PostToYouTubeBlock(Block):
|
||||
return
|
||||
|
||||
if len(input_data.title) > 100:
|
||||
yield (
|
||||
"error",
|
||||
f"YouTube title exceeds 100 character limit ({len(input_data.title)} characters)",
|
||||
)
|
||||
yield "error", f"YouTube title exceeds 100 character limit ({len(input_data.title)} characters)"
|
||||
return
|
||||
|
||||
if len(input_data.post) > 5000:
|
||||
yield (
|
||||
"error",
|
||||
f"YouTube description exceeds 5,000 character limit ({len(input_data.post)} characters)",
|
||||
)
|
||||
yield "error", f"YouTube description exceeds 5,000 character limit ({len(input_data.post)} characters)"
|
||||
return
|
||||
|
||||
# Check for forbidden characters
|
||||
@@ -195,10 +186,7 @@ class PostToYouTubeBlock(Block):
|
||||
# Validate visibility option
|
||||
valid_visibility = ["private", "public", "unlisted"]
|
||||
if input_data.visibility not in valid_visibility:
|
||||
yield (
|
||||
"error",
|
||||
f"YouTube visibility must be one of: {', '.join(valid_visibility)}",
|
||||
)
|
||||
yield "error", f"YouTube visibility must be one of: {', '.join(valid_visibility)}"
|
||||
return
|
||||
|
||||
# Validate thumbnail URL format
|
||||
@@ -214,18 +202,12 @@ class PostToYouTubeBlock(Block):
|
||||
if input_data.tags:
|
||||
total_tag_length = sum(len(tag) for tag in input_data.tags)
|
||||
if total_tag_length > 500:
|
||||
yield (
|
||||
"error",
|
||||
f"YouTube tags total length exceeds 500 characters ({total_tag_length} characters)",
|
||||
)
|
||||
yield "error", f"YouTube tags total length exceeds 500 characters ({total_tag_length} characters)"
|
||||
return
|
||||
|
||||
for tag in input_data.tags:
|
||||
if len(tag) < 2:
|
||||
yield (
|
||||
"error",
|
||||
f"YouTube tag '{tag}' is too short (minimum 2 characters)",
|
||||
)
|
||||
yield "error", f"YouTube tag '{tag}' is too short (minimum 2 characters)"
|
||||
return
|
||||
|
||||
# Validate subtitle URL
|
||||
@@ -243,18 +225,12 @@ class PostToYouTubeBlock(Block):
|
||||
return
|
||||
|
||||
if input_data.subtitle_name and len(input_data.subtitle_name) > 150:
|
||||
yield (
|
||||
"error",
|
||||
f"YouTube subtitle name exceeds 150 character limit ({len(input_data.subtitle_name)} characters)",
|
||||
)
|
||||
yield "error", f"YouTube subtitle name exceeds 150 character limit ({len(input_data.subtitle_name)} characters)"
|
||||
return
|
||||
|
||||
# Validate publish_at format if provided
|
||||
if input_data.publish_at and input_data.schedule_date:
|
||||
yield (
|
||||
"error",
|
||||
"Cannot use both 'publish_at' and 'schedule_date'. Use 'publish_at' for YouTube-controlled publishing.",
|
||||
)
|
||||
yield "error", "Cannot use both 'publish_at' and 'schedule_date'. Use 'publish_at' for YouTube-controlled publishing."
|
||||
return
|
||||
|
||||
# Convert datetime to ISO format if provided (only if not using publish_at)
|
||||
|
||||
@@ -59,13 +59,10 @@ class FileStoreBlock(Block):
|
||||
# for_block_output: smart format - workspace:// in CoPilot, data URI in graphs
|
||||
return_format = "for_external_api" if input_data.base_64 else "for_block_output"
|
||||
|
||||
yield (
|
||||
"file_out",
|
||||
await store_media_file(
|
||||
file=input_data.file_in,
|
||||
execution_context=execution_context,
|
||||
return_format=return_format,
|
||||
),
|
||||
yield "file_out", await store_media_file(
|
||||
file=input_data.file_in,
|
||||
execution_context=execution_context,
|
||||
return_format=return_format,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -728,12 +728,9 @@ class ConcatenateListsBlock(Block):
|
||||
# Type validation: each item must be a list
|
||||
# Strings are iterable and would cause extend() to iterate character-by-character
|
||||
# Non-iterable types would raise TypeError
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
f"Invalid input at index {idx}: expected a list, got {type(lst).__name__}. "
|
||||
f"All items in 'lists' must be lists (e.g., [[1, 2], [3, 4]])."
|
||||
),
|
||||
yield "error", (
|
||||
f"Invalid input at index {idx}: expected a list, got {type(lst).__name__}. "
|
||||
f"All items in 'lists' must be lists (e.g., [[1, 2], [3, 4]])."
|
||||
)
|
||||
return
|
||||
concatenated.extend(lst)
|
||||
|
||||
@@ -110,10 +110,8 @@ class DataForSeoKeywordSuggestionsBlock(Block):
|
||||
test_output=[
|
||||
(
|
||||
"suggestion",
|
||||
lambda x: (
|
||||
hasattr(x, "keyword")
|
||||
and x.keyword == "digital marketing strategy"
|
||||
),
|
||||
lambda x: hasattr(x, "keyword")
|
||||
and x.keyword == "digital marketing strategy",
|
||||
),
|
||||
("suggestions", lambda x: isinstance(x, list) and len(x) == 1),
|
||||
("total_count", 1),
|
||||
|
||||
@@ -137,71 +137,47 @@ class SendEmailBlock(Block):
|
||||
)
|
||||
yield "status", status
|
||||
except socket.gaierror:
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
f"Cannot connect to SMTP server '{input_data.config.smtp_server}'. "
|
||||
"Please verify the server address is correct."
|
||||
),
|
||||
yield "error", (
|
||||
f"Cannot connect to SMTP server '{input_data.config.smtp_server}'. "
|
||||
"Please verify the server address is correct."
|
||||
)
|
||||
except socket.timeout:
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
f"Connection timeout to '{input_data.config.smtp_server}' "
|
||||
f"on port {input_data.config.smtp_port}. "
|
||||
"The server may be down or unreachable."
|
||||
),
|
||||
yield "error", (
|
||||
f"Connection timeout to '{input_data.config.smtp_server}' "
|
||||
f"on port {input_data.config.smtp_port}. "
|
||||
"The server may be down or unreachable."
|
||||
)
|
||||
except ConnectionRefusedError:
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
f"Connection refused to '{input_data.config.smtp_server}' "
|
||||
f"on port {input_data.config.smtp_port}. "
|
||||
"Common SMTP ports are: 587 (TLS), 465 (SSL), 25 (plain). "
|
||||
"Please verify the port is correct."
|
||||
),
|
||||
yield "error", (
|
||||
f"Connection refused to '{input_data.config.smtp_server}' "
|
||||
f"on port {input_data.config.smtp_port}. "
|
||||
"Common SMTP ports are: 587 (TLS), 465 (SSL), 25 (plain). "
|
||||
"Please verify the port is correct."
|
||||
)
|
||||
except smtplib.SMTPNotSupportedError:
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
f"STARTTLS not supported by server '{input_data.config.smtp_server}'. "
|
||||
"Try using port 465 for SSL or port 25 for unencrypted connection."
|
||||
),
|
||||
yield "error", (
|
||||
f"STARTTLS not supported by server '{input_data.config.smtp_server}'. "
|
||||
"Try using port 465 for SSL or port 25 for unencrypted connection."
|
||||
)
|
||||
except ssl.SSLError as e:
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
f"SSL/TLS error when connecting to '{input_data.config.smtp_server}': {str(e)}. "
|
||||
"The server may require a different security protocol."
|
||||
),
|
||||
yield "error", (
|
||||
f"SSL/TLS error when connecting to '{input_data.config.smtp_server}': {str(e)}. "
|
||||
"The server may require a different security protocol."
|
||||
)
|
||||
except smtplib.SMTPAuthenticationError:
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
"Authentication failed. Please verify your username and password are correct."
|
||||
),
|
||||
yield "error", (
|
||||
"Authentication failed. Please verify your username and password are correct."
|
||||
)
|
||||
except smtplib.SMTPRecipientsRefused:
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
f"Recipient email address '{input_data.to_email}' was rejected by the server. "
|
||||
"Please verify the email address is valid."
|
||||
),
|
||||
yield "error", (
|
||||
f"Recipient email address '{input_data.to_email}' was rejected by the server. "
|
||||
"Please verify the email address is valid."
|
||||
)
|
||||
except smtplib.SMTPSenderRefused:
|
||||
yield (
|
||||
"error",
|
||||
(
|
||||
"Sender email address defined in the credentials that where used"
|
||||
"was rejected by the server. "
|
||||
"Please verify your account is authorized to send emails."
|
||||
),
|
||||
yield "error", (
|
||||
"Sender email address defined in the credentials that where used"
|
||||
"was rejected by the server. "
|
||||
"Please verify your account is authorized to send emails."
|
||||
)
|
||||
except smtplib.SMTPDataError as e:
|
||||
yield "error", f"Email data rejected by server: {str(e)}"
|
||||
|
||||
@@ -490,9 +490,7 @@ class GetLinkedinProfilePictureBlock(Block):
|
||||
],
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_mock={
|
||||
"_get_profile_picture": lambda *args, **kwargs: (
|
||||
"https://media.licdn.com/dms/image/C4D03AQFj-xjuXrLFSQ/profile-displayphoto-shrink_800_800/0/1576881858598?e=1686787200&v=beta&t=zrQC76QwsfQQIWthfOnrKRBMZ5D-qIAvzLXLmWgYvTk"
|
||||
),
|
||||
"_get_profile_picture": lambda *args, **kwargs: "https://media.licdn.com/dms/image/C4D03AQFj-xjuXrLFSQ/profile-displayphoto-shrink_800_800/0/1576881858598?e=1686787200&v=beta&t=zrQC76QwsfQQIWthfOnrKRBMZ5D-qIAvzLXLmWgYvTk",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -319,7 +319,7 @@ class CostDollars(BaseModel):
|
||||
|
||||
# Helper functions for payload processing
|
||||
def process_text_field(
|
||||
text: Union[bool, TextEnabled, TextDisabled, TextAdvanced, None],
|
||||
text: Union[bool, TextEnabled, TextDisabled, TextAdvanced, None]
|
||||
) -> Optional[Union[bool, Dict[str, Any]]]:
|
||||
"""Process text field for API payload."""
|
||||
if text is None:
|
||||
@@ -400,7 +400,7 @@ def process_contents_settings(contents: Optional[ContentSettings]) -> Dict[str,
|
||||
|
||||
|
||||
def process_context_field(
|
||||
context: Union[bool, dict, ContextEnabled, ContextDisabled, ContextAdvanced, None],
|
||||
context: Union[bool, dict, ContextEnabled, ContextDisabled, ContextAdvanced, None]
|
||||
) -> Optional[Union[bool, Dict[str, int]]]:
|
||||
"""Process context field for API payload."""
|
||||
if context is None:
|
||||
|
||||
@@ -566,9 +566,8 @@ class ExaUpdateWebsetBlock(Block):
|
||||
yield "status", status_str
|
||||
yield "external_id", sdk_webset.external_id
|
||||
yield "metadata", sdk_webset.metadata or {}
|
||||
yield (
|
||||
"updated_at",
|
||||
(sdk_webset.updated_at.isoformat() if sdk_webset.updated_at else ""),
|
||||
yield "updated_at", (
|
||||
sdk_webset.updated_at.isoformat() if sdk_webset.updated_at else ""
|
||||
)
|
||||
|
||||
|
||||
@@ -707,13 +706,11 @@ class ExaGetWebsetBlock(Block):
|
||||
yield "enrichments", enrichments_data
|
||||
yield "monitors", monitors_data
|
||||
yield "metadata", sdk_webset.metadata or {}
|
||||
yield (
|
||||
"created_at",
|
||||
(sdk_webset.created_at.isoformat() if sdk_webset.created_at else ""),
|
||||
yield "created_at", (
|
||||
sdk_webset.created_at.isoformat() if sdk_webset.created_at else ""
|
||||
)
|
||||
yield (
|
||||
"updated_at",
|
||||
(sdk_webset.updated_at.isoformat() if sdk_webset.updated_at else ""),
|
||||
yield "updated_at", (
|
||||
sdk_webset.updated_at.isoformat() if sdk_webset.updated_at else ""
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -523,20 +523,16 @@ class ExaWaitForEnrichmentBlock(Block):
|
||||
items_enriched = 0
|
||||
|
||||
if input_data.sample_results and status == "completed":
|
||||
(
|
||||
sample_data,
|
||||
items_enriched,
|
||||
) = await self._get_sample_enrichments(
|
||||
input_data.webset_id, input_data.enrichment_id, aexa
|
||||
sample_data, items_enriched = (
|
||||
await self._get_sample_enrichments(
|
||||
input_data.webset_id, input_data.enrichment_id, aexa
|
||||
)
|
||||
)
|
||||
|
||||
yield "enrichment_id", input_data.enrichment_id
|
||||
yield "final_status", status
|
||||
yield "items_enriched", items_enriched
|
||||
yield (
|
||||
"enrichment_title",
|
||||
enrichment.title or enrichment.description or "",
|
||||
)
|
||||
yield "enrichment_title", enrichment.title or enrichment.description or ""
|
||||
yield "elapsed_time", elapsed
|
||||
if input_data.sample_results:
|
||||
yield "sample_data", sample_data
|
||||
|
||||
@@ -127,9 +127,7 @@ class AIImageEditorBlock(Block):
|
||||
],
|
||||
test_mock={
|
||||
# Use data URI to avoid HTTP requests during tests
|
||||
"run_model": lambda *args, **kwargs: (
|
||||
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="
|
||||
),
|
||||
"run_model": lambda *args, **kwargs: "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg==",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
@@ -798,9 +798,7 @@ class GithubUnassignIssueBlock(Block):
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("status", "Issue unassigned successfully")],
|
||||
test_mock={
|
||||
"unassign_issue": lambda *args, **kwargs: (
|
||||
"Issue unassigned successfully"
|
||||
)
|
||||
"unassign_issue": lambda *args, **kwargs: "Issue unassigned successfully"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -261,9 +261,7 @@ class GithubReadPullRequestBlock(Block):
|
||||
"This is the body of the pull request.",
|
||||
"username",
|
||||
),
|
||||
"read_pr_changes": lambda *args, **kwargs: (
|
||||
"List of changes made in the pull request."
|
||||
),
|
||||
"read_pr_changes": lambda *args, **kwargs: "List of changes made in the pull request.",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -367,9 +365,7 @@ class GithubAssignPRReviewerBlock(Block):
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("status", "Reviewer assigned successfully")],
|
||||
test_mock={
|
||||
"assign_reviewer": lambda *args, **kwargs: (
|
||||
"Reviewer assigned successfully"
|
||||
)
|
||||
"assign_reviewer": lambda *args, **kwargs: "Reviewer assigned successfully"
|
||||
},
|
||||
)
|
||||
|
||||
@@ -436,9 +432,7 @@ class GithubUnassignPRReviewerBlock(Block):
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
test_output=[("status", "Reviewer unassigned successfully")],
|
||||
test_mock={
|
||||
"unassign_reviewer": lambda *args, **kwargs: (
|
||||
"Reviewer unassigned successfully"
|
||||
)
|
||||
"unassign_reviewer": lambda *args, **kwargs: "Reviewer unassigned successfully"
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -341,17 +341,14 @@ class GoogleDocsCreateBlock(Block):
|
||||
)
|
||||
doc_id = result["document_id"]
|
||||
doc_url = result["document_url"]
|
||||
yield (
|
||||
"document",
|
||||
GoogleDriveFile(
|
||||
id=doc_id,
|
||||
name=input_data.title,
|
||||
mimeType="application/vnd.google-apps.document",
|
||||
url=doc_url,
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/docs_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.credentials.id,
|
||||
),
|
||||
yield "document", GoogleDriveFile(
|
||||
id=doc_id,
|
||||
name=input_data.title,
|
||||
mimeType="application/vnd.google-apps.document",
|
||||
url=doc_url,
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/docs_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.credentials.id,
|
||||
)
|
||||
yield "document_id", doc_id
|
||||
yield "document_url", doc_url
|
||||
@@ -818,10 +815,7 @@ class GoogleDocsGetMetadataBlock(Block):
|
||||
yield "title", result["title"]
|
||||
yield "document_id", input_data.document.id
|
||||
yield "revision_id", result["revision_id"]
|
||||
yield (
|
||||
"document_url",
|
||||
f"https://docs.google.com/document/d/{input_data.document.id}/edit",
|
||||
)
|
||||
yield "document_url", f"https://docs.google.com/document/d/{input_data.document.id}/edit"
|
||||
yield "document", _make_document_output(input_data.document)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to get metadata: {str(e)}"
|
||||
|
||||
@@ -278,13 +278,11 @@ class GmailBase(Block, ABC):
|
||||
"""Download attachment content when email body is stored as attachment."""
|
||||
try:
|
||||
attachment = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.attachments()
|
||||
.get(userId="me", messageId=msg_id, id=attachment_id)
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.attachments()
|
||||
.get(userId="me", messageId=msg_id, id=attachment_id)
|
||||
.execute()
|
||||
)
|
||||
return attachment.get("data")
|
||||
except Exception:
|
||||
@@ -306,13 +304,11 @@ class GmailBase(Block, ABC):
|
||||
|
||||
async def download_attachment(self, service, message_id: str, attachment_id: str):
|
||||
attachment = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.attachments()
|
||||
.get(userId="me", messageId=message_id, id=attachment_id)
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.attachments()
|
||||
.get(userId="me", messageId=message_id, id=attachment_id)
|
||||
.execute()
|
||||
)
|
||||
file_data = base64.urlsafe_b64decode(attachment["data"].encode("UTF-8"))
|
||||
return file_data
|
||||
@@ -470,12 +466,10 @@ class GmailReadBlock(GmailBase):
|
||||
else "full"
|
||||
)
|
||||
msg = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.get(userId="me", id=message["id"], format=format_type)
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.get(userId="me", id=message["id"], format=format_type)
|
||||
.execute()
|
||||
)
|
||||
|
||||
headers = {
|
||||
@@ -608,12 +602,10 @@ class GmailSendBlock(GmailBase):
|
||||
)
|
||||
raw_message = await create_mime_message(input_data, execution_context)
|
||||
sent_message = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.send(userId="me", body={"raw": raw_message})
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.send(userId="me", body={"raw": raw_message})
|
||||
.execute()
|
||||
)
|
||||
return {"id": sent_message["id"], "status": "sent"}
|
||||
|
||||
@@ -707,13 +699,8 @@ class GmailCreateDraftBlock(GmailBase):
|
||||
input_data,
|
||||
execution_context,
|
||||
)
|
||||
yield (
|
||||
"result",
|
||||
GmailDraftResult(
|
||||
id=result["id"],
|
||||
message_id=result["message"]["id"],
|
||||
status="draft_created",
|
||||
),
|
||||
yield "result", GmailDraftResult(
|
||||
id=result["id"], message_id=result["message"]["id"], status="draft_created"
|
||||
)
|
||||
|
||||
async def _create_draft(
|
||||
@@ -726,12 +713,10 @@ class GmailCreateDraftBlock(GmailBase):
|
||||
|
||||
raw_message = await create_mime_message(input_data, execution_context)
|
||||
draft = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.drafts()
|
||||
.create(userId="me", body={"message": {"raw": raw_message}})
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.drafts()
|
||||
.create(userId="me", body={"message": {"raw": raw_message}})
|
||||
.execute()
|
||||
)
|
||||
|
||||
return draft
|
||||
@@ -855,12 +840,10 @@ class GmailAddLabelBlock(GmailBase):
|
||||
async def _add_label(self, service, message_id: str, label_name: str) -> dict:
|
||||
label_id = await self._get_or_create_label(service, label_name)
|
||||
result = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.modify(userId="me", id=message_id, body={"addLabelIds": [label_id]})
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.modify(userId="me", id=message_id, body={"addLabelIds": [label_id]})
|
||||
.execute()
|
||||
)
|
||||
if not result.get("labelIds"):
|
||||
return {
|
||||
@@ -874,12 +857,10 @@ class GmailAddLabelBlock(GmailBase):
|
||||
label_id = await self._get_label_id(service, label_name)
|
||||
if not label_id:
|
||||
label = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.labels()
|
||||
.create(userId="me", body={"name": label_name})
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.labels()
|
||||
.create(userId="me", body={"name": label_name})
|
||||
.execute()
|
||||
)
|
||||
label_id = label["id"]
|
||||
return label_id
|
||||
@@ -946,14 +927,10 @@ class GmailRemoveLabelBlock(GmailBase):
|
||||
label_id = await self._get_label_id(service, label_name)
|
||||
if label_id:
|
||||
result = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.modify(
|
||||
userId="me", id=message_id, body={"removeLabelIds": [label_id]}
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.modify(userId="me", id=message_id, body={"removeLabelIds": [label_id]})
|
||||
.execute()
|
||||
)
|
||||
if not result.get("labelIds"):
|
||||
return {
|
||||
@@ -1071,12 +1048,10 @@ class GmailGetThreadBlock(GmailBase):
|
||||
else "full"
|
||||
)
|
||||
thread = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.threads()
|
||||
.get(userId="me", id=thread_id, format=format_type)
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.threads()
|
||||
.get(userId="me", id=thread_id, format=format_type)
|
||||
.execute()
|
||||
)
|
||||
|
||||
parsed_messages = []
|
||||
@@ -1131,25 +1106,23 @@ async def _build_reply_message(
|
||||
"""
|
||||
# Get parent message for reply context
|
||||
parent = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.get(
|
||||
userId="me",
|
||||
id=input_data.parentMessageId,
|
||||
format="metadata",
|
||||
metadataHeaders=[
|
||||
"Subject",
|
||||
"References",
|
||||
"Message-ID",
|
||||
"From",
|
||||
"To",
|
||||
"Cc",
|
||||
"Reply-To",
|
||||
],
|
||||
)
|
||||
.execute()
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.get(
|
||||
userId="me",
|
||||
id=input_data.parentMessageId,
|
||||
format="metadata",
|
||||
metadataHeaders=[
|
||||
"Subject",
|
||||
"References",
|
||||
"Message-ID",
|
||||
"From",
|
||||
"To",
|
||||
"Cc",
|
||||
"Reply-To",
|
||||
],
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
|
||||
# Build headers dictionary, preserving all values for duplicate headers
|
||||
@@ -1373,12 +1346,10 @@ class GmailReplyBlock(GmailBase):
|
||||
|
||||
# Send the message
|
||||
return await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.send(userId="me", body={"threadId": thread_id, "raw": raw})
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.send(userId="me", body={"threadId": thread_id, "raw": raw})
|
||||
.execute()
|
||||
)
|
||||
|
||||
|
||||
@@ -1488,20 +1459,18 @@ class GmailDraftReplyBlock(GmailBase):
|
||||
|
||||
# Create draft with proper thread association
|
||||
draft = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.drafts()
|
||||
.create(
|
||||
userId="me",
|
||||
body={
|
||||
"message": {
|
||||
"threadId": thread_id,
|
||||
"raw": raw,
|
||||
}
|
||||
},
|
||||
)
|
||||
.execute()
|
||||
lambda: service.users()
|
||||
.drafts()
|
||||
.create(
|
||||
userId="me",
|
||||
body={
|
||||
"message": {
|
||||
"threadId": thread_id,
|
||||
"raw": raw,
|
||||
}
|
||||
},
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
|
||||
return draft
|
||||
@@ -1673,12 +1642,10 @@ class GmailForwardBlock(GmailBase):
|
||||
|
||||
# Get the original message
|
||||
original = await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.get(userId="me", id=input_data.messageId, format="full")
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.get(userId="me", id=input_data.messageId, format="full")
|
||||
.execute()
|
||||
)
|
||||
|
||||
headers = {
|
||||
@@ -1768,10 +1735,8 @@ To: {original_to}
|
||||
# Send the forwarded message
|
||||
raw = base64.urlsafe_b64encode(msg.as_bytes()).decode("utf-8")
|
||||
return await asyncio.to_thread(
|
||||
lambda: (
|
||||
service.users()
|
||||
.messages()
|
||||
.send(userId="me", body={"raw": raw})
|
||||
.execute()
|
||||
)
|
||||
lambda: service.users()
|
||||
.messages()
|
||||
.send(userId="me", body={"raw": raw})
|
||||
.execute()
|
||||
)
|
||||
|
||||
@@ -345,17 +345,14 @@ class GoogleSheetsReadBlock(Block):
|
||||
)
|
||||
yield "result", data
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=spreadsheet_id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{spreadsheet_id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=spreadsheet_id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{spreadsheet_id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", _handle_sheets_api_error(str(e), "read")
|
||||
@@ -469,12 +466,9 @@ class GoogleSheetsWriteBlock(Block):
|
||||
if validation_error:
|
||||
# Customize message for write operations on CSV files
|
||||
if "CSV file" in validation_error:
|
||||
yield (
|
||||
"error",
|
||||
validation_error.replace(
|
||||
"Please use a CSV reader block instead, or",
|
||||
"CSV files are read-only through Google Drive. Please",
|
||||
),
|
||||
yield "error", validation_error.replace(
|
||||
"Please use a CSV reader block instead, or",
|
||||
"CSV files are read-only through Google Drive. Please",
|
||||
)
|
||||
else:
|
||||
yield "error", validation_error
|
||||
@@ -491,17 +485,14 @@ class GoogleSheetsWriteBlock(Block):
|
||||
)
|
||||
yield "result", result
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", _handle_sheets_api_error(str(e), "write")
|
||||
@@ -623,17 +614,14 @@ class GoogleSheetsAppendRowBlock(Block):
|
||||
input_data.value_input_option,
|
||||
)
|
||||
yield "result", result
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to append row: {str(e)}"
|
||||
@@ -756,17 +744,14 @@ class GoogleSheetsClearBlock(Block):
|
||||
)
|
||||
yield "result", result
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to clear Google Sheet range: {str(e)}"
|
||||
@@ -869,17 +854,14 @@ class GoogleSheetsMetadataBlock(Block):
|
||||
)
|
||||
yield "result", result
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to get spreadsheet metadata: {str(e)}"
|
||||
@@ -1002,17 +984,14 @@ class GoogleSheetsManageSheetBlock(Block):
|
||||
)
|
||||
yield "result", result
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to manage sheet: {str(e)}"
|
||||
@@ -1162,17 +1141,14 @@ class GoogleSheetsBatchOperationsBlock(Block):
|
||||
)
|
||||
yield "result", result
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to perform batch operations: {str(e)}"
|
||||
@@ -1330,17 +1306,14 @@ class GoogleSheetsFindReplaceBlock(Block):
|
||||
)
|
||||
yield "result", result
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to find/replace in Google Sheet: {str(e)}"
|
||||
@@ -1515,17 +1488,14 @@ class GoogleSheetsFindBlock(Block):
|
||||
yield "locations", result["locations"]
|
||||
yield "result", {"success": True}
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to find text in Google Sheet: {str(e)}"
|
||||
@@ -1784,17 +1754,14 @@ class GoogleSheetsFormatBlock(Block):
|
||||
else:
|
||||
yield "result", result
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to format Google Sheet cells: {str(e)}"
|
||||
@@ -1961,17 +1928,14 @@ class GoogleSheetsCreateSpreadsheetBlock(Block):
|
||||
spreadsheet_id = result["spreadsheetId"]
|
||||
spreadsheet_url = result["spreadsheetUrl"]
|
||||
# Output the GoogleDriveFile for chaining (includes credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=spreadsheet_id,
|
||||
name=result.get("title", input_data.title),
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=spreadsheet_url,
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.credentials.id, # Preserve credentials for chaining
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=spreadsheet_id,
|
||||
name=result.get("title", input_data.title),
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=spreadsheet_url,
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.credentials.id, # Preserve credentials for chaining
|
||||
)
|
||||
yield "spreadsheet_id", spreadsheet_id
|
||||
yield "spreadsheet_url", spreadsheet_url
|
||||
@@ -2149,17 +2113,14 @@ class GoogleSheetsUpdateCellBlock(Block):
|
||||
|
||||
yield "result", result
|
||||
# Output the GoogleDriveFile for chaining (preserves credentials_id)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", _handle_sheets_api_error(str(e), "update")
|
||||
@@ -2418,17 +2379,14 @@ class GoogleSheetsFilterRowsBlock(Block):
|
||||
yield "rows", result["rows"]
|
||||
yield "row_indices", result["row_indices"]
|
||||
yield "count", result["count"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to filter rows: {str(e)}"
|
||||
@@ -2638,17 +2596,14 @@ class GoogleSheetsLookupRowBlock(Block):
|
||||
yield "row_dict", result["row_dict"]
|
||||
yield "row_index", result["row_index"]
|
||||
yield "found", result["found"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to lookup row: {str(e)}"
|
||||
@@ -2862,17 +2817,14 @@ class GoogleSheetsDeleteRowsBlock(Block):
|
||||
)
|
||||
yield "result", {"success": True}
|
||||
yield "deleted_count", result["deleted_count"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to delete rows: {str(e)}"
|
||||
@@ -3043,17 +2995,14 @@ class GoogleSheetsGetColumnBlock(Block):
|
||||
yield "values", result["values"]
|
||||
yield "count", result["count"]
|
||||
yield "column_index", result["column_index"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to get column: {str(e)}"
|
||||
@@ -3227,17 +3176,14 @@ class GoogleSheetsSortBlock(Block):
|
||||
input_data.has_header,
|
||||
)
|
||||
yield "result", result
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to sort sheet: {str(e)}"
|
||||
@@ -3493,17 +3439,14 @@ class GoogleSheetsGetUniqueValuesBlock(Block):
|
||||
yield "values", result["values"]
|
||||
yield "counts", result["counts"]
|
||||
yield "total_unique", result["total_unique"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to get unique values: {str(e)}"
|
||||
@@ -3677,17 +3620,14 @@ class GoogleSheetsInsertRowBlock(Block):
|
||||
input_data.value_input_option,
|
||||
)
|
||||
yield "result", result
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to insert row: {str(e)}"
|
||||
@@ -3853,17 +3793,14 @@ class GoogleSheetsAddColumnBlock(Block):
|
||||
yield "result", {"success": True}
|
||||
yield "column_letter", result["column_letter"]
|
||||
yield "column_index", result["column_index"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to add column: {str(e)}"
|
||||
@@ -4061,17 +3998,14 @@ class GoogleSheetsGetRowCountBlock(Block):
|
||||
yield "data_rows", result["data_rows"]
|
||||
yield "last_row", result["last_row"]
|
||||
yield "column_count", result["column_count"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to get row count: {str(e)}"
|
||||
@@ -4242,17 +4176,14 @@ class GoogleSheetsRemoveDuplicatesBlock(Block):
|
||||
yield "result", {"success": True}
|
||||
yield "removed_count", result["removed_count"]
|
||||
yield "remaining_rows", result["remaining_rows"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to remove duplicates: {str(e)}"
|
||||
@@ -4495,17 +4426,14 @@ class GoogleSheetsUpdateRowBlock(Block):
|
||||
input_data.dict_values,
|
||||
)
|
||||
yield "result", result
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to update row: {str(e)}"
|
||||
@@ -4687,17 +4615,14 @@ class GoogleSheetsGetRowBlock(Block):
|
||||
)
|
||||
yield "row", result["row"]
|
||||
yield "row_dict", result["row_dict"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to get row: {str(e)}"
|
||||
@@ -4828,17 +4753,14 @@ class GoogleSheetsDeleteColumnBlock(Block):
|
||||
input_data.column,
|
||||
)
|
||||
yield "result", result
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to delete column: {str(e)}"
|
||||
@@ -5009,17 +4931,14 @@ class GoogleSheetsCreateNamedRangeBlock(Block):
|
||||
)
|
||||
yield "result", {"success": True}
|
||||
yield "named_range_id", result["named_range_id"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to create named range: {str(e)}"
|
||||
@@ -5185,17 +5104,14 @@ class GoogleSheetsListNamedRangesBlock(Block):
|
||||
)
|
||||
yield "named_ranges", result["named_ranges"]
|
||||
yield "count", result["count"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to list named ranges: {str(e)}"
|
||||
@@ -5348,17 +5264,14 @@ class GoogleSheetsAddDropdownBlock(Block):
|
||||
input_data.show_dropdown,
|
||||
)
|
||||
yield "result", result
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to add dropdown: {str(e)}"
|
||||
@@ -5523,17 +5436,14 @@ class GoogleSheetsCopyToSpreadsheetBlock(Block):
|
||||
yield "result", {"success": True}
|
||||
yield "new_sheet_id", result["new_sheet_id"]
|
||||
yield "new_sheet_name", result["new_sheet_name"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.source_spreadsheet.id,
|
||||
name=input_data.source_spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.source_spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.source_spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.source_spreadsheet.id,
|
||||
name=input_data.source_spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.source_spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.source_spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to copy sheet: {str(e)}"
|
||||
@@ -5678,17 +5588,14 @@ class GoogleSheetsProtectRangeBlock(Block):
|
||||
)
|
||||
yield "result", {"success": True}
|
||||
yield "protection_id", result["protection_id"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to protect range: {str(e)}"
|
||||
@@ -5845,17 +5752,14 @@ class GoogleSheetsExportCsvBlock(Block):
|
||||
)
|
||||
yield "csv_data", result["csv_data"]
|
||||
yield "row_count", result["row_count"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to export CSV: {str(e)}"
|
||||
@@ -5991,17 +5895,14 @@ class GoogleSheetsImportCsvBlock(Block):
|
||||
)
|
||||
yield "result", {"success": True}
|
||||
yield "rows_imported", result["rows_imported"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to import CSV: {str(e)}"
|
||||
@@ -6131,17 +6032,14 @@ class GoogleSheetsAddNoteBlock(Block):
|
||||
input_data.note,
|
||||
)
|
||||
yield "result", {"success": True}
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to add note: {str(e)}"
|
||||
@@ -6287,17 +6185,14 @@ class GoogleSheetsGetNotesBlock(Block):
|
||||
notes = result["notes"]
|
||||
yield "notes", notes
|
||||
yield "count", len(notes)
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to get notes: {str(e)}"
|
||||
@@ -6452,17 +6347,14 @@ class GoogleSheetsShareSpreadsheetBlock(Block):
|
||||
)
|
||||
yield "result", {"success": True}
|
||||
yield "share_link", result["share_link"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to share spreadsheet: {str(e)}"
|
||||
@@ -6599,17 +6491,14 @@ class GoogleSheetsSetPublicAccessBlock(Block):
|
||||
)
|
||||
yield "result", {"success": True, "is_public": result["is_public"]}
|
||||
yield "share_link", result["share_link"]
|
||||
yield (
|
||||
"spreadsheet",
|
||||
GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
),
|
||||
yield "spreadsheet", GoogleDriveFile(
|
||||
id=input_data.spreadsheet.id,
|
||||
name=input_data.spreadsheet.name,
|
||||
mimeType="application/vnd.google-apps.spreadsheet",
|
||||
url=f"https://docs.google.com/spreadsheets/d/{input_data.spreadsheet.id}/edit",
|
||||
iconUrl="https://www.gstatic.com/images/branding/product/1x/sheets_48dp.png",
|
||||
isFolder=False,
|
||||
_credentials_id=input_data.spreadsheet.credentials_id,
|
||||
)
|
||||
except Exception as e:
|
||||
yield "error", f"Failed to set public access: {str(e)}"
|
||||
|
||||
@@ -21,71 +21,43 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
class HumanInTheLoopBlock(Block):
|
||||
"""
|
||||
Pauses execution and waits for human approval or rejection of the data.
|
||||
This block pauses execution and waits for human approval or modification of the data.
|
||||
|
||||
When executed, this block creates a pending review entry and sets the node execution
|
||||
status to REVIEW. The execution remains paused until a human user either approves
|
||||
or rejects the data.
|
||||
When executed, it creates a pending review entry and sets the node execution status
|
||||
to REVIEW. The execution will remain paused until a human user either:
|
||||
- Approves the data (with or without modifications)
|
||||
- Rejects the data
|
||||
|
||||
**How it works:**
|
||||
- The input data is presented to a human reviewer
|
||||
- The reviewer can approve or reject (and optionally modify the data if editable)
|
||||
- On approval: the data flows out through the `approved_data` output pin
|
||||
- On rejection: the data flows out through the `rejected_data` output pin
|
||||
|
||||
**Important:** The output pins yield the actual data itself, NOT status strings.
|
||||
The approval/rejection decision determines WHICH output pin fires, not the value.
|
||||
You do NOT need to compare the output to "APPROVED" or "REJECTED" - simply connect
|
||||
downstream blocks to the appropriate output pin for each case.
|
||||
|
||||
**Example usage:**
|
||||
- Connect `approved_data` → next step in your workflow (data was approved)
|
||||
- Connect `rejected_data` → error handling or notification (data was rejected)
|
||||
This is useful for workflows that require human validation or intervention before
|
||||
proceeding to the next steps.
|
||||
"""
|
||||
|
||||
class Input(BlockSchemaInput):
|
||||
data: Any = SchemaField(
|
||||
description="The data to be reviewed by a human user. "
|
||||
"This exact data will be passed through to either approved_data or "
|
||||
"rejected_data output based on the reviewer's decision."
|
||||
)
|
||||
data: Any = SchemaField(description="The data to be reviewed by a human user")
|
||||
name: str = SchemaField(
|
||||
description="A descriptive name for what this data represents. "
|
||||
"This helps the reviewer understand what they are reviewing.",
|
||||
description="A descriptive name for what this data represents",
|
||||
)
|
||||
editable: bool = SchemaField(
|
||||
description="Whether the human reviewer can edit the data before "
|
||||
"approving or rejecting it",
|
||||
description="Whether the human reviewer can edit the data",
|
||||
default=True,
|
||||
advanced=True,
|
||||
)
|
||||
|
||||
class Output(BlockSchemaOutput):
|
||||
approved_data: Any = SchemaField(
|
||||
description="Outputs the input data when the reviewer APPROVES it. "
|
||||
"The value is the actual data itself (not a status string like 'APPROVED'). "
|
||||
"If the reviewer edited the data, this contains the modified version. "
|
||||
"Connect downstream blocks here for the 'approved' workflow path."
|
||||
description="The data when approved (may be modified by reviewer)"
|
||||
)
|
||||
rejected_data: Any = SchemaField(
|
||||
description="Outputs the input data when the reviewer REJECTS it. "
|
||||
"The value is the actual data itself (not a status string like 'REJECTED'). "
|
||||
"If the reviewer edited the data, this contains the modified version. "
|
||||
"Connect downstream blocks here for the 'rejected' workflow path."
|
||||
description="The data when rejected (may be modified by reviewer)"
|
||||
)
|
||||
review_message: str = SchemaField(
|
||||
description="Optional message provided by the reviewer explaining their "
|
||||
"decision. Only outputs when the reviewer provides a message; "
|
||||
"this pin does not fire if no message was given.",
|
||||
default="",
|
||||
description="Any message provided by the reviewer", default=""
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
super().__init__(
|
||||
id="8b2a7b3c-6e9d-4a5f-8c1b-2e3f4a5b6c7d",
|
||||
description="Pause execution for human review. Data flows through "
|
||||
"approved_data or rejected_data output based on the reviewer's decision. "
|
||||
"Outputs contain the actual data, not status strings.",
|
||||
description="Pause execution and wait for human approval or modification of data",
|
||||
categories={BlockCategory.BASIC},
|
||||
input_schema=HumanInTheLoopBlock.Input,
|
||||
output_schema=HumanInTheLoopBlock.Output,
|
||||
|
||||
@@ -195,12 +195,8 @@ class IdeogramModelBlock(Block):
|
||||
),
|
||||
],
|
||||
test_mock={
|
||||
"run_model": lambda api_key, model_name, prompt, seed, aspect_ratio, magic_prompt_option, style_type, negative_prompt, color_palette_name, custom_colors: (
|
||||
"https://ideogram.ai/api/images/test-generated-image-url.png"
|
||||
),
|
||||
"upscale_image": lambda api_key, image_url: (
|
||||
"https://ideogram.ai/api/images/test-upscaled-image-url.png"
|
||||
),
|
||||
"run_model": lambda api_key, model_name, prompt, seed, aspect_ratio, magic_prompt_option, style_type, negative_prompt, color_palette_name, custom_colors: "https://ideogram.ai/api/images/test-generated-image-url.png",
|
||||
"upscale_image": lambda api_key, image_url: "https://ideogram.ai/api/images/test-upscaled-image-url.png",
|
||||
},
|
||||
test_credentials=TEST_CREDENTIALS,
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user