mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-03 11:24:57 -05:00
Compare commits
18 Commits
fix/file-i
...
otto/copil
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d8909af967 | ||
|
|
ff8ca11845 | ||
|
|
4d6471a7eb | ||
|
|
7dc53071e8 | ||
|
|
dcdd886067 | ||
|
|
6098c5eed6 | ||
|
|
4878665c66 | ||
|
|
f7350c797a | ||
|
|
2abbb7fbc8 | ||
|
|
05b60db554 | ||
|
|
cc4839bedb | ||
|
|
dbbff04616 | ||
|
|
e6438b9a76 | ||
|
|
e10ff8d37f | ||
|
|
9538992eaf | ||
|
|
27b72062f2 | ||
|
|
9a79a8d257 | ||
|
|
a9bf08748b |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -180,3 +180,4 @@ autogpt_platform/backend/settings.py
|
||||
.claude/settings.local.json
|
||||
CLAUDE.local.md
|
||||
/autogpt_platform/backend/logs
|
||||
.next
|
||||
@@ -17,6 +17,13 @@ from .model import ChatSession, create_chat_session, get_chat_session, get_user_
|
||||
|
||||
config = ChatConfig()
|
||||
|
||||
SSE_RESPONSE_HEADERS = {
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
"x-vercel-ai-ui-message-stream": "v1",
|
||||
}
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -32,6 +39,48 @@ async def _validate_and_get_session(
|
||||
return session
|
||||
|
||||
|
||||
async def _create_stream_generator(
|
||||
session_id: str,
|
||||
message: str,
|
||||
user_id: str | None,
|
||||
session: ChatSession,
|
||||
is_user_message: bool = True,
|
||||
context: dict[str, str] | None = None,
|
||||
) -> AsyncGenerator[str, None]:
|
||||
"""Create SSE event generator for chat streaming."""
|
||||
chunk_count = 0
|
||||
first_chunk_type: str | None = None
|
||||
async for chunk in chat_service.stream_chat_completion(
|
||||
session_id,
|
||||
message,
|
||||
is_user_message=is_user_message,
|
||||
user_id=user_id,
|
||||
session=session,
|
||||
context=context,
|
||||
):
|
||||
if chunk_count < 3:
|
||||
logger.info(
|
||||
"Chat stream chunk",
|
||||
extra={
|
||||
"session_id": session_id,
|
||||
"chunk_type": str(chunk.type),
|
||||
},
|
||||
)
|
||||
if not first_chunk_type:
|
||||
first_chunk_type = str(chunk.type)
|
||||
chunk_count += 1
|
||||
yield chunk.to_sse()
|
||||
logger.info(
|
||||
"Chat stream completed",
|
||||
extra={
|
||||
"session_id": session_id,
|
||||
"chunk_count": chunk_count,
|
||||
"first_chunk_type": first_chunk_type,
|
||||
},
|
||||
)
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
|
||||
router = APIRouter(
|
||||
tags=["chat"],
|
||||
)
|
||||
@@ -221,49 +270,17 @@ async def stream_chat_post(
|
||||
"""
|
||||
session = await _validate_and_get_session(session_id, user_id)
|
||||
|
||||
async def event_generator() -> AsyncGenerator[str, None]:
|
||||
chunk_count = 0
|
||||
first_chunk_type: str | None = None
|
||||
async for chunk in chat_service.stream_chat_completion(
|
||||
session_id,
|
||||
request.message,
|
||||
is_user_message=request.is_user_message,
|
||||
user_id=user_id,
|
||||
session=session, # Pass pre-fetched session to avoid double-fetch
|
||||
context=request.context,
|
||||
):
|
||||
if chunk_count < 3:
|
||||
logger.info(
|
||||
"Chat stream chunk",
|
||||
extra={
|
||||
"session_id": session_id,
|
||||
"chunk_type": str(chunk.type),
|
||||
},
|
||||
)
|
||||
if not first_chunk_type:
|
||||
first_chunk_type = str(chunk.type)
|
||||
chunk_count += 1
|
||||
yield chunk.to_sse()
|
||||
logger.info(
|
||||
"Chat stream completed",
|
||||
extra={
|
||||
"session_id": session_id,
|
||||
"chunk_count": chunk_count,
|
||||
"first_chunk_type": first_chunk_type,
|
||||
},
|
||||
)
|
||||
# AI SDK protocol termination
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
return StreamingResponse(
|
||||
event_generator(),
|
||||
_create_stream_generator(
|
||||
session_id=session_id,
|
||||
message=request.message,
|
||||
user_id=user_id,
|
||||
session=session,
|
||||
is_user_message=request.is_user_message,
|
||||
context=request.context,
|
||||
),
|
||||
media_type="text/event-stream",
|
||||
headers={
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"X-Accel-Buffering": "no", # Disable nginx buffering
|
||||
"x-vercel-ai-ui-message-stream": "v1", # AI SDK protocol header
|
||||
},
|
||||
headers=SSE_RESPONSE_HEADERS,
|
||||
)
|
||||
|
||||
|
||||
@@ -295,48 +312,16 @@ async def stream_chat_get(
|
||||
"""
|
||||
session = await _validate_and_get_session(session_id, user_id)
|
||||
|
||||
async def event_generator() -> AsyncGenerator[str, None]:
|
||||
chunk_count = 0
|
||||
first_chunk_type: str | None = None
|
||||
async for chunk in chat_service.stream_chat_completion(
|
||||
session_id,
|
||||
message,
|
||||
is_user_message=is_user_message,
|
||||
user_id=user_id,
|
||||
session=session, # Pass pre-fetched session to avoid double-fetch
|
||||
):
|
||||
if chunk_count < 3:
|
||||
logger.info(
|
||||
"Chat stream chunk",
|
||||
extra={
|
||||
"session_id": session_id,
|
||||
"chunk_type": str(chunk.type),
|
||||
},
|
||||
)
|
||||
if not first_chunk_type:
|
||||
first_chunk_type = str(chunk.type)
|
||||
chunk_count += 1
|
||||
yield chunk.to_sse()
|
||||
logger.info(
|
||||
"Chat stream completed",
|
||||
extra={
|
||||
"session_id": session_id,
|
||||
"chunk_count": chunk_count,
|
||||
"first_chunk_type": first_chunk_type,
|
||||
},
|
||||
)
|
||||
# AI SDK protocol termination
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
return StreamingResponse(
|
||||
event_generator(),
|
||||
_create_stream_generator(
|
||||
session_id=session_id,
|
||||
message=message,
|
||||
user_id=user_id,
|
||||
session=session,
|
||||
is_user_message=is_user_message,
|
||||
),
|
||||
media_type="text/event-stream",
|
||||
headers={
|
||||
"Cache-Control": "no-cache",
|
||||
"Connection": "keep-alive",
|
||||
"X-Accel-Buffering": "no", # Disable nginx buffering
|
||||
"x-vercel-ai-ui-message-stream": "v1", # AI SDK protocol header
|
||||
},
|
||||
headers=SSE_RESPONSE_HEADERS,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,81 @@
|
||||
"""Shared helpers for chat tools."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from .models import ErrorResponse
|
||||
|
||||
|
||||
def error_response(
|
||||
message: str, session_id: str | None, **kwargs: Any
|
||||
) -> ErrorResponse:
|
||||
"""Create standardized error response.
|
||||
|
||||
Args:
|
||||
message: Error message to display
|
||||
session_id: Current session ID
|
||||
**kwargs: Additional fields to pass to ErrorResponse
|
||||
|
||||
Returns:
|
||||
ErrorResponse with the given message and session_id
|
||||
"""
|
||||
return ErrorResponse(message=message, session_id=session_id, **kwargs)
|
||||
|
||||
|
||||
def get_inputs_from_schema(
|
||||
input_schema: dict[str, Any],
|
||||
exclude_fields: set[str] | None = None,
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Extract input field info from JSON schema.
|
||||
|
||||
Args:
|
||||
input_schema: JSON schema dict with 'properties' and 'required'
|
||||
exclude_fields: Set of field names to exclude (e.g., credential fields)
|
||||
|
||||
Returns:
|
||||
List of dicts with field info (name, title, type, description, required, default)
|
||||
"""
|
||||
# Safety check: original code returned [] if input_schema wasn't a dict
|
||||
if not isinstance(input_schema, dict):
|
||||
return []
|
||||
|
||||
exclude = exclude_fields or set()
|
||||
properties = input_schema.get("properties", {})
|
||||
required = set(input_schema.get("required", []))
|
||||
|
||||
return [
|
||||
{
|
||||
"name": name,
|
||||
"title": schema.get("title", name),
|
||||
"type": schema.get("type", "string"),
|
||||
"description": schema.get("description", ""),
|
||||
"required": name in required,
|
||||
"default": schema.get("default"),
|
||||
}
|
||||
for name, schema in properties.items()
|
||||
if name not in exclude
|
||||
]
|
||||
|
||||
|
||||
def format_inputs_as_markdown(inputs: list[dict[str, Any]]) -> str:
|
||||
"""Format input fields as a readable markdown list.
|
||||
|
||||
Args:
|
||||
inputs: List of input dicts from get_inputs_from_schema
|
||||
|
||||
Returns:
|
||||
Markdown-formatted string listing the inputs
|
||||
"""
|
||||
if not inputs:
|
||||
return "No inputs required."
|
||||
|
||||
lines = []
|
||||
for inp in inputs:
|
||||
required_marker = " (required)" if inp.get("required") else ""
|
||||
default = inp.get("default")
|
||||
default_info = f" [default: {default}]" if default is not None else ""
|
||||
description = inp.get("description", "")
|
||||
desc_info = f" - {description}" if description else ""
|
||||
|
||||
lines.append(f"- **{inp['name']}**{required_marker}{default_info}{desc_info}")
|
||||
|
||||
return "\n".join(lines)
|
||||
@@ -38,6 +38,8 @@ class ResponseType(str, Enum):
|
||||
OPERATION_STARTED = "operation_started"
|
||||
OPERATION_PENDING = "operation_pending"
|
||||
OPERATION_IN_PROGRESS = "operation_in_progress"
|
||||
# Input validation
|
||||
INPUT_VALIDATION_ERROR = "input_validation_error"
|
||||
|
||||
|
||||
# Base response model
|
||||
@@ -68,6 +70,10 @@ class AgentInfo(BaseModel):
|
||||
has_external_trigger: bool | None = None
|
||||
new_output: bool | None = None
|
||||
graph_id: str | None = None
|
||||
inputs: dict[str, Any] | None = Field(
|
||||
default=None,
|
||||
description="Input schema for the agent, including field names, types, and defaults",
|
||||
)
|
||||
|
||||
|
||||
class AgentsFoundResponse(ToolResponseBase):
|
||||
@@ -194,6 +200,20 @@ class ErrorResponse(ToolResponseBase):
|
||||
details: dict[str, Any] | None = None
|
||||
|
||||
|
||||
class InputValidationErrorResponse(ToolResponseBase):
|
||||
"""Response when run_agent receives unknown input fields."""
|
||||
|
||||
type: ResponseType = ResponseType.INPUT_VALIDATION_ERROR
|
||||
unrecognized_fields: list[str] = Field(
|
||||
description="List of input field names that were not recognized"
|
||||
)
|
||||
inputs: dict[str, Any] = Field(
|
||||
description="The agent's valid input schema for reference"
|
||||
)
|
||||
graph_id: str | None = None
|
||||
graph_version: int | None = None
|
||||
|
||||
|
||||
# Agent output models
|
||||
class ExecutionOutputInfo(BaseModel):
|
||||
"""Summary of a single execution's outputs."""
|
||||
|
||||
@@ -24,12 +24,14 @@ from backend.util.timezone_utils import (
|
||||
)
|
||||
|
||||
from .base import BaseTool
|
||||
from .helpers import get_inputs_from_schema
|
||||
from .models import (
|
||||
AgentDetails,
|
||||
AgentDetailsResponse,
|
||||
ErrorResponse,
|
||||
ExecutionOptions,
|
||||
ExecutionStartedResponse,
|
||||
InputValidationErrorResponse,
|
||||
SetupInfo,
|
||||
SetupRequirementsResponse,
|
||||
ToolResponseBase,
|
||||
@@ -273,6 +275,22 @@ class RunAgentTool(BaseTool):
|
||||
input_properties = graph.input_schema.get("properties", {})
|
||||
required_fields = set(graph.input_schema.get("required", []))
|
||||
provided_inputs = set(params.inputs.keys())
|
||||
valid_fields = set(input_properties.keys())
|
||||
|
||||
# Check for unknown input fields
|
||||
unrecognized_fields = provided_inputs - valid_fields
|
||||
if unrecognized_fields:
|
||||
return InputValidationErrorResponse(
|
||||
message=(
|
||||
f"Unknown input field(s) provided: {', '.join(sorted(unrecognized_fields))}. "
|
||||
f"Agent was not executed. Please use the correct field names from the schema."
|
||||
),
|
||||
session_id=session_id,
|
||||
unrecognized_fields=sorted(unrecognized_fields),
|
||||
inputs=graph.input_schema,
|
||||
graph_id=graph.id,
|
||||
graph_version=graph.version,
|
||||
)
|
||||
|
||||
# If agent has inputs but none were provided AND use_defaults is not set,
|
||||
# always show what's available first so user can decide
|
||||
@@ -354,19 +372,7 @@ class RunAgentTool(BaseTool):
|
||||
|
||||
def _get_inputs_list(self, input_schema: dict[str, Any]) -> list[dict[str, Any]]:
|
||||
"""Extract inputs list from schema."""
|
||||
inputs_list = []
|
||||
if isinstance(input_schema, dict) and "properties" in input_schema:
|
||||
for field_name, field_schema in input_schema["properties"].items():
|
||||
inputs_list.append(
|
||||
{
|
||||
"name": field_name,
|
||||
"title": field_schema.get("title", field_name),
|
||||
"type": field_schema.get("type", "string"),
|
||||
"description": field_schema.get("description", ""),
|
||||
"required": field_name in input_schema.get("required", []),
|
||||
}
|
||||
)
|
||||
return inputs_list
|
||||
return get_inputs_from_schema(input_schema)
|
||||
|
||||
def _get_execution_modes(self, graph: GraphModel) -> list[str]:
|
||||
"""Get available execution modes for the graph."""
|
||||
|
||||
@@ -402,3 +402,42 @@ async def test_run_agent_schedule_without_name(setup_test_data):
|
||||
# Should return error about missing schedule_name
|
||||
assert result_data.get("type") == "error"
|
||||
assert "schedule_name" in result_data["message"].lower()
|
||||
|
||||
|
||||
@pytest.mark.asyncio(loop_scope="session")
|
||||
async def test_run_agent_rejects_unknown_input_fields(setup_test_data):
|
||||
"""Test that run_agent returns input_validation_error for unknown input fields."""
|
||||
user = setup_test_data["user"]
|
||||
store_submission = setup_test_data["store_submission"]
|
||||
|
||||
tool = RunAgentTool()
|
||||
agent_marketplace_id = f"{user.email.split('@')[0]}/{store_submission.slug}"
|
||||
session = make_session(user_id=user.id)
|
||||
|
||||
# Execute with unknown input field names
|
||||
response = await tool.execute(
|
||||
user_id=user.id,
|
||||
session_id=str(uuid.uuid4()),
|
||||
tool_call_id=str(uuid.uuid4()),
|
||||
username_agent_slug=agent_marketplace_id,
|
||||
inputs={
|
||||
"unknown_field": "some value",
|
||||
"another_unknown": "another value",
|
||||
},
|
||||
session=session,
|
||||
)
|
||||
|
||||
assert response is not None
|
||||
assert hasattr(response, "output")
|
||||
assert isinstance(response.output, str)
|
||||
result_data = orjson.loads(response.output)
|
||||
|
||||
# Should return input_validation_error type with unrecognized fields
|
||||
assert result_data.get("type") == "input_validation_error"
|
||||
assert "unrecognized_fields" in result_data
|
||||
assert set(result_data["unrecognized_fields"]) == {
|
||||
"another_unknown",
|
||||
"unknown_field",
|
||||
}
|
||||
assert "inputs" in result_data # Contains the valid schema
|
||||
assert "Agent was not executed" in result_data["message"]
|
||||
|
||||
@@ -5,15 +5,18 @@ import uuid
|
||||
from collections import defaultdict
|
||||
from typing import Any
|
||||
|
||||
from pydantic_core import PydanticUndefined
|
||||
|
||||
from backend.api.features.chat.model import ChatSession
|
||||
from backend.data.block import get_block
|
||||
from backend.data.execution import ExecutionContext
|
||||
from backend.data.model import CredentialsMetaInput
|
||||
from backend.data.model import CredentialsFieldInfo, CredentialsMetaInput
|
||||
from backend.data.workspace import get_or_create_workspace
|
||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||
from backend.util.exceptions import BlockError
|
||||
|
||||
from .base import BaseTool
|
||||
from .helpers import get_inputs_from_schema
|
||||
from .models import (
|
||||
BlockOutputResponse,
|
||||
ErrorResponse,
|
||||
@@ -22,7 +25,10 @@ from .models import (
|
||||
ToolResponseBase,
|
||||
UserReadiness,
|
||||
)
|
||||
from .utils import build_missing_credentials_from_field_info
|
||||
from .utils import (
|
||||
build_missing_credentials_from_field_info,
|
||||
match_credentials_to_requirements,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -71,64 +77,68 @@ class RunBlockTool(BaseTool):
|
||||
def requires_auth(self) -> bool:
|
||||
return True
|
||||
|
||||
def _resolve_discriminated_credentials(
|
||||
self,
|
||||
block: Any,
|
||||
input_data: dict[str, Any],
|
||||
) -> dict[str, CredentialsFieldInfo]:
|
||||
"""Resolve credential requirements, applying discriminator logic where needed."""
|
||||
credentials_fields_info = block.input_schema.get_credentials_fields_info()
|
||||
if not credentials_fields_info:
|
||||
return {}
|
||||
|
||||
resolved: dict[str, CredentialsFieldInfo] = {}
|
||||
|
||||
for field_name, field_info in credentials_fields_info.items():
|
||||
effective_field_info = field_info
|
||||
|
||||
if field_info.discriminator and field_info.discriminator_mapping:
|
||||
discriminator_value = input_data.get(field_info.discriminator)
|
||||
if discriminator_value is None:
|
||||
field = block.input_schema.model_fields.get(
|
||||
field_info.discriminator
|
||||
)
|
||||
if field and field.default is not PydanticUndefined:
|
||||
discriminator_value = field.default
|
||||
|
||||
if (
|
||||
discriminator_value
|
||||
and discriminator_value in field_info.discriminator_mapping
|
||||
):
|
||||
effective_field_info = field_info.discriminate(discriminator_value)
|
||||
logger.debug(
|
||||
f"Discriminated provider for {field_name}: "
|
||||
f"{discriminator_value} -> {effective_field_info.provider}"
|
||||
)
|
||||
|
||||
resolved[field_name] = effective_field_info
|
||||
|
||||
return resolved
|
||||
|
||||
async def _check_block_credentials(
|
||||
self,
|
||||
user_id: str,
|
||||
block: Any,
|
||||
input_data: dict[str, Any] | None = None,
|
||||
) -> tuple[dict[str, CredentialsMetaInput], list[CredentialsMetaInput]]:
|
||||
"""
|
||||
Check if user has required credentials for a block.
|
||||
|
||||
Args:
|
||||
user_id: User ID
|
||||
block: Block to check credentials for
|
||||
input_data: Input data for the block (used to determine provider via discriminator)
|
||||
|
||||
Returns:
|
||||
tuple[matched_credentials, missing_credentials]
|
||||
"""
|
||||
matched_credentials: dict[str, CredentialsMetaInput] = {}
|
||||
missing_credentials: list[CredentialsMetaInput] = []
|
||||
input_data = input_data or {}
|
||||
requirements = self._resolve_discriminated_credentials(block, input_data)
|
||||
|
||||
# Get credential field info from block's input schema
|
||||
credentials_fields_info = block.input_schema.get_credentials_fields_info()
|
||||
if not requirements:
|
||||
return {}, []
|
||||
|
||||
if not credentials_fields_info:
|
||||
return matched_credentials, missing_credentials
|
||||
|
||||
# Get user's available credentials
|
||||
creds_manager = IntegrationCredentialsManager()
|
||||
available_creds = await creds_manager.store.get_all_creds(user_id)
|
||||
|
||||
for field_name, field_info in credentials_fields_info.items():
|
||||
# field_info.provider is a frozenset of acceptable providers
|
||||
# field_info.supported_types is a frozenset of acceptable types
|
||||
matching_cred = next(
|
||||
(
|
||||
cred
|
||||
for cred in available_creds
|
||||
if cred.provider in field_info.provider
|
||||
and cred.type in field_info.supported_types
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
if matching_cred:
|
||||
matched_credentials[field_name] = CredentialsMetaInput(
|
||||
id=matching_cred.id,
|
||||
provider=matching_cred.provider, # type: ignore
|
||||
type=matching_cred.type,
|
||||
title=matching_cred.title,
|
||||
)
|
||||
else:
|
||||
# Create a placeholder for the missing credential
|
||||
provider = next(iter(field_info.provider), "unknown")
|
||||
cred_type = next(iter(field_info.supported_types), "api_key")
|
||||
missing_credentials.append(
|
||||
CredentialsMetaInput(
|
||||
id=field_name,
|
||||
provider=provider, # type: ignore
|
||||
type=cred_type, # type: ignore
|
||||
title=field_name.replace("_", " ").title(),
|
||||
)
|
||||
)
|
||||
|
||||
return matched_credentials, missing_credentials
|
||||
return await match_credentials_to_requirements(user_id, requirements)
|
||||
|
||||
async def _execute(
|
||||
self,
|
||||
@@ -186,10 +196,9 @@ class RunBlockTool(BaseTool):
|
||||
|
||||
logger.info(f"Executing block {block.name} ({block_id}) for user {user_id}")
|
||||
|
||||
# Check credentials
|
||||
creds_manager = IntegrationCredentialsManager()
|
||||
matched_credentials, missing_credentials = await self._check_block_credentials(
|
||||
user_id, block
|
||||
user_id, block, input_data
|
||||
)
|
||||
|
||||
if missing_credentials:
|
||||
@@ -320,27 +329,6 @@ class RunBlockTool(BaseTool):
|
||||
|
||||
def _get_inputs_list(self, block: Any) -> list[dict[str, Any]]:
|
||||
"""Extract non-credential inputs from block schema."""
|
||||
inputs_list = []
|
||||
schema = block.input_schema.jsonschema()
|
||||
properties = schema.get("properties", {})
|
||||
required_fields = set(schema.get("required", []))
|
||||
|
||||
# Get credential field names to exclude
|
||||
credentials_fields = set(block.input_schema.get_credentials_fields().keys())
|
||||
|
||||
for field_name, field_schema in properties.items():
|
||||
# Skip credential fields
|
||||
if field_name in credentials_fields:
|
||||
continue
|
||||
|
||||
inputs_list.append(
|
||||
{
|
||||
"name": field_name,
|
||||
"title": field_schema.get("title", field_name),
|
||||
"type": field_schema.get("type", "string"),
|
||||
"description": field_schema.get("description", ""),
|
||||
"required": field_name in required_fields,
|
||||
}
|
||||
)
|
||||
|
||||
return inputs_list
|
||||
return get_inputs_from_schema(schema, exclude_fields=credentials_fields)
|
||||
|
||||
@@ -225,6 +225,103 @@ async def get_or_create_library_agent(
|
||||
return library_agents[0]
|
||||
|
||||
|
||||
async def get_user_credentials(user_id: str) -> list:
|
||||
"""Get all available credentials for a user."""
|
||||
creds_manager = IntegrationCredentialsManager()
|
||||
return await creds_manager.store.get_all_creds(user_id)
|
||||
|
||||
|
||||
def find_matching_credential(
|
||||
available_creds: list,
|
||||
field_info: CredentialsFieldInfo,
|
||||
check_scopes: bool = True,
|
||||
):
|
||||
"""Find a credential that matches the required provider, type, and optionally scopes."""
|
||||
for cred in available_creds:
|
||||
if cred.provider not in field_info.provider:
|
||||
continue
|
||||
if cred.type not in field_info.supported_types:
|
||||
continue
|
||||
if check_scopes and not _credential_has_required_scopes(cred, field_info):
|
||||
continue
|
||||
return cred
|
||||
return None
|
||||
|
||||
|
||||
def create_credential_meta_from_match(matching_cred) -> CredentialsMetaInput:
|
||||
"""Create a CredentialsMetaInput from a matched credential."""
|
||||
return CredentialsMetaInput(
|
||||
id=matching_cred.id,
|
||||
provider=matching_cred.provider, # type: ignore
|
||||
type=matching_cred.type,
|
||||
title=matching_cred.title,
|
||||
)
|
||||
|
||||
|
||||
async def match_credentials_to_requirements(
|
||||
user_id: str,
|
||||
requirements: dict[str, CredentialsFieldInfo],
|
||||
check_scopes: bool = True,
|
||||
) -> tuple[dict[str, CredentialsMetaInput], list[CredentialsMetaInput]]:
|
||||
"""
|
||||
Match user's credentials against a dictionary of credential requirements.
|
||||
|
||||
This is the core matching logic shared by both graph and block credential matching.
|
||||
|
||||
Args:
|
||||
user_id: User ID to fetch credentials for
|
||||
requirements: Dict mapping field names to CredentialsFieldInfo
|
||||
check_scopes: Whether to verify OAuth2 scopes match requirements (default True).
|
||||
Set to False to preserve original run_block behavior which didn't check scopes.
|
||||
"""
|
||||
matched: dict[str, CredentialsMetaInput] = {}
|
||||
missing: list[CredentialsMetaInput] = []
|
||||
|
||||
if not requirements:
|
||||
return matched, missing
|
||||
|
||||
available_creds = await get_user_credentials(user_id)
|
||||
|
||||
for field_name, field_info in requirements.items():
|
||||
matching_cred = find_matching_credential(
|
||||
available_creds, field_info, check_scopes=check_scopes
|
||||
)
|
||||
|
||||
if matching_cred:
|
||||
try:
|
||||
matched[field_name] = create_credential_meta_from_match(matching_cred)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to create CredentialsMetaInput for field '{field_name}': "
|
||||
f"provider={matching_cred.provider}, type={matching_cred.type}, "
|
||||
f"credential_id={matching_cred.id}",
|
||||
exc_info=True,
|
||||
)
|
||||
provider = next(iter(field_info.provider), "unknown")
|
||||
cred_type = next(iter(field_info.supported_types), "api_key")
|
||||
missing.append(
|
||||
CredentialsMetaInput(
|
||||
id=field_name,
|
||||
provider=provider, # type: ignore
|
||||
type=cred_type, # type: ignore
|
||||
title=f"{field_name} (validation failed: {e})",
|
||||
)
|
||||
)
|
||||
else:
|
||||
provider = next(iter(field_info.provider), "unknown")
|
||||
cred_type = next(iter(field_info.supported_types), "api_key")
|
||||
missing.append(
|
||||
CredentialsMetaInput(
|
||||
id=field_name,
|
||||
provider=provider, # type: ignore
|
||||
type=cred_type, # type: ignore
|
||||
title=field_name.replace("_", " ").title(),
|
||||
)
|
||||
)
|
||||
|
||||
return matched, missing
|
||||
|
||||
|
||||
async def match_user_credentials_to_graph(
|
||||
user_id: str,
|
||||
graph: GraphModel,
|
||||
|
||||
@@ -873,14 +873,13 @@ def is_block_auth_configured(
|
||||
|
||||
|
||||
async def initialize_blocks() -> None:
|
||||
# First, sync all provider costs to blocks
|
||||
# Imported here to avoid circular import
|
||||
from backend.sdk.cost_integration import sync_all_provider_costs
|
||||
from backend.util.retry import func_retry
|
||||
|
||||
sync_all_provider_costs()
|
||||
|
||||
for cls in get_blocks().values():
|
||||
block = cls()
|
||||
@func_retry
|
||||
async def sync_block_to_db(block: Block) -> None:
|
||||
existing_block = await AgentBlock.prisma().find_first(
|
||||
where={"OR": [{"id": block.id}, {"name": block.name}]}
|
||||
)
|
||||
@@ -893,7 +892,7 @@ async def initialize_blocks() -> None:
|
||||
outputSchema=json.dumps(block.output_schema.jsonschema()),
|
||||
)
|
||||
)
|
||||
continue
|
||||
return
|
||||
|
||||
input_schema = json.dumps(block.input_schema.jsonschema())
|
||||
output_schema = json.dumps(block.output_schema.jsonschema())
|
||||
@@ -913,6 +912,25 @@ async def initialize_blocks() -> None:
|
||||
},
|
||||
)
|
||||
|
||||
failed_blocks: list[str] = []
|
||||
for cls in get_blocks().values():
|
||||
block = cls()
|
||||
try:
|
||||
await sync_block_to_db(block)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Failed to sync block {block.name} to database: {e}. "
|
||||
"Block is still available in memory.",
|
||||
exc_info=True,
|
||||
)
|
||||
failed_blocks.append(block.name)
|
||||
|
||||
if failed_blocks:
|
||||
logger.error(
|
||||
f"Failed to sync {len(failed_blocks)} block(s) to database: "
|
||||
f"{', '.join(failed_blocks)}. These blocks are still available in memory."
|
||||
)
|
||||
|
||||
|
||||
# Note on the return type annotation: https://github.com/microsoft/pyright/issues/10281
|
||||
def get_block(block_id: str) -> AnyBlockSchema | None:
|
||||
|
||||
16
autogpt_platform/backend/backend/util/validation.py
Normal file
16
autogpt_platform/backend/backend/util/validation.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Validation utilities."""
|
||||
|
||||
import re
|
||||
|
||||
_UUID_V4_PATTERN = re.compile(
|
||||
r"[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}",
|
||||
re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
def is_uuid_v4(text: str) -> bool:
|
||||
return bool(_UUID_V4_PATTERN.fullmatch(text.strip()))
|
||||
|
||||
|
||||
def extract_uuids(text: str) -> list[str]:
|
||||
return sorted({m.lower() for m in _UUID_V4_PATTERN.findall(text)})
|
||||
@@ -1,10 +1,9 @@
|
||||
"use client";
|
||||
import { getV1OnboardingState } from "@/app/api/__generated__/endpoints/onboarding/onboarding";
|
||||
import { getOnboardingStatus, resolveResponse } from "@/app/api/helpers";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
import { resolveResponse, getOnboardingStatus } from "@/app/api/helpers";
|
||||
import { getV1OnboardingState } from "@/app/api/__generated__/endpoints/onboarding/onboarding";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
|
||||
export default function OnboardingPage() {
|
||||
const router = useRouter();
|
||||
@@ -13,12 +12,10 @@ export default function OnboardingPage() {
|
||||
async function redirectToStep() {
|
||||
try {
|
||||
// Check if onboarding is enabled (also gets chat flag for redirect)
|
||||
const { shouldShowOnboarding, isChatEnabled } =
|
||||
await getOnboardingStatus();
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
const { shouldShowOnboarding } = await getOnboardingStatus();
|
||||
|
||||
if (!shouldShowOnboarding) {
|
||||
router.replace(homepageRoute);
|
||||
router.replace("/");
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -26,7 +23,7 @@ export default function OnboardingPage() {
|
||||
|
||||
// Handle completed onboarding
|
||||
if (onboarding.completedSteps.includes("GET_RESULTS")) {
|
||||
router.replace(homepageRoute);
|
||||
router.replace("/");
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
import { NextResponse } from "next/server";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { getOnboardingStatus } from "@/app/api/helpers";
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
||||
import { revalidatePath } from "next/cache";
|
||||
import { NextResponse } from "next/server";
|
||||
|
||||
// Handle the callback to complete the user session login
|
||||
export async function GET(request: Request) {
|
||||
@@ -27,13 +26,12 @@ export async function GET(request: Request) {
|
||||
await api.createUser();
|
||||
|
||||
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
||||
const { shouldShowOnboarding, isChatEnabled } =
|
||||
await getOnboardingStatus();
|
||||
const { shouldShowOnboarding } = await getOnboardingStatus();
|
||||
if (shouldShowOnboarding) {
|
||||
next = "/onboarding";
|
||||
revalidatePath("/onboarding", "layout");
|
||||
} else {
|
||||
next = getHomepageRoute(isChatEnabled);
|
||||
next = "/";
|
||||
revalidatePath(next, "layout");
|
||||
}
|
||||
} catch (createUserError) {
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import type { ReactNode } from "react";
|
||||
"use client";
|
||||
import { FeatureFlagPage } from "@/services/feature-flags/FeatureFlagPage";
|
||||
import { Flag } from "@/services/feature-flags/use-get-flag";
|
||||
import { type ReactNode } from "react";
|
||||
import { CopilotShell } from "./components/CopilotShell/CopilotShell";
|
||||
|
||||
export default function CopilotLayout({ children }: { children: ReactNode }) {
|
||||
return <CopilotShell>{children}</CopilotShell>;
|
||||
return (
|
||||
<FeatureFlagPage flag={Flag.CHAT} whenDisabled="/library">
|
||||
<CopilotShell>{children}</CopilotShell>
|
||||
</FeatureFlagPage>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -14,14 +14,8 @@ export default function CopilotPage() {
|
||||
const isInterruptModalOpen = useCopilotStore((s) => s.isInterruptModalOpen);
|
||||
const confirmInterrupt = useCopilotStore((s) => s.confirmInterrupt);
|
||||
const cancelInterrupt = useCopilotStore((s) => s.cancelInterrupt);
|
||||
const {
|
||||
greetingName,
|
||||
quickActions,
|
||||
isLoading,
|
||||
hasSession,
|
||||
initialPrompt,
|
||||
isReady,
|
||||
} = state;
|
||||
const { greetingName, quickActions, isLoading, hasSession, initialPrompt } =
|
||||
state;
|
||||
const {
|
||||
handleQuickAction,
|
||||
startChatWithPrompt,
|
||||
@@ -29,8 +23,6 @@ export default function CopilotPage() {
|
||||
handleStreamingChange,
|
||||
} = handlers;
|
||||
|
||||
if (!isReady) return null;
|
||||
|
||||
if (hasSession) {
|
||||
return (
|
||||
<div className="flex h-full flex-col">
|
||||
|
||||
@@ -3,18 +3,11 @@ import {
|
||||
postV2CreateSession,
|
||||
} from "@/app/api/__generated__/endpoints/chat/chat";
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { useOnboarding } from "@/providers/onboarding/onboarding-provider";
|
||||
import {
|
||||
Flag,
|
||||
type FlagValues,
|
||||
useGetFlag,
|
||||
} from "@/services/feature-flags/use-get-flag";
|
||||
import { SessionKey, sessionStorage } from "@/services/storage/session-storage";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { useQueryClient } from "@tanstack/react-query";
|
||||
import { useFlags } from "launchdarkly-react-client-sdk";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
import { useCopilotStore } from "./copilot-page-store";
|
||||
@@ -33,22 +26,6 @@ export function useCopilotPage() {
|
||||
const isCreating = useCopilotStore((s) => s.isCreatingSession);
|
||||
const setIsCreating = useCopilotStore((s) => s.setIsCreatingSession);
|
||||
|
||||
// Complete VISIT_COPILOT onboarding step to grant $5 welcome bonus
|
||||
useEffect(() => {
|
||||
if (isLoggedIn) {
|
||||
completeStep("VISIT_COPILOT");
|
||||
}
|
||||
}, [completeStep, isLoggedIn]);
|
||||
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const flags = useFlags<FlagValues>();
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
||||
const isFlagReady =
|
||||
!isLaunchDarklyConfigured || flags[Flag.CHAT] !== undefined;
|
||||
|
||||
const greetingName = getGreetingName(user);
|
||||
const quickActions = getQuickActions();
|
||||
|
||||
@@ -58,11 +35,8 @@ export function useCopilotPage() {
|
||||
: undefined;
|
||||
|
||||
useEffect(() => {
|
||||
if (!isFlagReady) return;
|
||||
if (isChatEnabled === false) {
|
||||
router.replace(homepageRoute);
|
||||
}
|
||||
}, [homepageRoute, isChatEnabled, isFlagReady, router]);
|
||||
if (isLoggedIn) completeStep("VISIT_COPILOT");
|
||||
}, [completeStep, isLoggedIn]);
|
||||
|
||||
async function startChatWithPrompt(prompt: string) {
|
||||
if (!prompt?.trim()) return;
|
||||
@@ -116,7 +90,6 @@ export function useCopilotPage() {
|
||||
isLoading: isUserLoading,
|
||||
hasSession,
|
||||
initialPrompt,
|
||||
isReady: isFlagReady && isChatEnabled !== false && isLoggedIn,
|
||||
},
|
||||
handlers: {
|
||||
handleQuickAction,
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { Suspense } from "react";
|
||||
import { getErrorDetails } from "./helpers";
|
||||
@@ -11,8 +9,6 @@ function ErrorPageContent() {
|
||||
const searchParams = useSearchParams();
|
||||
const errorMessage = searchParams.get("message");
|
||||
const errorDetails = getErrorDetails(errorMessage);
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
|
||||
function handleRetry() {
|
||||
// Auth-related errors should redirect to login
|
||||
@@ -30,7 +26,7 @@ function ErrorPageContent() {
|
||||
}, 2000);
|
||||
} else {
|
||||
// For server/network errors, go to home
|
||||
window.location.href = homepageRoute;
|
||||
window.location.href = "/";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"use server";
|
||||
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import BackendAPI from "@/lib/autogpt-server-api";
|
||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
||||
import { loginFormSchema } from "@/types/auth";
|
||||
@@ -38,10 +37,8 @@ export async function login(email: string, password: string) {
|
||||
await api.createUser();
|
||||
|
||||
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
||||
const { shouldShowOnboarding, isChatEnabled } = await getOnboardingStatus();
|
||||
const next = shouldShowOnboarding
|
||||
? "/onboarding"
|
||||
: getHomepageRoute(isChatEnabled);
|
||||
const { shouldShowOnboarding } = await getOnboardingStatus();
|
||||
const next = shouldShowOnboarding ? "/onboarding" : "/";
|
||||
|
||||
return {
|
||||
success: true,
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { environment } from "@/services/environment";
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
@@ -22,17 +20,15 @@ export function useLoginPage() {
|
||||
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
||||
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
||||
const isCloudEnv = environment.isCloud();
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
|
||||
// Get redirect destination from 'next' query parameter
|
||||
const nextUrl = searchParams.get("next");
|
||||
|
||||
useEffect(() => {
|
||||
if (isLoggedIn && !isLoggingIn) {
|
||||
router.push(nextUrl || homepageRoute);
|
||||
router.push(nextUrl || "/");
|
||||
}
|
||||
}, [homepageRoute, isLoggedIn, isLoggingIn, nextUrl, router]);
|
||||
}, [isLoggedIn, isLoggingIn, nextUrl, router]);
|
||||
|
||||
const form = useForm<z.infer<typeof loginFormSchema>>({
|
||||
resolver: zodResolver(loginFormSchema),
|
||||
@@ -98,7 +94,7 @@ export function useLoginPage() {
|
||||
}
|
||||
|
||||
// Prefer URL's next parameter, then use backend-determined route
|
||||
router.replace(nextUrl || result.next || homepageRoute);
|
||||
router.replace(nextUrl || result.next || "/");
|
||||
} catch (error) {
|
||||
toast({
|
||||
title:
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"use server";
|
||||
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
||||
import { signupFormSchema } from "@/types/auth";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
@@ -59,10 +58,8 @@ export async function signup(
|
||||
}
|
||||
|
||||
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
||||
const { shouldShowOnboarding, isChatEnabled } = await getOnboardingStatus();
|
||||
const next = shouldShowOnboarding
|
||||
? "/onboarding"
|
||||
: getHomepageRoute(isChatEnabled);
|
||||
const { shouldShowOnboarding } = await getOnboardingStatus();
|
||||
const next = shouldShowOnboarding ? "/onboarding" : "/";
|
||||
|
||||
return { success: true, next };
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { environment } from "@/services/environment";
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import { LoginProvider, signupFormSchema } from "@/types/auth";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useRouter, useSearchParams } from "next/navigation";
|
||||
@@ -22,17 +20,15 @@ export function useSignupPage() {
|
||||
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
||||
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
||||
const isCloudEnv = environment.isCloud();
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
|
||||
// Get redirect destination from 'next' query parameter
|
||||
const nextUrl = searchParams.get("next");
|
||||
|
||||
useEffect(() => {
|
||||
if (isLoggedIn && !isSigningUp) {
|
||||
router.push(nextUrl || homepageRoute);
|
||||
router.push(nextUrl || "/");
|
||||
}
|
||||
}, [homepageRoute, isLoggedIn, isSigningUp, nextUrl, router]);
|
||||
}, [isLoggedIn, isSigningUp, nextUrl, router]);
|
||||
|
||||
const form = useForm<z.infer<typeof signupFormSchema>>({
|
||||
resolver: zodResolver(signupFormSchema),
|
||||
@@ -133,7 +129,7 @@ export function useSignupPage() {
|
||||
}
|
||||
|
||||
// Prefer the URL's next parameter, then result.next (for onboarding), then default
|
||||
const redirectTo = nextUrl || result.next || homepageRoute;
|
||||
const redirectTo = nextUrl || result.next || "/";
|
||||
router.replace(redirectTo);
|
||||
} catch (error) {
|
||||
setIsLoading(false);
|
||||
|
||||
@@ -181,6 +181,5 @@ export async function getOnboardingStatus() {
|
||||
const isCompleted = onboarding.completedSteps.includes("CONGRATS");
|
||||
return {
|
||||
shouldShowOnboarding: status.is_onboarding_enabled && !isCompleted,
|
||||
isChatEnabled: status.is_chat_enabled,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,27 +1,15 @@
|
||||
"use client";
|
||||
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
|
||||
export default function Page() {
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const router = useRouter();
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
||||
const isFlagReady =
|
||||
!isLaunchDarklyConfigured || typeof isChatEnabled === "boolean";
|
||||
|
||||
useEffect(
|
||||
function redirectToHomepage() {
|
||||
if (!isFlagReady) return;
|
||||
router.replace(homepageRoute);
|
||||
},
|
||||
[homepageRoute, isFlagReady, router],
|
||||
);
|
||||
useEffect(() => {
|
||||
router.replace("/copilot");
|
||||
}, [router]);
|
||||
|
||||
return null;
|
||||
return <LoadingSpinner size="large" cover />;
|
||||
}
|
||||
|
||||
@@ -104,31 +104,7 @@ export function FileInput(props: Props) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const getFileLabelFromValue = (val: unknown): string => {
|
||||
// Handle object format from external API: { name, type, size, data }
|
||||
if (val && typeof val === "object") {
|
||||
const obj = val as Record<string, unknown>;
|
||||
if (typeof obj.name === "string") {
|
||||
return getFileLabel(
|
||||
obj.name,
|
||||
typeof obj.type === "string" ? obj.type : "",
|
||||
);
|
||||
}
|
||||
if (typeof obj.type === "string") {
|
||||
const mimeParts = obj.type.split("/");
|
||||
if (mimeParts.length > 1) {
|
||||
return `${mimeParts[1].toUpperCase()} file`;
|
||||
}
|
||||
return `${obj.type} file`;
|
||||
}
|
||||
return "File";
|
||||
}
|
||||
|
||||
// Handle string values (data URIs or file paths)
|
||||
if (typeof val !== "string") {
|
||||
return "File";
|
||||
}
|
||||
|
||||
const getFileLabelFromValue = (val: string) => {
|
||||
if (val.startsWith("data:")) {
|
||||
const matches = val.match(/^data:([^;]+);/);
|
||||
if (matches?.[1]) {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { IconLaptop } from "@/components/__legacy__/ui/icons";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import { ListChecksIcon } from "@phosphor-icons/react/dist/ssr";
|
||||
@@ -24,11 +23,11 @@ interface Props {
|
||||
export function NavbarLink({ name, href }: Props) {
|
||||
const pathname = usePathname();
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
const expectedHomeRoute = isChatEnabled ? "/copilot" : "/library";
|
||||
|
||||
const isActive =
|
||||
href === homepageRoute
|
||||
? pathname === "/" || pathname.startsWith(homepageRoute)
|
||||
href === expectedHomeRoute
|
||||
? pathname === "/" || pathname.startsWith(expectedHomeRoute)
|
||||
: pathname.includes(href);
|
||||
|
||||
return (
|
||||
|
||||
@@ -66,7 +66,7 @@ export default function useAgentGraph(
|
||||
>(null);
|
||||
const [xyNodes, setXYNodes] = useState<CustomNode[]>([]);
|
||||
const [xyEdges, setXYEdges] = useState<CustomEdge[]>([]);
|
||||
const betaBlocks = useGetFlag(Flag.BETA_BLOCKS);
|
||||
const betaBlocks = useGetFlag(Flag.BETA_BLOCKS) as string[];
|
||||
|
||||
// Filter blocks based on beta flags
|
||||
const availableBlocks = useMemo(() => {
|
||||
|
||||
@@ -11,10 +11,3 @@ export const API_KEY_HEADER_NAME = "X-API-Key";
|
||||
|
||||
// Layout
|
||||
export const NAVBAR_HEIGHT_PX = 60;
|
||||
|
||||
// Routes
|
||||
export function getHomepageRoute(isChatEnabled?: boolean | null): string {
|
||||
if (isChatEnabled === true) return "/copilot";
|
||||
if (isChatEnabled === false) return "/library";
|
||||
return "/";
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { environment } from "@/services/environment";
|
||||
import { Key, storage } from "@/services/storage/local-storage";
|
||||
import { type CookieOptions } from "@supabase/ssr";
|
||||
@@ -71,7 +70,7 @@ export function getRedirectPath(
|
||||
}
|
||||
|
||||
if (isAdminPage(path) && userRole !== "admin") {
|
||||
return getHomepageRoute();
|
||||
return "/";
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { environment } from "@/services/environment";
|
||||
import { createServerClient } from "@supabase/ssr";
|
||||
import { NextResponse, type NextRequest } from "next/server";
|
||||
@@ -67,7 +66,7 @@ export async function updateSession(request: NextRequest) {
|
||||
|
||||
// 2. Check if user is authenticated but lacks admin role when accessing admin pages
|
||||
if (user && userRole !== "admin" && isAdminPage(pathname)) {
|
||||
url.pathname = getHomepageRoute();
|
||||
url.pathname = "/";
|
||||
return NextResponse.redirect(url);
|
||||
}
|
||||
|
||||
|
||||
@@ -23,9 +23,7 @@ import {
|
||||
WebSocketNotification,
|
||||
} from "@/lib/autogpt-server-api";
|
||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||
import { getHomepageRoute } from "@/lib/constants";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||
import Link from "next/link";
|
||||
import { usePathname, useRouter } from "next/navigation";
|
||||
import {
|
||||
@@ -104,8 +102,6 @@ export default function OnboardingProvider({
|
||||
const pathname = usePathname();
|
||||
const router = useRouter();
|
||||
const { isLoggedIn } = useSupabase();
|
||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||
|
||||
useOnboardingTimezoneDetection();
|
||||
|
||||
@@ -150,7 +146,7 @@ export default function OnboardingProvider({
|
||||
if (isOnOnboardingRoute) {
|
||||
const enabled = await resolveResponse(getV1IsOnboardingEnabled());
|
||||
if (!enabled) {
|
||||
router.push(homepageRoute);
|
||||
router.push("/");
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -162,7 +158,7 @@ export default function OnboardingProvider({
|
||||
isOnOnboardingRoute &&
|
||||
shouldRedirectFromOnboarding(onboarding.completedSteps, pathname)
|
||||
) {
|
||||
router.push(homepageRoute);
|
||||
router.push("/");
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Failed to initialize onboarding:", error);
|
||||
@@ -177,7 +173,7 @@ export default function OnboardingProvider({
|
||||
}
|
||||
|
||||
initializeOnboarding();
|
||||
}, [api, homepageRoute, isOnOnboardingRoute, router, isLoggedIn, pathname]);
|
||||
}, [api, isOnOnboardingRoute, router, isLoggedIn, pathname]);
|
||||
|
||||
const handleOnboardingNotification = useCallback(
|
||||
(notification: WebSocketNotification) => {
|
||||
|
||||
@@ -83,6 +83,10 @@ function getPostHogCredentials() {
|
||||
};
|
||||
}
|
||||
|
||||
function getLaunchDarklyClientId() {
|
||||
return process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||
}
|
||||
|
||||
function isProductionBuild() {
|
||||
return process.env.NODE_ENV === "production";
|
||||
}
|
||||
@@ -120,7 +124,10 @@ function isVercelPreview() {
|
||||
}
|
||||
|
||||
function areFeatureFlagsEnabled() {
|
||||
return process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "enabled";
|
||||
return (
|
||||
process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true" &&
|
||||
Boolean(process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID)
|
||||
);
|
||||
}
|
||||
|
||||
function isPostHogEnabled() {
|
||||
@@ -143,6 +150,7 @@ export const environment = {
|
||||
getSupabaseAnonKey,
|
||||
getPreviewStealingDev,
|
||||
getPostHogCredentials,
|
||||
getLaunchDarklyClientId,
|
||||
// Assertions
|
||||
isServerSide,
|
||||
isClientSide,
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
"use client";
|
||||
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { useLDClient } from "launchdarkly-react-client-sdk";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { ReactNode, useEffect, useState } from "react";
|
||||
import { environment } from "../environment";
|
||||
import { Flag, useGetFlag } from "./use-get-flag";
|
||||
|
||||
interface FeatureFlagRedirectProps {
|
||||
flag: Flag;
|
||||
whenDisabled: string;
|
||||
children: ReactNode;
|
||||
}
|
||||
|
||||
export function FeatureFlagPage({
|
||||
flag,
|
||||
whenDisabled,
|
||||
children,
|
||||
}: FeatureFlagRedirectProps) {
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const router = useRouter();
|
||||
const flagValue = useGetFlag(flag);
|
||||
const ldClient = useLDClient();
|
||||
const ldEnabled = environment.areFeatureFlagsEnabled();
|
||||
const ldReady = Boolean(ldClient);
|
||||
const flagEnabled = Boolean(flagValue);
|
||||
|
||||
useEffect(() => {
|
||||
const initialize = async () => {
|
||||
if (!ldEnabled) {
|
||||
router.replace(whenDisabled);
|
||||
setIsLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Wait for LaunchDarkly to initialize when enabled to prevent race conditions
|
||||
if (ldEnabled && !ldReady) return;
|
||||
|
||||
try {
|
||||
await ldClient?.waitForInitialization();
|
||||
if (!flagEnabled) router.replace(whenDisabled);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
router.replace(whenDisabled);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
initialize();
|
||||
}, [ldReady, flagEnabled]);
|
||||
|
||||
return isLoading || !flagEnabled ? (
|
||||
<LoadingSpinner size="large" cover />
|
||||
) : (
|
||||
<>{children}</>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
"use client";
|
||||
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { useLDClient } from "launchdarkly-react-client-sdk";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect } from "react";
|
||||
import { environment } from "../environment";
|
||||
import { Flag, useGetFlag } from "./use-get-flag";
|
||||
|
||||
interface FeatureFlagRedirectProps {
|
||||
flag: Flag;
|
||||
whenEnabled: string;
|
||||
whenDisabled: string;
|
||||
}
|
||||
|
||||
export function FeatureFlagRedirect({
|
||||
flag,
|
||||
whenEnabled,
|
||||
whenDisabled,
|
||||
}: FeatureFlagRedirectProps) {
|
||||
const router = useRouter();
|
||||
const flagValue = useGetFlag(flag);
|
||||
const ldEnabled = environment.areFeatureFlagsEnabled();
|
||||
const ldClient = useLDClient();
|
||||
const ldReady = Boolean(ldClient);
|
||||
const flagEnabled = Boolean(flagValue);
|
||||
|
||||
useEffect(() => {
|
||||
const initialize = async () => {
|
||||
if (!ldEnabled) {
|
||||
router.replace(whenDisabled);
|
||||
return;
|
||||
}
|
||||
|
||||
// Wait for LaunchDarkly to initialize when enabled to prevent race conditions
|
||||
if (ldEnabled && !ldReady) return;
|
||||
|
||||
try {
|
||||
await ldClient?.waitForInitialization();
|
||||
router.replace(flagEnabled ? whenEnabled : whenDisabled);
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
router.replace(whenDisabled);
|
||||
}
|
||||
};
|
||||
|
||||
initialize();
|
||||
}, [ldReady, flagEnabled]);
|
||||
|
||||
return <LoadingSpinner size="large" cover />;
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||
import * as Sentry from "@sentry/nextjs";
|
||||
import { LDProvider } from "launchdarkly-react-client-sdk";
|
||||
@@ -7,17 +8,17 @@ import type { ReactNode } from "react";
|
||||
import { useMemo } from "react";
|
||||
import { environment } from "../environment";
|
||||
|
||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
||||
const LAUNCHDARKLY_INIT_TIMEOUT_MS = 5000;
|
||||
|
||||
export function LaunchDarklyProvider({ children }: { children: ReactNode }) {
|
||||
const { user, isUserLoading } = useSupabase();
|
||||
const isCloud = environment.isCloud();
|
||||
const isLaunchDarklyConfigured = isCloud && envEnabled && clientId;
|
||||
const envEnabled = environment.areFeatureFlagsEnabled();
|
||||
const clientId = environment.getLaunchDarklyClientId();
|
||||
|
||||
const context = useMemo(() => {
|
||||
if (isUserLoading || !user) {
|
||||
if (isUserLoading) return;
|
||||
|
||||
if (!user) {
|
||||
return {
|
||||
kind: "user" as const,
|
||||
key: "anonymous",
|
||||
@@ -36,15 +37,17 @@ export function LaunchDarklyProvider({ children }: { children: ReactNode }) {
|
||||
};
|
||||
}, [user, isUserLoading]);
|
||||
|
||||
if (!isLaunchDarklyConfigured) {
|
||||
if (!envEnabled) {
|
||||
return <>{children}</>;
|
||||
}
|
||||
|
||||
if (isUserLoading) {
|
||||
return <LoadingSpinner size="large" cover />;
|
||||
}
|
||||
|
||||
return (
|
||||
<LDProvider
|
||||
// Add this key prop. It will be 'anonymous' when logged out,
|
||||
key={context.key}
|
||||
clientSideID={clientId}
|
||||
clientSideID={clientId ?? ""}
|
||||
context={context}
|
||||
timeout={LAUNCHDARKLY_INIT_TIMEOUT_MS}
|
||||
reactOptions={{ useCamelCaseFlagKeys: false }}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { DEFAULT_SEARCH_TERMS } from "@/app/(platform)/marketplace/components/HeroSection/helpers";
|
||||
import { environment } from "@/services/environment";
|
||||
import { useFlags } from "launchdarkly-react-client-sdk";
|
||||
|
||||
export enum Flag {
|
||||
@@ -18,24 +19,9 @@ export enum Flag {
|
||||
CHAT = "chat",
|
||||
}
|
||||
|
||||
export type FlagValues = {
|
||||
[Flag.BETA_BLOCKS]: string[];
|
||||
[Flag.NEW_BLOCK_MENU]: boolean;
|
||||
[Flag.NEW_AGENT_RUNS]: boolean;
|
||||
[Flag.GRAPH_SEARCH]: boolean;
|
||||
[Flag.ENABLE_ENHANCED_OUTPUT_HANDLING]: boolean;
|
||||
[Flag.NEW_FLOW_EDITOR]: boolean;
|
||||
[Flag.BUILDER_VIEW_SWITCH]: boolean;
|
||||
[Flag.SHARE_EXECUTION_RESULTS]: boolean;
|
||||
[Flag.AGENT_FAVORITING]: boolean;
|
||||
[Flag.MARKETPLACE_SEARCH_TERMS]: string[];
|
||||
[Flag.ENABLE_PLATFORM_PAYMENT]: boolean;
|
||||
[Flag.CHAT]: boolean;
|
||||
};
|
||||
|
||||
const isPwMockEnabled = process.env.NEXT_PUBLIC_PW_TEST === "true";
|
||||
|
||||
const mockFlags = {
|
||||
const defaultFlags = {
|
||||
[Flag.BETA_BLOCKS]: [],
|
||||
[Flag.NEW_BLOCK_MENU]: false,
|
||||
[Flag.NEW_AGENT_RUNS]: false,
|
||||
@@ -50,17 +36,16 @@ const mockFlags = {
|
||||
[Flag.CHAT]: false,
|
||||
};
|
||||
|
||||
export function useGetFlag<T extends Flag>(flag: T): FlagValues[T] | null {
|
||||
type FlagValues = typeof defaultFlags;
|
||||
|
||||
export function useGetFlag<T extends Flag>(flag: T): FlagValues[T] {
|
||||
const currentFlags = useFlags<FlagValues>();
|
||||
const flagValue = currentFlags[flag];
|
||||
const areFlagsEnabled = environment.areFeatureFlagsEnabled();
|
||||
|
||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
||||
|
||||
if (!isLaunchDarklyConfigured || isPwMockEnabled) {
|
||||
return mockFlags[flag];
|
||||
if (!areFlagsEnabled || isPwMockEnabled) {
|
||||
return defaultFlags[flag];
|
||||
}
|
||||
|
||||
return flagValue ?? mockFlags[flag];
|
||||
return flagValue ?? defaultFlags[flag];
|
||||
}
|
||||
|
||||
1
classic/frontend/.gitignore
vendored
1
classic/frontend/.gitignore
vendored
@@ -8,6 +8,7 @@
|
||||
.buildlog/
|
||||
.history
|
||||
.svn/
|
||||
.next/
|
||||
migrate_working_dir/
|
||||
|
||||
# IntelliJ related
|
||||
|
||||
Reference in New Issue
Block a user