mirror of
https://github.com/Significant-Gravitas/AutoGPT.git
synced 2026-02-04 20:05:11 -05:00
Compare commits
13 Commits
otto/copil
...
classic-fr
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2f12c76536 | ||
|
|
4878665c66 | ||
|
|
f7350c797a | ||
|
|
2abbb7fbc8 | ||
|
|
05b60db554 | ||
|
|
cc4839bedb | ||
|
|
dbbff04616 | ||
|
|
e6438b9a76 | ||
|
|
e10ff8d37f | ||
|
|
9538992eaf | ||
|
|
27b72062f2 | ||
|
|
9a79a8d257 | ||
|
|
a9bf08748b |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -180,3 +180,4 @@ autogpt_platform/backend/settings.py
|
|||||||
.claude/settings.local.json
|
.claude/settings.local.json
|
||||||
CLAUDE.local.md
|
CLAUDE.local.md
|
||||||
/autogpt_platform/backend/logs
|
/autogpt_platform/backend/logs
|
||||||
|
.next
|
||||||
@@ -17,14 +17,6 @@ from .model import ChatSession, create_chat_session, get_chat_session, get_user_
|
|||||||
|
|
||||||
config = ChatConfig()
|
config = ChatConfig()
|
||||||
|
|
||||||
# SSE response headers for streaming
|
|
||||||
SSE_RESPONSE_HEADERS = {
|
|
||||||
"Cache-Control": "no-cache",
|
|
||||||
"Connection": "keep-alive",
|
|
||||||
"X-Accel-Buffering": "no",
|
|
||||||
"x-vercel-ai-ui-message-stream": "v1",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -40,60 +32,6 @@ async def _validate_and_get_session(
|
|||||||
return session
|
return session
|
||||||
|
|
||||||
|
|
||||||
async def _create_stream_generator(
|
|
||||||
session_id: str,
|
|
||||||
message: str,
|
|
||||||
user_id: str | None,
|
|
||||||
session: ChatSession,
|
|
||||||
is_user_message: bool = True,
|
|
||||||
context: dict[str, str] | None = None,
|
|
||||||
) -> AsyncGenerator[str, None]:
|
|
||||||
"""Create SSE event generator for chat streaming.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session_id: Chat session ID
|
|
||||||
message: User message to process
|
|
||||||
user_id: Optional authenticated user ID
|
|
||||||
session: Pre-fetched chat session
|
|
||||||
is_user_message: Whether the message is from a user
|
|
||||||
context: Optional context dict with url and content
|
|
||||||
|
|
||||||
Yields:
|
|
||||||
SSE-formatted chunks from the chat completion stream
|
|
||||||
"""
|
|
||||||
chunk_count = 0
|
|
||||||
first_chunk_type: str | None = None
|
|
||||||
async for chunk in chat_service.stream_chat_completion(
|
|
||||||
session_id,
|
|
||||||
message,
|
|
||||||
is_user_message=is_user_message,
|
|
||||||
user_id=user_id,
|
|
||||||
session=session,
|
|
||||||
context=context,
|
|
||||||
):
|
|
||||||
if chunk_count < 3:
|
|
||||||
logger.info(
|
|
||||||
"Chat stream chunk",
|
|
||||||
extra={
|
|
||||||
"session_id": session_id,
|
|
||||||
"chunk_type": str(chunk.type),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
if not first_chunk_type:
|
|
||||||
first_chunk_type = str(chunk.type)
|
|
||||||
chunk_count += 1
|
|
||||||
yield chunk.to_sse()
|
|
||||||
logger.info(
|
|
||||||
"Chat stream completed",
|
|
||||||
extra={
|
|
||||||
"session_id": session_id,
|
|
||||||
"chunk_count": chunk_count,
|
|
||||||
"first_chunk_type": first_chunk_type,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
yield "data: [DONE]\n\n"
|
|
||||||
|
|
||||||
|
|
||||||
router = APIRouter(
|
router = APIRouter(
|
||||||
tags=["chat"],
|
tags=["chat"],
|
||||||
)
|
)
|
||||||
@@ -283,17 +221,49 @@ async def stream_chat_post(
|
|||||||
"""
|
"""
|
||||||
session = await _validate_and_get_session(session_id, user_id)
|
session = await _validate_and_get_session(session_id, user_id)
|
||||||
|
|
||||||
return StreamingResponse(
|
async def event_generator() -> AsyncGenerator[str, None]:
|
||||||
_create_stream_generator(
|
chunk_count = 0
|
||||||
session_id=session_id,
|
first_chunk_type: str | None = None
|
||||||
message=request.message,
|
async for chunk in chat_service.stream_chat_completion(
|
||||||
user_id=user_id,
|
session_id,
|
||||||
session=session,
|
request.message,
|
||||||
is_user_message=request.is_user_message,
|
is_user_message=request.is_user_message,
|
||||||
|
user_id=user_id,
|
||||||
|
session=session, # Pass pre-fetched session to avoid double-fetch
|
||||||
context=request.context,
|
context=request.context,
|
||||||
),
|
):
|
||||||
|
if chunk_count < 3:
|
||||||
|
logger.info(
|
||||||
|
"Chat stream chunk",
|
||||||
|
extra={
|
||||||
|
"session_id": session_id,
|
||||||
|
"chunk_type": str(chunk.type),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not first_chunk_type:
|
||||||
|
first_chunk_type = str(chunk.type)
|
||||||
|
chunk_count += 1
|
||||||
|
yield chunk.to_sse()
|
||||||
|
logger.info(
|
||||||
|
"Chat stream completed",
|
||||||
|
extra={
|
||||||
|
"session_id": session_id,
|
||||||
|
"chunk_count": chunk_count,
|
||||||
|
"first_chunk_type": first_chunk_type,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# AI SDK protocol termination
|
||||||
|
yield "data: [DONE]\n\n"
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
event_generator(),
|
||||||
media_type="text/event-stream",
|
media_type="text/event-stream",
|
||||||
headers=SSE_RESPONSE_HEADERS,
|
headers={
|
||||||
|
"Cache-Control": "no-cache",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"X-Accel-Buffering": "no", # Disable nginx buffering
|
||||||
|
"x-vercel-ai-ui-message-stream": "v1", # AI SDK protocol header
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -325,16 +295,48 @@ async def stream_chat_get(
|
|||||||
"""
|
"""
|
||||||
session = await _validate_and_get_session(session_id, user_id)
|
session = await _validate_and_get_session(session_id, user_id)
|
||||||
|
|
||||||
return StreamingResponse(
|
async def event_generator() -> AsyncGenerator[str, None]:
|
||||||
_create_stream_generator(
|
chunk_count = 0
|
||||||
session_id=session_id,
|
first_chunk_type: str | None = None
|
||||||
message=message,
|
async for chunk in chat_service.stream_chat_completion(
|
||||||
user_id=user_id,
|
session_id,
|
||||||
session=session,
|
message,
|
||||||
is_user_message=is_user_message,
|
is_user_message=is_user_message,
|
||||||
),
|
user_id=user_id,
|
||||||
|
session=session, # Pass pre-fetched session to avoid double-fetch
|
||||||
|
):
|
||||||
|
if chunk_count < 3:
|
||||||
|
logger.info(
|
||||||
|
"Chat stream chunk",
|
||||||
|
extra={
|
||||||
|
"session_id": session_id,
|
||||||
|
"chunk_type": str(chunk.type),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not first_chunk_type:
|
||||||
|
first_chunk_type = str(chunk.type)
|
||||||
|
chunk_count += 1
|
||||||
|
yield chunk.to_sse()
|
||||||
|
logger.info(
|
||||||
|
"Chat stream completed",
|
||||||
|
extra={
|
||||||
|
"session_id": session_id,
|
||||||
|
"chunk_count": chunk_count,
|
||||||
|
"first_chunk_type": first_chunk_type,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
# AI SDK protocol termination
|
||||||
|
yield "data: [DONE]\n\n"
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
event_generator(),
|
||||||
media_type="text/event-stream",
|
media_type="text/event-stream",
|
||||||
headers=SSE_RESPONSE_HEADERS,
|
headers={
|
||||||
|
"Cache-Control": "no-cache",
|
||||||
|
"Connection": "keep-alive",
|
||||||
|
"X-Accel-Buffering": "no", # Disable nginx buffering
|
||||||
|
"x-vercel-ai-ui-message-stream": "v1", # AI SDK protocol header
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,77 +0,0 @@
|
|||||||
"""Shared helpers for chat tools."""
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from .models import ErrorResponse
|
|
||||||
|
|
||||||
|
|
||||||
def error_response(
|
|
||||||
message: str, session_id: str | None, **kwargs: Any
|
|
||||||
) -> ErrorResponse:
|
|
||||||
"""Create standardized error response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
message: Error message to display
|
|
||||||
session_id: Current session ID
|
|
||||||
**kwargs: Additional fields to pass to ErrorResponse
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
ErrorResponse with the given message and session_id
|
|
||||||
"""
|
|
||||||
return ErrorResponse(message=message, session_id=session_id, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def get_inputs_from_schema(
|
|
||||||
input_schema: dict[str, Any],
|
|
||||||
exclude_fields: set[str] | None = None,
|
|
||||||
) -> list[dict[str, Any]]:
|
|
||||||
"""Extract input field info from JSON schema.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
input_schema: JSON schema dict with 'properties' and 'required'
|
|
||||||
exclude_fields: Set of field names to exclude (e.g., credential fields)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of dicts with field info (name, title, type, description, required, default)
|
|
||||||
"""
|
|
||||||
exclude = exclude_fields or set()
|
|
||||||
properties = input_schema.get("properties", {})
|
|
||||||
required = set(input_schema.get("required", []))
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"name": name,
|
|
||||||
"title": schema.get("title", name),
|
|
||||||
"type": schema.get("type", "string"),
|
|
||||||
"description": schema.get("description", ""),
|
|
||||||
"required": name in required,
|
|
||||||
"default": schema.get("default"),
|
|
||||||
}
|
|
||||||
for name, schema in properties.items()
|
|
||||||
if name not in exclude
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def format_inputs_as_markdown(inputs: list[dict[str, Any]]) -> str:
|
|
||||||
"""Format input fields as a readable markdown list.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
inputs: List of input dicts from get_inputs_from_schema
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Markdown-formatted string listing the inputs
|
|
||||||
"""
|
|
||||||
if not inputs:
|
|
||||||
return "No inputs required."
|
|
||||||
|
|
||||||
lines = []
|
|
||||||
for inp in inputs:
|
|
||||||
required_marker = " (required)" if inp.get("required") else ""
|
|
||||||
default = inp.get("default")
|
|
||||||
default_info = f" [default: {default}]" if default is not None else ""
|
|
||||||
description = inp.get("description", "")
|
|
||||||
desc_info = f" - {description}" if description else ""
|
|
||||||
|
|
||||||
lines.append(f"- **{inp['name']}**{required_marker}{default_info}{desc_info}")
|
|
||||||
|
|
||||||
return "\n".join(lines)
|
|
||||||
@@ -38,6 +38,8 @@ class ResponseType(str, Enum):
|
|||||||
OPERATION_STARTED = "operation_started"
|
OPERATION_STARTED = "operation_started"
|
||||||
OPERATION_PENDING = "operation_pending"
|
OPERATION_PENDING = "operation_pending"
|
||||||
OPERATION_IN_PROGRESS = "operation_in_progress"
|
OPERATION_IN_PROGRESS = "operation_in_progress"
|
||||||
|
# Input validation
|
||||||
|
INPUT_VALIDATION_ERROR = "input_validation_error"
|
||||||
|
|
||||||
|
|
||||||
# Base response model
|
# Base response model
|
||||||
@@ -68,6 +70,10 @@ class AgentInfo(BaseModel):
|
|||||||
has_external_trigger: bool | None = None
|
has_external_trigger: bool | None = None
|
||||||
new_output: bool | None = None
|
new_output: bool | None = None
|
||||||
graph_id: str | None = None
|
graph_id: str | None = None
|
||||||
|
inputs: dict[str, Any] | None = Field(
|
||||||
|
default=None,
|
||||||
|
description="Input schema for the agent, including field names, types, and defaults",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class AgentsFoundResponse(ToolResponseBase):
|
class AgentsFoundResponse(ToolResponseBase):
|
||||||
@@ -194,6 +200,20 @@ class ErrorResponse(ToolResponseBase):
|
|||||||
details: dict[str, Any] | None = None
|
details: dict[str, Any] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class InputValidationErrorResponse(ToolResponseBase):
|
||||||
|
"""Response when run_agent receives unknown input fields."""
|
||||||
|
|
||||||
|
type: ResponseType = ResponseType.INPUT_VALIDATION_ERROR
|
||||||
|
unrecognized_fields: list[str] = Field(
|
||||||
|
description="List of input field names that were not recognized"
|
||||||
|
)
|
||||||
|
inputs: dict[str, Any] = Field(
|
||||||
|
description="The agent's valid input schema for reference"
|
||||||
|
)
|
||||||
|
graph_id: str | None = None
|
||||||
|
graph_version: int | None = None
|
||||||
|
|
||||||
|
|
||||||
# Agent output models
|
# Agent output models
|
||||||
class ExecutionOutputInfo(BaseModel):
|
class ExecutionOutputInfo(BaseModel):
|
||||||
"""Summary of a single execution's outputs."""
|
"""Summary of a single execution's outputs."""
|
||||||
|
|||||||
@@ -24,13 +24,13 @@ from backend.util.timezone_utils import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from .base import BaseTool
|
from .base import BaseTool
|
||||||
from .helpers import get_inputs_from_schema
|
|
||||||
from .models import (
|
from .models import (
|
||||||
AgentDetails,
|
AgentDetails,
|
||||||
AgentDetailsResponse,
|
AgentDetailsResponse,
|
||||||
ErrorResponse,
|
ErrorResponse,
|
||||||
ExecutionOptions,
|
ExecutionOptions,
|
||||||
ExecutionStartedResponse,
|
ExecutionStartedResponse,
|
||||||
|
InputValidationErrorResponse,
|
||||||
SetupInfo,
|
SetupInfo,
|
||||||
SetupRequirementsResponse,
|
SetupRequirementsResponse,
|
||||||
ToolResponseBase,
|
ToolResponseBase,
|
||||||
@@ -274,6 +274,22 @@ class RunAgentTool(BaseTool):
|
|||||||
input_properties = graph.input_schema.get("properties", {})
|
input_properties = graph.input_schema.get("properties", {})
|
||||||
required_fields = set(graph.input_schema.get("required", []))
|
required_fields = set(graph.input_schema.get("required", []))
|
||||||
provided_inputs = set(params.inputs.keys())
|
provided_inputs = set(params.inputs.keys())
|
||||||
|
valid_fields = set(input_properties.keys())
|
||||||
|
|
||||||
|
# Check for unknown input fields
|
||||||
|
unrecognized_fields = provided_inputs - valid_fields
|
||||||
|
if unrecognized_fields:
|
||||||
|
return InputValidationErrorResponse(
|
||||||
|
message=(
|
||||||
|
f"Unknown input field(s) provided: {', '.join(sorted(unrecognized_fields))}. "
|
||||||
|
f"Agent was not executed. Please use the correct field names from the schema."
|
||||||
|
),
|
||||||
|
session_id=session_id,
|
||||||
|
unrecognized_fields=sorted(unrecognized_fields),
|
||||||
|
inputs=graph.input_schema,
|
||||||
|
graph_id=graph.id,
|
||||||
|
graph_version=graph.version,
|
||||||
|
)
|
||||||
|
|
||||||
# If agent has inputs but none were provided AND use_defaults is not set,
|
# If agent has inputs but none were provided AND use_defaults is not set,
|
||||||
# always show what's available first so user can decide
|
# always show what's available first so user can decide
|
||||||
@@ -355,7 +371,19 @@ class RunAgentTool(BaseTool):
|
|||||||
|
|
||||||
def _get_inputs_list(self, input_schema: dict[str, Any]) -> list[dict[str, Any]]:
|
def _get_inputs_list(self, input_schema: dict[str, Any]) -> list[dict[str, Any]]:
|
||||||
"""Extract inputs list from schema."""
|
"""Extract inputs list from schema."""
|
||||||
return get_inputs_from_schema(input_schema)
|
inputs_list = []
|
||||||
|
if isinstance(input_schema, dict) and "properties" in input_schema:
|
||||||
|
for field_name, field_schema in input_schema["properties"].items():
|
||||||
|
inputs_list.append(
|
||||||
|
{
|
||||||
|
"name": field_name,
|
||||||
|
"title": field_schema.get("title", field_name),
|
||||||
|
"type": field_schema.get("type", "string"),
|
||||||
|
"description": field_schema.get("description", ""),
|
||||||
|
"required": field_name in input_schema.get("required", []),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return inputs_list
|
||||||
|
|
||||||
def _get_execution_modes(self, graph: GraphModel) -> list[str]:
|
def _get_execution_modes(self, graph: GraphModel) -> list[str]:
|
||||||
"""Get available execution modes for the graph."""
|
"""Get available execution modes for the graph."""
|
||||||
|
|||||||
@@ -402,3 +402,42 @@ async def test_run_agent_schedule_without_name(setup_test_data):
|
|||||||
# Should return error about missing schedule_name
|
# Should return error about missing schedule_name
|
||||||
assert result_data.get("type") == "error"
|
assert result_data.get("type") == "error"
|
||||||
assert "schedule_name" in result_data["message"].lower()
|
assert "schedule_name" in result_data["message"].lower()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio(loop_scope="session")
|
||||||
|
async def test_run_agent_rejects_unknown_input_fields(setup_test_data):
|
||||||
|
"""Test that run_agent returns input_validation_error for unknown input fields."""
|
||||||
|
user = setup_test_data["user"]
|
||||||
|
store_submission = setup_test_data["store_submission"]
|
||||||
|
|
||||||
|
tool = RunAgentTool()
|
||||||
|
agent_marketplace_id = f"{user.email.split('@')[0]}/{store_submission.slug}"
|
||||||
|
session = make_session(user_id=user.id)
|
||||||
|
|
||||||
|
# Execute with unknown input field names
|
||||||
|
response = await tool.execute(
|
||||||
|
user_id=user.id,
|
||||||
|
session_id=str(uuid.uuid4()),
|
||||||
|
tool_call_id=str(uuid.uuid4()),
|
||||||
|
username_agent_slug=agent_marketplace_id,
|
||||||
|
inputs={
|
||||||
|
"unknown_field": "some value",
|
||||||
|
"another_unknown": "another value",
|
||||||
|
},
|
||||||
|
session=session,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response is not None
|
||||||
|
assert hasattr(response, "output")
|
||||||
|
assert isinstance(response.output, str)
|
||||||
|
result_data = orjson.loads(response.output)
|
||||||
|
|
||||||
|
# Should return input_validation_error type with unrecognized fields
|
||||||
|
assert result_data.get("type") == "input_validation_error"
|
||||||
|
assert "unrecognized_fields" in result_data
|
||||||
|
assert set(result_data["unrecognized_fields"]) == {
|
||||||
|
"another_unknown",
|
||||||
|
"unknown_field",
|
||||||
|
}
|
||||||
|
assert "inputs" in result_data # Contains the valid schema
|
||||||
|
assert "Agent was not executed" in result_data["message"]
|
||||||
|
|||||||
@@ -5,16 +5,17 @@ import uuid
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from pydantic_core import PydanticUndefined
|
||||||
|
|
||||||
from backend.api.features.chat.model import ChatSession
|
from backend.api.features.chat.model import ChatSession
|
||||||
from backend.data.block import get_block
|
from backend.data.block import get_block
|
||||||
from backend.data.execution import ExecutionContext
|
from backend.data.execution import ExecutionContext
|
||||||
from backend.data.model import CredentialsFieldInfo, CredentialsMetaInput
|
from backend.data.model import CredentialsMetaInput
|
||||||
from backend.data.workspace import get_or_create_workspace
|
from backend.data.workspace import get_or_create_workspace
|
||||||
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
from backend.integrations.creds_manager import IntegrationCredentialsManager
|
||||||
from backend.util.exceptions import BlockError
|
from backend.util.exceptions import BlockError
|
||||||
|
|
||||||
from .base import BaseTool
|
from .base import BaseTool
|
||||||
from .helpers import get_inputs_from_schema
|
|
||||||
from .models import (
|
from .models import (
|
||||||
BlockOutputResponse,
|
BlockOutputResponse,
|
||||||
ErrorResponse,
|
ErrorResponse,
|
||||||
@@ -23,10 +24,7 @@ from .models import (
|
|||||||
ToolResponseBase,
|
ToolResponseBase,
|
||||||
UserReadiness,
|
UserReadiness,
|
||||||
)
|
)
|
||||||
from .utils import (
|
from .utils import build_missing_credentials_from_field_info
|
||||||
build_missing_credentials_from_field_info,
|
|
||||||
match_credentials_to_requirements,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -75,39 +73,90 @@ class RunBlockTool(BaseTool):
|
|||||||
def requires_auth(self) -> bool:
|
def requires_auth(self) -> bool:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _get_credentials_requirements(
|
|
||||||
self,
|
|
||||||
block: Any,
|
|
||||||
) -> dict[str, CredentialsFieldInfo]:
|
|
||||||
"""
|
|
||||||
Get credential requirements from block's input schema.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
block: Block to get credentials for
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict mapping field names to CredentialsFieldInfo
|
|
||||||
"""
|
|
||||||
credentials_fields_info = block.input_schema.get_credentials_fields_info()
|
|
||||||
return credentials_fields_info if credentials_fields_info else {}
|
|
||||||
|
|
||||||
async def _check_block_credentials(
|
async def _check_block_credentials(
|
||||||
self,
|
self,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
block: Any,
|
block: Any,
|
||||||
|
input_data: dict[str, Any] | None = None,
|
||||||
) -> tuple[dict[str, CredentialsMetaInput], list[CredentialsMetaInput]]:
|
) -> tuple[dict[str, CredentialsMetaInput], list[CredentialsMetaInput]]:
|
||||||
"""
|
"""
|
||||||
Check if user has required credentials for a block.
|
Check if user has required credentials for a block.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
user_id: User ID
|
||||||
|
block: Block to check credentials for
|
||||||
|
input_data: Input data for the block (used to determine provider via discriminator)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
tuple[matched_credentials, missing_credentials]
|
tuple[matched_credentials, missing_credentials]
|
||||||
"""
|
"""
|
||||||
requirements = self._get_credentials_requirements(block)
|
matched_credentials: dict[str, CredentialsMetaInput] = {}
|
||||||
|
missing_credentials: list[CredentialsMetaInput] = []
|
||||||
|
input_data = input_data or {}
|
||||||
|
|
||||||
if not requirements:
|
# Get credential field info from block's input schema
|
||||||
return {}, []
|
credentials_fields_info = block.input_schema.get_credentials_fields_info()
|
||||||
|
|
||||||
return await match_credentials_to_requirements(user_id, requirements)
|
if not credentials_fields_info:
|
||||||
|
return matched_credentials, missing_credentials
|
||||||
|
|
||||||
|
# Get user's available credentials
|
||||||
|
creds_manager = IntegrationCredentialsManager()
|
||||||
|
available_creds = await creds_manager.store.get_all_creds(user_id)
|
||||||
|
|
||||||
|
for field_name, field_info in credentials_fields_info.items():
|
||||||
|
effective_field_info = field_info
|
||||||
|
if field_info.discriminator and field_info.discriminator_mapping:
|
||||||
|
# Get discriminator from input, falling back to schema default
|
||||||
|
discriminator_value = input_data.get(field_info.discriminator)
|
||||||
|
if discriminator_value is None:
|
||||||
|
field = block.input_schema.model_fields.get(
|
||||||
|
field_info.discriminator
|
||||||
|
)
|
||||||
|
if field and field.default is not PydanticUndefined:
|
||||||
|
discriminator_value = field.default
|
||||||
|
|
||||||
|
if (
|
||||||
|
discriminator_value
|
||||||
|
and discriminator_value in field_info.discriminator_mapping
|
||||||
|
):
|
||||||
|
effective_field_info = field_info.discriminate(discriminator_value)
|
||||||
|
logger.debug(
|
||||||
|
f"Discriminated provider for {field_name}: "
|
||||||
|
f"{discriminator_value} -> {effective_field_info.provider}"
|
||||||
|
)
|
||||||
|
|
||||||
|
matching_cred = next(
|
||||||
|
(
|
||||||
|
cred
|
||||||
|
for cred in available_creds
|
||||||
|
if cred.provider in effective_field_info.provider
|
||||||
|
and cred.type in effective_field_info.supported_types
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if matching_cred:
|
||||||
|
matched_credentials[field_name] = CredentialsMetaInput(
|
||||||
|
id=matching_cred.id,
|
||||||
|
provider=matching_cred.provider, # type: ignore
|
||||||
|
type=matching_cred.type,
|
||||||
|
title=matching_cred.title,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Create a placeholder for the missing credential
|
||||||
|
provider = next(iter(effective_field_info.provider), "unknown")
|
||||||
|
cred_type = next(iter(effective_field_info.supported_types), "api_key")
|
||||||
|
missing_credentials.append(
|
||||||
|
CredentialsMetaInput(
|
||||||
|
id=field_name,
|
||||||
|
provider=provider, # type: ignore
|
||||||
|
type=cred_type, # type: ignore
|
||||||
|
title=field_name.replace("_", " ").title(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return matched_credentials, missing_credentials
|
||||||
|
|
||||||
async def _execute(
|
async def _execute(
|
||||||
self,
|
self,
|
||||||
@@ -165,10 +214,9 @@ class RunBlockTool(BaseTool):
|
|||||||
|
|
||||||
logger.info(f"Executing block {block.name} ({block_id}) for user {user_id}")
|
logger.info(f"Executing block {block.name} ({block_id}) for user {user_id}")
|
||||||
|
|
||||||
# Check credentials
|
|
||||||
creds_manager = IntegrationCredentialsManager()
|
creds_manager = IntegrationCredentialsManager()
|
||||||
matched_credentials, missing_credentials = await self._check_block_credentials(
|
matched_credentials, missing_credentials = await self._check_block_credentials(
|
||||||
user_id, block
|
user_id, block, input_data
|
||||||
)
|
)
|
||||||
|
|
||||||
if missing_credentials:
|
if missing_credentials:
|
||||||
@@ -299,7 +347,27 @@ class RunBlockTool(BaseTool):
|
|||||||
|
|
||||||
def _get_inputs_list(self, block: Any) -> list[dict[str, Any]]:
|
def _get_inputs_list(self, block: Any) -> list[dict[str, Any]]:
|
||||||
"""Extract non-credential inputs from block schema."""
|
"""Extract non-credential inputs from block schema."""
|
||||||
|
inputs_list = []
|
||||||
schema = block.input_schema.jsonschema()
|
schema = block.input_schema.jsonschema()
|
||||||
|
properties = schema.get("properties", {})
|
||||||
|
required_fields = set(schema.get("required", []))
|
||||||
|
|
||||||
# Get credential field names to exclude
|
# Get credential field names to exclude
|
||||||
credentials_fields = set(block.input_schema.get_credentials_fields().keys())
|
credentials_fields = set(block.input_schema.get_credentials_fields().keys())
|
||||||
return get_inputs_from_schema(schema, exclude_fields=credentials_fields)
|
|
||||||
|
for field_name, field_schema in properties.items():
|
||||||
|
# Skip credential fields
|
||||||
|
if field_name in credentials_fields:
|
||||||
|
continue
|
||||||
|
|
||||||
|
inputs_list.append(
|
||||||
|
{
|
||||||
|
"name": field_name,
|
||||||
|
"title": field_schema.get("title", field_name),
|
||||||
|
"type": field_schema.get("type", "string"),
|
||||||
|
"description": field_schema.get("description", ""),
|
||||||
|
"required": field_name in required_fields,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return inputs_list
|
||||||
|
|||||||
@@ -225,127 +225,6 @@ async def get_or_create_library_agent(
|
|||||||
return library_agents[0]
|
return library_agents[0]
|
||||||
|
|
||||||
|
|
||||||
async def get_user_credentials(user_id: str) -> list:
|
|
||||||
"""
|
|
||||||
Get all available credentials for a user.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: The user's ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of user's credentials
|
|
||||||
"""
|
|
||||||
creds_manager = IntegrationCredentialsManager()
|
|
||||||
return await creds_manager.store.get_all_creds(user_id)
|
|
||||||
|
|
||||||
|
|
||||||
def find_matching_credential(
|
|
||||||
available_creds: list,
|
|
||||||
field_info: CredentialsFieldInfo,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Find a credential that matches the required provider, type, and scopes.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
available_creds: List of user's available credentials
|
|
||||||
field_info: CredentialsFieldInfo with provider, type, and scope requirements
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Matching credential or None
|
|
||||||
"""
|
|
||||||
for cred in available_creds:
|
|
||||||
if cred.provider not in field_info.provider:
|
|
||||||
continue
|
|
||||||
if cred.type not in field_info.supported_types:
|
|
||||||
continue
|
|
||||||
if not _credential_has_required_scopes(cred, field_info):
|
|
||||||
continue
|
|
||||||
return cred
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def create_credential_meta_from_match(
|
|
||||||
matching_cred,
|
|
||||||
) -> CredentialsMetaInput:
|
|
||||||
"""
|
|
||||||
Create a CredentialsMetaInput from a matched credential.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
matching_cred: The matched credential object
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
CredentialsMetaInput instance
|
|
||||||
"""
|
|
||||||
return CredentialsMetaInput(
|
|
||||||
id=matching_cred.id,
|
|
||||||
provider=matching_cred.provider, # type: ignore
|
|
||||||
type=matching_cred.type,
|
|
||||||
title=matching_cred.title,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def match_credentials_to_requirements(
|
|
||||||
user_id: str,
|
|
||||||
requirements: dict[str, CredentialsFieldInfo],
|
|
||||||
) -> tuple[dict[str, CredentialsMetaInput], list[CredentialsMetaInput]]:
|
|
||||||
"""
|
|
||||||
Match user's credentials against a dictionary of credential requirements.
|
|
||||||
|
|
||||||
This is the core matching logic shared by both graph and block credential matching.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: The user's ID
|
|
||||||
requirements: Dict mapping field names to CredentialsFieldInfo
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[matched_credentials dict, missing_credentials list]
|
|
||||||
"""
|
|
||||||
matched: dict[str, CredentialsMetaInput] = {}
|
|
||||||
missing: list[CredentialsMetaInput] = []
|
|
||||||
|
|
||||||
if not requirements:
|
|
||||||
return matched, missing
|
|
||||||
|
|
||||||
available_creds = await get_user_credentials(user_id)
|
|
||||||
|
|
||||||
for field_name, field_info in requirements.items():
|
|
||||||
matching_cred = find_matching_credential(available_creds, field_info)
|
|
||||||
|
|
||||||
if matching_cred:
|
|
||||||
try:
|
|
||||||
matched[field_name] = create_credential_meta_from_match(matching_cred)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to create CredentialsMetaInput for field '{field_name}': "
|
|
||||||
f"provider={matching_cred.provider}, type={matching_cred.type}, "
|
|
||||||
f"credential_id={matching_cred.id}",
|
|
||||||
exc_info=True,
|
|
||||||
)
|
|
||||||
provider = next(iter(field_info.provider), "unknown")
|
|
||||||
cred_type = next(iter(field_info.supported_types), "api_key")
|
|
||||||
missing.append(
|
|
||||||
CredentialsMetaInput(
|
|
||||||
id=field_name,
|
|
||||||
provider=provider, # type: ignore
|
|
||||||
type=cred_type, # type: ignore
|
|
||||||
title=f"{field_name} (validation failed: {e})",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
provider = next(iter(field_info.provider), "unknown")
|
|
||||||
cred_type = next(iter(field_info.supported_types), "api_key")
|
|
||||||
missing.append(
|
|
||||||
CredentialsMetaInput(
|
|
||||||
id=field_name,
|
|
||||||
provider=provider, # type: ignore
|
|
||||||
type=cred_type, # type: ignore
|
|
||||||
title=field_name.replace("_", " ").title(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return matched, missing
|
|
||||||
|
|
||||||
|
|
||||||
async def match_user_credentials_to_graph(
|
async def match_user_credentials_to_graph(
|
||||||
user_id: str,
|
user_id: str,
|
||||||
graph: GraphModel,
|
graph: GraphModel,
|
||||||
@@ -363,6 +242,9 @@ async def match_user_credentials_to_graph(
|
|||||||
Returns:
|
Returns:
|
||||||
tuple[matched_credentials dict, missing_credential_descriptions list]
|
tuple[matched_credentials dict, missing_credential_descriptions list]
|
||||||
"""
|
"""
|
||||||
|
graph_credentials_inputs: dict[str, CredentialsMetaInput] = {}
|
||||||
|
missing_creds: list[str] = []
|
||||||
|
|
||||||
# Get aggregated credentials requirements from the graph
|
# Get aggregated credentials requirements from the graph
|
||||||
aggregated_creds = graph.aggregate_credentials_inputs()
|
aggregated_creds = graph.aggregate_credentials_inputs()
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@@ -370,30 +252,69 @@ async def match_user_credentials_to_graph(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not aggregated_creds:
|
if not aggregated_creds:
|
||||||
return {}, []
|
return graph_credentials_inputs, missing_creds
|
||||||
|
|
||||||
# Convert aggregated format to simple requirements dict
|
# Get all available credentials for the user
|
||||||
requirements = {
|
creds_manager = IntegrationCredentialsManager()
|
||||||
field_name: field_info
|
available_creds = await creds_manager.store.get_all_creds(user_id)
|
||||||
for field_name, (field_info, _node_fields) in aggregated_creds.items()
|
|
||||||
}
|
|
||||||
|
|
||||||
# Use shared matching logic
|
# For each required credential field, find a matching user credential
|
||||||
matched, missing_list = await match_credentials_to_requirements(
|
# field_info.provider is a frozenset because aggregate_credentials_inputs()
|
||||||
user_id, requirements
|
# combines requirements from multiple nodes. A credential matches if its
|
||||||
)
|
# provider is in the set of acceptable providers.
|
||||||
|
for credential_field_name, (
|
||||||
|
credential_requirements,
|
||||||
|
_node_fields,
|
||||||
|
) in aggregated_creds.items():
|
||||||
|
# Find first matching credential by provider, type, and scopes
|
||||||
|
matching_cred = next(
|
||||||
|
(
|
||||||
|
cred
|
||||||
|
for cred in available_creds
|
||||||
|
if cred.provider in credential_requirements.provider
|
||||||
|
and cred.type in credential_requirements.supported_types
|
||||||
|
and _credential_has_required_scopes(cred, credential_requirements)
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
# Convert missing list to string descriptions for backward compatibility
|
if matching_cred:
|
||||||
missing_descriptions = [
|
try:
|
||||||
f"{cred.id} (requires provider={cred.provider}, type={cred.type})"
|
graph_credentials_inputs[credential_field_name] = CredentialsMetaInput(
|
||||||
for cred in missing_list
|
id=matching_cred.id,
|
||||||
]
|
provider=matching_cred.provider, # type: ignore
|
||||||
|
type=matching_cred.type,
|
||||||
|
title=matching_cred.title,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to create CredentialsMetaInput for field '{credential_field_name}': "
|
||||||
|
f"provider={matching_cred.provider}, type={matching_cred.type}, "
|
||||||
|
f"credential_id={matching_cred.id}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
missing_creds.append(
|
||||||
|
f"{credential_field_name} (validation failed: {e})"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Build a helpful error message including scope requirements
|
||||||
|
error_parts = [
|
||||||
|
f"provider in {list(credential_requirements.provider)}",
|
||||||
|
f"type in {list(credential_requirements.supported_types)}",
|
||||||
|
]
|
||||||
|
if credential_requirements.required_scopes:
|
||||||
|
error_parts.append(
|
||||||
|
f"scopes including {list(credential_requirements.required_scopes)}"
|
||||||
|
)
|
||||||
|
missing_creds.append(
|
||||||
|
f"{credential_field_name} (requires {', '.join(error_parts)})"
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Credential matching complete: {len(matched)}/{len(aggregated_creds)} matched"
|
f"Credential matching complete: {len(graph_credentials_inputs)}/{len(aggregated_creds)} matched"
|
||||||
)
|
)
|
||||||
|
|
||||||
return matched, missing_descriptions
|
return graph_credentials_inputs, missing_creds
|
||||||
|
|
||||||
|
|
||||||
def _credential_has_required_scopes(
|
def _credential_has_required_scopes(
|
||||||
|
|||||||
@@ -1,32 +0,0 @@
|
|||||||
"""Validation utilities."""
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
_UUID_V4_PATTERN = re.compile(
|
|
||||||
r"[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89ab][a-f0-9]{3}-[a-f0-9]{12}",
|
|
||||||
re.IGNORECASE,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def is_uuid_v4(text: str) -> bool:
|
|
||||||
"""Check if text is a valid UUID v4.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
text: String to validate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if the text is a valid UUID v4, False otherwise
|
|
||||||
"""
|
|
||||||
return bool(_UUID_V4_PATTERN.fullmatch(text.strip()))
|
|
||||||
|
|
||||||
|
|
||||||
def extract_uuids(text: str) -> list[str]:
|
|
||||||
"""Extract all UUID v4 strings from text.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
text: String to search for UUIDs
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of unique UUIDs found (lowercase)
|
|
||||||
"""
|
|
||||||
return list({m.lower() for m in _UUID_V4_PATTERN.findall(text)})
|
|
||||||
@@ -1,10 +1,9 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
import { getV1OnboardingState } from "@/app/api/__generated__/endpoints/onboarding/onboarding";
|
||||||
|
import { getOnboardingStatus, resolveResponse } from "@/app/api/helpers";
|
||||||
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
import { useRouter } from "next/navigation";
|
import { useRouter } from "next/navigation";
|
||||||
import { useEffect } from "react";
|
import { useEffect } from "react";
|
||||||
import { resolveResponse, getOnboardingStatus } from "@/app/api/helpers";
|
|
||||||
import { getV1OnboardingState } from "@/app/api/__generated__/endpoints/onboarding/onboarding";
|
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
|
|
||||||
export default function OnboardingPage() {
|
export default function OnboardingPage() {
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
@@ -13,12 +12,10 @@ export default function OnboardingPage() {
|
|||||||
async function redirectToStep() {
|
async function redirectToStep() {
|
||||||
try {
|
try {
|
||||||
// Check if onboarding is enabled (also gets chat flag for redirect)
|
// Check if onboarding is enabled (also gets chat flag for redirect)
|
||||||
const { shouldShowOnboarding, isChatEnabled } =
|
const { shouldShowOnboarding } = await getOnboardingStatus();
|
||||||
await getOnboardingStatus();
|
|
||||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
|
||||||
|
|
||||||
if (!shouldShowOnboarding) {
|
if (!shouldShowOnboarding) {
|
||||||
router.replace(homepageRoute);
|
router.replace("/");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -26,7 +23,7 @@ export default function OnboardingPage() {
|
|||||||
|
|
||||||
// Handle completed onboarding
|
// Handle completed onboarding
|
||||||
if (onboarding.completedSteps.includes("GET_RESULTS")) {
|
if (onboarding.completedSteps.includes("GET_RESULTS")) {
|
||||||
router.replace(homepageRoute);
|
router.replace("/");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import BackendAPI from "@/lib/autogpt-server-api";
|
|
||||||
import { NextResponse } from "next/server";
|
|
||||||
import { revalidatePath } from "next/cache";
|
|
||||||
import { getOnboardingStatus } from "@/app/api/helpers";
|
import { getOnboardingStatus } from "@/app/api/helpers";
|
||||||
|
import BackendAPI from "@/lib/autogpt-server-api";
|
||||||
|
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
||||||
|
import { revalidatePath } from "next/cache";
|
||||||
|
import { NextResponse } from "next/server";
|
||||||
|
|
||||||
// Handle the callback to complete the user session login
|
// Handle the callback to complete the user session login
|
||||||
export async function GET(request: Request) {
|
export async function GET(request: Request) {
|
||||||
@@ -27,13 +26,12 @@ export async function GET(request: Request) {
|
|||||||
await api.createUser();
|
await api.createUser();
|
||||||
|
|
||||||
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
||||||
const { shouldShowOnboarding, isChatEnabled } =
|
const { shouldShowOnboarding } = await getOnboardingStatus();
|
||||||
await getOnboardingStatus();
|
|
||||||
if (shouldShowOnboarding) {
|
if (shouldShowOnboarding) {
|
||||||
next = "/onboarding";
|
next = "/onboarding";
|
||||||
revalidatePath("/onboarding", "layout");
|
revalidatePath("/onboarding", "layout");
|
||||||
} else {
|
} else {
|
||||||
next = getHomepageRoute(isChatEnabled);
|
next = "/";
|
||||||
revalidatePath(next, "layout");
|
revalidatePath(next, "layout");
|
||||||
}
|
}
|
||||||
} catch (createUserError) {
|
} catch (createUserError) {
|
||||||
|
|||||||
@@ -1,6 +1,13 @@
|
|||||||
import type { ReactNode } from "react";
|
"use client";
|
||||||
|
import { FeatureFlagPage } from "@/services/feature-flags/FeatureFlagPage";
|
||||||
|
import { Flag } from "@/services/feature-flags/use-get-flag";
|
||||||
|
import { type ReactNode } from "react";
|
||||||
import { CopilotShell } from "./components/CopilotShell/CopilotShell";
|
import { CopilotShell } from "./components/CopilotShell/CopilotShell";
|
||||||
|
|
||||||
export default function CopilotLayout({ children }: { children: ReactNode }) {
|
export default function CopilotLayout({ children }: { children: ReactNode }) {
|
||||||
return <CopilotShell>{children}</CopilotShell>;
|
return (
|
||||||
|
<FeatureFlagPage flag={Flag.CHAT} whenDisabled="/library">
|
||||||
|
<CopilotShell>{children}</CopilotShell>
|
||||||
|
</FeatureFlagPage>
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,14 +14,8 @@ export default function CopilotPage() {
|
|||||||
const isInterruptModalOpen = useCopilotStore((s) => s.isInterruptModalOpen);
|
const isInterruptModalOpen = useCopilotStore((s) => s.isInterruptModalOpen);
|
||||||
const confirmInterrupt = useCopilotStore((s) => s.confirmInterrupt);
|
const confirmInterrupt = useCopilotStore((s) => s.confirmInterrupt);
|
||||||
const cancelInterrupt = useCopilotStore((s) => s.cancelInterrupt);
|
const cancelInterrupt = useCopilotStore((s) => s.cancelInterrupt);
|
||||||
const {
|
const { greetingName, quickActions, isLoading, hasSession, initialPrompt } =
|
||||||
greetingName,
|
state;
|
||||||
quickActions,
|
|
||||||
isLoading,
|
|
||||||
hasSession,
|
|
||||||
initialPrompt,
|
|
||||||
isReady,
|
|
||||||
} = state;
|
|
||||||
const {
|
const {
|
||||||
handleQuickAction,
|
handleQuickAction,
|
||||||
startChatWithPrompt,
|
startChatWithPrompt,
|
||||||
@@ -29,8 +23,6 @@ export default function CopilotPage() {
|
|||||||
handleStreamingChange,
|
handleStreamingChange,
|
||||||
} = handlers;
|
} = handlers;
|
||||||
|
|
||||||
if (!isReady) return null;
|
|
||||||
|
|
||||||
if (hasSession) {
|
if (hasSession) {
|
||||||
return (
|
return (
|
||||||
<div className="flex h-full flex-col">
|
<div className="flex h-full flex-col">
|
||||||
|
|||||||
@@ -3,18 +3,11 @@ import {
|
|||||||
postV2CreateSession,
|
postV2CreateSession,
|
||||||
} from "@/app/api/__generated__/endpoints/chat/chat";
|
} from "@/app/api/__generated__/endpoints/chat/chat";
|
||||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import { useOnboarding } from "@/providers/onboarding/onboarding-provider";
|
import { useOnboarding } from "@/providers/onboarding/onboarding-provider";
|
||||||
import {
|
|
||||||
Flag,
|
|
||||||
type FlagValues,
|
|
||||||
useGetFlag,
|
|
||||||
} from "@/services/feature-flags/use-get-flag";
|
|
||||||
import { SessionKey, sessionStorage } from "@/services/storage/session-storage";
|
import { SessionKey, sessionStorage } from "@/services/storage/session-storage";
|
||||||
import * as Sentry from "@sentry/nextjs";
|
import * as Sentry from "@sentry/nextjs";
|
||||||
import { useQueryClient } from "@tanstack/react-query";
|
import { useQueryClient } from "@tanstack/react-query";
|
||||||
import { useFlags } from "launchdarkly-react-client-sdk";
|
|
||||||
import { useRouter } from "next/navigation";
|
import { useRouter } from "next/navigation";
|
||||||
import { useEffect } from "react";
|
import { useEffect } from "react";
|
||||||
import { useCopilotStore } from "./copilot-page-store";
|
import { useCopilotStore } from "./copilot-page-store";
|
||||||
@@ -33,22 +26,6 @@ export function useCopilotPage() {
|
|||||||
const isCreating = useCopilotStore((s) => s.isCreatingSession);
|
const isCreating = useCopilotStore((s) => s.isCreatingSession);
|
||||||
const setIsCreating = useCopilotStore((s) => s.setIsCreatingSession);
|
const setIsCreating = useCopilotStore((s) => s.setIsCreatingSession);
|
||||||
|
|
||||||
// Complete VISIT_COPILOT onboarding step to grant $5 welcome bonus
|
|
||||||
useEffect(() => {
|
|
||||||
if (isLoggedIn) {
|
|
||||||
completeStep("VISIT_COPILOT");
|
|
||||||
}
|
|
||||||
}, [completeStep, isLoggedIn]);
|
|
||||||
|
|
||||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
|
||||||
const flags = useFlags<FlagValues>();
|
|
||||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
|
||||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
|
||||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
|
||||||
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
|
||||||
const isFlagReady =
|
|
||||||
!isLaunchDarklyConfigured || flags[Flag.CHAT] !== undefined;
|
|
||||||
|
|
||||||
const greetingName = getGreetingName(user);
|
const greetingName = getGreetingName(user);
|
||||||
const quickActions = getQuickActions();
|
const quickActions = getQuickActions();
|
||||||
|
|
||||||
@@ -58,11 +35,8 @@ export function useCopilotPage() {
|
|||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!isFlagReady) return;
|
if (isLoggedIn) completeStep("VISIT_COPILOT");
|
||||||
if (isChatEnabled === false) {
|
}, [completeStep, isLoggedIn]);
|
||||||
router.replace(homepageRoute);
|
|
||||||
}
|
|
||||||
}, [homepageRoute, isChatEnabled, isFlagReady, router]);
|
|
||||||
|
|
||||||
async function startChatWithPrompt(prompt: string) {
|
async function startChatWithPrompt(prompt: string) {
|
||||||
if (!prompt?.trim()) return;
|
if (!prompt?.trim()) return;
|
||||||
@@ -116,7 +90,6 @@ export function useCopilotPage() {
|
|||||||
isLoading: isUserLoading,
|
isLoading: isUserLoading,
|
||||||
hasSession,
|
hasSession,
|
||||||
initialPrompt,
|
initialPrompt,
|
||||||
isReady: isFlagReady && isChatEnabled !== false && isLoggedIn,
|
|
||||||
},
|
},
|
||||||
handlers: {
|
handlers: {
|
||||||
handleQuickAction,
|
handleQuickAction,
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
|
||||||
import { useSearchParams } from "next/navigation";
|
import { useSearchParams } from "next/navigation";
|
||||||
import { Suspense } from "react";
|
import { Suspense } from "react";
|
||||||
import { getErrorDetails } from "./helpers";
|
import { getErrorDetails } from "./helpers";
|
||||||
@@ -11,8 +9,6 @@ function ErrorPageContent() {
|
|||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
const errorMessage = searchParams.get("message");
|
const errorMessage = searchParams.get("message");
|
||||||
const errorDetails = getErrorDetails(errorMessage);
|
const errorDetails = getErrorDetails(errorMessage);
|
||||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
|
||||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
|
||||||
|
|
||||||
function handleRetry() {
|
function handleRetry() {
|
||||||
// Auth-related errors should redirect to login
|
// Auth-related errors should redirect to login
|
||||||
@@ -30,7 +26,7 @@ function ErrorPageContent() {
|
|||||||
}, 2000);
|
}, 2000);
|
||||||
} else {
|
} else {
|
||||||
// For server/network errors, go to home
|
// For server/network errors, go to home
|
||||||
window.location.href = homepageRoute;
|
window.location.href = "/";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
"use server";
|
"use server";
|
||||||
|
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import BackendAPI from "@/lib/autogpt-server-api";
|
import BackendAPI from "@/lib/autogpt-server-api";
|
||||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
||||||
import { loginFormSchema } from "@/types/auth";
|
import { loginFormSchema } from "@/types/auth";
|
||||||
@@ -38,10 +37,8 @@ export async function login(email: string, password: string) {
|
|||||||
await api.createUser();
|
await api.createUser();
|
||||||
|
|
||||||
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
||||||
const { shouldShowOnboarding, isChatEnabled } = await getOnboardingStatus();
|
const { shouldShowOnboarding } = await getOnboardingStatus();
|
||||||
const next = shouldShowOnboarding
|
const next = shouldShowOnboarding ? "/onboarding" : "/";
|
||||||
? "/onboarding"
|
|
||||||
: getHomepageRoute(isChatEnabled);
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
|
||||||
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
||||||
import { zodResolver } from "@hookform/resolvers/zod";
|
import { zodResolver } from "@hookform/resolvers/zod";
|
||||||
import { useRouter, useSearchParams } from "next/navigation";
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
@@ -22,17 +20,15 @@ export function useLoginPage() {
|
|||||||
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
||||||
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
||||||
const isCloudEnv = environment.isCloud();
|
const isCloudEnv = environment.isCloud();
|
||||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
|
||||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
|
||||||
|
|
||||||
// Get redirect destination from 'next' query parameter
|
// Get redirect destination from 'next' query parameter
|
||||||
const nextUrl = searchParams.get("next");
|
const nextUrl = searchParams.get("next");
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isLoggedIn && !isLoggingIn) {
|
if (isLoggedIn && !isLoggingIn) {
|
||||||
router.push(nextUrl || homepageRoute);
|
router.push(nextUrl || "/");
|
||||||
}
|
}
|
||||||
}, [homepageRoute, isLoggedIn, isLoggingIn, nextUrl, router]);
|
}, [isLoggedIn, isLoggingIn, nextUrl, router]);
|
||||||
|
|
||||||
const form = useForm<z.infer<typeof loginFormSchema>>({
|
const form = useForm<z.infer<typeof loginFormSchema>>({
|
||||||
resolver: zodResolver(loginFormSchema),
|
resolver: zodResolver(loginFormSchema),
|
||||||
@@ -98,7 +94,7 @@ export function useLoginPage() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Prefer URL's next parameter, then use backend-determined route
|
// Prefer URL's next parameter, then use backend-determined route
|
||||||
router.replace(nextUrl || result.next || homepageRoute);
|
router.replace(nextUrl || result.next || "/");
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast({
|
toast({
|
||||||
title:
|
title:
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
"use server";
|
"use server";
|
||||||
|
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
||||||
import { signupFormSchema } from "@/types/auth";
|
import { signupFormSchema } from "@/types/auth";
|
||||||
import * as Sentry from "@sentry/nextjs";
|
import * as Sentry from "@sentry/nextjs";
|
||||||
@@ -59,10 +58,8 @@ export async function signup(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
// Get onboarding status from backend (includes chat flag evaluated for this user)
|
||||||
const { shouldShowOnboarding, isChatEnabled } = await getOnboardingStatus();
|
const { shouldShowOnboarding } = await getOnboardingStatus();
|
||||||
const next = shouldShowOnboarding
|
const next = shouldShowOnboarding ? "/onboarding" : "/";
|
||||||
? "/onboarding"
|
|
||||||
: getHomepageRoute(isChatEnabled);
|
|
||||||
|
|
||||||
return { success: true, next };
|
return { success: true, next };
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
|
||||||
import { LoginProvider, signupFormSchema } from "@/types/auth";
|
import { LoginProvider, signupFormSchema } from "@/types/auth";
|
||||||
import { zodResolver } from "@hookform/resolvers/zod";
|
import { zodResolver } from "@hookform/resolvers/zod";
|
||||||
import { useRouter, useSearchParams } from "next/navigation";
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
@@ -22,17 +20,15 @@ export function useSignupPage() {
|
|||||||
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
||||||
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
||||||
const isCloudEnv = environment.isCloud();
|
const isCloudEnv = environment.isCloud();
|
||||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
|
||||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
|
||||||
|
|
||||||
// Get redirect destination from 'next' query parameter
|
// Get redirect destination from 'next' query parameter
|
||||||
const nextUrl = searchParams.get("next");
|
const nextUrl = searchParams.get("next");
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isLoggedIn && !isSigningUp) {
|
if (isLoggedIn && !isSigningUp) {
|
||||||
router.push(nextUrl || homepageRoute);
|
router.push(nextUrl || "/");
|
||||||
}
|
}
|
||||||
}, [homepageRoute, isLoggedIn, isSigningUp, nextUrl, router]);
|
}, [isLoggedIn, isSigningUp, nextUrl, router]);
|
||||||
|
|
||||||
const form = useForm<z.infer<typeof signupFormSchema>>({
|
const form = useForm<z.infer<typeof signupFormSchema>>({
|
||||||
resolver: zodResolver(signupFormSchema),
|
resolver: zodResolver(signupFormSchema),
|
||||||
@@ -133,7 +129,7 @@ export function useSignupPage() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Prefer the URL's next parameter, then result.next (for onboarding), then default
|
// Prefer the URL's next parameter, then result.next (for onboarding), then default
|
||||||
const redirectTo = nextUrl || result.next || homepageRoute;
|
const redirectTo = nextUrl || result.next || "/";
|
||||||
router.replace(redirectTo);
|
router.replace(redirectTo);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
|
|||||||
@@ -181,6 +181,5 @@ export async function getOnboardingStatus() {
|
|||||||
const isCompleted = onboarding.completedSteps.includes("CONGRATS");
|
const isCompleted = onboarding.completedSteps.includes("CONGRATS");
|
||||||
return {
|
return {
|
||||||
shouldShowOnboarding: status.is_onboarding_enabled && !isCompleted,
|
shouldShowOnboarding: status.is_onboarding_enabled && !isCompleted,
|
||||||
isChatEnabled: status.is_chat_enabled,
|
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,27 +1,15 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
|
||||||
import { useRouter } from "next/navigation";
|
import { useRouter } from "next/navigation";
|
||||||
import { useEffect } from "react";
|
import { useEffect } from "react";
|
||||||
|
|
||||||
export default function Page() {
|
export default function Page() {
|
||||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
|
||||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
|
||||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
|
||||||
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
|
||||||
const isFlagReady =
|
|
||||||
!isLaunchDarklyConfigured || typeof isChatEnabled === "boolean";
|
|
||||||
|
|
||||||
useEffect(
|
useEffect(() => {
|
||||||
function redirectToHomepage() {
|
router.replace("/copilot");
|
||||||
if (!isFlagReady) return;
|
}, [router]);
|
||||||
router.replace(homepageRoute);
|
|
||||||
},
|
|
||||||
[homepageRoute, isFlagReady, router],
|
|
||||||
);
|
|
||||||
|
|
||||||
return null;
|
return <LoadingSpinner size="large" cover />;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { IconLaptop } from "@/components/__legacy__/ui/icons";
|
import { IconLaptop } from "@/components/__legacy__/ui/icons";
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||||
import { ListChecksIcon } from "@phosphor-icons/react/dist/ssr";
|
import { ListChecksIcon } from "@phosphor-icons/react/dist/ssr";
|
||||||
@@ -24,11 +23,11 @@ interface Props {
|
|||||||
export function NavbarLink({ name, href }: Props) {
|
export function NavbarLink({ name, href }: Props) {
|
||||||
const pathname = usePathname();
|
const pathname = usePathname();
|
||||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
const expectedHomeRoute = isChatEnabled ? "/copilot" : "/library";
|
||||||
|
|
||||||
const isActive =
|
const isActive =
|
||||||
href === homepageRoute
|
href === expectedHomeRoute
|
||||||
? pathname === "/" || pathname.startsWith(homepageRoute)
|
? pathname === "/" || pathname.startsWith(expectedHomeRoute)
|
||||||
: pathname.includes(href);
|
: pathname.includes(href);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ export default function useAgentGraph(
|
|||||||
>(null);
|
>(null);
|
||||||
const [xyNodes, setXYNodes] = useState<CustomNode[]>([]);
|
const [xyNodes, setXYNodes] = useState<CustomNode[]>([]);
|
||||||
const [xyEdges, setXYEdges] = useState<CustomEdge[]>([]);
|
const [xyEdges, setXYEdges] = useState<CustomEdge[]>([]);
|
||||||
const betaBlocks = useGetFlag(Flag.BETA_BLOCKS);
|
const betaBlocks = useGetFlag(Flag.BETA_BLOCKS) as string[];
|
||||||
|
|
||||||
// Filter blocks based on beta flags
|
// Filter blocks based on beta flags
|
||||||
const availableBlocks = useMemo(() => {
|
const availableBlocks = useMemo(() => {
|
||||||
|
|||||||
@@ -11,10 +11,3 @@ export const API_KEY_HEADER_NAME = "X-API-Key";
|
|||||||
|
|
||||||
// Layout
|
// Layout
|
||||||
export const NAVBAR_HEIGHT_PX = 60;
|
export const NAVBAR_HEIGHT_PX = 60;
|
||||||
|
|
||||||
// Routes
|
|
||||||
export function getHomepageRoute(isChatEnabled?: boolean | null): string {
|
|
||||||
if (isChatEnabled === true) return "/copilot";
|
|
||||||
if (isChatEnabled === false) return "/library";
|
|
||||||
return "/";
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
import { Key, storage } from "@/services/storage/local-storage";
|
import { Key, storage } from "@/services/storage/local-storage";
|
||||||
import { type CookieOptions } from "@supabase/ssr";
|
import { type CookieOptions } from "@supabase/ssr";
|
||||||
@@ -71,7 +70,7 @@ export function getRedirectPath(
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (isAdminPage(path) && userRole !== "admin") {
|
if (isAdminPage(path) && userRole !== "admin") {
|
||||||
return getHomepageRoute();
|
return "/";
|
||||||
}
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
import { createServerClient } from "@supabase/ssr";
|
import { createServerClient } from "@supabase/ssr";
|
||||||
import { NextResponse, type NextRequest } from "next/server";
|
import { NextResponse, type NextRequest } from "next/server";
|
||||||
@@ -67,7 +66,7 @@ export async function updateSession(request: NextRequest) {
|
|||||||
|
|
||||||
// 2. Check if user is authenticated but lacks admin role when accessing admin pages
|
// 2. Check if user is authenticated but lacks admin role when accessing admin pages
|
||||||
if (user && userRole !== "admin" && isAdminPage(pathname)) {
|
if (user && userRole !== "admin" && isAdminPage(pathname)) {
|
||||||
url.pathname = getHomepageRoute();
|
url.pathname = "/";
|
||||||
return NextResponse.redirect(url);
|
return NextResponse.redirect(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -23,9 +23,7 @@ import {
|
|||||||
WebSocketNotification,
|
WebSocketNotification,
|
||||||
} from "@/lib/autogpt-server-api";
|
} from "@/lib/autogpt-server-api";
|
||||||
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
import { useBackendAPI } from "@/lib/autogpt-server-api/context";
|
||||||
import { getHomepageRoute } from "@/lib/constants";
|
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { usePathname, useRouter } from "next/navigation";
|
import { usePathname, useRouter } from "next/navigation";
|
||||||
import {
|
import {
|
||||||
@@ -104,8 +102,6 @@ export default function OnboardingProvider({
|
|||||||
const pathname = usePathname();
|
const pathname = usePathname();
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { isLoggedIn } = useSupabase();
|
const { isLoggedIn } = useSupabase();
|
||||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
|
||||||
const homepageRoute = getHomepageRoute(isChatEnabled);
|
|
||||||
|
|
||||||
useOnboardingTimezoneDetection();
|
useOnboardingTimezoneDetection();
|
||||||
|
|
||||||
@@ -150,7 +146,7 @@ export default function OnboardingProvider({
|
|||||||
if (isOnOnboardingRoute) {
|
if (isOnOnboardingRoute) {
|
||||||
const enabled = await resolveResponse(getV1IsOnboardingEnabled());
|
const enabled = await resolveResponse(getV1IsOnboardingEnabled());
|
||||||
if (!enabled) {
|
if (!enabled) {
|
||||||
router.push(homepageRoute);
|
router.push("/");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -162,7 +158,7 @@ export default function OnboardingProvider({
|
|||||||
isOnOnboardingRoute &&
|
isOnOnboardingRoute &&
|
||||||
shouldRedirectFromOnboarding(onboarding.completedSteps, pathname)
|
shouldRedirectFromOnboarding(onboarding.completedSteps, pathname)
|
||||||
) {
|
) {
|
||||||
router.push(homepageRoute);
|
router.push("/");
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Failed to initialize onboarding:", error);
|
console.error("Failed to initialize onboarding:", error);
|
||||||
@@ -177,7 +173,7 @@ export default function OnboardingProvider({
|
|||||||
}
|
}
|
||||||
|
|
||||||
initializeOnboarding();
|
initializeOnboarding();
|
||||||
}, [api, homepageRoute, isOnOnboardingRoute, router, isLoggedIn, pathname]);
|
}, [api, isOnOnboardingRoute, router, isLoggedIn, pathname]);
|
||||||
|
|
||||||
const handleOnboardingNotification = useCallback(
|
const handleOnboardingNotification = useCallback(
|
||||||
(notification: WebSocketNotification) => {
|
(notification: WebSocketNotification) => {
|
||||||
|
|||||||
@@ -83,6 +83,10 @@ function getPostHogCredentials() {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getLaunchDarklyClientId() {
|
||||||
|
return process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||||
|
}
|
||||||
|
|
||||||
function isProductionBuild() {
|
function isProductionBuild() {
|
||||||
return process.env.NODE_ENV === "production";
|
return process.env.NODE_ENV === "production";
|
||||||
}
|
}
|
||||||
@@ -120,7 +124,10 @@ function isVercelPreview() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function areFeatureFlagsEnabled() {
|
function areFeatureFlagsEnabled() {
|
||||||
return process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "enabled";
|
return (
|
||||||
|
process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true" &&
|
||||||
|
Boolean(process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID)
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
function isPostHogEnabled() {
|
function isPostHogEnabled() {
|
||||||
@@ -143,6 +150,7 @@ export const environment = {
|
|||||||
getSupabaseAnonKey,
|
getSupabaseAnonKey,
|
||||||
getPreviewStealingDev,
|
getPreviewStealingDev,
|
||||||
getPostHogCredentials,
|
getPostHogCredentials,
|
||||||
|
getLaunchDarklyClientId,
|
||||||
// Assertions
|
// Assertions
|
||||||
isServerSide,
|
isServerSide,
|
||||||
isClientSide,
|
isClientSide,
|
||||||
|
|||||||
@@ -0,0 +1,59 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
|
import { useLDClient } from "launchdarkly-react-client-sdk";
|
||||||
|
import { useRouter } from "next/navigation";
|
||||||
|
import { ReactNode, useEffect, useState } from "react";
|
||||||
|
import { environment } from "../environment";
|
||||||
|
import { Flag, useGetFlag } from "./use-get-flag";
|
||||||
|
|
||||||
|
interface FeatureFlagRedirectProps {
|
||||||
|
flag: Flag;
|
||||||
|
whenDisabled: string;
|
||||||
|
children: ReactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function FeatureFlagPage({
|
||||||
|
flag,
|
||||||
|
whenDisabled,
|
||||||
|
children,
|
||||||
|
}: FeatureFlagRedirectProps) {
|
||||||
|
const [isLoading, setIsLoading] = useState(true);
|
||||||
|
const router = useRouter();
|
||||||
|
const flagValue = useGetFlag(flag);
|
||||||
|
const ldClient = useLDClient();
|
||||||
|
const ldEnabled = environment.areFeatureFlagsEnabled();
|
||||||
|
const ldReady = Boolean(ldClient);
|
||||||
|
const flagEnabled = Boolean(flagValue);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const initialize = async () => {
|
||||||
|
if (!ldEnabled) {
|
||||||
|
router.replace(whenDisabled);
|
||||||
|
setIsLoading(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for LaunchDarkly to initialize when enabled to prevent race conditions
|
||||||
|
if (ldEnabled && !ldReady) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await ldClient?.waitForInitialization();
|
||||||
|
if (!flagEnabled) router.replace(whenDisabled);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
router.replace(whenDisabled);
|
||||||
|
} finally {
|
||||||
|
setIsLoading(false);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
initialize();
|
||||||
|
}, [ldReady, flagEnabled]);
|
||||||
|
|
||||||
|
return isLoading || !flagEnabled ? (
|
||||||
|
<LoadingSpinner size="large" cover />
|
||||||
|
) : (
|
||||||
|
<>{children}</>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
|
import { useLDClient } from "launchdarkly-react-client-sdk";
|
||||||
|
import { useRouter } from "next/navigation";
|
||||||
|
import { useEffect } from "react";
|
||||||
|
import { environment } from "../environment";
|
||||||
|
import { Flag, useGetFlag } from "./use-get-flag";
|
||||||
|
|
||||||
|
interface FeatureFlagRedirectProps {
|
||||||
|
flag: Flag;
|
||||||
|
whenEnabled: string;
|
||||||
|
whenDisabled: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function FeatureFlagRedirect({
|
||||||
|
flag,
|
||||||
|
whenEnabled,
|
||||||
|
whenDisabled,
|
||||||
|
}: FeatureFlagRedirectProps) {
|
||||||
|
const router = useRouter();
|
||||||
|
const flagValue = useGetFlag(flag);
|
||||||
|
const ldEnabled = environment.areFeatureFlagsEnabled();
|
||||||
|
const ldClient = useLDClient();
|
||||||
|
const ldReady = Boolean(ldClient);
|
||||||
|
const flagEnabled = Boolean(flagValue);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const initialize = async () => {
|
||||||
|
if (!ldEnabled) {
|
||||||
|
router.replace(whenDisabled);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Wait for LaunchDarkly to initialize when enabled to prevent race conditions
|
||||||
|
if (ldEnabled && !ldReady) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
await ldClient?.waitForInitialization();
|
||||||
|
router.replace(flagEnabled ? whenEnabled : whenDisabled);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(error);
|
||||||
|
router.replace(whenDisabled);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
initialize();
|
||||||
|
}, [ldReady, flagEnabled]);
|
||||||
|
|
||||||
|
return <LoadingSpinner size="large" cover />;
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import * as Sentry from "@sentry/nextjs";
|
import * as Sentry from "@sentry/nextjs";
|
||||||
import { LDProvider } from "launchdarkly-react-client-sdk";
|
import { LDProvider } from "launchdarkly-react-client-sdk";
|
||||||
@@ -7,17 +8,17 @@ import type { ReactNode } from "react";
|
|||||||
import { useMemo } from "react";
|
import { useMemo } from "react";
|
||||||
import { environment } from "../environment";
|
import { environment } from "../environment";
|
||||||
|
|
||||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
|
||||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
|
||||||
const LAUNCHDARKLY_INIT_TIMEOUT_MS = 5000;
|
const LAUNCHDARKLY_INIT_TIMEOUT_MS = 5000;
|
||||||
|
|
||||||
export function LaunchDarklyProvider({ children }: { children: ReactNode }) {
|
export function LaunchDarklyProvider({ children }: { children: ReactNode }) {
|
||||||
const { user, isUserLoading } = useSupabase();
|
const { user, isUserLoading } = useSupabase();
|
||||||
const isCloud = environment.isCloud();
|
const envEnabled = environment.areFeatureFlagsEnabled();
|
||||||
const isLaunchDarklyConfigured = isCloud && envEnabled && clientId;
|
const clientId = environment.getLaunchDarklyClientId();
|
||||||
|
|
||||||
const context = useMemo(() => {
|
const context = useMemo(() => {
|
||||||
if (isUserLoading || !user) {
|
if (isUserLoading) return;
|
||||||
|
|
||||||
|
if (!user) {
|
||||||
return {
|
return {
|
||||||
kind: "user" as const,
|
kind: "user" as const,
|
||||||
key: "anonymous",
|
key: "anonymous",
|
||||||
@@ -36,15 +37,17 @@ export function LaunchDarklyProvider({ children }: { children: ReactNode }) {
|
|||||||
};
|
};
|
||||||
}, [user, isUserLoading]);
|
}, [user, isUserLoading]);
|
||||||
|
|
||||||
if (!isLaunchDarklyConfigured) {
|
if (!envEnabled) {
|
||||||
return <>{children}</>;
|
return <>{children}</>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (isUserLoading) {
|
||||||
|
return <LoadingSpinner size="large" cover />;
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<LDProvider
|
<LDProvider
|
||||||
// Add this key prop. It will be 'anonymous' when logged out,
|
clientSideID={clientId ?? ""}
|
||||||
key={context.key}
|
|
||||||
clientSideID={clientId}
|
|
||||||
context={context}
|
context={context}
|
||||||
timeout={LAUNCHDARKLY_INIT_TIMEOUT_MS}
|
timeout={LAUNCHDARKLY_INIT_TIMEOUT_MS}
|
||||||
reactOptions={{ useCamelCaseFlagKeys: false }}
|
reactOptions={{ useCamelCaseFlagKeys: false }}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { DEFAULT_SEARCH_TERMS } from "@/app/(platform)/marketplace/components/HeroSection/helpers";
|
import { DEFAULT_SEARCH_TERMS } from "@/app/(platform)/marketplace/components/HeroSection/helpers";
|
||||||
|
import { environment } from "@/services/environment";
|
||||||
import { useFlags } from "launchdarkly-react-client-sdk";
|
import { useFlags } from "launchdarkly-react-client-sdk";
|
||||||
|
|
||||||
export enum Flag {
|
export enum Flag {
|
||||||
@@ -18,24 +19,9 @@ export enum Flag {
|
|||||||
CHAT = "chat",
|
CHAT = "chat",
|
||||||
}
|
}
|
||||||
|
|
||||||
export type FlagValues = {
|
|
||||||
[Flag.BETA_BLOCKS]: string[];
|
|
||||||
[Flag.NEW_BLOCK_MENU]: boolean;
|
|
||||||
[Flag.NEW_AGENT_RUNS]: boolean;
|
|
||||||
[Flag.GRAPH_SEARCH]: boolean;
|
|
||||||
[Flag.ENABLE_ENHANCED_OUTPUT_HANDLING]: boolean;
|
|
||||||
[Flag.NEW_FLOW_EDITOR]: boolean;
|
|
||||||
[Flag.BUILDER_VIEW_SWITCH]: boolean;
|
|
||||||
[Flag.SHARE_EXECUTION_RESULTS]: boolean;
|
|
||||||
[Flag.AGENT_FAVORITING]: boolean;
|
|
||||||
[Flag.MARKETPLACE_SEARCH_TERMS]: string[];
|
|
||||||
[Flag.ENABLE_PLATFORM_PAYMENT]: boolean;
|
|
||||||
[Flag.CHAT]: boolean;
|
|
||||||
};
|
|
||||||
|
|
||||||
const isPwMockEnabled = process.env.NEXT_PUBLIC_PW_TEST === "true";
|
const isPwMockEnabled = process.env.NEXT_PUBLIC_PW_TEST === "true";
|
||||||
|
|
||||||
const mockFlags = {
|
const defaultFlags = {
|
||||||
[Flag.BETA_BLOCKS]: [],
|
[Flag.BETA_BLOCKS]: [],
|
||||||
[Flag.NEW_BLOCK_MENU]: false,
|
[Flag.NEW_BLOCK_MENU]: false,
|
||||||
[Flag.NEW_AGENT_RUNS]: false,
|
[Flag.NEW_AGENT_RUNS]: false,
|
||||||
@@ -50,17 +36,16 @@ const mockFlags = {
|
|||||||
[Flag.CHAT]: false,
|
[Flag.CHAT]: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
export function useGetFlag<T extends Flag>(flag: T): FlagValues[T] | null {
|
type FlagValues = typeof defaultFlags;
|
||||||
|
|
||||||
|
export function useGetFlag<T extends Flag>(flag: T): FlagValues[T] {
|
||||||
const currentFlags = useFlags<FlagValues>();
|
const currentFlags = useFlags<FlagValues>();
|
||||||
const flagValue = currentFlags[flag];
|
const flagValue = currentFlags[flag];
|
||||||
|
const areFlagsEnabled = environment.areFeatureFlagsEnabled();
|
||||||
|
|
||||||
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
if (!areFlagsEnabled || isPwMockEnabled) {
|
||||||
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
return defaultFlags[flag];
|
||||||
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
|
||||||
|
|
||||||
if (!isLaunchDarklyConfigured || isPwMockEnabled) {
|
|
||||||
return mockFlags[flag];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return flagValue ?? mockFlags[flag];
|
return flagValue ?? defaultFlags[flag];
|
||||||
}
|
}
|
||||||
|
|||||||
1
classic/frontend/.gitignore
vendored
1
classic/frontend/.gitignore
vendored
@@ -8,6 +8,7 @@
|
|||||||
.buildlog/
|
.buildlog/
|
||||||
.history
|
.history
|
||||||
.svn/
|
.svn/
|
||||||
|
.next/
|
||||||
migrate_working_dir/
|
migrate_working_dir/
|
||||||
|
|
||||||
# IntelliJ related
|
# IntelliJ related
|
||||||
|
|||||||
60
classic/frontend/build/web/flutter_service_worker.js
generated
60
classic/frontend/build/web/flutter_service_worker.js
generated
@@ -3,45 +3,45 @@ const MANIFEST = 'flutter-app-manifest';
|
|||||||
const TEMP = 'flutter-temp-cache';
|
const TEMP = 'flutter-temp-cache';
|
||||||
const CACHE_NAME = 'flutter-app-cache';
|
const CACHE_NAME = 'flutter-app-cache';
|
||||||
|
|
||||||
const RESOURCES = {"canvaskit/skwasm.worker.js": "51253d3321b11ddb8d73fa8aa87d3b15",
|
const RESOURCES = {"flutter.js": "6fef97aeca90b426343ba6c5c9dc5d4a",
|
||||||
"canvaskit/skwasm.js": "95f16c6690f955a45b2317496983dbe9",
|
|
||||||
"canvaskit/canvaskit.wasm": "d9f69e0f428f695dc3d66b3a83a4aa8e",
|
|
||||||
"canvaskit/skwasm.wasm": "d1fde2560be92c0b07ad9cf9acb10d05",
|
|
||||||
"canvaskit/canvaskit.js": "5caccb235fad20e9b72ea6da5a0094e6",
|
|
||||||
"canvaskit/chromium/canvaskit.wasm": "393ec8fb05d94036734f8104fa550a67",
|
|
||||||
"canvaskit/chromium/canvaskit.js": "ffb2bb6484d5689d91f393b60664d530",
|
|
||||||
"icons/Icon-maskable-192.png": "c457ef57daa1d16f64b27b786ec2ea3c",
|
|
||||||
"icons/Icon-maskable-512.png": "301a7604d45b3e739efc881eb04896ea",
|
|
||||||
"icons/Icon-512.png": "96e752610906ba2a93c65f8abe1645f1",
|
"icons/Icon-512.png": "96e752610906ba2a93c65f8abe1645f1",
|
||||||
|
"icons/Icon-maskable-512.png": "301a7604d45b3e739efc881eb04896ea",
|
||||||
"icons/Icon-192.png": "ac9a721a12bbc803b44f645561ecb1e1",
|
"icons/Icon-192.png": "ac9a721a12bbc803b44f645561ecb1e1",
|
||||||
|
"icons/Icon-maskable-192.png": "c457ef57daa1d16f64b27b786ec2ea3c",
|
||||||
"manifest.json": "0fa552613b8ec0fda5cda565914e3b16",
|
"manifest.json": "0fa552613b8ec0fda5cda565914e3b16",
|
||||||
"favicon.png": "5dcef449791fa27946b3d35ad8803796",
|
"index.html": "3442c510a9ea217672c82e799ae070f7",
|
||||||
"version.json": "46a52461e018faa623d9196334aa3f50",
|
"/": "3442c510a9ea217672c82e799ae070f7",
|
||||||
"index.html": "e6981504a32bf86f892909c1875df208",
|
|
||||||
"/": "e6981504a32bf86f892909c1875df208",
|
|
||||||
"main.dart.js": "6fcbf8bbcb0a76fae9029f72ac7fbdc3",
|
|
||||||
"assets/AssetManifest.json": "1b1e4a4276722b65eb1ef765e2991840",
|
|
||||||
"assets/packages/cupertino_icons/assets/CupertinoIcons.ttf": "055d9e87e4a40dbf72b2af1a20865d57",
|
|
||||||
"assets/packages/fluttertoast/assets/toastify.js": "56e2c9cedd97f10e7e5f1cebd85d53e3",
|
|
||||||
"assets/packages/fluttertoast/assets/toastify.css": "a85675050054f179444bc5ad70ffc635",
|
|
||||||
"assets/shaders/ink_sparkle.frag": "f8b80e740d33eb157090be4e995febdf",
|
"assets/shaders/ink_sparkle.frag": "f8b80e740d33eb157090be4e995febdf",
|
||||||
"assets/fonts/MaterialIcons-Regular.otf": "245e0462249d95ad589a087f1c9f58e1",
|
"assets/assets/tree_structure.json": "cda9b1a239f956c547411efad9f7c794",
|
||||||
"assets/assets/images/twitter_logo.png": "af6c11b96a5e732b8dfda86a2351ecab",
|
|
||||||
"assets/assets/images/discord_logo.png": "0e4a4162c5de8665a7d63ae9665405ae",
|
|
||||||
"assets/assets/images/google_logo.svg.png": "0e29f8e1acfb8996437dbb2b0f591f19",
|
|
||||||
"assets/assets/images/autogpt_logo.png": "6a5362a7d1f2f840e43ee259e733476c",
|
|
||||||
"assets/assets/images/github_logo.svg.png": "ba087b073efdc4996b035d3a12bad0e4",
|
|
||||||
"assets/assets/scrape_synthesize_tree_structure.json": "a9665c1b465bb0cb939c7210f2bf0b13",
|
|
||||||
"assets/assets/coding_tree_structure.json": "017a857cf3e274346a0a7eab4ce02eed",
|
"assets/assets/coding_tree_structure.json": "017a857cf3e274346a0a7eab4ce02eed",
|
||||||
"assets/assets/general_tree_structure.json": "41dfbcdc2349dcdda2b082e597c6d5ee",
|
"assets/assets/general_tree_structure.json": "41dfbcdc2349dcdda2b082e597c6d5ee",
|
||||||
"assets/assets/google_logo.svg.png": "0e29f8e1acfb8996437dbb2b0f591f19",
|
|
||||||
"assets/assets/tree_structure.json": "cda9b1a239f956c547411efad9f7c794",
|
|
||||||
"assets/assets/data_tree_structure.json": "5f9627548304155821968182f3883ca7",
|
|
||||||
"assets/assets/github_logo.svg.png": "ba087b073efdc4996b035d3a12bad0e4",
|
"assets/assets/github_logo.svg.png": "ba087b073efdc4996b035d3a12bad0e4",
|
||||||
|
"assets/assets/images/discord_logo.png": "0e4a4162c5de8665a7d63ae9665405ae",
|
||||||
|
"assets/assets/images/github_logo.svg.png": "ba087b073efdc4996b035d3a12bad0e4",
|
||||||
|
"assets/assets/images/twitter_logo.png": "af6c11b96a5e732b8dfda86a2351ecab",
|
||||||
|
"assets/assets/images/google_logo.svg.png": "0e29f8e1acfb8996437dbb2b0f591f19",
|
||||||
|
"assets/assets/images/autogpt_logo.png": "6a5362a7d1f2f840e43ee259e733476c",
|
||||||
|
"assets/assets/google_logo.svg.png": "0e29f8e1acfb8996437dbb2b0f591f19",
|
||||||
|
"assets/assets/scrape_synthesize_tree_structure.json": "a9665c1b465bb0cb939c7210f2bf0b13",
|
||||||
|
"assets/assets/data_tree_structure.json": "5f9627548304155821968182f3883ca7",
|
||||||
|
"assets/fonts/MaterialIcons-Regular.otf": "245e0462249d95ad589a087f1c9f58e1",
|
||||||
"assets/NOTICES": "28ba0c63fc6e4d1ef829af7441e27f78",
|
"assets/NOTICES": "28ba0c63fc6e4d1ef829af7441e27f78",
|
||||||
"assets/AssetManifest.bin": "791447d17744ac2ade3999c1672fdbe8",
|
"assets/packages/fluttertoast/assets/toastify.css": "a85675050054f179444bc5ad70ffc635",
|
||||||
|
"assets/packages/fluttertoast/assets/toastify.js": "56e2c9cedd97f10e7e5f1cebd85d53e3",
|
||||||
|
"assets/packages/cupertino_icons/assets/CupertinoIcons.ttf": "055d9e87e4a40dbf72b2af1a20865d57",
|
||||||
"assets/FontManifest.json": "dc3d03800ccca4601324923c0b1d6d57",
|
"assets/FontManifest.json": "dc3d03800ccca4601324923c0b1d6d57",
|
||||||
"flutter.js": "6fef97aeca90b426343ba6c5c9dc5d4a"};
|
"assets/AssetManifest.bin": "791447d17744ac2ade3999c1672fdbe8",
|
||||||
|
"assets/AssetManifest.json": "1b1e4a4276722b65eb1ef765e2991840",
|
||||||
|
"canvaskit/chromium/canvaskit.wasm": "393ec8fb05d94036734f8104fa550a67",
|
||||||
|
"canvaskit/chromium/canvaskit.js": "ffb2bb6484d5689d91f393b60664d530",
|
||||||
|
"canvaskit/skwasm.worker.js": "51253d3321b11ddb8d73fa8aa87d3b15",
|
||||||
|
"canvaskit/skwasm.js": "95f16c6690f955a45b2317496983dbe9",
|
||||||
|
"canvaskit/canvaskit.wasm": "d9f69e0f428f695dc3d66b3a83a4aa8e",
|
||||||
|
"canvaskit/canvaskit.js": "5caccb235fad20e9b72ea6da5a0094e6",
|
||||||
|
"canvaskit/skwasm.wasm": "d1fde2560be92c0b07ad9cf9acb10d05",
|
||||||
|
"favicon.png": "5dcef449791fa27946b3d35ad8803796",
|
||||||
|
"version.json": "46a52461e018faa623d9196334aa3f50",
|
||||||
|
"main.dart.js": "6fcbf8bbcb0a76fae9029f72ac7fbdc3"};
|
||||||
// The application shell files that are downloaded before a service worker can
|
// The application shell files that are downloaded before a service worker can
|
||||||
// start.
|
// start.
|
||||||
const CORE = ["main.dart.js",
|
const CORE = ["main.dart.js",
|
||||||
|
|||||||
2
classic/frontend/build/web/index.html
generated
2
classic/frontend/build/web/index.html
generated
@@ -35,7 +35,7 @@
|
|||||||
|
|
||||||
<script>
|
<script>
|
||||||
// The value below is injected by flutter build, do not touch.
|
// The value below is injected by flutter build, do not touch.
|
||||||
const serviceWorkerVersion = "726743092";
|
const serviceWorkerVersion = "1550046101";
|
||||||
</script>
|
</script>
|
||||||
<!-- This script adds the flutter initialization JS code -->
|
<!-- This script adds the flutter initialization JS code -->
|
||||||
<script src="flutter.js" defer></script>
|
<script src="flutter.js" defer></script>
|
||||||
|
|||||||
Reference in New Issue
Block a user