Compare commits
10 Commits
ntindle/wa
...
testing-cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8fa75c8da4 | ||
|
|
b0953654d9 | ||
|
|
c5069ca48f | ||
|
|
5d0cd88d98 | ||
|
|
033f58c075 | ||
|
|
40ef2d511f | ||
|
|
b714c0c221 | ||
|
|
ebabc4287e | ||
|
|
8b25e62959 | ||
|
|
919cc877ad |
26
AGENTS.md
@@ -16,6 +16,32 @@ See `docs/content/platform/getting-started.md` for setup instructions.
|
|||||||
- Format Python code with `poetry run format`.
|
- Format Python code with `poetry run format`.
|
||||||
- Format frontend code using `pnpm format`.
|
- Format frontend code using `pnpm format`.
|
||||||
|
|
||||||
|
|
||||||
|
## Frontend guidelines:
|
||||||
|
|
||||||
|
See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
|
||||||
|
|
||||||
|
1. **Pages**: Create in `src/app/(platform)/feature-name/page.tsx`
|
||||||
|
- Add `usePageName.ts` hook for logic
|
||||||
|
- Put sub-components in local `components/` folder
|
||||||
|
2. **Components**: Structure as `ComponentName/ComponentName.tsx` + `useComponentName.ts` + `helpers.ts`
|
||||||
|
- Use design system components from `src/components/` (atoms, molecules, organisms)
|
||||||
|
- Never use `src/components/__legacy__/*`
|
||||||
|
3. **Data fetching**: Use generated API hooks from `@/app/api/__generated__/endpoints/`
|
||||||
|
- Regenerate with `pnpm generate:api`
|
||||||
|
- Pattern: `use{Method}{Version}{OperationName}`
|
||||||
|
4. **Styling**: Tailwind CSS only, use design tokens, Phosphor Icons only
|
||||||
|
5. **Testing**: Add Storybook stories for new components, Playwright for E2E
|
||||||
|
6. **Code conventions**: Function declarations (not arrow functions) for components/handlers
|
||||||
|
- Component props should be `interface Props { ... }` (not exported) unless the interface needs to be used outside the component
|
||||||
|
- Separate render logic from business logic (component.tsx + useComponent.ts + helpers.ts)
|
||||||
|
- Colocate state when possible and avoid creating large components, use sub-components ( local `/components` folder next to the parent component ) when sensible
|
||||||
|
- Avoid large hooks, abstract logic into `helpers.ts` files when sensible
|
||||||
|
- Use function declarations for components, arrow functions only for callbacks
|
||||||
|
- No barrel files or `index.ts` re-exports
|
||||||
|
- Do not use `useCallback` or `useMemo` unless strictly needed
|
||||||
|
- Avoid comments at all times unless the code is very complex
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
- Backend: `poetry run test` (runs pytest with a docker based postgres + prisma).
|
- Backend: `poetry run test` (runs pytest with a docker based postgres + prisma).
|
||||||
|
|||||||
@@ -201,7 +201,7 @@ If you get any pushback or hit complex block conditions check the new_blocks gui
|
|||||||
3. Write tests alongside the route file
|
3. Write tests alongside the route file
|
||||||
4. Run `poetry run test` to verify
|
4. Run `poetry run test` to verify
|
||||||
|
|
||||||
**Frontend feature development:**
|
### Frontend guidelines:
|
||||||
|
|
||||||
See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
|
See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
|
||||||
|
|
||||||
@@ -217,6 +217,14 @@ See `/frontend/CONTRIBUTING.md` for complete patterns. Quick reference:
|
|||||||
4. **Styling**: Tailwind CSS only, use design tokens, Phosphor Icons only
|
4. **Styling**: Tailwind CSS only, use design tokens, Phosphor Icons only
|
||||||
5. **Testing**: Add Storybook stories for new components, Playwright for E2E
|
5. **Testing**: Add Storybook stories for new components, Playwright for E2E
|
||||||
6. **Code conventions**: Function declarations (not arrow functions) for components/handlers
|
6. **Code conventions**: Function declarations (not arrow functions) for components/handlers
|
||||||
|
- Component props should be `interface Props { ... }` (not exported) unless the interface needs to be used outside the component
|
||||||
|
- Separate render logic from business logic (component.tsx + useComponent.ts + helpers.ts)
|
||||||
|
- Colocate state when possible and avoid creating large components, use sub-components ( local `/components` folder next to the parent component ) when sensible
|
||||||
|
- Avoid large hooks, abstract logic into `helpers.ts` files when sensible
|
||||||
|
- Use function declarations for components, arrow functions only for callbacks
|
||||||
|
- No barrel files or `index.ts` re-exports
|
||||||
|
- Do not use `useCallback` or `useMemo` unless strictly needed
|
||||||
|
- Avoid comments at all times unless the code is very complex
|
||||||
|
|
||||||
### Security Implementation
|
### Security Implementation
|
||||||
|
|
||||||
|
|||||||
@@ -290,6 +290,11 @@ async def _cache_session(session: ChatSession) -> None:
|
|||||||
await async_redis.setex(redis_key, config.session_ttl, session.model_dump_json())
|
await async_redis.setex(redis_key, config.session_ttl, session.model_dump_json())
|
||||||
|
|
||||||
|
|
||||||
|
async def cache_chat_session(session: ChatSession) -> None:
|
||||||
|
"""Cache a chat session without persisting to the database."""
|
||||||
|
await _cache_session(session)
|
||||||
|
|
||||||
|
|
||||||
async def _get_session_from_db(session_id: str) -> ChatSession | None:
|
async def _get_session_from_db(session_id: str) -> ChatSession | None:
|
||||||
"""Get a chat session from the database."""
|
"""Get a chat session from the database."""
|
||||||
prisma_session = await chat_db.get_chat_session(session_id)
|
prisma_session = await chat_db.get_chat_session(session_id)
|
||||||
|
|||||||
@@ -172,12 +172,12 @@ async def get_session(
|
|||||||
user_id: The optional authenticated user ID, or None for anonymous access.
|
user_id: The optional authenticated user ID, or None for anonymous access.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
SessionDetailResponse: Details for the requested session; raises NotFoundError if not found.
|
SessionDetailResponse: Details for the requested session, or None if not found.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
session = await get_chat_session(session_id, user_id)
|
session = await get_chat_session(session_id, user_id)
|
||||||
if not session:
|
if not session:
|
||||||
raise NotFoundError(f"Session {session_id} not found")
|
raise NotFoundError(f"Session {session_id} not found.")
|
||||||
|
|
||||||
messages = [message.model_dump() for message in session.messages]
|
messages = [message.model_dump() for message in session.messages]
|
||||||
logger.info(
|
logger.info(
|
||||||
@@ -222,6 +222,8 @@ async def stream_chat_post(
|
|||||||
session = await _validate_and_get_session(session_id, user_id)
|
session = await _validate_and_get_session(session_id, user_id)
|
||||||
|
|
||||||
async def event_generator() -> AsyncGenerator[str, None]:
|
async def event_generator() -> AsyncGenerator[str, None]:
|
||||||
|
chunk_count = 0
|
||||||
|
first_chunk_type: str | None = None
|
||||||
async for chunk in chat_service.stream_chat_completion(
|
async for chunk in chat_service.stream_chat_completion(
|
||||||
session_id,
|
session_id,
|
||||||
request.message,
|
request.message,
|
||||||
@@ -230,7 +232,26 @@ async def stream_chat_post(
|
|||||||
session=session, # Pass pre-fetched session to avoid double-fetch
|
session=session, # Pass pre-fetched session to avoid double-fetch
|
||||||
context=request.context,
|
context=request.context,
|
||||||
):
|
):
|
||||||
|
if chunk_count < 3:
|
||||||
|
logger.info(
|
||||||
|
"Chat stream chunk",
|
||||||
|
extra={
|
||||||
|
"session_id": session_id,
|
||||||
|
"chunk_type": str(chunk.type),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not first_chunk_type:
|
||||||
|
first_chunk_type = str(chunk.type)
|
||||||
|
chunk_count += 1
|
||||||
yield chunk.to_sse()
|
yield chunk.to_sse()
|
||||||
|
logger.info(
|
||||||
|
"Chat stream completed",
|
||||||
|
extra={
|
||||||
|
"session_id": session_id,
|
||||||
|
"chunk_count": chunk_count,
|
||||||
|
"first_chunk_type": first_chunk_type,
|
||||||
|
},
|
||||||
|
)
|
||||||
# AI SDK protocol termination
|
# AI SDK protocol termination
|
||||||
yield "data: [DONE]\n\n"
|
yield "data: [DONE]\n\n"
|
||||||
|
|
||||||
@@ -275,6 +296,8 @@ async def stream_chat_get(
|
|||||||
session = await _validate_and_get_session(session_id, user_id)
|
session = await _validate_and_get_session(session_id, user_id)
|
||||||
|
|
||||||
async def event_generator() -> AsyncGenerator[str, None]:
|
async def event_generator() -> AsyncGenerator[str, None]:
|
||||||
|
chunk_count = 0
|
||||||
|
first_chunk_type: str | None = None
|
||||||
async for chunk in chat_service.stream_chat_completion(
|
async for chunk in chat_service.stream_chat_completion(
|
||||||
session_id,
|
session_id,
|
||||||
message,
|
message,
|
||||||
@@ -282,7 +305,26 @@ async def stream_chat_get(
|
|||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
session=session, # Pass pre-fetched session to avoid double-fetch
|
session=session, # Pass pre-fetched session to avoid double-fetch
|
||||||
):
|
):
|
||||||
|
if chunk_count < 3:
|
||||||
|
logger.info(
|
||||||
|
"Chat stream chunk",
|
||||||
|
extra={
|
||||||
|
"session_id": session_id,
|
||||||
|
"chunk_type": str(chunk.type),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if not first_chunk_type:
|
||||||
|
first_chunk_type = str(chunk.type)
|
||||||
|
chunk_count += 1
|
||||||
yield chunk.to_sse()
|
yield chunk.to_sse()
|
||||||
|
logger.info(
|
||||||
|
"Chat stream completed",
|
||||||
|
extra={
|
||||||
|
"session_id": session_id,
|
||||||
|
"chunk_count": chunk_count,
|
||||||
|
"first_chunk_type": first_chunk_type,
|
||||||
|
},
|
||||||
|
)
|
||||||
# AI SDK protocol termination
|
# AI SDK protocol termination
|
||||||
yield "data: [DONE]\n\n"
|
yield "data: [DONE]\n\n"
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,20 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
import time
|
||||||
|
from asyncio import CancelledError
|
||||||
from collections.abc import AsyncGenerator
|
from collections.abc import AsyncGenerator
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import orjson
|
import orjson
|
||||||
from langfuse import get_client, propagate_attributes
|
from langfuse import get_client, propagate_attributes
|
||||||
from langfuse.openai import openai # type: ignore
|
from langfuse.openai import openai # type: ignore
|
||||||
from openai import APIConnectionError, APIError, APIStatusError, RateLimitError
|
from openai import (
|
||||||
|
APIConnectionError,
|
||||||
|
APIError,
|
||||||
|
APIStatusError,
|
||||||
|
PermissionDeniedError,
|
||||||
|
RateLimitError,
|
||||||
|
)
|
||||||
from openai.types.chat import ChatCompletionChunk, ChatCompletionToolParam
|
from openai.types.chat import ChatCompletionChunk, ChatCompletionToolParam
|
||||||
|
|
||||||
from backend.data.understanding import (
|
from backend.data.understanding import (
|
||||||
@@ -21,6 +29,7 @@ from .model import (
|
|||||||
ChatMessage,
|
ChatMessage,
|
||||||
ChatSession,
|
ChatSession,
|
||||||
Usage,
|
Usage,
|
||||||
|
cache_chat_session,
|
||||||
get_chat_session,
|
get_chat_session,
|
||||||
update_session_title,
|
update_session_title,
|
||||||
upsert_chat_session,
|
upsert_chat_session,
|
||||||
@@ -296,6 +305,10 @@ async def stream_chat_completion(
|
|||||||
content="",
|
content="",
|
||||||
)
|
)
|
||||||
accumulated_tool_calls: list[dict[str, Any]] = []
|
accumulated_tool_calls: list[dict[str, Any]] = []
|
||||||
|
has_saved_assistant_message = False
|
||||||
|
has_appended_streaming_message = False
|
||||||
|
last_cache_time = 0.0
|
||||||
|
last_cache_content_len = 0
|
||||||
|
|
||||||
# Wrap main logic in try/finally to ensure Langfuse observations are always ended
|
# Wrap main logic in try/finally to ensure Langfuse observations are always ended
|
||||||
has_yielded_end = False
|
has_yielded_end = False
|
||||||
@@ -332,6 +345,23 @@ async def stream_chat_completion(
|
|||||||
assert assistant_response.content is not None
|
assert assistant_response.content is not None
|
||||||
assistant_response.content += delta
|
assistant_response.content += delta
|
||||||
has_received_text = True
|
has_received_text = True
|
||||||
|
if not has_appended_streaming_message:
|
||||||
|
session.messages.append(assistant_response)
|
||||||
|
has_appended_streaming_message = True
|
||||||
|
current_time = time.monotonic()
|
||||||
|
content_len = len(assistant_response.content)
|
||||||
|
if (
|
||||||
|
current_time - last_cache_time >= 1.0
|
||||||
|
and content_len > last_cache_content_len
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
await cache_chat_session(session)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(
|
||||||
|
f"Failed to cache partial session {session.session_id}: {e}"
|
||||||
|
)
|
||||||
|
last_cache_time = current_time
|
||||||
|
last_cache_content_len = content_len
|
||||||
yield chunk
|
yield chunk
|
||||||
elif isinstance(chunk, StreamTextEnd):
|
elif isinstance(chunk, StreamTextEnd):
|
||||||
# Emit text-end after text completes
|
# Emit text-end after text completes
|
||||||
@@ -390,10 +420,42 @@ async def stream_chat_completion(
|
|||||||
if has_received_text and not text_streaming_ended:
|
if has_received_text and not text_streaming_ended:
|
||||||
yield StreamTextEnd(id=text_block_id)
|
yield StreamTextEnd(id=text_block_id)
|
||||||
text_streaming_ended = True
|
text_streaming_ended = True
|
||||||
|
|
||||||
|
# Save assistant message before yielding finish to ensure it's persisted
|
||||||
|
# even if client disconnects immediately after receiving StreamFinish
|
||||||
|
if not has_saved_assistant_message:
|
||||||
|
messages_to_save_early: list[ChatMessage] = []
|
||||||
|
if accumulated_tool_calls:
|
||||||
|
assistant_response.tool_calls = (
|
||||||
|
accumulated_tool_calls
|
||||||
|
)
|
||||||
|
if not has_appended_streaming_message and (
|
||||||
|
assistant_response.content
|
||||||
|
or assistant_response.tool_calls
|
||||||
|
):
|
||||||
|
messages_to_save_early.append(assistant_response)
|
||||||
|
messages_to_save_early.extend(tool_response_messages)
|
||||||
|
|
||||||
|
if messages_to_save_early:
|
||||||
|
session.messages.extend(messages_to_save_early)
|
||||||
|
logger.info(
|
||||||
|
f"Saving assistant message before StreamFinish: "
|
||||||
|
f"content_len={len(assistant_response.content or '')}, "
|
||||||
|
f"tool_calls={len(assistant_response.tool_calls or [])}, "
|
||||||
|
f"tool_responses={len(tool_response_messages)}"
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
messages_to_save_early
|
||||||
|
or has_appended_streaming_message
|
||||||
|
):
|
||||||
|
await upsert_chat_session(session)
|
||||||
|
has_saved_assistant_message = True
|
||||||
|
|
||||||
has_yielded_end = True
|
has_yielded_end = True
|
||||||
yield chunk
|
yield chunk
|
||||||
elif isinstance(chunk, StreamError):
|
elif isinstance(chunk, StreamError):
|
||||||
has_yielded_error = True
|
has_yielded_error = True
|
||||||
|
yield chunk
|
||||||
elif isinstance(chunk, StreamUsage):
|
elif isinstance(chunk, StreamUsage):
|
||||||
session.usage.append(
|
session.usage.append(
|
||||||
Usage(
|
Usage(
|
||||||
@@ -413,6 +475,27 @@ async def stream_chat_completion(
|
|||||||
langfuse.update_current_trace(output=str(tool_response_messages))
|
langfuse.update_current_trace(output=str(tool_response_messages))
|
||||||
langfuse.update_current_span(output=str(tool_response_messages))
|
langfuse.update_current_span(output=str(tool_response_messages))
|
||||||
|
|
||||||
|
except CancelledError:
|
||||||
|
if not has_saved_assistant_message:
|
||||||
|
if accumulated_tool_calls:
|
||||||
|
assistant_response.tool_calls = accumulated_tool_calls
|
||||||
|
if assistant_response.content:
|
||||||
|
assistant_response.content = (
|
||||||
|
f"{assistant_response.content}\n\n[interrupted]"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
assistant_response.content = "[interrupted]"
|
||||||
|
if not has_appended_streaming_message:
|
||||||
|
session.messages.append(assistant_response)
|
||||||
|
if tool_response_messages:
|
||||||
|
session.messages.extend(tool_response_messages)
|
||||||
|
try:
|
||||||
|
await upsert_chat_session(session)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(
|
||||||
|
f"Failed to save interrupted session {session.session_id}: {e}"
|
||||||
|
)
|
||||||
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error during stream: {e!s}", exc_info=True)
|
logger.error(f"Error during stream: {e!s}", exc_info=True)
|
||||||
|
|
||||||
@@ -434,13 +517,18 @@ async def stream_chat_completion(
|
|||||||
# Add assistant message if it has content or tool calls
|
# Add assistant message if it has content or tool calls
|
||||||
if accumulated_tool_calls:
|
if accumulated_tool_calls:
|
||||||
assistant_response.tool_calls = accumulated_tool_calls
|
assistant_response.tool_calls = accumulated_tool_calls
|
||||||
if assistant_response.content or assistant_response.tool_calls:
|
if not has_appended_streaming_message and (
|
||||||
|
assistant_response.content or assistant_response.tool_calls
|
||||||
|
):
|
||||||
messages_to_save.append(assistant_response)
|
messages_to_save.append(assistant_response)
|
||||||
|
|
||||||
# Add tool response messages after assistant message
|
# Add tool response messages after assistant message
|
||||||
messages_to_save.extend(tool_response_messages)
|
messages_to_save.extend(tool_response_messages)
|
||||||
|
|
||||||
|
if not has_saved_assistant_message:
|
||||||
|
if messages_to_save:
|
||||||
session.messages.extend(messages_to_save)
|
session.messages.extend(messages_to_save)
|
||||||
|
if messages_to_save or has_appended_streaming_message:
|
||||||
await upsert_chat_session(session)
|
await upsert_chat_session(session)
|
||||||
|
|
||||||
if not has_yielded_error:
|
if not has_yielded_error:
|
||||||
@@ -472,6 +560,8 @@ async def stream_chat_completion(
|
|||||||
return # Exit after retry to avoid double-saving in finally block
|
return # Exit after retry to avoid double-saving in finally block
|
||||||
|
|
||||||
# Normal completion path - save session and handle tool call continuation
|
# Normal completion path - save session and handle tool call continuation
|
||||||
|
# Only save if we haven't already saved when StreamFinish was received
|
||||||
|
if not has_saved_assistant_message:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Normal completion path: session={session.session_id}, "
|
f"Normal completion path: session={session.session_id}, "
|
||||||
f"current message_count={len(session.messages)}"
|
f"current message_count={len(session.messages)}"
|
||||||
@@ -486,7 +576,9 @@ async def stream_chat_completion(
|
|||||||
logger.info(
|
logger.info(
|
||||||
f"Added {len(accumulated_tool_calls)} tool calls to assistant message"
|
f"Added {len(accumulated_tool_calls)} tool calls to assistant message"
|
||||||
)
|
)
|
||||||
if assistant_response.content or assistant_response.tool_calls:
|
if not has_appended_streaming_message and (
|
||||||
|
assistant_response.content or assistant_response.tool_calls
|
||||||
|
):
|
||||||
messages_to_save.append(assistant_response)
|
messages_to_save.append(assistant_response)
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Saving assistant message with content_len={len(assistant_response.content or '')}, tool_calls={len(assistant_response.tool_calls or [])}"
|
f"Saving assistant message with content_len={len(assistant_response.content or '')}, tool_calls={len(assistant_response.tool_calls or [])}"
|
||||||
@@ -499,11 +591,18 @@ async def stream_chat_completion(
|
|||||||
f"total_to_save={len(messages_to_save)}"
|
f"total_to_save={len(messages_to_save)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if messages_to_save:
|
||||||
session.messages.extend(messages_to_save)
|
session.messages.extend(messages_to_save)
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Extended session messages, new message_count={len(session.messages)}"
|
f"Extended session messages, new message_count={len(session.messages)}"
|
||||||
)
|
)
|
||||||
|
if messages_to_save or has_appended_streaming_message:
|
||||||
await upsert_chat_session(session)
|
await upsert_chat_session(session)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
"Assistant message already saved when StreamFinish was received, "
|
||||||
|
"skipping duplicate save"
|
||||||
|
)
|
||||||
|
|
||||||
# If we did a tool call, stream the chat completion again to get the next response
|
# If we did a tool call, stream the chat completion again to get the next response
|
||||||
if has_done_tool_call:
|
if has_done_tool_call:
|
||||||
@@ -545,6 +644,12 @@ def _is_retryable_error(error: Exception) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def _is_region_blocked_error(error: Exception) -> bool:
|
||||||
|
if isinstance(error, PermissionDeniedError):
|
||||||
|
return "not available in your region" in str(error).lower()
|
||||||
|
return "not available in your region" in str(error).lower()
|
||||||
|
|
||||||
|
|
||||||
async def _stream_chat_chunks(
|
async def _stream_chat_chunks(
|
||||||
session: ChatSession,
|
session: ChatSession,
|
||||||
tools: list[ChatCompletionToolParam],
|
tools: list[ChatCompletionToolParam],
|
||||||
@@ -737,7 +842,18 @@ async def _stream_chat_chunks(
|
|||||||
f"Error in stream (not retrying): {e!s}",
|
f"Error in stream (not retrying): {e!s}",
|
||||||
exc_info=True,
|
exc_info=True,
|
||||||
)
|
)
|
||||||
error_response = StreamError(errorText=str(e))
|
error_code = None
|
||||||
|
error_text = str(e)
|
||||||
|
if _is_region_blocked_error(e):
|
||||||
|
error_code = "MODEL_NOT_AVAILABLE_REGION"
|
||||||
|
error_text = (
|
||||||
|
"This model is not available in your region. "
|
||||||
|
"Please connect via VPN and try again."
|
||||||
|
)
|
||||||
|
error_response = StreamError(
|
||||||
|
errorText=error_text,
|
||||||
|
code=error_code,
|
||||||
|
)
|
||||||
yield error_response
|
yield error_response
|
||||||
yield StreamFinish()
|
yield StreamFinish()
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -218,6 +218,7 @@ async def save_agent_to_library(
|
|||||||
library_agents = await library_db.create_library_agent(
|
library_agents = await library_db.create_library_agent(
|
||||||
graph=created_graph,
|
graph=created_graph,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
|
sensitive_action_safe_mode=True,
|
||||||
create_library_agents_for_sub_graphs=False,
|
create_library_agents_for_sub_graphs=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -401,27 +401,11 @@ async def add_generated_agent_image(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _initialize_graph_settings(graph: graph_db.GraphModel) -> GraphSettings:
|
|
||||||
"""
|
|
||||||
Initialize GraphSettings based on graph content.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
graph: The graph to analyze
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
GraphSettings with appropriate human_in_the_loop_safe_mode value
|
|
||||||
"""
|
|
||||||
if graph.has_human_in_the_loop:
|
|
||||||
# Graph has HITL blocks - set safe mode to True by default
|
|
||||||
return GraphSettings(human_in_the_loop_safe_mode=True)
|
|
||||||
else:
|
|
||||||
# Graph has no HITL blocks - keep None
|
|
||||||
return GraphSettings(human_in_the_loop_safe_mode=None)
|
|
||||||
|
|
||||||
|
|
||||||
async def create_library_agent(
|
async def create_library_agent(
|
||||||
graph: graph_db.GraphModel,
|
graph: graph_db.GraphModel,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
|
hitl_safe_mode: bool = True,
|
||||||
|
sensitive_action_safe_mode: bool = False,
|
||||||
create_library_agents_for_sub_graphs: bool = True,
|
create_library_agents_for_sub_graphs: bool = True,
|
||||||
) -> list[library_model.LibraryAgent]:
|
) -> list[library_model.LibraryAgent]:
|
||||||
"""
|
"""
|
||||||
@@ -430,6 +414,8 @@ async def create_library_agent(
|
|||||||
Args:
|
Args:
|
||||||
agent: The agent/Graph to add to the library.
|
agent: The agent/Graph to add to the library.
|
||||||
user_id: The user to whom the agent will be added.
|
user_id: The user to whom the agent will be added.
|
||||||
|
hitl_safe_mode: Whether HITL blocks require manual review (default True).
|
||||||
|
sensitive_action_safe_mode: Whether sensitive action blocks require review.
|
||||||
create_library_agents_for_sub_graphs: If True, creates LibraryAgent records for sub-graphs as well.
|
create_library_agents_for_sub_graphs: If True, creates LibraryAgent records for sub-graphs as well.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -465,7 +451,11 @@ async def create_library_agent(
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
settings=SafeJson(
|
settings=SafeJson(
|
||||||
_initialize_graph_settings(graph_entry).model_dump()
|
GraphSettings.from_graph(
|
||||||
|
graph_entry,
|
||||||
|
hitl_safe_mode=hitl_safe_mode,
|
||||||
|
sensitive_action_safe_mode=sensitive_action_safe_mode,
|
||||||
|
).model_dump()
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
include=library_agent_include(
|
include=library_agent_include(
|
||||||
@@ -627,33 +617,6 @@ async def update_library_agent(
|
|||||||
raise DatabaseError("Failed to update library agent") from e
|
raise DatabaseError("Failed to update library agent") from e
|
||||||
|
|
||||||
|
|
||||||
async def update_library_agent_settings(
|
|
||||||
user_id: str,
|
|
||||||
agent_id: str,
|
|
||||||
settings: GraphSettings,
|
|
||||||
) -> library_model.LibraryAgent:
|
|
||||||
"""
|
|
||||||
Updates the settings for a specific LibraryAgent.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
user_id: The owner of the LibraryAgent.
|
|
||||||
agent_id: The ID of the LibraryAgent to update.
|
|
||||||
settings: New GraphSettings to apply.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The updated LibraryAgent.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
NotFoundError: If the specified LibraryAgent does not exist.
|
|
||||||
DatabaseError: If there's an error in the update operation.
|
|
||||||
"""
|
|
||||||
return await update_library_agent(
|
|
||||||
library_agent_id=agent_id,
|
|
||||||
user_id=user_id,
|
|
||||||
settings=settings,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def delete_library_agent(
|
async def delete_library_agent(
|
||||||
library_agent_id: str, user_id: str, soft_delete: bool = True
|
library_agent_id: str, user_id: str, soft_delete: bool = True
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -838,7 +801,7 @@ async def add_store_agent_to_library(
|
|||||||
"isCreatedByUser": False,
|
"isCreatedByUser": False,
|
||||||
"useGraphIsActiveVersion": False,
|
"useGraphIsActiveVersion": False,
|
||||||
"settings": SafeJson(
|
"settings": SafeJson(
|
||||||
_initialize_graph_settings(graph_model).model_dump()
|
GraphSettings.from_graph(graph_model).model_dump()
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
include=library_agent_include(
|
include=library_agent_include(
|
||||||
@@ -1228,8 +1191,15 @@ async def fork_library_agent(
|
|||||||
)
|
)
|
||||||
new_graph = await on_graph_activate(new_graph, user_id=user_id)
|
new_graph = await on_graph_activate(new_graph, user_id=user_id)
|
||||||
|
|
||||||
# Create a library agent for the new graph
|
# Create a library agent for the new graph, preserving safe mode settings
|
||||||
return (await create_library_agent(new_graph, user_id))[0]
|
return (
|
||||||
|
await create_library_agent(
|
||||||
|
new_graph,
|
||||||
|
user_id,
|
||||||
|
hitl_safe_mode=original_agent.settings.human_in_the_loop_safe_mode,
|
||||||
|
sensitive_action_safe_mode=original_agent.settings.sensitive_action_safe_mode,
|
||||||
|
)
|
||||||
|
)[0]
|
||||||
except prisma.errors.PrismaError as e:
|
except prisma.errors.PrismaError as e:
|
||||||
logger.error(f"Database error cloning library agent: {e}")
|
logger.error(f"Database error cloning library agent: {e}")
|
||||||
raise DatabaseError("Failed to fork library agent") from e
|
raise DatabaseError("Failed to fork library agent") from e
|
||||||
|
|||||||
@@ -73,6 +73,12 @@ class LibraryAgent(pydantic.BaseModel):
|
|||||||
has_external_trigger: bool = pydantic.Field(
|
has_external_trigger: bool = pydantic.Field(
|
||||||
description="Whether the agent has an external trigger (e.g. webhook) node"
|
description="Whether the agent has an external trigger (e.g. webhook) node"
|
||||||
)
|
)
|
||||||
|
has_human_in_the_loop: bool = pydantic.Field(
|
||||||
|
description="Whether the agent has human-in-the-loop blocks"
|
||||||
|
)
|
||||||
|
has_sensitive_action: bool = pydantic.Field(
|
||||||
|
description="Whether the agent has sensitive action blocks"
|
||||||
|
)
|
||||||
trigger_setup_info: Optional[GraphTriggerInfo] = None
|
trigger_setup_info: Optional[GraphTriggerInfo] = None
|
||||||
|
|
||||||
# Indicates whether there's a new output (based on recent runs)
|
# Indicates whether there's a new output (based on recent runs)
|
||||||
@@ -180,6 +186,8 @@ class LibraryAgent(pydantic.BaseModel):
|
|||||||
graph.credentials_input_schema if sub_graphs is not None else None
|
graph.credentials_input_schema if sub_graphs is not None else None
|
||||||
),
|
),
|
||||||
has_external_trigger=graph.has_external_trigger,
|
has_external_trigger=graph.has_external_trigger,
|
||||||
|
has_human_in_the_loop=graph.has_human_in_the_loop,
|
||||||
|
has_sensitive_action=graph.has_sensitive_action,
|
||||||
trigger_setup_info=graph.trigger_setup_info,
|
trigger_setup_info=graph.trigger_setup_info,
|
||||||
new_output=new_output,
|
new_output=new_output,
|
||||||
can_access_graph=can_access_graph,
|
can_access_graph=can_access_graph,
|
||||||
|
|||||||
@@ -52,6 +52,8 @@ async def test_get_library_agents_success(
|
|||||||
output_schema={"type": "object", "properties": {}},
|
output_schema={"type": "object", "properties": {}},
|
||||||
credentials_input_schema={"type": "object", "properties": {}},
|
credentials_input_schema={"type": "object", "properties": {}},
|
||||||
has_external_trigger=False,
|
has_external_trigger=False,
|
||||||
|
has_human_in_the_loop=False,
|
||||||
|
has_sensitive_action=False,
|
||||||
status=library_model.LibraryAgentStatus.COMPLETED,
|
status=library_model.LibraryAgentStatus.COMPLETED,
|
||||||
recommended_schedule_cron=None,
|
recommended_schedule_cron=None,
|
||||||
new_output=False,
|
new_output=False,
|
||||||
@@ -75,6 +77,8 @@ async def test_get_library_agents_success(
|
|||||||
output_schema={"type": "object", "properties": {}},
|
output_schema={"type": "object", "properties": {}},
|
||||||
credentials_input_schema={"type": "object", "properties": {}},
|
credentials_input_schema={"type": "object", "properties": {}},
|
||||||
has_external_trigger=False,
|
has_external_trigger=False,
|
||||||
|
has_human_in_the_loop=False,
|
||||||
|
has_sensitive_action=False,
|
||||||
status=library_model.LibraryAgentStatus.COMPLETED,
|
status=library_model.LibraryAgentStatus.COMPLETED,
|
||||||
recommended_schedule_cron=None,
|
recommended_schedule_cron=None,
|
||||||
new_output=False,
|
new_output=False,
|
||||||
@@ -150,6 +154,8 @@ async def test_get_favorite_library_agents_success(
|
|||||||
output_schema={"type": "object", "properties": {}},
|
output_schema={"type": "object", "properties": {}},
|
||||||
credentials_input_schema={"type": "object", "properties": {}},
|
credentials_input_schema={"type": "object", "properties": {}},
|
||||||
has_external_trigger=False,
|
has_external_trigger=False,
|
||||||
|
has_human_in_the_loop=False,
|
||||||
|
has_sensitive_action=False,
|
||||||
status=library_model.LibraryAgentStatus.COMPLETED,
|
status=library_model.LibraryAgentStatus.COMPLETED,
|
||||||
recommended_schedule_cron=None,
|
recommended_schedule_cron=None,
|
||||||
new_output=False,
|
new_output=False,
|
||||||
@@ -218,6 +224,8 @@ def test_add_agent_to_library_success(
|
|||||||
output_schema={"type": "object", "properties": {}},
|
output_schema={"type": "object", "properties": {}},
|
||||||
credentials_input_schema={"type": "object", "properties": {}},
|
credentials_input_schema={"type": "object", "properties": {}},
|
||||||
has_external_trigger=False,
|
has_external_trigger=False,
|
||||||
|
has_human_in_the_loop=False,
|
||||||
|
has_sensitive_action=False,
|
||||||
status=library_model.LibraryAgentStatus.COMPLETED,
|
status=library_model.LibraryAgentStatus.COMPLETED,
|
||||||
new_output=False,
|
new_output=False,
|
||||||
can_access_graph=True,
|
can_access_graph=True,
|
||||||
|
|||||||
@@ -154,16 +154,16 @@ async def store_content_embedding(
|
|||||||
|
|
||||||
# Upsert the embedding
|
# Upsert the embedding
|
||||||
# WHERE clause in DO UPDATE prevents PostgreSQL 15 bug with NULLS NOT DISTINCT
|
# WHERE clause in DO UPDATE prevents PostgreSQL 15 bug with NULLS NOT DISTINCT
|
||||||
# Use {pgvector_schema}.vector for explicit pgvector type qualification
|
# Use unqualified ::vector - pgvector is in search_path on all environments
|
||||||
await execute_raw_with_schema(
|
await execute_raw_with_schema(
|
||||||
"""
|
"""
|
||||||
INSERT INTO {schema_prefix}"UnifiedContentEmbedding" (
|
INSERT INTO {schema_prefix}"UnifiedContentEmbedding" (
|
||||||
"id", "contentType", "contentId", "userId", "embedding", "searchableText", "metadata", "createdAt", "updatedAt"
|
"id", "contentType", "contentId", "userId", "embedding", "searchableText", "metadata", "createdAt", "updatedAt"
|
||||||
)
|
)
|
||||||
VALUES (gen_random_uuid()::text, $1::{schema_prefix}"ContentType", $2, $3, $4::{pgvector_schema}.vector, $5, $6::jsonb, NOW(), NOW())
|
VALUES (gen_random_uuid()::text, $1::{schema_prefix}"ContentType", $2, $3, $4::vector, $5, $6::jsonb, NOW(), NOW())
|
||||||
ON CONFLICT ("contentType", "contentId", "userId")
|
ON CONFLICT ("contentType", "contentId", "userId")
|
||||||
DO UPDATE SET
|
DO UPDATE SET
|
||||||
"embedding" = $4::{pgvector_schema}.vector,
|
"embedding" = $4::vector,
|
||||||
"searchableText" = $5,
|
"searchableText" = $5,
|
||||||
"metadata" = $6::jsonb,
|
"metadata" = $6::jsonb,
|
||||||
"updatedAt" = NOW()
|
"updatedAt" = NOW()
|
||||||
@@ -879,8 +879,7 @@ async def semantic_search(
|
|||||||
min_similarity_idx = len(params) + 1
|
min_similarity_idx = len(params) + 1
|
||||||
params.append(min_similarity)
|
params.append(min_similarity)
|
||||||
|
|
||||||
# Use regular string (not f-string) for template to preserve {schema_prefix} and {schema} placeholders
|
# Use unqualified ::vector and <=> operator - pgvector is in search_path on all environments
|
||||||
# Use OPERATOR({pgvector_schema}.<=>) for explicit operator schema qualification
|
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
SELECT
|
SELECT
|
||||||
@@ -888,9 +887,9 @@ async def semantic_search(
|
|||||||
"contentType" as content_type,
|
"contentType" as content_type,
|
||||||
"searchableText" as searchable_text,
|
"searchableText" as searchable_text,
|
||||||
metadata,
|
metadata,
|
||||||
1 - (embedding OPERATOR({pgvector_schema}.<=>) '"""
|
1 - (embedding <=> '"""
|
||||||
+ embedding_str
|
+ embedding_str
|
||||||
+ """'::{pgvector_schema}.vector) as similarity
|
+ """'::vector) as similarity
|
||||||
FROM {schema_prefix}"UnifiedContentEmbedding"
|
FROM {schema_prefix}"UnifiedContentEmbedding"
|
||||||
WHERE "contentType" IN ("""
|
WHERE "contentType" IN ("""
|
||||||
+ content_type_placeholders
|
+ content_type_placeholders
|
||||||
@@ -898,9 +897,9 @@ async def semantic_search(
|
|||||||
"""
|
"""
|
||||||
+ user_filter
|
+ user_filter
|
||||||
+ """
|
+ """
|
||||||
AND 1 - (embedding OPERATOR({pgvector_schema}.<=>) '"""
|
AND 1 - (embedding <=> '"""
|
||||||
+ embedding_str
|
+ embedding_str
|
||||||
+ """'::{pgvector_schema}.vector) >= $"""
|
+ """'::vector) >= $"""
|
||||||
+ str(min_similarity_idx)
|
+ str(min_similarity_idx)
|
||||||
+ """
|
+ """
|
||||||
ORDER BY similarity DESC
|
ORDER BY similarity DESC
|
||||||
|
|||||||
@@ -295,7 +295,7 @@ async def unified_hybrid_search(
|
|||||||
FROM {{schema_prefix}}"UnifiedContentEmbedding" uce
|
FROM {{schema_prefix}}"UnifiedContentEmbedding" uce
|
||||||
WHERE uce."contentType" = ANY({content_types_param}::{{schema_prefix}}"ContentType"[])
|
WHERE uce."contentType" = ANY({content_types_param}::{{schema_prefix}}"ContentType"[])
|
||||||
{user_filter}
|
{user_filter}
|
||||||
ORDER BY uce.embedding OPERATOR({{pgvector_schema}}.<=>) {embedding_param}::{{pgvector_schema}}.vector
|
ORDER BY uce.embedding <=> {embedding_param}::vector
|
||||||
LIMIT 200
|
LIMIT 200
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
@@ -307,7 +307,7 @@ async def unified_hybrid_search(
|
|||||||
uce.metadata,
|
uce.metadata,
|
||||||
uce."updatedAt" as updated_at,
|
uce."updatedAt" as updated_at,
|
||||||
-- Semantic score: cosine similarity (1 - distance)
|
-- Semantic score: cosine similarity (1 - distance)
|
||||||
COALESCE(1 - (uce.embedding OPERATOR({{pgvector_schema}}.<=>) {embedding_param}::{{pgvector_schema}}.vector), 0) as semantic_score,
|
COALESCE(1 - (uce.embedding <=> {embedding_param}::vector), 0) as semantic_score,
|
||||||
-- Lexical score: ts_rank_cd
|
-- Lexical score: ts_rank_cd
|
||||||
COALESCE(ts_rank_cd(uce.search, plainto_tsquery('english', {query_param})), 0) as lexical_raw,
|
COALESCE(ts_rank_cd(uce.search, plainto_tsquery('english', {query_param})), 0) as lexical_raw,
|
||||||
-- Category match from metadata
|
-- Category match from metadata
|
||||||
@@ -583,7 +583,7 @@ async def hybrid_search(
|
|||||||
WHERE uce."contentType" = 'STORE_AGENT'::{{schema_prefix}}"ContentType"
|
WHERE uce."contentType" = 'STORE_AGENT'::{{schema_prefix}}"ContentType"
|
||||||
AND uce."userId" IS NULL
|
AND uce."userId" IS NULL
|
||||||
AND {where_clause}
|
AND {where_clause}
|
||||||
ORDER BY uce.embedding OPERATOR({{pgvector_schema}}.<=>) {embedding_param}::{{pgvector_schema}}.vector
|
ORDER BY uce.embedding <=> {embedding_param}::vector
|
||||||
LIMIT 200
|
LIMIT 200
|
||||||
) uce
|
) uce
|
||||||
),
|
),
|
||||||
@@ -605,7 +605,7 @@ async def hybrid_search(
|
|||||||
-- Searchable text for BM25 reranking
|
-- Searchable text for BM25 reranking
|
||||||
COALESCE(sa.agent_name, '') || ' ' || COALESCE(sa.sub_heading, '') || ' ' || COALESCE(sa.description, '') as searchable_text,
|
COALESCE(sa.agent_name, '') || ' ' || COALESCE(sa.sub_heading, '') || ' ' || COALESCE(sa.description, '') as searchable_text,
|
||||||
-- Semantic score
|
-- Semantic score
|
||||||
COALESCE(1 - (uce.embedding OPERATOR({{pgvector_schema}}.<=>) {embedding_param}::{{pgvector_schema}}.vector), 0) as semantic_score,
|
COALESCE(1 - (uce.embedding <=> {embedding_param}::vector), 0) as semantic_score,
|
||||||
-- Lexical score (raw, will normalize)
|
-- Lexical score (raw, will normalize)
|
||||||
COALESCE(ts_rank_cd(uce.search, plainto_tsquery('english', {query_param})), 0) as lexical_raw,
|
COALESCE(ts_rank_cd(uce.search, plainto_tsquery('english', {query_param})), 0) as lexical_raw,
|
||||||
-- Category match
|
-- Category match
|
||||||
|
|||||||
@@ -761,10 +761,8 @@ async def create_new_graph(
|
|||||||
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
graph.reassign_ids(user_id=user_id, reassign_graph_id=True)
|
||||||
graph.validate_graph(for_run=False)
|
graph.validate_graph(for_run=False)
|
||||||
|
|
||||||
# The return value of the create graph & library function is intentionally not used here,
|
|
||||||
# as the graph already valid and no sub-graphs are returned back.
|
|
||||||
await graph_db.create_graph(graph, user_id=user_id)
|
await graph_db.create_graph(graph, user_id=user_id)
|
||||||
await library_db.create_library_agent(graph, user_id=user_id)
|
await library_db.create_library_agent(graph, user_id)
|
||||||
activated_graph = await on_graph_activate(graph, user_id=user_id)
|
activated_graph = await on_graph_activate(graph, user_id=user_id)
|
||||||
|
|
||||||
if create_graph.source == "builder":
|
if create_graph.source == "builder":
|
||||||
@@ -888,21 +886,19 @@ async def set_graph_active_version(
|
|||||||
async def _update_library_agent_version_and_settings(
|
async def _update_library_agent_version_and_settings(
|
||||||
user_id: str, agent_graph: graph_db.GraphModel
|
user_id: str, agent_graph: graph_db.GraphModel
|
||||||
) -> library_model.LibraryAgent:
|
) -> library_model.LibraryAgent:
|
||||||
# Keep the library agent up to date with the new active version
|
|
||||||
library = await library_db.update_agent_version_in_library(
|
library = await library_db.update_agent_version_in_library(
|
||||||
user_id, agent_graph.id, agent_graph.version
|
user_id, agent_graph.id, agent_graph.version
|
||||||
)
|
)
|
||||||
# If the graph has HITL node, initialize the setting if it's not already set.
|
updated_settings = GraphSettings.from_graph(
|
||||||
if (
|
graph=agent_graph,
|
||||||
agent_graph.has_human_in_the_loop
|
hitl_safe_mode=library.settings.human_in_the_loop_safe_mode,
|
||||||
and library.settings.human_in_the_loop_safe_mode is None
|
sensitive_action_safe_mode=library.settings.sensitive_action_safe_mode,
|
||||||
):
|
)
|
||||||
await library_db.update_library_agent_settings(
|
if updated_settings != library.settings:
|
||||||
|
library = await library_db.update_library_agent(
|
||||||
|
library_agent_id=library.id,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
agent_id=library.id,
|
settings=updated_settings,
|
||||||
settings=library.settings.model_copy(
|
|
||||||
update={"human_in_the_loop_safe_mode": True}
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
return library
|
return library
|
||||||
|
|
||||||
@@ -919,21 +915,18 @@ async def update_graph_settings(
|
|||||||
user_id: Annotated[str, Security(get_user_id)],
|
user_id: Annotated[str, Security(get_user_id)],
|
||||||
) -> GraphSettings:
|
) -> GraphSettings:
|
||||||
"""Update graph settings for the user's library agent."""
|
"""Update graph settings for the user's library agent."""
|
||||||
# Get the library agent for this graph
|
|
||||||
library_agent = await library_db.get_library_agent_by_graph_id(
|
library_agent = await library_db.get_library_agent_by_graph_id(
|
||||||
graph_id=graph_id, user_id=user_id
|
graph_id=graph_id, user_id=user_id
|
||||||
)
|
)
|
||||||
if not library_agent:
|
if not library_agent:
|
||||||
raise HTTPException(404, f"Graph #{graph_id} not found in user's library")
|
raise HTTPException(404, f"Graph #{graph_id} not found in user's library")
|
||||||
|
|
||||||
# Update the library agent settings
|
updated_agent = await library_db.update_library_agent(
|
||||||
updated_agent = await library_db.update_library_agent_settings(
|
library_agent_id=library_agent.id,
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
agent_id=library_agent.id,
|
|
||||||
settings=settings,
|
settings=settings,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Return the updated settings
|
|
||||||
return GraphSettings.model_validate(updated_agent.settings)
|
return GraphSettings.model_validate(updated_agent.settings)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -84,7 +84,7 @@ class HITLReviewHelper:
|
|||||||
Exception: If review creation or status update fails
|
Exception: If review creation or status update fails
|
||||||
"""
|
"""
|
||||||
# Skip review if safe mode is disabled - return auto-approved result
|
# Skip review if safe mode is disabled - return auto-approved result
|
||||||
if not execution_context.safe_mode:
|
if not execution_context.human_in_the_loop_safe_mode:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Block {block_name} skipping review for node {node_exec_id} - safe mode disabled"
|
f"Block {block_name} skipping review for node {node_exec_id} - safe mode disabled"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -104,7 +104,7 @@ class HumanInTheLoopBlock(Block):
|
|||||||
execution_context: ExecutionContext,
|
execution_context: ExecutionContext,
|
||||||
**_kwargs,
|
**_kwargs,
|
||||||
) -> BlockOutput:
|
) -> BlockOutput:
|
||||||
if not execution_context.safe_mode:
|
if not execution_context.human_in_the_loop_safe_mode:
|
||||||
logger.info(
|
logger.info(
|
||||||
f"HITL block skipping review for node {node_exec_id} - safe mode disabled"
|
f"HITL block skipping review for node {node_exec_id} - safe mode disabled"
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -79,6 +79,10 @@ class ModelMetadata(NamedTuple):
|
|||||||
provider: str
|
provider: str
|
||||||
context_window: int
|
context_window: int
|
||||||
max_output_tokens: int | None
|
max_output_tokens: int | None
|
||||||
|
display_name: str
|
||||||
|
provider_name: str
|
||||||
|
creator_name: str
|
||||||
|
price_tier: Literal[1, 2, 3]
|
||||||
|
|
||||||
|
|
||||||
class LlmModelMeta(EnumMeta):
|
class LlmModelMeta(EnumMeta):
|
||||||
@@ -171,6 +175,26 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
|||||||
V0_1_5_LG = "v0-1.5-lg"
|
V0_1_5_LG = "v0-1.5-lg"
|
||||||
V0_1_0_MD = "v0-1.0-md"
|
V0_1_0_MD = "v0-1.0-md"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def __get_pydantic_json_schema__(cls, schema, handler):
|
||||||
|
json_schema = handler(schema)
|
||||||
|
llm_model_metadata = {}
|
||||||
|
for model in cls:
|
||||||
|
model_name = model.value
|
||||||
|
metadata = model.metadata
|
||||||
|
llm_model_metadata[model_name] = {
|
||||||
|
"creator": metadata.creator_name,
|
||||||
|
"creator_name": metadata.creator_name,
|
||||||
|
"title": metadata.display_name,
|
||||||
|
"provider": metadata.provider,
|
||||||
|
"provider_name": metadata.provider_name,
|
||||||
|
"name": model_name,
|
||||||
|
"price_tier": metadata.price_tier,
|
||||||
|
}
|
||||||
|
json_schema["llm_model"] = True
|
||||||
|
json_schema["llm_model_metadata"] = llm_model_metadata
|
||||||
|
return json_schema
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata(self) -> ModelMetadata:
|
def metadata(self) -> ModelMetadata:
|
||||||
return MODEL_METADATA[self]
|
return MODEL_METADATA[self]
|
||||||
@@ -190,119 +214,291 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
|
|||||||
|
|
||||||
MODEL_METADATA = {
|
MODEL_METADATA = {
|
||||||
# https://platform.openai.com/docs/models
|
# https://platform.openai.com/docs/models
|
||||||
LlmModel.O3: ModelMetadata("openai", 200000, 100000),
|
LlmModel.O3: ModelMetadata("openai", 200000, 100000, "O3", "OpenAI", "OpenAI", 2),
|
||||||
LlmModel.O3_MINI: ModelMetadata("openai", 200000, 100000), # o3-mini-2025-01-31
|
LlmModel.O3_MINI: ModelMetadata(
|
||||||
LlmModel.O1: ModelMetadata("openai", 200000, 100000), # o1-2024-12-17
|
"openai", 200000, 100000, "O3 Mini", "OpenAI", "OpenAI", 1
|
||||||
LlmModel.O1_MINI: ModelMetadata("openai", 128000, 65536), # o1-mini-2024-09-12
|
), # o3-mini-2025-01-31
|
||||||
|
LlmModel.O1: ModelMetadata(
|
||||||
|
"openai", 200000, 100000, "O1", "OpenAI", "OpenAI", 3
|
||||||
|
), # o1-2024-12-17
|
||||||
|
LlmModel.O1_MINI: ModelMetadata(
|
||||||
|
"openai", 128000, 65536, "O1 Mini", "OpenAI", "OpenAI", 2
|
||||||
|
), # o1-mini-2024-09-12
|
||||||
# GPT-5 models
|
# GPT-5 models
|
||||||
LlmModel.GPT5_2: ModelMetadata("openai", 400000, 128000),
|
LlmModel.GPT5_2: ModelMetadata(
|
||||||
LlmModel.GPT5_1: ModelMetadata("openai", 400000, 128000),
|
"openai", 400000, 128000, "GPT-5.2", "OpenAI", "OpenAI", 3
|
||||||
LlmModel.GPT5: ModelMetadata("openai", 400000, 128000),
|
),
|
||||||
LlmModel.GPT5_MINI: ModelMetadata("openai", 400000, 128000),
|
LlmModel.GPT5_1: ModelMetadata(
|
||||||
LlmModel.GPT5_NANO: ModelMetadata("openai", 400000, 128000),
|
"openai", 400000, 128000, "GPT-5.1", "OpenAI", "OpenAI", 2
|
||||||
LlmModel.GPT5_CHAT: ModelMetadata("openai", 400000, 16384),
|
),
|
||||||
LlmModel.GPT41: ModelMetadata("openai", 1047576, 32768),
|
LlmModel.GPT5: ModelMetadata(
|
||||||
LlmModel.GPT41_MINI: ModelMetadata("openai", 1047576, 32768),
|
"openai", 400000, 128000, "GPT-5", "OpenAI", "OpenAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.GPT5_MINI: ModelMetadata(
|
||||||
|
"openai", 400000, 128000, "GPT-5 Mini", "OpenAI", "OpenAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.GPT5_NANO: ModelMetadata(
|
||||||
|
"openai", 400000, 128000, "GPT-5 Nano", "OpenAI", "OpenAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.GPT5_CHAT: ModelMetadata(
|
||||||
|
"openai", 400000, 16384, "GPT-5 Chat Latest", "OpenAI", "OpenAI", 2
|
||||||
|
),
|
||||||
|
LlmModel.GPT41: ModelMetadata(
|
||||||
|
"openai", 1047576, 32768, "GPT-4.1", "OpenAI", "OpenAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.GPT41_MINI: ModelMetadata(
|
||||||
|
"openai", 1047576, 32768, "GPT-4.1 Mini", "OpenAI", "OpenAI", 1
|
||||||
|
),
|
||||||
LlmModel.GPT4O_MINI: ModelMetadata(
|
LlmModel.GPT4O_MINI: ModelMetadata(
|
||||||
"openai", 128000, 16384
|
"openai", 128000, 16384, "GPT-4o Mini", "OpenAI", "OpenAI", 1
|
||||||
), # gpt-4o-mini-2024-07-18
|
), # gpt-4o-mini-2024-07-18
|
||||||
LlmModel.GPT4O: ModelMetadata("openai", 128000, 16384), # gpt-4o-2024-08-06
|
LlmModel.GPT4O: ModelMetadata(
|
||||||
|
"openai", 128000, 16384, "GPT-4o", "OpenAI", "OpenAI", 2
|
||||||
|
), # gpt-4o-2024-08-06
|
||||||
LlmModel.GPT4_TURBO: ModelMetadata(
|
LlmModel.GPT4_TURBO: ModelMetadata(
|
||||||
"openai", 128000, 4096
|
"openai", 128000, 4096, "GPT-4 Turbo", "OpenAI", "OpenAI", 3
|
||||||
), # gpt-4-turbo-2024-04-09
|
), # gpt-4-turbo-2024-04-09
|
||||||
LlmModel.GPT3_5_TURBO: ModelMetadata("openai", 16385, 4096), # gpt-3.5-turbo-0125
|
LlmModel.GPT3_5_TURBO: ModelMetadata(
|
||||||
|
"openai", 16385, 4096, "GPT-3.5 Turbo", "OpenAI", "OpenAI", 1
|
||||||
|
), # gpt-3.5-turbo-0125
|
||||||
# https://docs.anthropic.com/en/docs/about-claude/models
|
# https://docs.anthropic.com/en/docs/about-claude/models
|
||||||
LlmModel.CLAUDE_4_1_OPUS: ModelMetadata(
|
LlmModel.CLAUDE_4_1_OPUS: ModelMetadata(
|
||||||
"anthropic", 200000, 32000
|
"anthropic", 200000, 32000, "Claude Opus 4.1", "Anthropic", "Anthropic", 3
|
||||||
), # claude-opus-4-1-20250805
|
), # claude-opus-4-1-20250805
|
||||||
LlmModel.CLAUDE_4_OPUS: ModelMetadata(
|
LlmModel.CLAUDE_4_OPUS: ModelMetadata(
|
||||||
"anthropic", 200000, 32000
|
"anthropic", 200000, 32000, "Claude Opus 4", "Anthropic", "Anthropic", 3
|
||||||
), # claude-4-opus-20250514
|
), # claude-4-opus-20250514
|
||||||
LlmModel.CLAUDE_4_SONNET: ModelMetadata(
|
LlmModel.CLAUDE_4_SONNET: ModelMetadata(
|
||||||
"anthropic", 200000, 64000
|
"anthropic", 200000, 64000, "Claude Sonnet 4", "Anthropic", "Anthropic", 2
|
||||||
), # claude-4-sonnet-20250514
|
), # claude-4-sonnet-20250514
|
||||||
LlmModel.CLAUDE_4_5_OPUS: ModelMetadata(
|
LlmModel.CLAUDE_4_5_OPUS: ModelMetadata(
|
||||||
"anthropic", 200000, 64000
|
"anthropic", 200000, 64000, "Claude Opus 4.5", "Anthropic", "Anthropic", 3
|
||||||
), # claude-opus-4-5-20251101
|
), # claude-opus-4-5-20251101
|
||||||
LlmModel.CLAUDE_4_5_SONNET: ModelMetadata(
|
LlmModel.CLAUDE_4_5_SONNET: ModelMetadata(
|
||||||
"anthropic", 200000, 64000
|
"anthropic", 200000, 64000, "Claude Sonnet 4.5", "Anthropic", "Anthropic", 3
|
||||||
), # claude-sonnet-4-5-20250929
|
), # claude-sonnet-4-5-20250929
|
||||||
LlmModel.CLAUDE_4_5_HAIKU: ModelMetadata(
|
LlmModel.CLAUDE_4_5_HAIKU: ModelMetadata(
|
||||||
"anthropic", 200000, 64000
|
"anthropic", 200000, 64000, "Claude Haiku 4.5", "Anthropic", "Anthropic", 2
|
||||||
), # claude-haiku-4-5-20251001
|
), # claude-haiku-4-5-20251001
|
||||||
LlmModel.CLAUDE_3_7_SONNET: ModelMetadata(
|
LlmModel.CLAUDE_3_7_SONNET: ModelMetadata(
|
||||||
"anthropic", 200000, 64000
|
"anthropic", 200000, 64000, "Claude 3.7 Sonnet", "Anthropic", "Anthropic", 2
|
||||||
), # claude-3-7-sonnet-20250219
|
), # claude-3-7-sonnet-20250219
|
||||||
LlmModel.CLAUDE_3_HAIKU: ModelMetadata(
|
LlmModel.CLAUDE_3_HAIKU: ModelMetadata(
|
||||||
"anthropic", 200000, 4096
|
"anthropic", 200000, 4096, "Claude 3 Haiku", "Anthropic", "Anthropic", 1
|
||||||
), # claude-3-haiku-20240307
|
), # claude-3-haiku-20240307
|
||||||
# https://docs.aimlapi.com/api-overview/model-database/text-models
|
# https://docs.aimlapi.com/api-overview/model-database/text-models
|
||||||
LlmModel.AIML_API_QWEN2_5_72B: ModelMetadata("aiml_api", 32000, 8000),
|
LlmModel.AIML_API_QWEN2_5_72B: ModelMetadata(
|
||||||
LlmModel.AIML_API_LLAMA3_1_70B: ModelMetadata("aiml_api", 128000, 40000),
|
"aiml_api", 32000, 8000, "Qwen 2.5 72B Instruct Turbo", "AI/ML", "Qwen", 1
|
||||||
LlmModel.AIML_API_LLAMA3_3_70B: ModelMetadata("aiml_api", 128000, None),
|
),
|
||||||
LlmModel.AIML_API_META_LLAMA_3_1_70B: ModelMetadata("aiml_api", 131000, 2000),
|
LlmModel.AIML_API_LLAMA3_1_70B: ModelMetadata(
|
||||||
LlmModel.AIML_API_LLAMA_3_2_3B: ModelMetadata("aiml_api", 128000, None),
|
"aiml_api",
|
||||||
# https://console.groq.com/docs/models
|
128000,
|
||||||
LlmModel.LLAMA3_3_70B: ModelMetadata("groq", 128000, 32768),
|
40000,
|
||||||
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 128000, 8192),
|
"Llama 3.1 Nemotron 70B Instruct",
|
||||||
# https://ollama.com/library
|
"AI/ML",
|
||||||
LlmModel.OLLAMA_LLAMA3_3: ModelMetadata("ollama", 8192, None),
|
"Nvidia",
|
||||||
LlmModel.OLLAMA_LLAMA3_2: ModelMetadata("ollama", 8192, None),
|
1,
|
||||||
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192, None),
|
),
|
||||||
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192, None),
|
LlmModel.AIML_API_LLAMA3_3_70B: ModelMetadata(
|
||||||
LlmModel.OLLAMA_DOLPHIN: ModelMetadata("ollama", 32768, None),
|
"aiml_api", 128000, None, "Llama 3.3 70B Instruct Turbo", "AI/ML", "Meta", 1
|
||||||
# https://openrouter.ai/models
|
),
|
||||||
LlmModel.GEMINI_2_5_PRO: ModelMetadata("open_router", 1050000, 8192),
|
LlmModel.AIML_API_META_LLAMA_3_1_70B: ModelMetadata(
|
||||||
LlmModel.GEMINI_3_PRO_PREVIEW: ModelMetadata("open_router", 1048576, 65535),
|
"aiml_api", 131000, 2000, "Llama 3.1 70B Instruct Turbo", "AI/ML", "Meta", 1
|
||||||
LlmModel.GEMINI_2_5_FLASH: ModelMetadata("open_router", 1048576, 65535),
|
),
|
||||||
LlmModel.GEMINI_2_0_FLASH: ModelMetadata("open_router", 1048576, 8192),
|
LlmModel.AIML_API_LLAMA_3_2_3B: ModelMetadata(
|
||||||
LlmModel.GEMINI_2_5_FLASH_LITE_PREVIEW: ModelMetadata(
|
"aiml_api", 128000, None, "Llama 3.2 3B Instruct Turbo", "AI/ML", "Meta", 1
|
||||||
"open_router", 1048576, 65535
|
),
|
||||||
|
# https://console.groq.com/docs/models
|
||||||
|
LlmModel.LLAMA3_3_70B: ModelMetadata(
|
||||||
|
"groq", 128000, 32768, "Llama 3.3 70B Versatile", "Groq", "Meta", 1
|
||||||
|
),
|
||||||
|
LlmModel.LLAMA3_1_8B: ModelMetadata(
|
||||||
|
"groq", 128000, 8192, "Llama 3.1 8B Instant", "Groq", "Meta", 1
|
||||||
|
),
|
||||||
|
# https://ollama.com/library
|
||||||
|
LlmModel.OLLAMA_LLAMA3_3: ModelMetadata(
|
||||||
|
"ollama", 8192, None, "Llama 3.3", "Ollama", "Meta", 1
|
||||||
|
),
|
||||||
|
LlmModel.OLLAMA_LLAMA3_2: ModelMetadata(
|
||||||
|
"ollama", 8192, None, "Llama 3.2", "Ollama", "Meta", 1
|
||||||
|
),
|
||||||
|
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata(
|
||||||
|
"ollama", 8192, None, "Llama 3", "Ollama", "Meta", 1
|
||||||
|
),
|
||||||
|
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata(
|
||||||
|
"ollama", 8192, None, "Llama 3.1 405B", "Ollama", "Meta", 1
|
||||||
|
),
|
||||||
|
LlmModel.OLLAMA_DOLPHIN: ModelMetadata(
|
||||||
|
"ollama", 32768, None, "Dolphin Mistral Latest", "Ollama", "Mistral AI", 1
|
||||||
|
),
|
||||||
|
# https://openrouter.ai/models
|
||||||
|
LlmModel.GEMINI_2_5_PRO: ModelMetadata(
|
||||||
|
"open_router",
|
||||||
|
1050000,
|
||||||
|
8192,
|
||||||
|
"Gemini 2.5 Pro Preview 03.25",
|
||||||
|
"OpenRouter",
|
||||||
|
"Google",
|
||||||
|
2,
|
||||||
|
),
|
||||||
|
LlmModel.GEMINI_3_PRO_PREVIEW: ModelMetadata(
|
||||||
|
"open_router", 1048576, 65535, "Gemini 3 Pro Preview", "OpenRouter", "Google", 2
|
||||||
|
),
|
||||||
|
LlmModel.GEMINI_2_5_FLASH: ModelMetadata(
|
||||||
|
"open_router", 1048576, 65535, "Gemini 2.5 Flash", "OpenRouter", "Google", 1
|
||||||
|
),
|
||||||
|
LlmModel.GEMINI_2_0_FLASH: ModelMetadata(
|
||||||
|
"open_router", 1048576, 8192, "Gemini 2.0 Flash 001", "OpenRouter", "Google", 1
|
||||||
|
),
|
||||||
|
LlmModel.GEMINI_2_5_FLASH_LITE_PREVIEW: ModelMetadata(
|
||||||
|
"open_router",
|
||||||
|
1048576,
|
||||||
|
65535,
|
||||||
|
"Gemini 2.5 Flash Lite Preview 06.17",
|
||||||
|
"OpenRouter",
|
||||||
|
"Google",
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
LlmModel.GEMINI_2_0_FLASH_LITE: ModelMetadata(
|
||||||
|
"open_router",
|
||||||
|
1048576,
|
||||||
|
8192,
|
||||||
|
"Gemini 2.0 Flash Lite 001",
|
||||||
|
"OpenRouter",
|
||||||
|
"Google",
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
LlmModel.MISTRAL_NEMO: ModelMetadata(
|
||||||
|
"open_router", 128000, 4096, "Mistral Nemo", "OpenRouter", "Mistral AI", 1
|
||||||
|
),
|
||||||
|
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata(
|
||||||
|
"open_router", 128000, 4096, "Command R 08.2024", "OpenRouter", "Cohere", 1
|
||||||
|
),
|
||||||
|
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata(
|
||||||
|
"open_router", 128000, 4096, "Command R Plus 08.2024", "OpenRouter", "Cohere", 2
|
||||||
|
),
|
||||||
|
LlmModel.DEEPSEEK_CHAT: ModelMetadata(
|
||||||
|
"open_router", 64000, 2048, "DeepSeek Chat", "OpenRouter", "DeepSeek", 1
|
||||||
|
),
|
||||||
|
LlmModel.DEEPSEEK_R1_0528: ModelMetadata(
|
||||||
|
"open_router", 163840, 163840, "DeepSeek R1 0528", "OpenRouter", "DeepSeek", 1
|
||||||
|
),
|
||||||
|
LlmModel.PERPLEXITY_SONAR: ModelMetadata(
|
||||||
|
"open_router", 127000, 8000, "Sonar", "OpenRouter", "Perplexity", 1
|
||||||
|
),
|
||||||
|
LlmModel.PERPLEXITY_SONAR_PRO: ModelMetadata(
|
||||||
|
"open_router", 200000, 8000, "Sonar Pro", "OpenRouter", "Perplexity", 2
|
||||||
),
|
),
|
||||||
LlmModel.GEMINI_2_0_FLASH_LITE: ModelMetadata("open_router", 1048576, 8192),
|
|
||||||
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 128000, 4096),
|
|
||||||
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 128000, 4096),
|
|
||||||
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata("open_router", 128000, 4096),
|
|
||||||
LlmModel.DEEPSEEK_CHAT: ModelMetadata("open_router", 64000, 2048),
|
|
||||||
LlmModel.DEEPSEEK_R1_0528: ModelMetadata("open_router", 163840, 163840),
|
|
||||||
LlmModel.PERPLEXITY_SONAR: ModelMetadata("open_router", 127000, 8000),
|
|
||||||
LlmModel.PERPLEXITY_SONAR_PRO: ModelMetadata("open_router", 200000, 8000),
|
|
||||||
LlmModel.PERPLEXITY_SONAR_DEEP_RESEARCH: ModelMetadata(
|
LlmModel.PERPLEXITY_SONAR_DEEP_RESEARCH: ModelMetadata(
|
||||||
"open_router",
|
"open_router",
|
||||||
128000,
|
128000,
|
||||||
16000,
|
16000,
|
||||||
|
"Sonar Deep Research",
|
||||||
|
"OpenRouter",
|
||||||
|
"Perplexity",
|
||||||
|
3,
|
||||||
),
|
),
|
||||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata(
|
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_405B: ModelMetadata(
|
||||||
"open_router", 131000, 4096
|
"open_router",
|
||||||
|
131000,
|
||||||
|
4096,
|
||||||
|
"Hermes 3 Llama 3.1 405B",
|
||||||
|
"OpenRouter",
|
||||||
|
"Nous Research",
|
||||||
|
1,
|
||||||
),
|
),
|
||||||
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: ModelMetadata(
|
LlmModel.NOUSRESEARCH_HERMES_3_LLAMA_3_1_70B: ModelMetadata(
|
||||||
"open_router", 12288, 12288
|
"open_router",
|
||||||
|
12288,
|
||||||
|
12288,
|
||||||
|
"Hermes 3 Llama 3.1 70B",
|
||||||
|
"OpenRouter",
|
||||||
|
"Nous Research",
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
LlmModel.OPENAI_GPT_OSS_120B: ModelMetadata(
|
||||||
|
"open_router", 131072, 131072, "GPT-OSS 120B", "OpenRouter", "OpenAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.OPENAI_GPT_OSS_20B: ModelMetadata(
|
||||||
|
"open_router", 131072, 32768, "GPT-OSS 20B", "OpenRouter", "OpenAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.AMAZON_NOVA_LITE_V1: ModelMetadata(
|
||||||
|
"open_router", 300000, 5120, "Nova Lite V1", "OpenRouter", "Amazon", 1
|
||||||
|
),
|
||||||
|
LlmModel.AMAZON_NOVA_MICRO_V1: ModelMetadata(
|
||||||
|
"open_router", 128000, 5120, "Nova Micro V1", "OpenRouter", "Amazon", 1
|
||||||
|
),
|
||||||
|
LlmModel.AMAZON_NOVA_PRO_V1: ModelMetadata(
|
||||||
|
"open_router", 300000, 5120, "Nova Pro V1", "OpenRouter", "Amazon", 1
|
||||||
|
),
|
||||||
|
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: ModelMetadata(
|
||||||
|
"open_router", 65536, 4096, "WizardLM 2 8x22B", "OpenRouter", "Microsoft", 1
|
||||||
|
),
|
||||||
|
LlmModel.GRYPHE_MYTHOMAX_L2_13B: ModelMetadata(
|
||||||
|
"open_router", 4096, 4096, "MythoMax L2 13B", "OpenRouter", "Gryphe", 1
|
||||||
|
),
|
||||||
|
LlmModel.META_LLAMA_4_SCOUT: ModelMetadata(
|
||||||
|
"open_router", 131072, 131072, "Llama 4 Scout", "OpenRouter", "Meta", 1
|
||||||
|
),
|
||||||
|
LlmModel.META_LLAMA_4_MAVERICK: ModelMetadata(
|
||||||
|
"open_router", 1048576, 1000000, "Llama 4 Maverick", "OpenRouter", "Meta", 1
|
||||||
|
),
|
||||||
|
LlmModel.GROK_4: ModelMetadata(
|
||||||
|
"open_router", 256000, 256000, "Grok 4", "OpenRouter", "xAI", 3
|
||||||
|
),
|
||||||
|
LlmModel.GROK_4_FAST: ModelMetadata(
|
||||||
|
"open_router", 2000000, 30000, "Grok 4 Fast", "OpenRouter", "xAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.GROK_4_1_FAST: ModelMetadata(
|
||||||
|
"open_router", 2000000, 30000, "Grok 4.1 Fast", "OpenRouter", "xAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.GROK_CODE_FAST_1: ModelMetadata(
|
||||||
|
"open_router", 256000, 10000, "Grok Code Fast 1", "OpenRouter", "xAI", 1
|
||||||
|
),
|
||||||
|
LlmModel.KIMI_K2: ModelMetadata(
|
||||||
|
"open_router", 131000, 131000, "Kimi K2", "OpenRouter", "Moonshot AI", 1
|
||||||
|
),
|
||||||
|
LlmModel.QWEN3_235B_A22B_THINKING: ModelMetadata(
|
||||||
|
"open_router",
|
||||||
|
262144,
|
||||||
|
262144,
|
||||||
|
"Qwen 3 235B A22B Thinking 2507",
|
||||||
|
"OpenRouter",
|
||||||
|
"Qwen",
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
LlmModel.QWEN3_CODER: ModelMetadata(
|
||||||
|
"open_router", 262144, 262144, "Qwen 3 Coder", "OpenRouter", "Qwen", 3
|
||||||
),
|
),
|
||||||
LlmModel.OPENAI_GPT_OSS_120B: ModelMetadata("open_router", 131072, 131072),
|
|
||||||
LlmModel.OPENAI_GPT_OSS_20B: ModelMetadata("open_router", 131072, 32768),
|
|
||||||
LlmModel.AMAZON_NOVA_LITE_V1: ModelMetadata("open_router", 300000, 5120),
|
|
||||||
LlmModel.AMAZON_NOVA_MICRO_V1: ModelMetadata("open_router", 128000, 5120),
|
|
||||||
LlmModel.AMAZON_NOVA_PRO_V1: ModelMetadata("open_router", 300000, 5120),
|
|
||||||
LlmModel.MICROSOFT_WIZARDLM_2_8X22B: ModelMetadata("open_router", 65536, 4096),
|
|
||||||
LlmModel.GRYPHE_MYTHOMAX_L2_13B: ModelMetadata("open_router", 4096, 4096),
|
|
||||||
LlmModel.META_LLAMA_4_SCOUT: ModelMetadata("open_router", 131072, 131072),
|
|
||||||
LlmModel.META_LLAMA_4_MAVERICK: ModelMetadata("open_router", 1048576, 1000000),
|
|
||||||
LlmModel.GROK_4: ModelMetadata("open_router", 256000, 256000),
|
|
||||||
LlmModel.GROK_4_FAST: ModelMetadata("open_router", 2000000, 30000),
|
|
||||||
LlmModel.GROK_4_1_FAST: ModelMetadata("open_router", 2000000, 30000),
|
|
||||||
LlmModel.GROK_CODE_FAST_1: ModelMetadata("open_router", 256000, 10000),
|
|
||||||
LlmModel.KIMI_K2: ModelMetadata("open_router", 131000, 131000),
|
|
||||||
LlmModel.QWEN3_235B_A22B_THINKING: ModelMetadata("open_router", 262144, 262144),
|
|
||||||
LlmModel.QWEN3_CODER: ModelMetadata("open_router", 262144, 262144),
|
|
||||||
# Llama API models
|
# Llama API models
|
||||||
LlmModel.LLAMA_API_LLAMA_4_SCOUT: ModelMetadata("llama_api", 128000, 4028),
|
LlmModel.LLAMA_API_LLAMA_4_SCOUT: ModelMetadata(
|
||||||
LlmModel.LLAMA_API_LLAMA4_MAVERICK: ModelMetadata("llama_api", 128000, 4028),
|
"llama_api",
|
||||||
LlmModel.LLAMA_API_LLAMA3_3_8B: ModelMetadata("llama_api", 128000, 4028),
|
128000,
|
||||||
LlmModel.LLAMA_API_LLAMA3_3_70B: ModelMetadata("llama_api", 128000, 4028),
|
4028,
|
||||||
|
"Llama 4 Scout 17B 16E Instruct FP8",
|
||||||
|
"Llama API",
|
||||||
|
"Meta",
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
LlmModel.LLAMA_API_LLAMA4_MAVERICK: ModelMetadata(
|
||||||
|
"llama_api",
|
||||||
|
128000,
|
||||||
|
4028,
|
||||||
|
"Llama 4 Maverick 17B 128E Instruct FP8",
|
||||||
|
"Llama API",
|
||||||
|
"Meta",
|
||||||
|
1,
|
||||||
|
),
|
||||||
|
LlmModel.LLAMA_API_LLAMA3_3_8B: ModelMetadata(
|
||||||
|
"llama_api", 128000, 4028, "Llama 3.3 8B Instruct", "Llama API", "Meta", 1
|
||||||
|
),
|
||||||
|
LlmModel.LLAMA_API_LLAMA3_3_70B: ModelMetadata(
|
||||||
|
"llama_api", 128000, 4028, "Llama 3.3 70B Instruct", "Llama API", "Meta", 1
|
||||||
|
),
|
||||||
# v0 by Vercel models
|
# v0 by Vercel models
|
||||||
LlmModel.V0_1_5_MD: ModelMetadata("v0", 128000, 64000),
|
LlmModel.V0_1_5_MD: ModelMetadata("v0", 128000, 64000, "v0 1.5 MD", "V0", "V0", 1),
|
||||||
LlmModel.V0_1_5_LG: ModelMetadata("v0", 512000, 64000),
|
LlmModel.V0_1_5_LG: ModelMetadata("v0", 512000, 64000, "v0 1.5 LG", "V0", "V0", 1),
|
||||||
LlmModel.V0_1_0_MD: ModelMetadata("v0", 128000, 64000),
|
LlmModel.V0_1_0_MD: ModelMetadata("v0", 128000, 64000, "v0 1.0 MD", "V0", "V0", 1),
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFAULT_LLM_MODEL = LlmModel.GPT5_2
|
DEFAULT_LLM_MODEL = LlmModel.GPT5_2
|
||||||
|
|||||||
@@ -242,7 +242,7 @@ async def test_smart_decision_maker_tracks_llm_stats():
|
|||||||
outputs = {}
|
outputs = {}
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
@@ -343,7 +343,7 @@ async def test_smart_decision_maker_parameter_validation():
|
|||||||
|
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
@@ -409,7 +409,7 @@ async def test_smart_decision_maker_parameter_validation():
|
|||||||
|
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
@@ -471,7 +471,7 @@ async def test_smart_decision_maker_parameter_validation():
|
|||||||
outputs = {}
|
outputs = {}
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
@@ -535,7 +535,7 @@ async def test_smart_decision_maker_parameter_validation():
|
|||||||
outputs = {}
|
outputs = {}
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
@@ -658,7 +658,7 @@ async def test_smart_decision_maker_raw_response_conversion():
|
|||||||
outputs = {}
|
outputs = {}
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
@@ -730,7 +730,7 @@ async def test_smart_decision_maker_raw_response_conversion():
|
|||||||
outputs = {}
|
outputs = {}
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
@@ -786,7 +786,7 @@ async def test_smart_decision_maker_raw_response_conversion():
|
|||||||
outputs = {}
|
outputs = {}
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
@@ -905,7 +905,7 @@ async def test_smart_decision_maker_agent_mode():
|
|||||||
# Create a mock execution context
|
# Create a mock execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(
|
mock_execution_context = ExecutionContext(
|
||||||
safe_mode=False,
|
human_in_the_loop_safe_mode=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create a mock execution processor for agent mode tests
|
# Create a mock execution processor for agent mode tests
|
||||||
@@ -1027,7 +1027,7 @@ async def test_smart_decision_maker_traditional_mode_default():
|
|||||||
|
|
||||||
# Create execution context
|
# Create execution context
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
|
|
||||||
# Create a mock execution processor for tests
|
# Create a mock execution processor for tests
|
||||||
|
|
||||||
|
|||||||
@@ -386,7 +386,7 @@ async def test_output_yielding_with_dynamic_fields():
|
|||||||
outputs = {}
|
outputs = {}
|
||||||
from backend.data.execution import ExecutionContext
|
from backend.data.execution import ExecutionContext
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(human_in_the_loop_safe_mode=False)
|
||||||
mock_execution_processor = MagicMock()
|
mock_execution_processor = MagicMock()
|
||||||
|
|
||||||
async for output_name, output_value in block.run(
|
async for output_name, output_value in block.run(
|
||||||
@@ -609,7 +609,9 @@ async def test_validation_errors_dont_pollute_conversation():
|
|||||||
outputs = {}
|
outputs = {}
|
||||||
from backend.data.execution import ExecutionContext
|
from backend.data.execution import ExecutionContext
|
||||||
|
|
||||||
mock_execution_context = ExecutionContext(safe_mode=False)
|
mock_execution_context = ExecutionContext(
|
||||||
|
human_in_the_loop_safe_mode=False
|
||||||
|
)
|
||||||
|
|
||||||
# Create a proper mock execution processor for agent mode
|
# Create a proper mock execution processor for agent mode
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|||||||
@@ -474,7 +474,7 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
|||||||
self.block_type = block_type
|
self.block_type = block_type
|
||||||
self.webhook_config = webhook_config
|
self.webhook_config = webhook_config
|
||||||
self.execution_stats: NodeExecutionStats = NodeExecutionStats()
|
self.execution_stats: NodeExecutionStats = NodeExecutionStats()
|
||||||
self.requires_human_review: bool = False
|
self.is_sensitive_action: bool = False
|
||||||
|
|
||||||
if self.webhook_config:
|
if self.webhook_config:
|
||||||
if isinstance(self.webhook_config, BlockWebhookConfig):
|
if isinstance(self.webhook_config, BlockWebhookConfig):
|
||||||
@@ -637,8 +637,9 @@ class Block(ABC, Generic[BlockSchemaInputType, BlockSchemaOutputType]):
|
|||||||
- should_pause: True if execution should be paused for review
|
- should_pause: True if execution should be paused for review
|
||||||
- input_data_to_use: The input data to use (may be modified by reviewer)
|
- input_data_to_use: The input data to use (may be modified by reviewer)
|
||||||
"""
|
"""
|
||||||
# Skip review if not required or safe mode is disabled
|
if not (
|
||||||
if not self.requires_human_review or not execution_context.safe_mode:
|
self.is_sensitive_action and execution_context.sensitive_action_safe_mode
|
||||||
|
):
|
||||||
return False, input_data
|
return False, input_data
|
||||||
|
|
||||||
from backend.blocks.helpers.review import HITLReviewHelper
|
from backend.blocks.helpers.review import HITLReviewHelper
|
||||||
|
|||||||
@@ -99,10 +99,15 @@ MODEL_COST: dict[LlmModel, int] = {
|
|||||||
LlmModel.OPENAI_GPT_OSS_20B: 1,
|
LlmModel.OPENAI_GPT_OSS_20B: 1,
|
||||||
LlmModel.GEMINI_2_5_PRO: 4,
|
LlmModel.GEMINI_2_5_PRO: 4,
|
||||||
LlmModel.GEMINI_3_PRO_PREVIEW: 5,
|
LlmModel.GEMINI_3_PRO_PREVIEW: 5,
|
||||||
|
LlmModel.GEMINI_2_5_FLASH: 1,
|
||||||
|
LlmModel.GEMINI_2_0_FLASH: 1,
|
||||||
|
LlmModel.GEMINI_2_5_FLASH_LITE_PREVIEW: 1,
|
||||||
|
LlmModel.GEMINI_2_0_FLASH_LITE: 1,
|
||||||
LlmModel.MISTRAL_NEMO: 1,
|
LlmModel.MISTRAL_NEMO: 1,
|
||||||
LlmModel.COHERE_COMMAND_R_08_2024: 1,
|
LlmModel.COHERE_COMMAND_R_08_2024: 1,
|
||||||
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: 3,
|
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: 3,
|
||||||
LlmModel.DEEPSEEK_CHAT: 2,
|
LlmModel.DEEPSEEK_CHAT: 2,
|
||||||
|
LlmModel.DEEPSEEK_R1_0528: 1,
|
||||||
LlmModel.PERPLEXITY_SONAR: 1,
|
LlmModel.PERPLEXITY_SONAR: 1,
|
||||||
LlmModel.PERPLEXITY_SONAR_PRO: 5,
|
LlmModel.PERPLEXITY_SONAR_PRO: 5,
|
||||||
LlmModel.PERPLEXITY_SONAR_DEEP_RESEARCH: 10,
|
LlmModel.PERPLEXITY_SONAR_DEEP_RESEARCH: 10,
|
||||||
@@ -126,11 +131,6 @@ MODEL_COST: dict[LlmModel, int] = {
|
|||||||
LlmModel.KIMI_K2: 1,
|
LlmModel.KIMI_K2: 1,
|
||||||
LlmModel.QWEN3_235B_A22B_THINKING: 1,
|
LlmModel.QWEN3_235B_A22B_THINKING: 1,
|
||||||
LlmModel.QWEN3_CODER: 9,
|
LlmModel.QWEN3_CODER: 9,
|
||||||
LlmModel.GEMINI_2_5_FLASH: 1,
|
|
||||||
LlmModel.GEMINI_2_0_FLASH: 1,
|
|
||||||
LlmModel.GEMINI_2_5_FLASH_LITE_PREVIEW: 1,
|
|
||||||
LlmModel.GEMINI_2_0_FLASH_LITE: 1,
|
|
||||||
LlmModel.DEEPSEEK_R1_0528: 1,
|
|
||||||
# v0 by Vercel models
|
# v0 by Vercel models
|
||||||
LlmModel.V0_1_5_MD: 1,
|
LlmModel.V0_1_5_MD: 1,
|
||||||
LlmModel.V0_1_5_LG: 2,
|
LlmModel.V0_1_5_LG: 2,
|
||||||
|
|||||||
@@ -121,10 +121,14 @@ async def _raw_with_schema(
|
|||||||
Supports placeholders:
|
Supports placeholders:
|
||||||
- {schema_prefix}: Table/type prefix (e.g., "platform".)
|
- {schema_prefix}: Table/type prefix (e.g., "platform".)
|
||||||
- {schema}: Raw schema name for application tables (e.g., platform)
|
- {schema}: Raw schema name for application tables (e.g., platform)
|
||||||
- {pgvector_schema}: Schema where pgvector is installed (defaults to "public")
|
|
||||||
|
Note on pgvector types:
|
||||||
|
Use unqualified ::vector and <=> operator in queries. PostgreSQL resolves
|
||||||
|
these via search_path, which includes the schema where pgvector is installed
|
||||||
|
on all environments (local, CI, dev).
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
query_template: SQL query with {schema_prefix}, {schema}, and/or {pgvector_schema} placeholders
|
query_template: SQL query with {schema_prefix} and/or {schema} placeholders
|
||||||
*args: Query parameters
|
*args: Query parameters
|
||||||
execute: If False, executes SELECT query. If True, executes INSERT/UPDATE/DELETE.
|
execute: If False, executes SELECT query. If True, executes INSERT/UPDATE/DELETE.
|
||||||
client: Optional Prisma client for transactions (only used when execute=True).
|
client: Optional Prisma client for transactions (only used when execute=True).
|
||||||
@@ -135,20 +139,16 @@ async def _raw_with_schema(
|
|||||||
|
|
||||||
Example with vector type:
|
Example with vector type:
|
||||||
await execute_raw_with_schema(
|
await execute_raw_with_schema(
|
||||||
'INSERT INTO {schema_prefix}"Embedding" (vec) VALUES ($1::{pgvector_schema}.vector)',
|
'INSERT INTO {schema_prefix}"Embedding" (vec) VALUES ($1::vector)',
|
||||||
embedding_data
|
embedding_data
|
||||||
)
|
)
|
||||||
"""
|
"""
|
||||||
schema = get_database_schema()
|
schema = get_database_schema()
|
||||||
schema_prefix = f'"{schema}".' if schema != "public" else ""
|
schema_prefix = f'"{schema}".' if schema != "public" else ""
|
||||||
# pgvector extension is typically installed in "public" schema
|
|
||||||
# On Supabase it may be in "extensions" but "public" is the common default
|
|
||||||
pgvector_schema = "public"
|
|
||||||
|
|
||||||
formatted_query = query_template.format(
|
formatted_query = query_template.format(
|
||||||
schema_prefix=schema_prefix,
|
schema_prefix=schema_prefix,
|
||||||
schema=schema,
|
schema=schema,
|
||||||
pgvector_schema=pgvector_schema,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
import prisma as prisma_module
|
import prisma as prisma_module
|
||||||
|
|||||||
@@ -103,8 +103,18 @@ class RedisEventBus(BaseRedisEventBus[M], ABC):
|
|||||||
return redis.get_redis()
|
return redis.get_redis()
|
||||||
|
|
||||||
def publish_event(self, event: M, channel_key: str):
|
def publish_event(self, event: M, channel_key: str):
|
||||||
|
"""
|
||||||
|
Publish an event to Redis. Gracefully handles connection failures
|
||||||
|
by logging the error instead of raising exceptions.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
message, full_channel_name = self._serialize_message(event, channel_key)
|
message, full_channel_name = self._serialize_message(event, channel_key)
|
||||||
self.connection.publish(full_channel_name, message)
|
self.connection.publish(full_channel_name, message)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(
|
||||||
|
f"Failed to publish event to Redis channel {channel_key}. "
|
||||||
|
"Event bus operation will continue without Redis connectivity."
|
||||||
|
)
|
||||||
|
|
||||||
def listen_events(self, channel_key: str) -> Generator[M, None, None]:
|
def listen_events(self, channel_key: str) -> Generator[M, None, None]:
|
||||||
pubsub, full_channel_name = self._get_pubsub_channel(
|
pubsub, full_channel_name = self._get_pubsub_channel(
|
||||||
@@ -128,9 +138,19 @@ class AsyncRedisEventBus(BaseRedisEventBus[M], ABC):
|
|||||||
return await redis.get_redis_async()
|
return await redis.get_redis_async()
|
||||||
|
|
||||||
async def publish_event(self, event: M, channel_key: str):
|
async def publish_event(self, event: M, channel_key: str):
|
||||||
|
"""
|
||||||
|
Publish an event to Redis. Gracefully handles connection failures
|
||||||
|
by logging the error instead of raising exceptions.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
message, full_channel_name = self._serialize_message(event, channel_key)
|
message, full_channel_name = self._serialize_message(event, channel_key)
|
||||||
connection = await self.connection
|
connection = await self.connection
|
||||||
await connection.publish(full_channel_name, message)
|
await connection.publish(full_channel_name, message)
|
||||||
|
except Exception:
|
||||||
|
logger.exception(
|
||||||
|
f"Failed to publish event to Redis channel {channel_key}. "
|
||||||
|
"Event bus operation will continue without Redis connectivity."
|
||||||
|
)
|
||||||
|
|
||||||
async def listen_events(self, channel_key: str) -> AsyncGenerator[M, None]:
|
async def listen_events(self, channel_key: str) -> AsyncGenerator[M, None]:
|
||||||
pubsub, full_channel_name = self._get_pubsub_channel(
|
pubsub, full_channel_name = self._get_pubsub_channel(
|
||||||
|
|||||||
56
autogpt_platform/backend/backend/data/event_bus_test.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""
|
||||||
|
Tests for event_bus graceful degradation when Redis is unavailable.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from backend.data.event_bus import AsyncRedisEventBus
|
||||||
|
|
||||||
|
|
||||||
|
class TestEvent(BaseModel):
|
||||||
|
"""Test event model."""
|
||||||
|
|
||||||
|
message: str
|
||||||
|
|
||||||
|
|
||||||
|
class TestNotificationBus(AsyncRedisEventBus[TestEvent]):
|
||||||
|
"""Test implementation of AsyncRedisEventBus."""
|
||||||
|
|
||||||
|
Model = TestEvent
|
||||||
|
|
||||||
|
@property
|
||||||
|
def event_bus_name(self) -> str:
|
||||||
|
return "test_event_bus"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_publish_event_handles_connection_failure_gracefully():
|
||||||
|
"""Test that publish_event logs exception instead of raising when Redis is unavailable."""
|
||||||
|
bus = TestNotificationBus()
|
||||||
|
event = TestEvent(message="test message")
|
||||||
|
|
||||||
|
# Mock get_redis_async to raise connection error
|
||||||
|
with patch(
|
||||||
|
"backend.data.event_bus.redis.get_redis_async",
|
||||||
|
side_effect=ConnectionError("Authentication required."),
|
||||||
|
):
|
||||||
|
# Should not raise exception
|
||||||
|
await bus.publish_event(event, "test_channel")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_publish_event_works_with_redis_available():
|
||||||
|
"""Test that publish_event works normally when Redis is available."""
|
||||||
|
bus = TestNotificationBus()
|
||||||
|
event = TestEvent(message="test message")
|
||||||
|
|
||||||
|
# Mock successful Redis connection
|
||||||
|
mock_redis = AsyncMock()
|
||||||
|
mock_redis.publish = AsyncMock()
|
||||||
|
|
||||||
|
with patch("backend.data.event_bus.redis.get_redis_async", return_value=mock_redis):
|
||||||
|
await bus.publish_event(event, "test_channel")
|
||||||
|
mock_redis.publish.assert_called_once()
|
||||||
@@ -81,7 +81,10 @@ class ExecutionContext(BaseModel):
|
|||||||
This includes information needed by blocks, sub-graphs, and execution management.
|
This includes information needed by blocks, sub-graphs, and execution management.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
safe_mode: bool = True
|
model_config = {"extra": "ignore"}
|
||||||
|
|
||||||
|
human_in_the_loop_safe_mode: bool = True
|
||||||
|
sensitive_action_safe_mode: bool = False
|
||||||
user_timezone: str = "UTC"
|
user_timezone: str = "UTC"
|
||||||
root_execution_id: Optional[str] = None
|
root_execution_id: Optional[str] = None
|
||||||
parent_execution_id: Optional[str] = None
|
parent_execution_id: Optional[str] = None
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import logging
|
|||||||
import uuid
|
import uuid
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import TYPE_CHECKING, Any, Literal, Optional, cast
|
from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, cast
|
||||||
|
|
||||||
from prisma.enums import SubmissionStatus
|
from prisma.enums import SubmissionStatus
|
||||||
from prisma.models import (
|
from prisma.models import (
|
||||||
@@ -20,7 +20,7 @@ from prisma.types import (
|
|||||||
AgentNodeLinkCreateInput,
|
AgentNodeLinkCreateInput,
|
||||||
StoreListingVersionWhereInput,
|
StoreListingVersionWhereInput,
|
||||||
)
|
)
|
||||||
from pydantic import BaseModel, Field, create_model
|
from pydantic import BaseModel, BeforeValidator, Field, create_model
|
||||||
from pydantic.fields import computed_field
|
from pydantic.fields import computed_field
|
||||||
|
|
||||||
from backend.blocks.agent import AgentExecutorBlock
|
from backend.blocks.agent import AgentExecutorBlock
|
||||||
@@ -62,7 +62,31 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
class GraphSettings(BaseModel):
|
class GraphSettings(BaseModel):
|
||||||
human_in_the_loop_safe_mode: bool | None = None
|
# Use Annotated with BeforeValidator to coerce None to default values.
|
||||||
|
# This handles cases where the database has null values for these fields.
|
||||||
|
model_config = {"extra": "ignore"}
|
||||||
|
|
||||||
|
human_in_the_loop_safe_mode: Annotated[
|
||||||
|
bool, BeforeValidator(lambda v: v if v is not None else True)
|
||||||
|
] = True
|
||||||
|
sensitive_action_safe_mode: Annotated[
|
||||||
|
bool, BeforeValidator(lambda v: v if v is not None else False)
|
||||||
|
] = False
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_graph(
|
||||||
|
cls,
|
||||||
|
graph: "GraphModel",
|
||||||
|
hitl_safe_mode: bool | None = None,
|
||||||
|
sensitive_action_safe_mode: bool = False,
|
||||||
|
) -> "GraphSettings":
|
||||||
|
# Default to True if not explicitly set
|
||||||
|
if hitl_safe_mode is None:
|
||||||
|
hitl_safe_mode = True
|
||||||
|
return cls(
|
||||||
|
human_in_the_loop_safe_mode=hitl_safe_mode,
|
||||||
|
sensitive_action_safe_mode=sensitive_action_safe_mode,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Link(BaseDbModel):
|
class Link(BaseDbModel):
|
||||||
@@ -244,10 +268,14 @@ class BaseGraph(BaseDbModel):
|
|||||||
return any(
|
return any(
|
||||||
node.block_id
|
node.block_id
|
||||||
for node in self.nodes
|
for node in self.nodes
|
||||||
if (
|
if node.block.block_type == BlockType.HUMAN_IN_THE_LOOP
|
||||||
node.block.block_type == BlockType.HUMAN_IN_THE_LOOP
|
|
||||||
or node.block.requires_human_review
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@computed_field
|
||||||
|
@property
|
||||||
|
def has_sensitive_action(self) -> bool:
|
||||||
|
return any(
|
||||||
|
node.block_id for node in self.nodes if node.block.is_sensitive_action
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -309,7 +309,7 @@ def ensure_embeddings_coverage():
|
|||||||
|
|
||||||
# Process in batches until no more missing embeddings
|
# Process in batches until no more missing embeddings
|
||||||
while True:
|
while True:
|
||||||
result = db_client.backfill_missing_embeddings(batch_size=10)
|
result = db_client.backfill_missing_embeddings(batch_size=100)
|
||||||
|
|
||||||
total_processed += result["processed"]
|
total_processed += result["processed"]
|
||||||
total_success += result["success"]
|
total_success += result["success"]
|
||||||
|
|||||||
@@ -873,11 +873,8 @@ async def add_graph_execution(
|
|||||||
settings = await gdb.get_graph_settings(user_id=user_id, graph_id=graph_id)
|
settings = await gdb.get_graph_settings(user_id=user_id, graph_id=graph_id)
|
||||||
|
|
||||||
execution_context = ExecutionContext(
|
execution_context = ExecutionContext(
|
||||||
safe_mode=(
|
human_in_the_loop_safe_mode=settings.human_in_the_loop_safe_mode,
|
||||||
settings.human_in_the_loop_safe_mode
|
sensitive_action_safe_mode=settings.sensitive_action_safe_mode,
|
||||||
if settings.human_in_the_loop_safe_mode is not None
|
|
||||||
else True
|
|
||||||
),
|
|
||||||
user_timezone=(
|
user_timezone=(
|
||||||
user.timezone if user.timezone != USER_TIMEZONE_NOT_SET else "UTC"
|
user.timezone if user.timezone != USER_TIMEZONE_NOT_SET else "UTC"
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -386,6 +386,7 @@ async def test_add_graph_execution_is_repeatable(mocker: MockerFixture):
|
|||||||
mock_user.timezone = "UTC"
|
mock_user.timezone = "UTC"
|
||||||
mock_settings = mocker.MagicMock()
|
mock_settings = mocker.MagicMock()
|
||||||
mock_settings.human_in_the_loop_safe_mode = True
|
mock_settings.human_in_the_loop_safe_mode = True
|
||||||
|
mock_settings.sensitive_action_safe_mode = False
|
||||||
|
|
||||||
mock_udb.get_user_by_id = mocker.AsyncMock(return_value=mock_user)
|
mock_udb.get_user_by_id = mocker.AsyncMock(return_value=mock_user)
|
||||||
mock_gdb.get_graph_settings = mocker.AsyncMock(return_value=mock_settings)
|
mock_gdb.get_graph_settings = mocker.AsyncMock(return_value=mock_settings)
|
||||||
@@ -651,6 +652,7 @@ async def test_add_graph_execution_with_nodes_to_skip(mocker: MockerFixture):
|
|||||||
mock_user.timezone = "UTC"
|
mock_user.timezone = "UTC"
|
||||||
mock_settings = mocker.MagicMock()
|
mock_settings = mocker.MagicMock()
|
||||||
mock_settings.human_in_the_loop_safe_mode = True
|
mock_settings.human_in_the_loop_safe_mode = True
|
||||||
|
mock_settings.sensitive_action_safe_mode = False
|
||||||
|
|
||||||
mock_udb.get_user_by_id = mocker.AsyncMock(return_value=mock_user)
|
mock_udb.get_user_by_id = mocker.AsyncMock(return_value=mock_user)
|
||||||
mock_gdb.get_graph_settings = mocker.AsyncMock(return_value=mock_settings)
|
mock_gdb.get_graph_settings = mocker.AsyncMock(return_value=mock_settings)
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
-- CreateExtension
|
-- CreateExtension
|
||||||
-- Supabase: pgvector must be enabled via Dashboard → Database → Extensions first
|
-- Supabase: pgvector must be enabled via Dashboard → Database → Extensions first
|
||||||
-- Create in public schema so vector type is available across all schemas
|
-- Creates extension in current schema (determined by search_path from DATABASE_URL ?schema= param)
|
||||||
|
-- This ensures vector type is in the same schema as tables, making ::vector work without explicit qualification
|
||||||
DO $$
|
DO $$
|
||||||
BEGIN
|
BEGIN
|
||||||
CREATE EXTENSION IF NOT EXISTS "vector" WITH SCHEMA "public";
|
CREATE EXTENSION IF NOT EXISTS "vector";
|
||||||
EXCEPTION WHEN OTHERS THEN
|
EXCEPTION WHEN OTHERS THEN
|
||||||
RAISE NOTICE 'vector extension not available or already exists, skipping';
|
RAISE NOTICE 'vector extension not available or already exists, skipping';
|
||||||
END $$;
|
END $$;
|
||||||
@@ -19,7 +20,7 @@ CREATE TABLE "UnifiedContentEmbedding" (
|
|||||||
"contentType" "ContentType" NOT NULL,
|
"contentType" "ContentType" NOT NULL,
|
||||||
"contentId" TEXT NOT NULL,
|
"contentId" TEXT NOT NULL,
|
||||||
"userId" TEXT,
|
"userId" TEXT,
|
||||||
"embedding" public.vector(1536) NOT NULL,
|
"embedding" vector(1536) NOT NULL,
|
||||||
"searchableText" TEXT NOT NULL,
|
"searchableText" TEXT NOT NULL,
|
||||||
"metadata" JSONB NOT NULL DEFAULT '{}',
|
"metadata" JSONB NOT NULL DEFAULT '{}',
|
||||||
|
|
||||||
@@ -45,4 +46,4 @@ CREATE UNIQUE INDEX "UnifiedContentEmbedding_contentType_contentId_userId_key" O
|
|||||||
-- Uses cosine distance operator (<=>), which matches the query in hybrid_search.py
|
-- Uses cosine distance operator (<=>), which matches the query in hybrid_search.py
|
||||||
-- Note: Drop first in case Prisma created a btree index (Prisma doesn't support HNSW)
|
-- Note: Drop first in case Prisma created a btree index (Prisma doesn't support HNSW)
|
||||||
DROP INDEX IF EXISTS "UnifiedContentEmbedding_embedding_idx";
|
DROP INDEX IF EXISTS "UnifiedContentEmbedding_embedding_idx";
|
||||||
CREATE INDEX "UnifiedContentEmbedding_embedding_idx" ON "UnifiedContentEmbedding" USING hnsw ("embedding" public.vector_cosine_ops);
|
CREATE INDEX "UnifiedContentEmbedding_embedding_idx" ON "UnifiedContentEmbedding" USING hnsw ("embedding" vector_cosine_ops);
|
||||||
|
|||||||
@@ -366,12 +366,12 @@ def generate_block_markdown(
|
|||||||
lines.append("")
|
lines.append("")
|
||||||
|
|
||||||
# What it is (full description)
|
# What it is (full description)
|
||||||
lines.append(f"### What it is")
|
lines.append("### What it is")
|
||||||
lines.append(block.description or "No description available.")
|
lines.append(block.description or "No description available.")
|
||||||
lines.append("")
|
lines.append("")
|
||||||
|
|
||||||
# How it works (manual section)
|
# How it works (manual section)
|
||||||
lines.append(f"### How it works")
|
lines.append("### How it works")
|
||||||
how_it_works = manual_content.get(
|
how_it_works = manual_content.get(
|
||||||
"how_it_works", "_Add technical explanation here._"
|
"how_it_works", "_Add technical explanation here._"
|
||||||
)
|
)
|
||||||
@@ -383,7 +383,7 @@ def generate_block_markdown(
|
|||||||
# Inputs table (auto-generated)
|
# Inputs table (auto-generated)
|
||||||
visible_inputs = [f for f in block.inputs if not f.hidden]
|
visible_inputs = [f for f in block.inputs if not f.hidden]
|
||||||
if visible_inputs:
|
if visible_inputs:
|
||||||
lines.append(f"### Inputs")
|
lines.append("### Inputs")
|
||||||
lines.append("")
|
lines.append("")
|
||||||
lines.append("| Input | Description | Type | Required |")
|
lines.append("| Input | Description | Type | Required |")
|
||||||
lines.append("|-------|-------------|------|----------|")
|
lines.append("|-------|-------------|------|----------|")
|
||||||
@@ -400,7 +400,7 @@ def generate_block_markdown(
|
|||||||
# Outputs table (auto-generated)
|
# Outputs table (auto-generated)
|
||||||
visible_outputs = [f for f in block.outputs if not f.hidden]
|
visible_outputs = [f for f in block.outputs if not f.hidden]
|
||||||
if visible_outputs:
|
if visible_outputs:
|
||||||
lines.append(f"### Outputs")
|
lines.append("### Outputs")
|
||||||
lines.append("")
|
lines.append("")
|
||||||
lines.append("| Output | Description | Type |")
|
lines.append("| Output | Description | Type |")
|
||||||
lines.append("|--------|-------------|------|")
|
lines.append("|--------|-------------|------|")
|
||||||
@@ -414,7 +414,7 @@ def generate_block_markdown(
|
|||||||
lines.append("")
|
lines.append("")
|
||||||
|
|
||||||
# Possible use case (manual section)
|
# Possible use case (manual section)
|
||||||
lines.append(f"### Possible use case")
|
lines.append("### Possible use case")
|
||||||
use_case = manual_content.get("use_case", "_Add practical use case examples here._")
|
use_case = manual_content.get("use_case", "_Add practical use case examples here._")
|
||||||
lines.append("<!-- MANUAL: use_case -->")
|
lines.append("<!-- MANUAL: use_case -->")
|
||||||
lines.append(use_case)
|
lines.append(use_case)
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
"forked_from_version": null,
|
"forked_from_version": null,
|
||||||
"has_external_trigger": false,
|
"has_external_trigger": false,
|
||||||
"has_human_in_the_loop": false,
|
"has_human_in_the_loop": false,
|
||||||
|
"has_sensitive_action": false,
|
||||||
"id": "graph-123",
|
"id": "graph-123",
|
||||||
"input_schema": {
|
"input_schema": {
|
||||||
"properties": {},
|
"properties": {},
|
||||||
|
|||||||
@@ -11,6 +11,7 @@
|
|||||||
"forked_from_version": null,
|
"forked_from_version": null,
|
||||||
"has_external_trigger": false,
|
"has_external_trigger": false,
|
||||||
"has_human_in_the_loop": false,
|
"has_human_in_the_loop": false,
|
||||||
|
"has_sensitive_action": false,
|
||||||
"id": "graph-123",
|
"id": "graph-123",
|
||||||
"input_schema": {
|
"input_schema": {
|
||||||
"properties": {},
|
"properties": {},
|
||||||
|
|||||||
@@ -27,6 +27,8 @@
|
|||||||
"properties": {}
|
"properties": {}
|
||||||
},
|
},
|
||||||
"has_external_trigger": false,
|
"has_external_trigger": false,
|
||||||
|
"has_human_in_the_loop": false,
|
||||||
|
"has_sensitive_action": false,
|
||||||
"trigger_setup_info": null,
|
"trigger_setup_info": null,
|
||||||
"new_output": false,
|
"new_output": false,
|
||||||
"can_access_graph": true,
|
"can_access_graph": true,
|
||||||
@@ -34,7 +36,8 @@
|
|||||||
"is_favorite": false,
|
"is_favorite": false,
|
||||||
"recommended_schedule_cron": null,
|
"recommended_schedule_cron": null,
|
||||||
"settings": {
|
"settings": {
|
||||||
"human_in_the_loop_safe_mode": null
|
"human_in_the_loop_safe_mode": true,
|
||||||
|
"sensitive_action_safe_mode": false
|
||||||
},
|
},
|
||||||
"marketplace_listing": null
|
"marketplace_listing": null
|
||||||
},
|
},
|
||||||
@@ -65,6 +68,8 @@
|
|||||||
"properties": {}
|
"properties": {}
|
||||||
},
|
},
|
||||||
"has_external_trigger": false,
|
"has_external_trigger": false,
|
||||||
|
"has_human_in_the_loop": false,
|
||||||
|
"has_sensitive_action": false,
|
||||||
"trigger_setup_info": null,
|
"trigger_setup_info": null,
|
||||||
"new_output": false,
|
"new_output": false,
|
||||||
"can_access_graph": false,
|
"can_access_graph": false,
|
||||||
@@ -72,7 +77,8 @@
|
|||||||
"is_favorite": false,
|
"is_favorite": false,
|
||||||
"recommended_schedule_cron": null,
|
"recommended_schedule_cron": null,
|
||||||
"settings": {
|
"settings": {
|
||||||
"human_in_the_loop_safe_mode": null
|
"human_in_the_loop_safe_mode": true,
|
||||||
|
"sensitive_action_safe_mode": false
|
||||||
},
|
},
|
||||||
"marketplace_listing": null
|
"marketplace_listing": null
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -175,6 +175,8 @@ While server components and actions are cool and cutting-edge, they introduce a
|
|||||||
|
|
||||||
- Prefer [React Query](https://tanstack.com/query/latest/docs/framework/react/overview) for server state, colocated near consumers (see [state colocation](https://kentcdodds.com/blog/state-colocation-will-make-your-react-app-faster))
|
- Prefer [React Query](https://tanstack.com/query/latest/docs/framework/react/overview) for server state, colocated near consumers (see [state colocation](https://kentcdodds.com/blog/state-colocation-will-make-your-react-app-faster))
|
||||||
- Co-locate UI state inside components/hooks; keep global state minimal
|
- Co-locate UI state inside components/hooks; keep global state minimal
|
||||||
|
- Avoid `useMemo` and `useCallback` unless you have a measured performance issue
|
||||||
|
- Do not abuse `useEffect`; prefer state colocation and derive values directly when possible
|
||||||
|
|
||||||
### Styling and components
|
### Styling and components
|
||||||
|
|
||||||
@@ -549,9 +551,48 @@ Files:
|
|||||||
Types:
|
Types:
|
||||||
|
|
||||||
- Prefer `interface` for object shapes
|
- Prefer `interface` for object shapes
|
||||||
- Component props should be `interface Props { ... }`
|
- Component props should be `interface Props { ... }` (not exported)
|
||||||
|
- Only use specific exported names (e.g., `export interface MyComponentProps`) when the interface needs to be used outside the component
|
||||||
|
- Keep type definitions inline with the component - do not create separate `types.ts` files unless types are shared across multiple files
|
||||||
- Use precise types; avoid `any` and unsafe casts
|
- Use precise types; avoid `any` and unsafe casts
|
||||||
|
|
||||||
|
**Props naming examples:**
|
||||||
|
|
||||||
|
```tsx
|
||||||
|
// ✅ Good - internal props, not exported
|
||||||
|
interface Props {
|
||||||
|
title: string;
|
||||||
|
onClose: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function Modal({ title, onClose }: Props) {
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
|
||||||
|
// ✅ Good - exported when needed externally
|
||||||
|
export interface ModalProps {
|
||||||
|
title: string;
|
||||||
|
onClose: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function Modal({ title, onClose }: ModalProps) {
|
||||||
|
// ...
|
||||||
|
}
|
||||||
|
|
||||||
|
// ❌ Bad - unnecessarily specific name for internal use
|
||||||
|
interface ModalComponentProps {
|
||||||
|
title: string;
|
||||||
|
onClose: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ❌ Bad - separate types.ts file for single component
|
||||||
|
// types.ts
|
||||||
|
export interface ModalProps { ... }
|
||||||
|
|
||||||
|
// Modal.tsx
|
||||||
|
import type { ModalProps } from './types';
|
||||||
|
```
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
|
|
||||||
- If more than one parameter is needed, pass a single `Args` object for clarity
|
- If more than one parameter is needed, pass a single `Args` object for clarity
|
||||||
|
|||||||
@@ -16,6 +16,12 @@ export default defineConfig({
|
|||||||
client: "react-query",
|
client: "react-query",
|
||||||
httpClient: "fetch",
|
httpClient: "fetch",
|
||||||
indexFiles: false,
|
indexFiles: false,
|
||||||
|
mock: {
|
||||||
|
type: "msw",
|
||||||
|
baseUrl: "http://localhost:3000/api/proxy",
|
||||||
|
generateEachHttpStatus: true,
|
||||||
|
delay: 0,
|
||||||
|
},
|
||||||
override: {
|
override: {
|
||||||
mutator: {
|
mutator: {
|
||||||
path: "./mutators/custom-mutator.ts",
|
path: "./mutators/custom-mutator.ts",
|
||||||
|
|||||||
@@ -15,6 +15,8 @@
|
|||||||
"types": "tsc --noEmit",
|
"types": "tsc --noEmit",
|
||||||
"test": "NEXT_PUBLIC_PW_TEST=true next build --turbo && playwright test",
|
"test": "NEXT_PUBLIC_PW_TEST=true next build --turbo && playwright test",
|
||||||
"test-ui": "NEXT_PUBLIC_PW_TEST=true next build --turbo && playwright test --ui",
|
"test-ui": "NEXT_PUBLIC_PW_TEST=true next build --turbo && playwright test --ui",
|
||||||
|
"test:unit": "vitest run",
|
||||||
|
"test:unit:watch": "vitest",
|
||||||
"test:no-build": "playwright test",
|
"test:no-build": "playwright test",
|
||||||
"gentests": "playwright codegen http://localhost:3000",
|
"gentests": "playwright codegen http://localhost:3000",
|
||||||
"storybook": "storybook dev -p 6006",
|
"storybook": "storybook dev -p 6006",
|
||||||
@@ -118,6 +120,7 @@
|
|||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@chromatic-com/storybook": "4.1.2",
|
"@chromatic-com/storybook": "4.1.2",
|
||||||
|
"happy-dom": "20.3.4",
|
||||||
"@opentelemetry/instrumentation": "0.209.0",
|
"@opentelemetry/instrumentation": "0.209.0",
|
||||||
"@playwright/test": "1.56.1",
|
"@playwright/test": "1.56.1",
|
||||||
"@storybook/addon-a11y": "9.1.5",
|
"@storybook/addon-a11y": "9.1.5",
|
||||||
@@ -127,6 +130,8 @@
|
|||||||
"@storybook/nextjs": "9.1.5",
|
"@storybook/nextjs": "9.1.5",
|
||||||
"@tanstack/eslint-plugin-query": "5.91.2",
|
"@tanstack/eslint-plugin-query": "5.91.2",
|
||||||
"@tanstack/react-query-devtools": "5.90.2",
|
"@tanstack/react-query-devtools": "5.90.2",
|
||||||
|
"@testing-library/dom": "10.4.1",
|
||||||
|
"@testing-library/react": "16.3.2",
|
||||||
"@types/canvas-confetti": "1.9.0",
|
"@types/canvas-confetti": "1.9.0",
|
||||||
"@types/lodash": "4.17.20",
|
"@types/lodash": "4.17.20",
|
||||||
"@types/negotiator": "0.6.4",
|
"@types/negotiator": "0.6.4",
|
||||||
@@ -135,6 +140,7 @@
|
|||||||
"@types/react-dom": "18.3.5",
|
"@types/react-dom": "18.3.5",
|
||||||
"@types/react-modal": "3.16.3",
|
"@types/react-modal": "3.16.3",
|
||||||
"@types/react-window": "1.8.8",
|
"@types/react-window": "1.8.8",
|
||||||
|
"@vitejs/plugin-react": "5.1.2",
|
||||||
"axe-playwright": "2.2.2",
|
"axe-playwright": "2.2.2",
|
||||||
"chromatic": "13.3.3",
|
"chromatic": "13.3.3",
|
||||||
"concurrently": "9.2.1",
|
"concurrently": "9.2.1",
|
||||||
@@ -153,7 +159,9 @@
|
|||||||
"require-in-the-middle": "8.0.1",
|
"require-in-the-middle": "8.0.1",
|
||||||
"storybook": "9.1.5",
|
"storybook": "9.1.5",
|
||||||
"tailwindcss": "3.4.17",
|
"tailwindcss": "3.4.17",
|
||||||
"typescript": "5.9.3"
|
"typescript": "5.9.3",
|
||||||
|
"vite-tsconfig-paths": "6.0.4",
|
||||||
|
"vitest": "4.0.17"
|
||||||
},
|
},
|
||||||
"msw": {
|
"msw": {
|
||||||
"workerDirectory": [
|
"workerDirectory": [
|
||||||
|
|||||||
1118
autogpt_platform/frontend/pnpm-lock.yaml
generated
BIN
autogpt_platform/frontend/public/integrations/amazon.png
Normal file
|
After Width: | Height: | Size: 5.9 KiB |
|
After Width: | Height: | Size: 19 KiB |
BIN
autogpt_platform/frontend/public/integrations/cohere.png
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
autogpt_platform/frontend/public/integrations/deepseek.png
Normal file
|
After Width: | Height: | Size: 25 KiB |
BIN
autogpt_platform/frontend/public/integrations/gemini.png
Normal file
|
After Width: | Height: | Size: 72 KiB |
BIN
autogpt_platform/frontend/public/integrations/gryphe.png
Normal file
|
After Width: | Height: | Size: 21 KiB |
BIN
autogpt_platform/frontend/public/integrations/microsoft.webp
Normal file
|
After Width: | Height: | Size: 374 B |
BIN
autogpt_platform/frontend/public/integrations/mistral.png
Normal file
|
After Width: | Height: | Size: 663 B |
BIN
autogpt_platform/frontend/public/integrations/moonshot.png
Normal file
|
After Width: | Height: | Size: 40 KiB |
BIN
autogpt_platform/frontend/public/integrations/nousresearch.avif
Normal file
|
After Width: | Height: | Size: 4.1 KiB |
BIN
autogpt_platform/frontend/public/integrations/perplexity.webp
Normal file
|
After Width: | Height: | Size: 2.5 KiB |
BIN
autogpt_platform/frontend/public/integrations/qwen.png
Normal file
|
After Width: | Height: | Size: 52 KiB |
BIN
autogpt_platform/frontend/public/integrations/xai.webp
Normal file
|
After Width: | Height: | Size: 1.8 KiB |
@@ -0,0 +1,58 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
|
import { Text } from "@/components/atoms/Text/Text";
|
||||||
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
|
import { useRouter } from "next/navigation";
|
||||||
|
import { useEffect, useRef } from "react";
|
||||||
|
|
||||||
|
const LOGOUT_REDIRECT_DELAY_MS = 400;
|
||||||
|
|
||||||
|
function wait(ms: number): Promise<void> {
|
||||||
|
return new Promise(function resolveAfterDelay(resolve) {
|
||||||
|
setTimeout(resolve, ms);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export default function LogoutPage() {
|
||||||
|
const { logOut } = useSupabase();
|
||||||
|
const { toast } = useToast();
|
||||||
|
const router = useRouter();
|
||||||
|
const hasStartedRef = useRef(false);
|
||||||
|
|
||||||
|
useEffect(
|
||||||
|
function handleLogoutEffect() {
|
||||||
|
if (hasStartedRef.current) return;
|
||||||
|
hasStartedRef.current = true;
|
||||||
|
|
||||||
|
async function runLogout() {
|
||||||
|
try {
|
||||||
|
await logOut();
|
||||||
|
} catch {
|
||||||
|
toast({
|
||||||
|
title: "Failed to log out. Redirecting to login.",
|
||||||
|
variant: "destructive",
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
await wait(LOGOUT_REDIRECT_DELAY_MS);
|
||||||
|
router.replace("/login");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void runLogout();
|
||||||
|
},
|
||||||
|
[logOut, router, toast],
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex min-h-screen items-center justify-center px-4">
|
||||||
|
<div className="flex flex-col items-center justify-center gap-4 py-8">
|
||||||
|
<LoadingSpinner size="large" />
|
||||||
|
<Text variant="body" className="text-center">
|
||||||
|
Logging you out...
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -9,7 +9,7 @@ export async function GET(request: Request) {
|
|||||||
const { searchParams, origin } = new URL(request.url);
|
const { searchParams, origin } = new URL(request.url);
|
||||||
const code = searchParams.get("code");
|
const code = searchParams.get("code");
|
||||||
|
|
||||||
let next = "/marketplace";
|
let next = "/";
|
||||||
|
|
||||||
if (code) {
|
if (code) {
|
||||||
const supabase = await getServerSupabase();
|
const supabase = await getServerSupabase();
|
||||||
|
|||||||
@@ -18,51 +18,47 @@ interface Props {
|
|||||||
fullWidth?: boolean;
|
fullWidth?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function FloatingSafeModeToggle({
|
interface SafeModeButtonProps {
|
||||||
graph,
|
isEnabled: boolean;
|
||||||
className,
|
label: string;
|
||||||
fullWidth = false,
|
tooltipEnabled: string;
|
||||||
}: Props) {
|
tooltipDisabled: string;
|
||||||
const {
|
onToggle: () => void;
|
||||||
currentSafeMode,
|
isPending: boolean;
|
||||||
isPending,
|
fullWidth?: boolean;
|
||||||
shouldShowToggle,
|
|
||||||
isStateUndetermined,
|
|
||||||
handleToggle,
|
|
||||||
} = useAgentSafeMode(graph);
|
|
||||||
|
|
||||||
if (!shouldShowToggle || isStateUndetermined || isPending) {
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function SafeModeButton({
|
||||||
|
isEnabled,
|
||||||
|
label,
|
||||||
|
tooltipEnabled,
|
||||||
|
tooltipDisabled,
|
||||||
|
onToggle,
|
||||||
|
isPending,
|
||||||
|
fullWidth = false,
|
||||||
|
}: SafeModeButtonProps) {
|
||||||
return (
|
return (
|
||||||
<div className={cn("fixed z-50", className)}>
|
|
||||||
<Tooltip delayDuration={100}>
|
<Tooltip delayDuration={100}>
|
||||||
<TooltipTrigger asChild>
|
<TooltipTrigger asChild>
|
||||||
<Button
|
<Button
|
||||||
variant={currentSafeMode! ? "primary" : "outline"}
|
variant={isEnabled ? "primary" : "outline"}
|
||||||
key={graph.id}
|
|
||||||
size="small"
|
size="small"
|
||||||
title={
|
onClick={onToggle}
|
||||||
currentSafeMode!
|
disabled={isPending}
|
||||||
? "Safe Mode: ON. Human in the loop blocks require manual review"
|
className={cn("justify-start", fullWidth ? "w-full" : "")}
|
||||||
: "Safe Mode: OFF. Human in the loop blocks proceed automatically"
|
|
||||||
}
|
|
||||||
onClick={handleToggle}
|
|
||||||
className={cn(fullWidth ? "w-full" : "")}
|
|
||||||
>
|
>
|
||||||
{currentSafeMode! ? (
|
{isEnabled ? (
|
||||||
<>
|
<>
|
||||||
<ShieldCheckIcon weight="bold" size={16} />
|
<ShieldCheckIcon weight="bold" size={16} />
|
||||||
<Text variant="body" className="text-zinc-200">
|
<Text variant="body" className="text-zinc-200">
|
||||||
Safe Mode: ON
|
{label}: ON
|
||||||
</Text>
|
</Text>
|
||||||
</>
|
</>
|
||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
<ShieldIcon weight="bold" size={16} />
|
<ShieldIcon weight="bold" size={16} />
|
||||||
<Text variant="body" className="text-zinc-600">
|
<Text variant="body" className="text-zinc-600">
|
||||||
Safe Mode: OFF
|
{label}: OFF
|
||||||
</Text>
|
</Text>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
@@ -71,16 +67,69 @@ export function FloatingSafeModeToggle({
|
|||||||
<TooltipContent>
|
<TooltipContent>
|
||||||
<div className="text-center">
|
<div className="text-center">
|
||||||
<div className="font-medium">
|
<div className="font-medium">
|
||||||
Safe Mode: {currentSafeMode! ? "ON" : "OFF"}
|
{label}: {isEnabled ? "ON" : "OFF"}
|
||||||
</div>
|
</div>
|
||||||
<div className="mt-1 text-xs text-muted-foreground">
|
<div className="mt-1 text-xs text-muted-foreground">
|
||||||
{currentSafeMode!
|
{isEnabled ? tooltipEnabled : tooltipDisabled}
|
||||||
? "Human in the loop blocks require manual review"
|
|
||||||
: "Human in the loop blocks proceed automatically"}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</TooltipContent>
|
</TooltipContent>
|
||||||
</Tooltip>
|
</Tooltip>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function FloatingSafeModeToggle({
|
||||||
|
graph,
|
||||||
|
className,
|
||||||
|
fullWidth = false,
|
||||||
|
}: Props) {
|
||||||
|
const {
|
||||||
|
currentHITLSafeMode,
|
||||||
|
showHITLToggle,
|
||||||
|
isHITLStateUndetermined,
|
||||||
|
handleHITLToggle,
|
||||||
|
currentSensitiveActionSafeMode,
|
||||||
|
showSensitiveActionToggle,
|
||||||
|
handleSensitiveActionToggle,
|
||||||
|
isPending,
|
||||||
|
shouldShowToggle,
|
||||||
|
} = useAgentSafeMode(graph);
|
||||||
|
|
||||||
|
if (!shouldShowToggle || isPending) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const showHITL = showHITLToggle && !isHITLStateUndetermined;
|
||||||
|
const showSensitive = showSensitiveActionToggle;
|
||||||
|
|
||||||
|
if (!showHITL && !showSensitive) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={cn("fixed z-50 flex flex-col gap-2", className)}>
|
||||||
|
{showHITL && (
|
||||||
|
<SafeModeButton
|
||||||
|
isEnabled={currentHITLSafeMode}
|
||||||
|
label="Human in the loop block approval"
|
||||||
|
tooltipEnabled="The agent will pause at human-in-the-loop blocks and wait for your approval"
|
||||||
|
tooltipDisabled="Human in the loop blocks will proceed automatically"
|
||||||
|
onToggle={handleHITLToggle}
|
||||||
|
isPending={isPending}
|
||||||
|
fullWidth={fullWidth}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
{showSensitive && (
|
||||||
|
<SafeModeButton
|
||||||
|
isEnabled={currentSensitiveActionSafeMode}
|
||||||
|
label="Sensitive actions blocks approval"
|
||||||
|
tooltipEnabled="The agent will pause at sensitive action blocks and wait for your approval"
|
||||||
|
tooltipDisabled="Sensitive action blocks will proceed automatically"
|
||||||
|
onToggle={handleSensitiveActionToggle}
|
||||||
|
isPending={isPending}
|
||||||
|
fullWidth={fullWidth}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,134 +0,0 @@
|
|||||||
"use client";
|
|
||||||
|
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
|
||||||
import { Text } from "@/components/atoms/Text/Text";
|
|
||||||
import { cn } from "@/lib/utils";
|
|
||||||
import { List } from "@phosphor-icons/react";
|
|
||||||
import React, { useState } from "react";
|
|
||||||
import { ChatContainer } from "./components/ChatContainer/ChatContainer";
|
|
||||||
import { ChatErrorState } from "./components/ChatErrorState/ChatErrorState";
|
|
||||||
import { ChatLoadingState } from "./components/ChatLoadingState/ChatLoadingState";
|
|
||||||
import { SessionsDrawer } from "./components/SessionsDrawer/SessionsDrawer";
|
|
||||||
import { useChat } from "./useChat";
|
|
||||||
|
|
||||||
export interface ChatProps {
|
|
||||||
className?: string;
|
|
||||||
headerTitle?: React.ReactNode;
|
|
||||||
showHeader?: boolean;
|
|
||||||
showSessionInfo?: boolean;
|
|
||||||
showNewChatButton?: boolean;
|
|
||||||
onNewChat?: () => void;
|
|
||||||
headerActions?: React.ReactNode;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function Chat({
|
|
||||||
className,
|
|
||||||
headerTitle = "AutoGPT Copilot",
|
|
||||||
showHeader = true,
|
|
||||||
showSessionInfo = true,
|
|
||||||
showNewChatButton = true,
|
|
||||||
onNewChat,
|
|
||||||
headerActions,
|
|
||||||
}: ChatProps) {
|
|
||||||
const {
|
|
||||||
messages,
|
|
||||||
isLoading,
|
|
||||||
isCreating,
|
|
||||||
error,
|
|
||||||
sessionId,
|
|
||||||
createSession,
|
|
||||||
clearSession,
|
|
||||||
loadSession,
|
|
||||||
} = useChat();
|
|
||||||
|
|
||||||
const [isSessionsDrawerOpen, setIsSessionsDrawerOpen] = useState(false);
|
|
||||||
|
|
||||||
const handleNewChat = () => {
|
|
||||||
clearSession();
|
|
||||||
onNewChat?.();
|
|
||||||
};
|
|
||||||
|
|
||||||
const handleSelectSession = async (sessionId: string) => {
|
|
||||||
try {
|
|
||||||
await loadSession(sessionId);
|
|
||||||
} catch (err) {
|
|
||||||
console.error("Failed to load session:", err);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className={cn("flex h-full flex-col", className)}>
|
|
||||||
{/* Header */}
|
|
||||||
{showHeader && (
|
|
||||||
<header className="shrink-0 border-t border-zinc-200 bg-white p-3">
|
|
||||||
<div className="flex items-center justify-between">
|
|
||||||
<div className="flex items-center gap-3">
|
|
||||||
<button
|
|
||||||
aria-label="View sessions"
|
|
||||||
onClick={() => setIsSessionsDrawerOpen(true)}
|
|
||||||
className="flex size-8 items-center justify-center rounded hover:bg-zinc-100"
|
|
||||||
>
|
|
||||||
<List width="1.25rem" height="1.25rem" />
|
|
||||||
</button>
|
|
||||||
{typeof headerTitle === "string" ? (
|
|
||||||
<Text variant="h2" className="text-lg font-semibold">
|
|
||||||
{headerTitle}
|
|
||||||
</Text>
|
|
||||||
) : (
|
|
||||||
headerTitle
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
<div className="flex items-center gap-3">
|
|
||||||
{showSessionInfo && sessionId && (
|
|
||||||
<>
|
|
||||||
{showNewChatButton && (
|
|
||||||
<Button
|
|
||||||
variant="outline"
|
|
||||||
size="small"
|
|
||||||
onClick={handleNewChat}
|
|
||||||
>
|
|
||||||
New Chat
|
|
||||||
</Button>
|
|
||||||
)}
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
{headerActions}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</header>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Main Content */}
|
|
||||||
<main className="flex min-h-0 flex-1 flex-col overflow-hidden">
|
|
||||||
{/* Loading State - show when explicitly loading/creating OR when we don't have a session yet and no error */}
|
|
||||||
{(isLoading || isCreating || (!sessionId && !error)) && (
|
|
||||||
<ChatLoadingState
|
|
||||||
message={isCreating ? "Creating session..." : "Loading..."}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Error State */}
|
|
||||||
{error && !isLoading && (
|
|
||||||
<ChatErrorState error={error} onRetry={createSession} />
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Session Content */}
|
|
||||||
{sessionId && !isLoading && !error && (
|
|
||||||
<ChatContainer
|
|
||||||
sessionId={sessionId}
|
|
||||||
initialMessages={messages}
|
|
||||||
className="flex-1"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</main>
|
|
||||||
|
|
||||||
{/* Sessions Drawer */}
|
|
||||||
<SessionsDrawer
|
|
||||||
isOpen={isSessionsDrawerOpen}
|
|
||||||
onClose={() => setIsSessionsDrawerOpen(false)}
|
|
||||||
onSelectSession={handleSelectSession}
|
|
||||||
currentSessionId={sessionId}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,88 +0,0 @@
|
|||||||
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
|
|
||||||
import { cn } from "@/lib/utils";
|
|
||||||
import { useCallback } from "react";
|
|
||||||
import { usePageContext } from "../../usePageContext";
|
|
||||||
import { ChatInput } from "../ChatInput/ChatInput";
|
|
||||||
import { MessageList } from "../MessageList/MessageList";
|
|
||||||
import { QuickActionsWelcome } from "../QuickActionsWelcome/QuickActionsWelcome";
|
|
||||||
import { useChatContainer } from "./useChatContainer";
|
|
||||||
|
|
||||||
export interface ChatContainerProps {
|
|
||||||
sessionId: string | null;
|
|
||||||
initialMessages: SessionDetailResponse["messages"];
|
|
||||||
className?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function ChatContainer({
|
|
||||||
sessionId,
|
|
||||||
initialMessages,
|
|
||||||
className,
|
|
||||||
}: ChatContainerProps) {
|
|
||||||
const { messages, streamingChunks, isStreaming, sendMessage } =
|
|
||||||
useChatContainer({
|
|
||||||
sessionId,
|
|
||||||
initialMessages,
|
|
||||||
});
|
|
||||||
const { capturePageContext } = usePageContext();
|
|
||||||
|
|
||||||
// Wrap sendMessage to automatically capture page context
|
|
||||||
const sendMessageWithContext = useCallback(
|
|
||||||
async (content: string, isUserMessage: boolean = true) => {
|
|
||||||
const context = capturePageContext();
|
|
||||||
await sendMessage(content, isUserMessage, context);
|
|
||||||
},
|
|
||||||
[sendMessage, capturePageContext],
|
|
||||||
);
|
|
||||||
|
|
||||||
const quickActions = [
|
|
||||||
"Find agents for social media management",
|
|
||||||
"Show me agents for content creation",
|
|
||||||
"Help me automate my business",
|
|
||||||
"What can you help me with?",
|
|
||||||
];
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
className={cn("flex h-full min-h-0 flex-col", className)}
|
|
||||||
style={{
|
|
||||||
backgroundColor: "#ffffff",
|
|
||||||
backgroundImage:
|
|
||||||
"radial-gradient(#e5e5e5 0.5px, transparent 0.5px), radial-gradient(#e5e5e5 0.5px, #ffffff 0.5px)",
|
|
||||||
backgroundSize: "20px 20px",
|
|
||||||
backgroundPosition: "0 0, 10px 10px",
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
{/* Messages or Welcome Screen */}
|
|
||||||
<div className="flex min-h-0 flex-1 flex-col overflow-hidden pb-24">
|
|
||||||
{messages.length === 0 ? (
|
|
||||||
<QuickActionsWelcome
|
|
||||||
title="Welcome to AutoGPT Copilot"
|
|
||||||
description="Start a conversation to discover and run AI agents."
|
|
||||||
actions={quickActions}
|
|
||||||
onActionClick={sendMessageWithContext}
|
|
||||||
disabled={isStreaming || !sessionId}
|
|
||||||
/>
|
|
||||||
) : (
|
|
||||||
<MessageList
|
|
||||||
messages={messages}
|
|
||||||
streamingChunks={streamingChunks}
|
|
||||||
isStreaming={isStreaming}
|
|
||||||
onSendMessage={sendMessageWithContext}
|
|
||||||
className="flex-1"
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
|
|
||||||
{/* Input - Always visible */}
|
|
||||||
<div className="fixed bottom-0 left-0 right-0 z-50 border-t border-zinc-200 bg-white p-4">
|
|
||||||
<ChatInput
|
|
||||||
onSend={sendMessageWithContext}
|
|
||||||
disabled={isStreaming || !sessionId}
|
|
||||||
placeholder={
|
|
||||||
sessionId ? "Type your message..." : "Creating session..."
|
|
||||||
}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
import { Input } from "@/components/atoms/Input/Input";
|
|
||||||
import { cn } from "@/lib/utils";
|
|
||||||
import { ArrowUpIcon } from "@phosphor-icons/react";
|
|
||||||
import { useChatInput } from "./useChatInput";
|
|
||||||
|
|
||||||
export interface ChatInputProps {
|
|
||||||
onSend: (message: string) => void;
|
|
||||||
disabled?: boolean;
|
|
||||||
placeholder?: string;
|
|
||||||
className?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function ChatInput({
|
|
||||||
onSend,
|
|
||||||
disabled = false,
|
|
||||||
placeholder = "Type your message...",
|
|
||||||
className,
|
|
||||||
}: ChatInputProps) {
|
|
||||||
const inputId = "chat-input";
|
|
||||||
const { value, setValue, handleKeyDown, handleSend } = useChatInput({
|
|
||||||
onSend,
|
|
||||||
disabled,
|
|
||||||
maxRows: 5,
|
|
||||||
inputId,
|
|
||||||
});
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className={cn("relative flex-1", className)}>
|
|
||||||
<Input
|
|
||||||
id={inputId}
|
|
||||||
label="Chat message input"
|
|
||||||
hideLabel
|
|
||||||
type="textarea"
|
|
||||||
value={value}
|
|
||||||
onChange={(e) => setValue(e.target.value)}
|
|
||||||
onKeyDown={handleKeyDown}
|
|
||||||
placeholder={placeholder}
|
|
||||||
disabled={disabled}
|
|
||||||
rows={1}
|
|
||||||
wrapperClassName="mb-0 relative"
|
|
||||||
className="pr-12"
|
|
||||||
/>
|
|
||||||
<span id="chat-input-hint" className="sr-only">
|
|
||||||
Press Enter to send, Shift+Enter for new line
|
|
||||||
</span>
|
|
||||||
|
|
||||||
<button
|
|
||||||
onClick={handleSend}
|
|
||||||
disabled={disabled || !value.trim()}
|
|
||||||
className={cn(
|
|
||||||
"absolute right-3 top-1/2 flex h-8 w-8 -translate-y-1/2 items-center justify-center rounded-full",
|
|
||||||
"border border-zinc-800 bg-zinc-800 text-white",
|
|
||||||
"hover:border-zinc-900 hover:bg-zinc-900",
|
|
||||||
"disabled:border-zinc-200 disabled:bg-zinc-200 disabled:text-white disabled:opacity-50",
|
|
||||||
"transition-colors focus-visible:outline-none focus-visible:ring-1 focus-visible:ring-neutral-950",
|
|
||||||
"disabled:pointer-events-none",
|
|
||||||
)}
|
|
||||||
aria-label="Send message"
|
|
||||||
>
|
|
||||||
<ArrowUpIcon className="h-3 w-3" weight="bold" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
import { KeyboardEvent, useCallback, useEffect, useState } from "react";
|
|
||||||
|
|
||||||
interface UseChatInputArgs {
|
|
||||||
onSend: (message: string) => void;
|
|
||||||
disabled?: boolean;
|
|
||||||
maxRows?: number;
|
|
||||||
inputId?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function useChatInput({
|
|
||||||
onSend,
|
|
||||||
disabled = false,
|
|
||||||
maxRows = 5,
|
|
||||||
inputId = "chat-input",
|
|
||||||
}: UseChatInputArgs) {
|
|
||||||
const [value, setValue] = useState("");
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
const textarea = document.getElementById(inputId) as HTMLTextAreaElement;
|
|
||||||
if (!textarea) return;
|
|
||||||
textarea.style.height = "auto";
|
|
||||||
const lineHeight = parseInt(
|
|
||||||
window.getComputedStyle(textarea).lineHeight,
|
|
||||||
10,
|
|
||||||
);
|
|
||||||
const maxHeight = lineHeight * maxRows;
|
|
||||||
const newHeight = Math.min(textarea.scrollHeight, maxHeight);
|
|
||||||
textarea.style.height = `${newHeight}px`;
|
|
||||||
textarea.style.overflowY =
|
|
||||||
textarea.scrollHeight > maxHeight ? "auto" : "hidden";
|
|
||||||
}, [value, maxRows, inputId]);
|
|
||||||
|
|
||||||
const handleSend = useCallback(() => {
|
|
||||||
if (disabled || !value.trim()) return;
|
|
||||||
onSend(value.trim());
|
|
||||||
setValue("");
|
|
||||||
const textarea = document.getElementById(inputId) as HTMLTextAreaElement;
|
|
||||||
if (textarea) {
|
|
||||||
textarea.style.height = "auto";
|
|
||||||
}
|
|
||||||
}, [value, onSend, disabled, inputId]);
|
|
||||||
|
|
||||||
const handleKeyDown = useCallback(
|
|
||||||
(event: KeyboardEvent<HTMLInputElement | HTMLTextAreaElement>) => {
|
|
||||||
if (event.key === "Enter" && !event.shiftKey) {
|
|
||||||
event.preventDefault();
|
|
||||||
handleSend();
|
|
||||||
}
|
|
||||||
// Shift+Enter allows default behavior (new line) - no need to handle explicitly
|
|
||||||
},
|
|
||||||
[handleSend],
|
|
||||||
);
|
|
||||||
|
|
||||||
return {
|
|
||||||
value,
|
|
||||||
setValue,
|
|
||||||
handleKeyDown,
|
|
||||||
handleSend,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,121 +0,0 @@
|
|||||||
"use client";
|
|
||||||
|
|
||||||
import { cn } from "@/lib/utils";
|
|
||||||
import { ChatMessage } from "../ChatMessage/ChatMessage";
|
|
||||||
import type { ChatMessageData } from "../ChatMessage/useChatMessage";
|
|
||||||
import { StreamingMessage } from "../StreamingMessage/StreamingMessage";
|
|
||||||
import { ThinkingMessage } from "../ThinkingMessage/ThinkingMessage";
|
|
||||||
import { useMessageList } from "./useMessageList";
|
|
||||||
|
|
||||||
export interface MessageListProps {
|
|
||||||
messages: ChatMessageData[];
|
|
||||||
streamingChunks?: string[];
|
|
||||||
isStreaming?: boolean;
|
|
||||||
className?: string;
|
|
||||||
onStreamComplete?: () => void;
|
|
||||||
onSendMessage?: (content: string) => void;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function MessageList({
|
|
||||||
messages,
|
|
||||||
streamingChunks = [],
|
|
||||||
isStreaming = false,
|
|
||||||
className,
|
|
||||||
onStreamComplete,
|
|
||||||
onSendMessage,
|
|
||||||
}: MessageListProps) {
|
|
||||||
const { messagesEndRef, messagesContainerRef } = useMessageList({
|
|
||||||
messageCount: messages.length,
|
|
||||||
isStreaming,
|
|
||||||
});
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
ref={messagesContainerRef}
|
|
||||||
className={cn(
|
|
||||||
"flex-1 overflow-y-auto",
|
|
||||||
"scrollbar-thin scrollbar-track-transparent scrollbar-thumb-zinc-300",
|
|
||||||
className,
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
<div className="mx-auto flex max-w-3xl flex-col py-4">
|
|
||||||
{/* Render all persisted messages */}
|
|
||||||
{messages.map((message, index) => {
|
|
||||||
// Check if current message is an agent_output tool_response
|
|
||||||
// and if previous message is an assistant message
|
|
||||||
let agentOutput: ChatMessageData | undefined;
|
|
||||||
|
|
||||||
if (message.type === "tool_response" && message.result) {
|
|
||||||
let parsedResult: Record<string, unknown> | null = null;
|
|
||||||
try {
|
|
||||||
parsedResult =
|
|
||||||
typeof message.result === "string"
|
|
||||||
? JSON.parse(message.result)
|
|
||||||
: (message.result as Record<string, unknown>);
|
|
||||||
} catch {
|
|
||||||
parsedResult = null;
|
|
||||||
}
|
|
||||||
if (parsedResult?.type === "agent_output") {
|
|
||||||
const prevMessage = messages[index - 1];
|
|
||||||
if (
|
|
||||||
prevMessage &&
|
|
||||||
prevMessage.type === "message" &&
|
|
||||||
prevMessage.role === "assistant"
|
|
||||||
) {
|
|
||||||
// This agent output will be rendered inside the previous assistant message
|
|
||||||
// Skip rendering this message separately
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if next message is an agent_output tool_response to include in current assistant message
|
|
||||||
if (message.type === "message" && message.role === "assistant") {
|
|
||||||
const nextMessage = messages[index + 1];
|
|
||||||
if (
|
|
||||||
nextMessage &&
|
|
||||||
nextMessage.type === "tool_response" &&
|
|
||||||
nextMessage.result
|
|
||||||
) {
|
|
||||||
let parsedResult: Record<string, unknown> | null = null;
|
|
||||||
try {
|
|
||||||
parsedResult =
|
|
||||||
typeof nextMessage.result === "string"
|
|
||||||
? JSON.parse(nextMessage.result)
|
|
||||||
: (nextMessage.result as Record<string, unknown>);
|
|
||||||
} catch {
|
|
||||||
parsedResult = null;
|
|
||||||
}
|
|
||||||
if (parsedResult?.type === "agent_output") {
|
|
||||||
agentOutput = nextMessage;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<ChatMessage
|
|
||||||
key={index}
|
|
||||||
message={message}
|
|
||||||
onSendMessage={onSendMessage}
|
|
||||||
agentOutput={agentOutput}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
|
|
||||||
{/* Render thinking message when streaming but no chunks yet */}
|
|
||||||
{isStreaming && streamingChunks.length === 0 && <ThinkingMessage />}
|
|
||||||
|
|
||||||
{/* Render streaming message if active */}
|
|
||||||
{isStreaming && streamingChunks.length > 0 && (
|
|
||||||
<StreamingMessage
|
|
||||||
chunks={streamingChunks}
|
|
||||||
onComplete={onStreamComplete}
|
|
||||||
/>
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Invisible div to scroll to */}
|
|
||||||
<div ref={messagesEndRef} />
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
import { Text } from "@/components/atoms/Text/Text";
|
|
||||||
import { cn } from "@/lib/utils";
|
|
||||||
import { WrenchIcon } from "@phosphor-icons/react";
|
|
||||||
import { getToolActionPhrase } from "../../helpers";
|
|
||||||
|
|
||||||
export interface ToolCallMessageProps {
|
|
||||||
toolName: string;
|
|
||||||
className?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function ToolCallMessage({ toolName, className }: ToolCallMessageProps) {
|
|
||||||
return (
|
|
||||||
<div className={cn("flex items-center justify-center gap-2", className)}>
|
|
||||||
<WrenchIcon
|
|
||||||
size={14}
|
|
||||||
weight="bold"
|
|
||||||
className="flex-shrink-0 text-neutral-500"
|
|
||||||
/>
|
|
||||||
<Text variant="small" className="text-neutral-500">
|
|
||||||
{getToolActionPhrase(toolName)}...
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,260 +0,0 @@
|
|||||||
import { Text } from "@/components/atoms/Text/Text";
|
|
||||||
import "@/components/contextual/OutputRenderers";
|
|
||||||
import {
|
|
||||||
globalRegistry,
|
|
||||||
OutputItem,
|
|
||||||
} from "@/components/contextual/OutputRenderers";
|
|
||||||
import { cn } from "@/lib/utils";
|
|
||||||
import type { ToolResult } from "@/types/chat";
|
|
||||||
import { WrenchIcon } from "@phosphor-icons/react";
|
|
||||||
import { getToolActionPhrase } from "../../helpers";
|
|
||||||
|
|
||||||
export interface ToolResponseMessageProps {
|
|
||||||
toolName: string;
|
|
||||||
result?: ToolResult;
|
|
||||||
success?: boolean;
|
|
||||||
className?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function ToolResponseMessage({
|
|
||||||
toolName,
|
|
||||||
result,
|
|
||||||
success: _success = true,
|
|
||||||
className,
|
|
||||||
}: ToolResponseMessageProps) {
|
|
||||||
if (!result) {
|
|
||||||
return (
|
|
||||||
<div className={cn("flex items-center justify-center gap-2", className)}>
|
|
||||||
<WrenchIcon
|
|
||||||
size={14}
|
|
||||||
weight="bold"
|
|
||||||
className="flex-shrink-0 text-neutral-500"
|
|
||||||
/>
|
|
||||||
<Text variant="small" className="text-neutral-500">
|
|
||||||
{getToolActionPhrase(toolName)}...
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let parsedResult: Record<string, unknown> | null = null;
|
|
||||||
try {
|
|
||||||
parsedResult =
|
|
||||||
typeof result === "string"
|
|
||||||
? JSON.parse(result)
|
|
||||||
: (result as Record<string, unknown>);
|
|
||||||
} catch {
|
|
||||||
parsedResult = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parsedResult && typeof parsedResult === "object") {
|
|
||||||
const responseType = parsedResult.type as string | undefined;
|
|
||||||
|
|
||||||
if (responseType === "agent_output") {
|
|
||||||
const execution = parsedResult.execution as
|
|
||||||
| {
|
|
||||||
outputs?: Record<string, unknown[]>;
|
|
||||||
}
|
|
||||||
| null
|
|
||||||
| undefined;
|
|
||||||
const outputs = execution?.outputs || {};
|
|
||||||
const message = parsedResult.message as string | undefined;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className={cn("space-y-4 px-4 py-2", className)}>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<WrenchIcon
|
|
||||||
size={14}
|
|
||||||
weight="bold"
|
|
||||||
className="flex-shrink-0 text-neutral-500"
|
|
||||||
/>
|
|
||||||
<Text variant="small" className="text-neutral-500">
|
|
||||||
{getToolActionPhrase(toolName)}
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
{message && (
|
|
||||||
<div className="rounded border p-4">
|
|
||||||
<Text variant="small" className="text-neutral-600">
|
|
||||||
{message}
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
{Object.keys(outputs).length > 0 && (
|
|
||||||
<div className="space-y-4">
|
|
||||||
{Object.entries(outputs).map(([outputName, values]) =>
|
|
||||||
values.map((value, index) => {
|
|
||||||
const renderer = globalRegistry.getRenderer(value);
|
|
||||||
if (renderer) {
|
|
||||||
return (
|
|
||||||
<OutputItem
|
|
||||||
key={`${outputName}-${index}`}
|
|
||||||
value={value}
|
|
||||||
renderer={renderer}
|
|
||||||
label={outputName}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
key={`${outputName}-${index}`}
|
|
||||||
className="rounded border p-4"
|
|
||||||
>
|
|
||||||
<Text variant="large-medium" className="mb-2 capitalize">
|
|
||||||
{outputName}
|
|
||||||
</Text>
|
|
||||||
<pre className="overflow-auto text-sm">
|
|
||||||
{JSON.stringify(value, null, 2)}
|
|
||||||
</pre>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}),
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (responseType === "block_output" && parsedResult.outputs) {
|
|
||||||
const outputs = parsedResult.outputs as Record<string, unknown[]>;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className={cn("space-y-4 px-4 py-2", className)}>
|
|
||||||
<div className="flex items-center gap-2">
|
|
||||||
<WrenchIcon
|
|
||||||
size={14}
|
|
||||||
weight="bold"
|
|
||||||
className="flex-shrink-0 text-neutral-500"
|
|
||||||
/>
|
|
||||||
<Text variant="small" className="text-neutral-500">
|
|
||||||
{getToolActionPhrase(toolName)}
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
<div className="space-y-4">
|
|
||||||
{Object.entries(outputs).map(([outputName, values]) =>
|
|
||||||
values.map((value, index) => {
|
|
||||||
const renderer = globalRegistry.getRenderer(value);
|
|
||||||
if (renderer) {
|
|
||||||
return (
|
|
||||||
<OutputItem
|
|
||||||
key={`${outputName}-${index}`}
|
|
||||||
value={value}
|
|
||||||
renderer={renderer}
|
|
||||||
label={outputName}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
key={`${outputName}-${index}`}
|
|
||||||
className="rounded border p-4"
|
|
||||||
>
|
|
||||||
<Text variant="large-medium" className="mb-2 capitalize">
|
|
||||||
{outputName}
|
|
||||||
</Text>
|
|
||||||
<pre className="overflow-auto text-sm">
|
|
||||||
{JSON.stringify(value, null, 2)}
|
|
||||||
</pre>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}),
|
|
||||||
)}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle other response types with a message field (e.g., understanding_updated)
|
|
||||||
if (parsedResult.message && typeof parsedResult.message === "string") {
|
|
||||||
// Format tool name from snake_case to Title Case
|
|
||||||
const formattedToolName = toolName
|
|
||||||
.split("_")
|
|
||||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
|
||||||
.join(" ");
|
|
||||||
|
|
||||||
// Clean up message - remove incomplete user_name references
|
|
||||||
let cleanedMessage = parsedResult.message;
|
|
||||||
// Remove "Updated understanding with: user_name" pattern if user_name is just a placeholder
|
|
||||||
cleanedMessage = cleanedMessage.replace(
|
|
||||||
/Updated understanding with:\s*user_name\.?\s*/gi,
|
|
||||||
"",
|
|
||||||
);
|
|
||||||
// Remove standalone user_name references
|
|
||||||
cleanedMessage = cleanedMessage.replace(/\buser_name\b\.?\s*/gi, "");
|
|
||||||
cleanedMessage = cleanedMessage.trim();
|
|
||||||
|
|
||||||
// Only show message if it has content after cleaning
|
|
||||||
if (!cleanedMessage) {
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
className={cn(
|
|
||||||
"flex items-center justify-center gap-2 px-4 py-2",
|
|
||||||
className,
|
|
||||||
)}
|
|
||||||
>
|
|
||||||
<WrenchIcon
|
|
||||||
size={14}
|
|
||||||
weight="bold"
|
|
||||||
className="flex-shrink-0 text-neutral-500"
|
|
||||||
/>
|
|
||||||
<Text variant="small" className="text-neutral-500">
|
|
||||||
{formattedToolName}
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className={cn("space-y-2 px-4 py-2", className)}>
|
|
||||||
<div className="flex items-center justify-center gap-2">
|
|
||||||
<WrenchIcon
|
|
||||||
size={14}
|
|
||||||
weight="bold"
|
|
||||||
className="flex-shrink-0 text-neutral-500"
|
|
||||||
/>
|
|
||||||
<Text variant="small" className="text-neutral-500">
|
|
||||||
{formattedToolName}
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
<div className="rounded border p-4">
|
|
||||||
<Text variant="small" className="text-neutral-600">
|
|
||||||
{cleanedMessage}
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const renderer = globalRegistry.getRenderer(result);
|
|
||||||
if (renderer) {
|
|
||||||
return (
|
|
||||||
<div className={cn("px-4 py-2", className)}>
|
|
||||||
<div className="mb-2 flex items-center gap-2">
|
|
||||||
<WrenchIcon
|
|
||||||
size={14}
|
|
||||||
weight="bold"
|
|
||||||
className="flex-shrink-0 text-neutral-500"
|
|
||||||
/>
|
|
||||||
<Text variant="small" className="text-neutral-500">
|
|
||||||
{getToolActionPhrase(toolName)}
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
<OutputItem value={result} renderer={renderer} />
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className={cn("flex items-center justify-center gap-2", className)}>
|
|
||||||
<WrenchIcon
|
|
||||||
size={14}
|
|
||||||
weight="bold"
|
|
||||||
className="flex-shrink-0 text-neutral-500"
|
|
||||||
/>
|
|
||||||
<Text variant="small" className="text-neutral-500">
|
|
||||||
{getToolActionPhrase(toolName)}...
|
|
||||||
</Text>
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
/**
|
|
||||||
* Maps internal tool names to user-friendly display names with emojis.
|
|
||||||
* @deprecated Use getToolActionPhrase or getToolCompletionPhrase for status messages
|
|
||||||
*
|
|
||||||
* @param toolName - The internal tool name from the backend
|
|
||||||
* @returns A user-friendly display name with an emoji prefix
|
|
||||||
*/
|
|
||||||
export function getToolDisplayName(toolName: string): string {
|
|
||||||
const toolDisplayNames: Record<string, string> = {
|
|
||||||
find_agent: "🔍 Search Marketplace",
|
|
||||||
get_agent_details: "📋 Get Agent Details",
|
|
||||||
check_credentials: "🔑 Check Credentials",
|
|
||||||
setup_agent: "⚙️ Setup Agent",
|
|
||||||
run_agent: "▶️ Run Agent",
|
|
||||||
get_required_setup_info: "📝 Get Setup Requirements",
|
|
||||||
};
|
|
||||||
return toolDisplayNames[toolName] || toolName;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Maps internal tool names to human-friendly action phrases (present continuous).
|
|
||||||
* Used for tool call messages to indicate what action is currently happening.
|
|
||||||
*
|
|
||||||
* @param toolName - The internal tool name from the backend
|
|
||||||
* @returns A human-friendly action phrase in present continuous tense
|
|
||||||
*/
|
|
||||||
export function getToolActionPhrase(toolName: string): string {
|
|
||||||
const toolActionPhrases: Record<string, string> = {
|
|
||||||
find_agent: "Looking for agents in the marketplace",
|
|
||||||
agent_carousel: "Looking for agents in the marketplace",
|
|
||||||
get_agent_details: "Learning about the agent",
|
|
||||||
check_credentials: "Checking your credentials",
|
|
||||||
setup_agent: "Setting up the agent",
|
|
||||||
execution_started: "Running the agent",
|
|
||||||
run_agent: "Running the agent",
|
|
||||||
get_required_setup_info: "Getting setup requirements",
|
|
||||||
schedule_agent: "Scheduling the agent to run",
|
|
||||||
};
|
|
||||||
|
|
||||||
// Return mapped phrase or generate human-friendly fallback
|
|
||||||
return toolActionPhrases[toolName] || toolName;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Maps internal tool names to human-friendly completion phrases (past tense).
|
|
||||||
* Used for tool response messages to indicate what action was completed.
|
|
||||||
*
|
|
||||||
* @param toolName - The internal tool name from the backend
|
|
||||||
* @returns A human-friendly completion phrase in past tense
|
|
||||||
*/
|
|
||||||
export function getToolCompletionPhrase(toolName: string): string {
|
|
||||||
const toolCompletionPhrases: Record<string, string> = {
|
|
||||||
find_agent: "Finished searching the marketplace",
|
|
||||||
get_agent_details: "Got agent details",
|
|
||||||
check_credentials: "Checked credentials",
|
|
||||||
setup_agent: "Agent setup complete",
|
|
||||||
run_agent: "Agent execution started",
|
|
||||||
get_required_setup_info: "Got setup requirements",
|
|
||||||
};
|
|
||||||
|
|
||||||
// Return mapped phrase or generate human-friendly fallback
|
|
||||||
return (
|
|
||||||
toolCompletionPhrases[toolName] ||
|
|
||||||
`Finished ${toolName.replace(/_/g, " ").replace("...", "")}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -1,271 +0,0 @@
|
|||||||
import {
|
|
||||||
getGetV2GetSessionQueryKey,
|
|
||||||
getGetV2GetSessionQueryOptions,
|
|
||||||
postV2CreateSession,
|
|
||||||
useGetV2GetSession,
|
|
||||||
usePatchV2SessionAssignUser,
|
|
||||||
usePostV2CreateSession,
|
|
||||||
} from "@/app/api/__generated__/endpoints/chat/chat";
|
|
||||||
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
|
|
||||||
import { okData } from "@/app/api/helpers";
|
|
||||||
import { isValidUUID } from "@/lib/utils";
|
|
||||||
import { Key, storage } from "@/services/storage/local-storage";
|
|
||||||
import { useQueryClient } from "@tanstack/react-query";
|
|
||||||
import { useCallback, useEffect, useMemo, useRef, useState } from "react";
|
|
||||||
import { toast } from "sonner";
|
|
||||||
|
|
||||||
interface UseChatSessionArgs {
|
|
||||||
urlSessionId?: string | null;
|
|
||||||
autoCreate?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function useChatSession({
|
|
||||||
urlSessionId,
|
|
||||||
autoCreate = false,
|
|
||||||
}: UseChatSessionArgs = {}) {
|
|
||||||
const queryClient = useQueryClient();
|
|
||||||
const [sessionId, setSessionId] = useState<string | null>(null);
|
|
||||||
const [error, setError] = useState<Error | null>(null);
|
|
||||||
const justCreatedSessionIdRef = useRef<string | null>(null);
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (urlSessionId) {
|
|
||||||
if (!isValidUUID(urlSessionId)) {
|
|
||||||
console.error("Invalid session ID format:", urlSessionId);
|
|
||||||
toast.error("Invalid session ID", {
|
|
||||||
description:
|
|
||||||
"The session ID in the URL is not valid. Starting a new session...",
|
|
||||||
});
|
|
||||||
setSessionId(null);
|
|
||||||
storage.clean(Key.CHAT_SESSION_ID);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
setSessionId(urlSessionId);
|
|
||||||
storage.set(Key.CHAT_SESSION_ID, urlSessionId);
|
|
||||||
} else {
|
|
||||||
const storedSessionId = storage.get(Key.CHAT_SESSION_ID);
|
|
||||||
if (storedSessionId) {
|
|
||||||
if (!isValidUUID(storedSessionId)) {
|
|
||||||
console.error("Invalid stored session ID:", storedSessionId);
|
|
||||||
storage.clean(Key.CHAT_SESSION_ID);
|
|
||||||
setSessionId(null);
|
|
||||||
} else {
|
|
||||||
setSessionId(storedSessionId);
|
|
||||||
}
|
|
||||||
} else if (autoCreate) {
|
|
||||||
setSessionId(null);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, [urlSessionId, autoCreate]);
|
|
||||||
|
|
||||||
const {
|
|
||||||
mutateAsync: createSessionMutation,
|
|
||||||
isPending: isCreating,
|
|
||||||
error: createError,
|
|
||||||
} = usePostV2CreateSession();
|
|
||||||
|
|
||||||
const {
|
|
||||||
data: sessionData,
|
|
||||||
isLoading: isLoadingSession,
|
|
||||||
error: loadError,
|
|
||||||
refetch,
|
|
||||||
} = useGetV2GetSession(sessionId || "", {
|
|
||||||
query: {
|
|
||||||
enabled: !!sessionId,
|
|
||||||
select: okData,
|
|
||||||
staleTime: Infinity, // Never mark as stale
|
|
||||||
refetchOnMount: false, // Don't refetch on component mount
|
|
||||||
refetchOnWindowFocus: false, // Don't refetch when window regains focus
|
|
||||||
refetchOnReconnect: false, // Don't refetch when network reconnects
|
|
||||||
retry: 1,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const { mutateAsync: claimSessionMutation } = usePatchV2SessionAssignUser();
|
|
||||||
|
|
||||||
const session = useMemo(() => {
|
|
||||||
if (sessionData) return sessionData;
|
|
||||||
|
|
||||||
if (sessionId && justCreatedSessionIdRef.current === sessionId) {
|
|
||||||
return {
|
|
||||||
id: sessionId,
|
|
||||||
user_id: null,
|
|
||||||
messages: [],
|
|
||||||
created_at: new Date().toISOString(),
|
|
||||||
updated_at: new Date().toISOString(),
|
|
||||||
} as SessionDetailResponse;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}, [sessionData, sessionId]);
|
|
||||||
|
|
||||||
const messages = session?.messages || [];
|
|
||||||
const isLoading = isCreating || isLoadingSession;
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (createError) {
|
|
||||||
setError(
|
|
||||||
createError instanceof Error
|
|
||||||
? createError
|
|
||||||
: new Error("Failed to create session"),
|
|
||||||
);
|
|
||||||
} else if (loadError) {
|
|
||||||
setError(
|
|
||||||
loadError instanceof Error
|
|
||||||
? loadError
|
|
||||||
: new Error("Failed to load session"),
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
setError(null);
|
|
||||||
}
|
|
||||||
}, [createError, loadError]);
|
|
||||||
|
|
||||||
const createSession = useCallback(
|
|
||||||
async function createSession() {
|
|
||||||
try {
|
|
||||||
setError(null);
|
|
||||||
const response = await postV2CreateSession({
|
|
||||||
body: JSON.stringify({}),
|
|
||||||
});
|
|
||||||
if (response.status !== 200) {
|
|
||||||
throw new Error("Failed to create session");
|
|
||||||
}
|
|
||||||
const newSessionId = response.data.id;
|
|
||||||
setSessionId(newSessionId);
|
|
||||||
storage.set(Key.CHAT_SESSION_ID, newSessionId);
|
|
||||||
justCreatedSessionIdRef.current = newSessionId;
|
|
||||||
setTimeout(() => {
|
|
||||||
if (justCreatedSessionIdRef.current === newSessionId) {
|
|
||||||
justCreatedSessionIdRef.current = null;
|
|
||||||
}
|
|
||||||
}, 10000);
|
|
||||||
return newSessionId;
|
|
||||||
} catch (err) {
|
|
||||||
const error =
|
|
||||||
err instanceof Error ? err : new Error("Failed to create session");
|
|
||||||
setError(error);
|
|
||||||
toast.error("Failed to create chat session", {
|
|
||||||
description: error.message,
|
|
||||||
});
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[createSessionMutation],
|
|
||||||
);
|
|
||||||
|
|
||||||
const loadSession = useCallback(
|
|
||||||
async function loadSession(id: string) {
|
|
||||||
try {
|
|
||||||
setError(null);
|
|
||||||
// Invalidate the query cache for this session to force a fresh fetch
|
|
||||||
await queryClient.invalidateQueries({
|
|
||||||
queryKey: getGetV2GetSessionQueryKey(id),
|
|
||||||
});
|
|
||||||
// Set sessionId after invalidation to ensure the hook refetches
|
|
||||||
setSessionId(id);
|
|
||||||
storage.set(Key.CHAT_SESSION_ID, id);
|
|
||||||
// Force fetch with fresh data (bypass cache)
|
|
||||||
const queryOptions = getGetV2GetSessionQueryOptions(id, {
|
|
||||||
query: {
|
|
||||||
staleTime: 0, // Force fresh fetch
|
|
||||||
retry: 1,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
const result = await queryClient.fetchQuery(queryOptions);
|
|
||||||
if (!result || ("status" in result && result.status !== 200)) {
|
|
||||||
console.warn("Session not found on server, clearing local state");
|
|
||||||
storage.clean(Key.CHAT_SESSION_ID);
|
|
||||||
setSessionId(null);
|
|
||||||
throw new Error("Session not found");
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
const error =
|
|
||||||
err instanceof Error ? err : new Error("Failed to load session");
|
|
||||||
setError(error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[queryClient],
|
|
||||||
);
|
|
||||||
|
|
||||||
const refreshSession = useCallback(
|
|
||||||
async function refreshSession() {
|
|
||||||
if (!sessionId) {
|
|
||||||
console.log("[refreshSession] Skipping - no session ID");
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
setError(null);
|
|
||||||
await refetch();
|
|
||||||
} catch (err) {
|
|
||||||
const error =
|
|
||||||
err instanceof Error ? err : new Error("Failed to refresh session");
|
|
||||||
setError(error);
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[sessionId, refetch],
|
|
||||||
);
|
|
||||||
|
|
||||||
const claimSession = useCallback(
|
|
||||||
async function claimSession(id: string) {
|
|
||||||
try {
|
|
||||||
setError(null);
|
|
||||||
await claimSessionMutation({ sessionId: id });
|
|
||||||
if (justCreatedSessionIdRef.current === id) {
|
|
||||||
justCreatedSessionIdRef.current = null;
|
|
||||||
}
|
|
||||||
await queryClient.invalidateQueries({
|
|
||||||
queryKey: getGetV2GetSessionQueryKey(id),
|
|
||||||
});
|
|
||||||
await refetch();
|
|
||||||
toast.success("Session claimed successfully", {
|
|
||||||
description: "Your chat history has been saved to your account",
|
|
||||||
});
|
|
||||||
} catch (err: unknown) {
|
|
||||||
const error =
|
|
||||||
err instanceof Error ? err : new Error("Failed to claim session");
|
|
||||||
const is404 =
|
|
||||||
(typeof err === "object" &&
|
|
||||||
err !== null &&
|
|
||||||
"status" in err &&
|
|
||||||
err.status === 404) ||
|
|
||||||
(typeof err === "object" &&
|
|
||||||
err !== null &&
|
|
||||||
"response" in err &&
|
|
||||||
typeof err.response === "object" &&
|
|
||||||
err.response !== null &&
|
|
||||||
"status" in err.response &&
|
|
||||||
err.response.status === 404);
|
|
||||||
if (!is404) {
|
|
||||||
setError(error);
|
|
||||||
toast.error("Failed to claim session", {
|
|
||||||
description: error.message || "Unable to claim session",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
[claimSessionMutation, queryClient, refetch],
|
|
||||||
);
|
|
||||||
|
|
||||||
const clearSession = useCallback(function clearSession() {
|
|
||||||
setSessionId(null);
|
|
||||||
setError(null);
|
|
||||||
storage.clean(Key.CHAT_SESSION_ID);
|
|
||||||
justCreatedSessionIdRef.current = null;
|
|
||||||
}, []);
|
|
||||||
|
|
||||||
return {
|
|
||||||
session,
|
|
||||||
sessionId,
|
|
||||||
messages,
|
|
||||||
isLoading,
|
|
||||||
isCreating,
|
|
||||||
error,
|
|
||||||
createSession,
|
|
||||||
loadSession,
|
|
||||||
refreshSession,
|
|
||||||
claimSession,
|
|
||||||
clearSession,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
"use client";
|
|
||||||
|
|
||||||
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
|
||||||
import { useRouter } from "next/navigation";
|
|
||||||
import { useEffect } from "react";
|
|
||||||
import { Chat } from "./components/Chat/Chat";
|
|
||||||
|
|
||||||
export default function ChatPage() {
|
|
||||||
const isChatEnabled = useGetFlag(Flag.CHAT);
|
|
||||||
const router = useRouter();
|
|
||||||
|
|
||||||
useEffect(() => {
|
|
||||||
if (isChatEnabled === false) {
|
|
||||||
router.push("/marketplace");
|
|
||||||
}
|
|
||||||
}, [isChatEnabled, router]);
|
|
||||||
|
|
||||||
if (isChatEnabled === null || isChatEnabled === false) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div className="flex h-full flex-col">
|
|
||||||
<Chat className="flex-1" />
|
|
||||||
</div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
@@ -0,0 +1,88 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
|
import { NAVBAR_HEIGHT_PX } from "@/lib/constants";
|
||||||
|
import type { ReactNode } from "react";
|
||||||
|
import { DesktopSidebar } from "./components/DesktopSidebar/DesktopSidebar";
|
||||||
|
import { LoadingState } from "./components/LoadingState/LoadingState";
|
||||||
|
import { MobileDrawer } from "./components/MobileDrawer/MobileDrawer";
|
||||||
|
import { MobileHeader } from "./components/MobileHeader/MobileHeader";
|
||||||
|
import { useCopilotShell } from "./useCopilotShell";
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
children: ReactNode;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function CopilotShell({ children }: Props) {
|
||||||
|
const {
|
||||||
|
isMobile,
|
||||||
|
isDrawerOpen,
|
||||||
|
isLoading,
|
||||||
|
isLoggedIn,
|
||||||
|
hasActiveSession,
|
||||||
|
sessions,
|
||||||
|
currentSessionId,
|
||||||
|
handleSelectSession,
|
||||||
|
handleOpenDrawer,
|
||||||
|
handleCloseDrawer,
|
||||||
|
handleDrawerOpenChange,
|
||||||
|
handleNewChat,
|
||||||
|
hasNextPage,
|
||||||
|
isFetchingNextPage,
|
||||||
|
fetchNextPage,
|
||||||
|
isReadyToShowContent,
|
||||||
|
} = useCopilotShell();
|
||||||
|
|
||||||
|
if (!isLoggedIn) {
|
||||||
|
return (
|
||||||
|
<div className="flex h-full items-center justify-center">
|
||||||
|
<LoadingSpinner size="large" />
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className="flex overflow-hidden bg-[#EFEFF0]"
|
||||||
|
style={{ height: `calc(100vh - ${NAVBAR_HEIGHT_PX}px)` }}
|
||||||
|
>
|
||||||
|
{!isMobile && (
|
||||||
|
<DesktopSidebar
|
||||||
|
sessions={sessions}
|
||||||
|
currentSessionId={currentSessionId}
|
||||||
|
isLoading={isLoading}
|
||||||
|
hasNextPage={hasNextPage}
|
||||||
|
isFetchingNextPage={isFetchingNextPage}
|
||||||
|
onSelectSession={handleSelectSession}
|
||||||
|
onFetchNextPage={fetchNextPage}
|
||||||
|
onNewChat={handleNewChat}
|
||||||
|
hasActiveSession={Boolean(hasActiveSession)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
|
||||||
|
<div className="relative flex min-h-0 flex-1 flex-col">
|
||||||
|
{isMobile && <MobileHeader onOpenDrawer={handleOpenDrawer} />}
|
||||||
|
<div className="flex min-h-0 flex-1 flex-col">
|
||||||
|
{isReadyToShowContent ? children : <LoadingState />}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{isMobile && (
|
||||||
|
<MobileDrawer
|
||||||
|
isOpen={isDrawerOpen}
|
||||||
|
sessions={sessions}
|
||||||
|
currentSessionId={currentSessionId}
|
||||||
|
isLoading={isLoading}
|
||||||
|
hasNextPage={hasNextPage}
|
||||||
|
isFetchingNextPage={isFetchingNextPage}
|
||||||
|
onSelectSession={handleSelectSession}
|
||||||
|
onFetchNextPage={fetchNextPage}
|
||||||
|
onNewChat={handleNewChat}
|
||||||
|
onClose={handleCloseDrawer}
|
||||||
|
onOpenChange={handleDrawerOpenChange}
|
||||||
|
hasActiveSession={Boolean(hasActiveSession)}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,70 @@
|
|||||||
|
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
|
||||||
|
import { Button } from "@/components/atoms/Button/Button";
|
||||||
|
import { Text } from "@/components/atoms/Text/Text";
|
||||||
|
import { scrollbarStyles } from "@/components/styles/scrollbars";
|
||||||
|
import { cn } from "@/lib/utils";
|
||||||
|
import { Plus } from "@phosphor-icons/react";
|
||||||
|
import { SessionsList } from "../SessionsList/SessionsList";
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
sessions: SessionSummaryResponse[];
|
||||||
|
currentSessionId: string | null;
|
||||||
|
isLoading: boolean;
|
||||||
|
hasNextPage: boolean;
|
||||||
|
isFetchingNextPage: boolean;
|
||||||
|
onSelectSession: (sessionId: string) => void;
|
||||||
|
onFetchNextPage: () => void;
|
||||||
|
onNewChat: () => void;
|
||||||
|
hasActiveSession: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function DesktopSidebar({
|
||||||
|
sessions,
|
||||||
|
currentSessionId,
|
||||||
|
isLoading,
|
||||||
|
hasNextPage,
|
||||||
|
isFetchingNextPage,
|
||||||
|
onSelectSession,
|
||||||
|
onFetchNextPage,
|
||||||
|
onNewChat,
|
||||||
|
hasActiveSession,
|
||||||
|
}: Props) {
|
||||||
|
return (
|
||||||
|
<aside className="flex h-full w-80 flex-col border-r border-zinc-100 bg-zinc-50">
|
||||||
|
<div className="shrink-0 px-6 py-4">
|
||||||
|
<Text variant="h3" size="body-medium">
|
||||||
|
Your chats
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"flex min-h-0 flex-1 flex-col overflow-y-auto px-3 py-3",
|
||||||
|
scrollbarStyles,
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<SessionsList
|
||||||
|
sessions={sessions}
|
||||||
|
currentSessionId={currentSessionId}
|
||||||
|
isLoading={isLoading}
|
||||||
|
hasNextPage={hasNextPage}
|
||||||
|
isFetchingNextPage={isFetchingNextPage}
|
||||||
|
onSelectSession={onSelectSession}
|
||||||
|
onFetchNextPage={onFetchNextPage}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{hasActiveSession && (
|
||||||
|
<div className="shrink-0 bg-zinc-50 p-3 shadow-[0_-4px_6px_-1px_rgba(0,0,0,0.05)]">
|
||||||
|
<Button
|
||||||
|
variant="primary"
|
||||||
|
size="small"
|
||||||
|
onClick={onNewChat}
|
||||||
|
className="w-full"
|
||||||
|
leftIcon={<Plus width="1rem" height="1rem" />}
|
||||||
|
>
|
||||||
|
New Chat
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</aside>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,15 @@
|
|||||||
|
import { Text } from "@/components/atoms/Text/Text";
|
||||||
|
import { ChatLoader } from "@/components/contextual/Chat/components/ChatLoader/ChatLoader";
|
||||||
|
|
||||||
|
export function LoadingState() {
|
||||||
|
return (
|
||||||
|
<div className="flex flex-1 items-center justify-center">
|
||||||
|
<div className="flex flex-col items-center gap-4">
|
||||||
|
<ChatLoader />
|
||||||
|
<Text variant="body" className="text-zinc-500">
|
||||||
|
Loading your chats...
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,91 @@
|
|||||||
|
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
|
||||||
|
import { Button } from "@/components/atoms/Button/Button";
|
||||||
|
import { scrollbarStyles } from "@/components/styles/scrollbars";
|
||||||
|
import { cn } from "@/lib/utils";
|
||||||
|
import { PlusIcon, X } from "@phosphor-icons/react";
|
||||||
|
import { Drawer } from "vaul";
|
||||||
|
import { SessionsList } from "../SessionsList/SessionsList";
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
isOpen: boolean;
|
||||||
|
sessions: SessionSummaryResponse[];
|
||||||
|
currentSessionId: string | null;
|
||||||
|
isLoading: boolean;
|
||||||
|
hasNextPage: boolean;
|
||||||
|
isFetchingNextPage: boolean;
|
||||||
|
onSelectSession: (sessionId: string) => void;
|
||||||
|
onFetchNextPage: () => void;
|
||||||
|
onNewChat: () => void;
|
||||||
|
onClose: () => void;
|
||||||
|
onOpenChange: (open: boolean) => void;
|
||||||
|
hasActiveSession: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function MobileDrawer({
|
||||||
|
isOpen,
|
||||||
|
sessions,
|
||||||
|
currentSessionId,
|
||||||
|
isLoading,
|
||||||
|
hasNextPage,
|
||||||
|
isFetchingNextPage,
|
||||||
|
onSelectSession,
|
||||||
|
onFetchNextPage,
|
||||||
|
onNewChat,
|
||||||
|
onClose,
|
||||||
|
onOpenChange,
|
||||||
|
hasActiveSession,
|
||||||
|
}: Props) {
|
||||||
|
return (
|
||||||
|
<Drawer.Root open={isOpen} onOpenChange={onOpenChange} direction="left">
|
||||||
|
<Drawer.Portal>
|
||||||
|
<Drawer.Overlay className="fixed inset-0 z-[60] bg-black/10 backdrop-blur-sm" />
|
||||||
|
<Drawer.Content className="fixed left-0 top-0 z-[70] flex h-full w-80 flex-col border-r border-zinc-200 bg-zinc-50">
|
||||||
|
<div className="shrink-0 border-b border-zinc-200 p-4">
|
||||||
|
<div className="flex items-center justify-between">
|
||||||
|
<Drawer.Title className="text-lg font-semibold text-zinc-800">
|
||||||
|
Your chats
|
||||||
|
</Drawer.Title>
|
||||||
|
<Button
|
||||||
|
variant="icon"
|
||||||
|
size="icon"
|
||||||
|
aria-label="Close sessions"
|
||||||
|
onClick={onClose}
|
||||||
|
>
|
||||||
|
<X width="1.25rem" height="1.25rem" />
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div
|
||||||
|
className={cn(
|
||||||
|
"flex min-h-0 flex-1 flex-col overflow-y-auto px-3 py-3",
|
||||||
|
scrollbarStyles,
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<SessionsList
|
||||||
|
sessions={sessions}
|
||||||
|
currentSessionId={currentSessionId}
|
||||||
|
isLoading={isLoading}
|
||||||
|
hasNextPage={hasNextPage}
|
||||||
|
isFetchingNextPage={isFetchingNextPage}
|
||||||
|
onSelectSession={onSelectSession}
|
||||||
|
onFetchNextPage={onFetchNextPage}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{hasActiveSession && (
|
||||||
|
<div className="shrink-0 bg-white p-3 shadow-[0_-4px_6px_-1px_rgba(0,0,0,0.05)]">
|
||||||
|
<Button
|
||||||
|
variant="primary"
|
||||||
|
size="small"
|
||||||
|
onClick={onNewChat}
|
||||||
|
className="w-full"
|
||||||
|
leftIcon={<PlusIcon width="1rem" height="1rem" />}
|
||||||
|
>
|
||||||
|
New Chat
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</Drawer.Content>
|
||||||
|
</Drawer.Portal>
|
||||||
|
</Drawer.Root>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,24 @@
|
|||||||
|
import { useState } from "react";
|
||||||
|
|
||||||
|
export function useMobileDrawer() {
|
||||||
|
const [isDrawerOpen, setIsDrawerOpen] = useState(false);
|
||||||
|
|
||||||
|
function handleOpenDrawer() {
|
||||||
|
setIsDrawerOpen(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleCloseDrawer() {
|
||||||
|
setIsDrawerOpen(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleDrawerOpenChange(open: boolean) {
|
||||||
|
setIsDrawerOpen(open);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
isDrawerOpen,
|
||||||
|
handleOpenDrawer,
|
||||||
|
handleCloseDrawer,
|
||||||
|
handleDrawerOpenChange,
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -0,0 +1,22 @@
|
|||||||
|
import { Button } from "@/components/atoms/Button/Button";
|
||||||
|
import { NAVBAR_HEIGHT_PX } from "@/lib/constants";
|
||||||
|
import { ListIcon } from "@phosphor-icons/react";
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
onOpenDrawer: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function MobileHeader({ onOpenDrawer }: Props) {
|
||||||
|
return (
|
||||||
|
<Button
|
||||||
|
variant="icon"
|
||||||
|
size="icon"
|
||||||
|
aria-label="Open sessions"
|
||||||
|
onClick={onOpenDrawer}
|
||||||
|
className="fixed z-50 bg-white shadow-md"
|
||||||
|
style={{ left: "1rem", top: `${NAVBAR_HEIGHT_PX + 20}px` }}
|
||||||
|
>
|
||||||
|
<ListIcon width="1.25rem" height="1.25rem" />
|
||||||
|
</Button>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,80 @@
|
|||||||
|
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
|
||||||
|
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||||
|
import { Text } from "@/components/atoms/Text/Text";
|
||||||
|
import { InfiniteList } from "@/components/molecules/InfiniteList/InfiniteList";
|
||||||
|
import { cn } from "@/lib/utils";
|
||||||
|
import { getSessionTitle } from "../../helpers";
|
||||||
|
|
||||||
|
interface Props {
|
||||||
|
sessions: SessionSummaryResponse[];
|
||||||
|
currentSessionId: string | null;
|
||||||
|
isLoading: boolean;
|
||||||
|
hasNextPage: boolean;
|
||||||
|
isFetchingNextPage: boolean;
|
||||||
|
onSelectSession: (sessionId: string) => void;
|
||||||
|
onFetchNextPage: () => void;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function SessionsList({
|
||||||
|
sessions,
|
||||||
|
currentSessionId,
|
||||||
|
isLoading,
|
||||||
|
hasNextPage,
|
||||||
|
isFetchingNextPage,
|
||||||
|
onSelectSession,
|
||||||
|
onFetchNextPage,
|
||||||
|
}: Props) {
|
||||||
|
if (isLoading) {
|
||||||
|
return (
|
||||||
|
<div className="space-y-1">
|
||||||
|
{Array.from({ length: 5 }).map((_, i) => (
|
||||||
|
<div key={i} className="rounded-lg px-3 py-2.5">
|
||||||
|
<Skeleton className="h-5 w-full" />
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sessions.length === 0) {
|
||||||
|
return (
|
||||||
|
<div className="flex h-full items-center justify-center">
|
||||||
|
<Text variant="body" className="text-zinc-500">
|
||||||
|
You don't have previous chats
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<InfiniteList
|
||||||
|
items={sessions}
|
||||||
|
hasMore={hasNextPage}
|
||||||
|
isFetchingMore={isFetchingNextPage}
|
||||||
|
onEndReached={onFetchNextPage}
|
||||||
|
className="space-y-1"
|
||||||
|
renderItem={(session) => {
|
||||||
|
const isActive = session.id === currentSessionId;
|
||||||
|
return (
|
||||||
|
<button
|
||||||
|
onClick={() => onSelectSession(session.id)}
|
||||||
|
className={cn(
|
||||||
|
"w-full rounded-lg px-3 py-2.5 text-left transition-colors",
|
||||||
|
isActive ? "bg-zinc-100" : "hover:bg-zinc-50",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
<Text
|
||||||
|
variant="body"
|
||||||
|
className={cn(
|
||||||
|
"font-normal",
|
||||||
|
isActive ? "text-zinc-600" : "text-zinc-800",
|
||||||
|
)}
|
||||||
|
>
|
||||||
|
{getSessionTitle(session)}
|
||||||
|
</Text>
|
||||||
|
</button>
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -0,0 +1,92 @@
|
|||||||
|
import { useGetV2ListSessions } from "@/app/api/__generated__/endpoints/chat/chat";
|
||||||
|
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
|
||||||
|
import { okData } from "@/app/api/helpers";
|
||||||
|
import { useEffect, useMemo, useState } from "react";
|
||||||
|
|
||||||
|
const PAGE_SIZE = 50;
|
||||||
|
|
||||||
|
export interface UseSessionsPaginationArgs {
|
||||||
|
enabled: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function useSessionsPagination({ enabled }: UseSessionsPaginationArgs) {
|
||||||
|
const [offset, setOffset] = useState(0);
|
||||||
|
const [accumulatedSessions, setAccumulatedSessions] = useState<
|
||||||
|
SessionSummaryResponse[]
|
||||||
|
>([]);
|
||||||
|
const [totalCount, setTotalCount] = useState<number | null>(null);
|
||||||
|
|
||||||
|
const { data, isLoading, isFetching, isError } = useGetV2ListSessions(
|
||||||
|
{ limit: PAGE_SIZE, offset },
|
||||||
|
{
|
||||||
|
query: {
|
||||||
|
enabled: enabled && offset >= 0,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
const responseData = okData(data);
|
||||||
|
if (responseData) {
|
||||||
|
const newSessions = responseData.sessions;
|
||||||
|
const total = responseData.total;
|
||||||
|
setTotalCount(total);
|
||||||
|
|
||||||
|
if (offset === 0) {
|
||||||
|
setAccumulatedSessions(newSessions);
|
||||||
|
} else {
|
||||||
|
setAccumulatedSessions((prev) => [...prev, ...newSessions]);
|
||||||
|
}
|
||||||
|
} else if (!enabled) {
|
||||||
|
setAccumulatedSessions([]);
|
||||||
|
setTotalCount(null);
|
||||||
|
}
|
||||||
|
}, [data, offset, enabled]);
|
||||||
|
|
||||||
|
const hasNextPage = useMemo(() => {
|
||||||
|
if (totalCount === null) return false;
|
||||||
|
return accumulatedSessions.length < totalCount;
|
||||||
|
}, [accumulatedSessions.length, totalCount]);
|
||||||
|
|
||||||
|
const areAllSessionsLoaded = useMemo(() => {
|
||||||
|
if (totalCount === null) return false;
|
||||||
|
return (
|
||||||
|
accumulatedSessions.length >= totalCount && !isFetching && !isLoading
|
||||||
|
);
|
||||||
|
}, [accumulatedSessions.length, totalCount, isFetching, isLoading]);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (
|
||||||
|
hasNextPage &&
|
||||||
|
!isFetching &&
|
||||||
|
!isLoading &&
|
||||||
|
!isError &&
|
||||||
|
totalCount !== null
|
||||||
|
) {
|
||||||
|
setOffset((prev) => prev + PAGE_SIZE);
|
||||||
|
}
|
||||||
|
}, [hasNextPage, isFetching, isLoading, isError, totalCount]);
|
||||||
|
|
||||||
|
function fetchNextPage() {
|
||||||
|
if (hasNextPage && !isFetching) {
|
||||||
|
setOffset((prev) => prev + PAGE_SIZE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function reset() {
|
||||||
|
setOffset(0);
|
||||||
|
setAccumulatedSessions([]);
|
||||||
|
setTotalCount(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
sessions: accumulatedSessions,
|
||||||
|
isLoading,
|
||||||
|
isFetching,
|
||||||
|
hasNextPage,
|
||||||
|
areAllSessionsLoaded,
|
||||||
|
totalCount,
|
||||||
|
fetchNextPage,
|
||||||
|
reset,
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -0,0 +1,165 @@
|
|||||||
|
import type { SessionDetailResponse } from "@/app/api/__generated__/models/sessionDetailResponse";
|
||||||
|
import type { SessionSummaryResponse } from "@/app/api/__generated__/models/sessionSummaryResponse";
|
||||||
|
import { format, formatDistanceToNow, isToday } from "date-fns";
|
||||||
|
|
||||||
|
export function convertSessionDetailToSummary(
|
||||||
|
session: SessionDetailResponse,
|
||||||
|
): SessionSummaryResponse {
|
||||||
|
return {
|
||||||
|
id: session.id,
|
||||||
|
created_at: session.created_at,
|
||||||
|
updated_at: session.updated_at,
|
||||||
|
title: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function filterVisibleSessions(
|
||||||
|
sessions: SessionSummaryResponse[],
|
||||||
|
): SessionSummaryResponse[] {
|
||||||
|
return sessions.filter(
|
||||||
|
(session) => session.updated_at !== session.created_at,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getSessionTitle(session: SessionSummaryResponse): string {
|
||||||
|
if (session.title) return session.title;
|
||||||
|
const isNewSession = session.updated_at === session.created_at;
|
||||||
|
if (isNewSession) {
|
||||||
|
const createdDate = new Date(session.created_at);
|
||||||
|
if (isToday(createdDate)) {
|
||||||
|
return "Today";
|
||||||
|
}
|
||||||
|
return format(createdDate, "MMM d, yyyy");
|
||||||
|
}
|
||||||
|
return "Untitled Chat";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getSessionUpdatedLabel(
|
||||||
|
session: SessionSummaryResponse,
|
||||||
|
): string {
|
||||||
|
if (!session.updated_at) return "";
|
||||||
|
return formatDistanceToNow(new Date(session.updated_at), { addSuffix: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
export function mergeCurrentSessionIntoList(
|
||||||
|
accumulatedSessions: SessionSummaryResponse[],
|
||||||
|
currentSessionId: string | null,
|
||||||
|
currentSessionData: SessionDetailResponse | null | undefined,
|
||||||
|
): SessionSummaryResponse[] {
|
||||||
|
const filteredSessions: SessionSummaryResponse[] = [];
|
||||||
|
|
||||||
|
if (accumulatedSessions.length > 0) {
|
||||||
|
const visibleSessions = filterVisibleSessions(accumulatedSessions);
|
||||||
|
|
||||||
|
if (currentSessionId) {
|
||||||
|
const currentInAll = accumulatedSessions.find(
|
||||||
|
(s) => s.id === currentSessionId,
|
||||||
|
);
|
||||||
|
if (currentInAll) {
|
||||||
|
const isInVisible = visibleSessions.some(
|
||||||
|
(s) => s.id === currentSessionId,
|
||||||
|
);
|
||||||
|
if (!isInVisible) {
|
||||||
|
filteredSessions.push(currentInAll);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
filteredSessions.push(...visibleSessions);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (currentSessionId && currentSessionData) {
|
||||||
|
const isCurrentInList = filteredSessions.some(
|
||||||
|
(s) => s.id === currentSessionId,
|
||||||
|
);
|
||||||
|
if (!isCurrentInList) {
|
||||||
|
const summarySession = convertSessionDetailToSummary(currentSessionData);
|
||||||
|
filteredSessions.unshift(summarySession);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return filteredSessions;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getCurrentSessionId(
|
||||||
|
searchParams: URLSearchParams,
|
||||||
|
): string | null {
|
||||||
|
return searchParams.get("sessionId");
|
||||||
|
}
|
||||||
|
|
||||||
|
export function shouldAutoSelectSession(
|
||||||
|
areAllSessionsLoaded: boolean,
|
||||||
|
hasAutoSelectedSession: boolean,
|
||||||
|
paramSessionId: string | null,
|
||||||
|
visibleSessions: SessionSummaryResponse[],
|
||||||
|
accumulatedSessions: SessionSummaryResponse[],
|
||||||
|
isLoading: boolean,
|
||||||
|
totalCount: number | null,
|
||||||
|
): {
|
||||||
|
shouldSelect: boolean;
|
||||||
|
sessionIdToSelect: string | null;
|
||||||
|
shouldCreate: boolean;
|
||||||
|
} {
|
||||||
|
if (!areAllSessionsLoaded || hasAutoSelectedSession) {
|
||||||
|
return {
|
||||||
|
shouldSelect: false,
|
||||||
|
sessionIdToSelect: null,
|
||||||
|
shouldCreate: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (paramSessionId) {
|
||||||
|
return {
|
||||||
|
shouldSelect: false,
|
||||||
|
sessionIdToSelect: null,
|
||||||
|
shouldCreate: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (visibleSessions.length > 0) {
|
||||||
|
return {
|
||||||
|
shouldSelect: true,
|
||||||
|
sessionIdToSelect: visibleSessions[0].id,
|
||||||
|
shouldCreate: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (accumulatedSessions.length === 0 && !isLoading && totalCount === 0) {
|
||||||
|
return { shouldSelect: false, sessionIdToSelect: null, shouldCreate: true };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (totalCount === 0) {
|
||||||
|
return {
|
||||||
|
shouldSelect: false,
|
||||||
|
sessionIdToSelect: null,
|
||||||
|
shouldCreate: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return { shouldSelect: false, sessionIdToSelect: null, shouldCreate: false };
|
||||||
|
}
|
||||||
|
|
||||||
|
export function checkReadyToShowContent(
|
||||||
|
areAllSessionsLoaded: boolean,
|
||||||
|
paramSessionId: string | null,
|
||||||
|
accumulatedSessions: SessionSummaryResponse[],
|
||||||
|
isCurrentSessionLoading: boolean,
|
||||||
|
currentSessionData: SessionDetailResponse | null | undefined,
|
||||||
|
hasAutoSelectedSession: boolean,
|
||||||
|
): boolean {
|
||||||
|
if (!areAllSessionsLoaded) return false;
|
||||||
|
|
||||||
|
if (paramSessionId) {
|
||||||
|
const sessionFound = accumulatedSessions.some(
|
||||||
|
(s) => s.id === paramSessionId,
|
||||||
|
);
|
||||||
|
return (
|
||||||
|
sessionFound ||
|
||||||
|
(!isCurrentSessionLoading &&
|
||||||
|
currentSessionData !== undefined &&
|
||||||
|
currentSessionData !== null)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return hasAutoSelectedSession;
|
||||||
|
}
|
||||||
@@ -0,0 +1,170 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import {
|
||||||
|
getGetV2ListSessionsQueryKey,
|
||||||
|
useGetV2GetSession,
|
||||||
|
} from "@/app/api/__generated__/endpoints/chat/chat";
|
||||||
|
import { okData } from "@/app/api/helpers";
|
||||||
|
import { useBreakpoint } from "@/lib/hooks/useBreakpoint";
|
||||||
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
|
import { useQueryClient } from "@tanstack/react-query";
|
||||||
|
import { usePathname, useRouter, useSearchParams } from "next/navigation";
|
||||||
|
import { useEffect, useRef, useState } from "react";
|
||||||
|
import { useMobileDrawer } from "./components/MobileDrawer/useMobileDrawer";
|
||||||
|
import { useSessionsPagination } from "./components/SessionsList/useSessionsPagination";
|
||||||
|
import {
|
||||||
|
checkReadyToShowContent,
|
||||||
|
filterVisibleSessions,
|
||||||
|
getCurrentSessionId,
|
||||||
|
mergeCurrentSessionIntoList,
|
||||||
|
} from "./helpers";
|
||||||
|
|
||||||
|
export function useCopilotShell() {
|
||||||
|
const router = useRouter();
|
||||||
|
const pathname = usePathname();
|
||||||
|
const searchParams = useSearchParams();
|
||||||
|
const queryClient = useQueryClient();
|
||||||
|
const breakpoint = useBreakpoint();
|
||||||
|
const { isLoggedIn } = useSupabase();
|
||||||
|
const isMobile =
|
||||||
|
breakpoint === "base" || breakpoint === "sm" || breakpoint === "md";
|
||||||
|
|
||||||
|
const isOnHomepage = pathname === "/copilot";
|
||||||
|
const paramSessionId = searchParams.get("sessionId");
|
||||||
|
|
||||||
|
const {
|
||||||
|
isDrawerOpen,
|
||||||
|
handleOpenDrawer,
|
||||||
|
handleCloseDrawer,
|
||||||
|
handleDrawerOpenChange,
|
||||||
|
} = useMobileDrawer();
|
||||||
|
|
||||||
|
const paginationEnabled = !isMobile || isDrawerOpen || !!paramSessionId;
|
||||||
|
|
||||||
|
const {
|
||||||
|
sessions: accumulatedSessions,
|
||||||
|
isLoading: isSessionsLoading,
|
||||||
|
isFetching: isSessionsFetching,
|
||||||
|
hasNextPage,
|
||||||
|
areAllSessionsLoaded,
|
||||||
|
fetchNextPage,
|
||||||
|
reset: resetPagination,
|
||||||
|
} = useSessionsPagination({
|
||||||
|
enabled: paginationEnabled,
|
||||||
|
});
|
||||||
|
|
||||||
|
const currentSessionId = getCurrentSessionId(searchParams);
|
||||||
|
|
||||||
|
const { data: currentSessionData, isLoading: isCurrentSessionLoading } =
|
||||||
|
useGetV2GetSession(currentSessionId || "", {
|
||||||
|
query: {
|
||||||
|
enabled: !!currentSessionId,
|
||||||
|
select: okData,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const [hasAutoSelectedSession, setHasAutoSelectedSession] = useState(false);
|
||||||
|
const hasAutoSelectedRef = useRef(false);
|
||||||
|
|
||||||
|
// Mark as auto-selected when sessionId is in URL
|
||||||
|
useEffect(() => {
|
||||||
|
if (paramSessionId && !hasAutoSelectedRef.current) {
|
||||||
|
hasAutoSelectedRef.current = true;
|
||||||
|
setHasAutoSelectedSession(true);
|
||||||
|
}
|
||||||
|
}, [paramSessionId]);
|
||||||
|
|
||||||
|
// On homepage without sessionId, mark as ready immediately
|
||||||
|
useEffect(() => {
|
||||||
|
if (isOnHomepage && !paramSessionId && !hasAutoSelectedRef.current) {
|
||||||
|
hasAutoSelectedRef.current = true;
|
||||||
|
setHasAutoSelectedSession(true);
|
||||||
|
}
|
||||||
|
}, [isOnHomepage, paramSessionId]);
|
||||||
|
|
||||||
|
// Invalidate sessions list when navigating to homepage (to show newly created sessions)
|
||||||
|
useEffect(() => {
|
||||||
|
if (isOnHomepage && !paramSessionId) {
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: getGetV2ListSessionsQueryKey(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, [isOnHomepage, paramSessionId, queryClient]);
|
||||||
|
|
||||||
|
// Reset pagination when query becomes disabled
|
||||||
|
const prevPaginationEnabledRef = useRef(paginationEnabled);
|
||||||
|
useEffect(() => {
|
||||||
|
if (prevPaginationEnabledRef.current && !paginationEnabled) {
|
||||||
|
resetPagination();
|
||||||
|
resetAutoSelect();
|
||||||
|
}
|
||||||
|
prevPaginationEnabledRef.current = paginationEnabled;
|
||||||
|
}, [paginationEnabled, resetPagination]);
|
||||||
|
|
||||||
|
const sessions = mergeCurrentSessionIntoList(
|
||||||
|
accumulatedSessions,
|
||||||
|
currentSessionId,
|
||||||
|
currentSessionData,
|
||||||
|
);
|
||||||
|
|
||||||
|
const visibleSessions = filterVisibleSessions(sessions);
|
||||||
|
|
||||||
|
const sidebarSelectedSessionId =
|
||||||
|
isOnHomepage && !paramSessionId ? null : currentSessionId;
|
||||||
|
|
||||||
|
const isReadyToShowContent = isOnHomepage
|
||||||
|
? true
|
||||||
|
: checkReadyToShowContent(
|
||||||
|
areAllSessionsLoaded,
|
||||||
|
paramSessionId,
|
||||||
|
accumulatedSessions,
|
||||||
|
isCurrentSessionLoading,
|
||||||
|
currentSessionData,
|
||||||
|
hasAutoSelectedSession,
|
||||||
|
);
|
||||||
|
|
||||||
|
function handleSelectSession(sessionId: string) {
|
||||||
|
// Navigate using replaceState to avoid full page reload
|
||||||
|
window.history.replaceState(null, "", `/copilot?sessionId=${sessionId}`);
|
||||||
|
// Force a re-render by updating the URL through router
|
||||||
|
router.replace(`/copilot?sessionId=${sessionId}`);
|
||||||
|
if (isMobile) handleCloseDrawer();
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleNewChat() {
|
||||||
|
resetAutoSelect();
|
||||||
|
resetPagination();
|
||||||
|
// Invalidate and refetch sessions list to ensure newly created sessions appear
|
||||||
|
queryClient.invalidateQueries({
|
||||||
|
queryKey: getGetV2ListSessionsQueryKey(),
|
||||||
|
});
|
||||||
|
window.history.replaceState(null, "", "/copilot");
|
||||||
|
router.replace("/copilot");
|
||||||
|
if (isMobile) handleCloseDrawer();
|
||||||
|
}
|
||||||
|
|
||||||
|
function resetAutoSelect() {
|
||||||
|
hasAutoSelectedRef.current = false;
|
||||||
|
setHasAutoSelectedSession(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
isMobile,
|
||||||
|
isDrawerOpen,
|
||||||
|
isLoggedIn,
|
||||||
|
hasActiveSession:
|
||||||
|
Boolean(currentSessionId) && (!isOnHomepage || Boolean(paramSessionId)),
|
||||||
|
isLoading: isSessionsLoading || !areAllSessionsLoaded,
|
||||||
|
sessions: visibleSessions,
|
||||||
|
currentSessionId: sidebarSelectedSessionId,
|
||||||
|
handleSelectSession,
|
||||||
|
handleOpenDrawer,
|
||||||
|
handleCloseDrawer,
|
||||||
|
handleDrawerOpenChange,
|
||||||
|
handleNewChat,
|
||||||
|
hasNextPage,
|
||||||
|
isFetchingNextPage: isSessionsFetching,
|
||||||
|
fetchNextPage,
|
||||||
|
isReadyToShowContent,
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -0,0 +1,33 @@
|
|||||||
|
import type { User } from "@supabase/supabase-js";
|
||||||
|
|
||||||
|
export function getGreetingName(user?: User | null): string {
|
||||||
|
if (!user) return "there";
|
||||||
|
const metadata = user.user_metadata as Record<string, unknown> | undefined;
|
||||||
|
const fullName = metadata?.full_name;
|
||||||
|
const name = metadata?.name;
|
||||||
|
if (typeof fullName === "string" && fullName.trim()) {
|
||||||
|
return fullName.split(" ")[0];
|
||||||
|
}
|
||||||
|
if (typeof name === "string" && name.trim()) {
|
||||||
|
return name.split(" ")[0];
|
||||||
|
}
|
||||||
|
if (user.email) {
|
||||||
|
return user.email.split("@")[0];
|
||||||
|
}
|
||||||
|
return "there";
|
||||||
|
}
|
||||||
|
|
||||||
|
export function buildCopilotChatUrl(prompt: string): string {
|
||||||
|
const trimmed = prompt.trim();
|
||||||
|
if (!trimmed) return "/copilot/chat";
|
||||||
|
const encoded = encodeURIComponent(trimmed);
|
||||||
|
return `/copilot/chat?prompt=${encoded}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getQuickActions(): string[] {
|
||||||
|
return [
|
||||||
|
"Show me what I can automate",
|
||||||
|
"Design a custom workflow",
|
||||||
|
"Help me with content creation",
|
||||||
|
];
|
||||||
|
}
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
import type { ReactNode } from "react";
|
||||||
|
import { CopilotShell } from "./components/CopilotShell/CopilotShell";
|
||||||
|
|
||||||
|
export default function CopilotLayout({ children }: { children: ReactNode }) {
|
||||||
|
return <CopilotShell>{children}</CopilotShell>;
|
||||||
|
}
|
||||||
228
autogpt_platform/frontend/src/app/(platform)/copilot/page.tsx
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
"use client";
|
||||||
|
|
||||||
|
import { postV2CreateSession } from "@/app/api/__generated__/endpoints/chat/chat";
|
||||||
|
import { Skeleton } from "@/components/__legacy__/ui/skeleton";
|
||||||
|
import { Button } from "@/components/atoms/Button/Button";
|
||||||
|
import { LoadingSpinner } from "@/components/atoms/LoadingSpinner/LoadingSpinner";
|
||||||
|
import { Text } from "@/components/atoms/Text/Text";
|
||||||
|
import { Chat } from "@/components/contextual/Chat/Chat";
|
||||||
|
import { ChatInput } from "@/components/contextual/Chat/components/ChatInput/ChatInput";
|
||||||
|
import { getHomepageRoute } from "@/lib/constants";
|
||||||
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
|
import {
|
||||||
|
Flag,
|
||||||
|
type FlagValues,
|
||||||
|
useGetFlag,
|
||||||
|
} from "@/services/feature-flags/use-get-flag";
|
||||||
|
import { useFlags } from "launchdarkly-react-client-sdk";
|
||||||
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
|
import { useEffect, useMemo, useRef, useState } from "react";
|
||||||
|
import { getGreetingName, getQuickActions } from "./helpers";
|
||||||
|
|
||||||
|
type PageState =
|
||||||
|
| { type: "welcome" }
|
||||||
|
| { type: "creating"; prompt: string }
|
||||||
|
| { type: "chat"; sessionId: string; initialPrompt?: string };
|
||||||
|
|
||||||
|
export default function CopilotPage() {
|
||||||
|
const router = useRouter();
|
||||||
|
const searchParams = useSearchParams();
|
||||||
|
const { user, isLoggedIn, isUserLoading } = useSupabase();
|
||||||
|
|
||||||
|
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||||
|
const flags = useFlags<FlagValues>();
|
||||||
|
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||||
|
const envEnabled = process.env.NEXT_PUBLIC_LAUNCHDARKLY_ENABLED === "true";
|
||||||
|
const clientId = process.env.NEXT_PUBLIC_LAUNCHDARKLY_CLIENT_ID;
|
||||||
|
const isLaunchDarklyConfigured = envEnabled && Boolean(clientId);
|
||||||
|
const isFlagReady =
|
||||||
|
!isLaunchDarklyConfigured || flags[Flag.CHAT] !== undefined;
|
||||||
|
|
||||||
|
const [pageState, setPageState] = useState<PageState>({ type: "welcome" });
|
||||||
|
const initialPromptRef = useRef<Map<string, string>>(new Map());
|
||||||
|
|
||||||
|
const urlSessionId = searchParams.get("sessionId");
|
||||||
|
|
||||||
|
// Sync with URL sessionId (preserve initialPrompt from ref)
|
||||||
|
useEffect(
|
||||||
|
function syncSessionFromUrl() {
|
||||||
|
if (urlSessionId) {
|
||||||
|
// If we're already in chat state with this sessionId, don't overwrite
|
||||||
|
if (pageState.type === "chat" && pageState.sessionId === urlSessionId) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Get initialPrompt from ref or current state
|
||||||
|
const storedInitialPrompt = initialPromptRef.current.get(urlSessionId);
|
||||||
|
const currentInitialPrompt =
|
||||||
|
storedInitialPrompt ||
|
||||||
|
(pageState.type === "creating"
|
||||||
|
? pageState.prompt
|
||||||
|
: pageState.type === "chat"
|
||||||
|
? pageState.initialPrompt
|
||||||
|
: undefined);
|
||||||
|
if (currentInitialPrompt) {
|
||||||
|
initialPromptRef.current.set(urlSessionId, currentInitialPrompt);
|
||||||
|
}
|
||||||
|
setPageState({
|
||||||
|
type: "chat",
|
||||||
|
sessionId: urlSessionId,
|
||||||
|
initialPrompt: currentInitialPrompt,
|
||||||
|
});
|
||||||
|
} else if (pageState.type === "chat") {
|
||||||
|
setPageState({ type: "welcome" });
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[urlSessionId],
|
||||||
|
);
|
||||||
|
|
||||||
|
useEffect(
|
||||||
|
function ensureAccess() {
|
||||||
|
if (!isFlagReady) return;
|
||||||
|
if (isChatEnabled === false) {
|
||||||
|
router.replace(homepageRoute);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[homepageRoute, isChatEnabled, isFlagReady, router],
|
||||||
|
);
|
||||||
|
|
||||||
|
const greetingName = useMemo(
|
||||||
|
function getName() {
|
||||||
|
return getGreetingName(user);
|
||||||
|
},
|
||||||
|
[user],
|
||||||
|
);
|
||||||
|
|
||||||
|
const quickActions = useMemo(function getActions() {
|
||||||
|
return getQuickActions();
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
async function startChatWithPrompt(prompt: string) {
|
||||||
|
if (!prompt?.trim()) return;
|
||||||
|
if (pageState.type === "creating") return;
|
||||||
|
|
||||||
|
const trimmedPrompt = prompt.trim();
|
||||||
|
setPageState({ type: "creating", prompt: trimmedPrompt });
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Create session
|
||||||
|
const sessionResponse = await postV2CreateSession({
|
||||||
|
body: JSON.stringify({}),
|
||||||
|
});
|
||||||
|
|
||||||
|
if (sessionResponse.status !== 200 || !sessionResponse.data?.id) {
|
||||||
|
throw new Error("Failed to create session");
|
||||||
|
}
|
||||||
|
|
||||||
|
const sessionId = sessionResponse.data.id;
|
||||||
|
|
||||||
|
// Store initialPrompt in ref so it persists across re-renders
|
||||||
|
initialPromptRef.current.set(sessionId, trimmedPrompt);
|
||||||
|
|
||||||
|
// Update URL and show Chat with initial prompt
|
||||||
|
// Chat will handle sending the message and streaming
|
||||||
|
window.history.replaceState(null, "", `/copilot?sessionId=${sessionId}`);
|
||||||
|
setPageState({ type: "chat", sessionId, initialPrompt: trimmedPrompt });
|
||||||
|
} catch (error) {
|
||||||
|
console.error("[CopilotPage] Failed to start chat:", error);
|
||||||
|
setPageState({ type: "welcome" });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleQuickAction(action: string) {
|
||||||
|
startChatWithPrompt(action);
|
||||||
|
}
|
||||||
|
|
||||||
|
function handleSessionNotFound() {
|
||||||
|
router.replace("/copilot");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isFlagReady || isChatEnabled === false || !isLoggedIn) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show Chat when we have an active session
|
||||||
|
if (pageState.type === "chat") {
|
||||||
|
return (
|
||||||
|
<div className="flex h-full flex-col">
|
||||||
|
<Chat
|
||||||
|
key={pageState.sessionId ?? "welcome"}
|
||||||
|
className="flex-1"
|
||||||
|
urlSessionId={pageState.sessionId}
|
||||||
|
initialPrompt={pageState.initialPrompt}
|
||||||
|
onSessionNotFound={handleSessionNotFound}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show loading state while creating session and sending first message
|
||||||
|
if (pageState.type === "creating") {
|
||||||
|
return (
|
||||||
|
<div className="flex h-full flex-1 flex-col items-center justify-center bg-[#f8f8f9] px-6 py-10">
|
||||||
|
<LoadingSpinner size="large" />
|
||||||
|
<Text variant="body" className="mt-4 text-zinc-500">
|
||||||
|
Starting your chat...
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show Welcome screen
|
||||||
|
const isLoading = isUserLoading;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className="flex h-full flex-1 items-center justify-center overflow-y-auto bg-[#f8f8f9] px-6 py-10">
|
||||||
|
<div className="w-full text-center">
|
||||||
|
{isLoading ? (
|
||||||
|
<div className="mx-auto max-w-2xl">
|
||||||
|
<Skeleton className="mx-auto mb-3 h-8 w-64" />
|
||||||
|
<Skeleton className="mx-auto mb-8 h-6 w-80" />
|
||||||
|
<div className="mb-8">
|
||||||
|
<Skeleton className="mx-auto h-14 w-full rounded-lg" />
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-wrap items-center justify-center gap-3">
|
||||||
|
{Array.from({ length: 4 }).map((_, i) => (
|
||||||
|
<Skeleton key={i} className="h-9 w-48 rounded-md" />
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
) : (
|
||||||
|
<>
|
||||||
|
<div className="mx-auto max-w-2xl">
|
||||||
|
<Text
|
||||||
|
variant="h3"
|
||||||
|
className="mb-3 !text-[1.375rem] text-zinc-700"
|
||||||
|
>
|
||||||
|
Hey, <span className="text-violet-600">{greetingName}</span>
|
||||||
|
</Text>
|
||||||
|
<Text variant="h3" className="mb-8 !font-normal">
|
||||||
|
What do you want to automate?
|
||||||
|
</Text>
|
||||||
|
|
||||||
|
<div className="mb-6">
|
||||||
|
<ChatInput
|
||||||
|
onSend={startChatWithPrompt}
|
||||||
|
placeholder='You can search or just ask - e.g. "create a blog post outline"'
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div className="flex flex-nowrap items-center justify-center gap-3 overflow-x-auto [-ms-overflow-style:none] [scrollbar-width:none] [&::-webkit-scrollbar]:hidden">
|
||||||
|
{quickActions.map((action) => (
|
||||||
|
<Button
|
||||||
|
key={action}
|
||||||
|
type="button"
|
||||||
|
variant="outline"
|
||||||
|
size="small"
|
||||||
|
onClick={() => handleQuickAction(action)}
|
||||||
|
className="h-auto shrink-0 border-zinc-600 !px-4 !py-2 text-[1rem] text-zinc-600"
|
||||||
|
>
|
||||||
|
{action}
|
||||||
|
</Button>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
</>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||||
|
import { getHomepageRoute } from "@/lib/constants";
|
||||||
|
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||||
import { useSearchParams } from "next/navigation";
|
import { useSearchParams } from "next/navigation";
|
||||||
import { Suspense } from "react";
|
import { Suspense } from "react";
|
||||||
import { getErrorDetails } from "./helpers";
|
import { getErrorDetails } from "./helpers";
|
||||||
@@ -9,6 +11,8 @@ function ErrorPageContent() {
|
|||||||
const searchParams = useSearchParams();
|
const searchParams = useSearchParams();
|
||||||
const errorMessage = searchParams.get("message");
|
const errorMessage = searchParams.get("message");
|
||||||
const errorDetails = getErrorDetails(errorMessage);
|
const errorDetails = getErrorDetails(errorMessage);
|
||||||
|
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||||
|
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||||
|
|
||||||
function handleRetry() {
|
function handleRetry() {
|
||||||
// Auth-related errors should redirect to login
|
// Auth-related errors should redirect to login
|
||||||
@@ -25,8 +29,8 @@ function ErrorPageContent() {
|
|||||||
window.location.reload();
|
window.location.reload();
|
||||||
}, 2000);
|
}, 2000);
|
||||||
} else {
|
} else {
|
||||||
// For server/network errors, go to marketplace
|
// For server/network errors, go to home
|
||||||
window.location.href = "/marketplace";
|
window.location.href = homepageRoute;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -31,10 +31,18 @@ export function AgentSettingsModal({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const { currentSafeMode, isPending, hasHITLBlocks, handleToggle } =
|
const {
|
||||||
useAgentSafeMode(agent);
|
currentHITLSafeMode,
|
||||||
|
showHITLToggle,
|
||||||
|
handleHITLToggle,
|
||||||
|
currentSensitiveActionSafeMode,
|
||||||
|
showSensitiveActionToggle,
|
||||||
|
handleSensitiveActionToggle,
|
||||||
|
isPending,
|
||||||
|
shouldShowToggle,
|
||||||
|
} = useAgentSafeMode(agent);
|
||||||
|
|
||||||
if (!hasHITLBlocks) return null;
|
if (!shouldShowToggle) return null;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Dialog
|
<Dialog
|
||||||
@@ -57,23 +65,48 @@ export function AgentSettingsModal({
|
|||||||
)}
|
)}
|
||||||
<Dialog.Content>
|
<Dialog.Content>
|
||||||
<div className="space-y-6">
|
<div className="space-y-6">
|
||||||
|
{showHITLToggle && (
|
||||||
<div className="flex w-full flex-col items-start gap-4 rounded-xl border border-zinc-100 bg-white p-6">
|
<div className="flex w-full flex-col items-start gap-4 rounded-xl border border-zinc-100 bg-white p-6">
|
||||||
<div className="flex w-full items-start justify-between gap-4">
|
<div className="flex w-full items-start justify-between gap-4">
|
||||||
<div className="flex-1">
|
<div className="flex-1">
|
||||||
<Text variant="large-semibold">Require human approval</Text>
|
<Text variant="large-semibold">
|
||||||
|
Human-in-the-loop approval
|
||||||
|
</Text>
|
||||||
<Text variant="large" className="mt-1 text-zinc-900">
|
<Text variant="large" className="mt-1 text-zinc-900">
|
||||||
The agent will pause and wait for your review before
|
The agent will pause at human-in-the-loop blocks and wait
|
||||||
continuing
|
for your review before continuing
|
||||||
</Text>
|
</Text>
|
||||||
</div>
|
</div>
|
||||||
<Switch
|
<Switch
|
||||||
checked={currentSafeMode || false}
|
checked={currentHITLSafeMode || false}
|
||||||
onCheckedChange={handleToggle}
|
onCheckedChange={handleHITLToggle}
|
||||||
disabled={isPending}
|
disabled={isPending}
|
||||||
className="mt-1"
|
className="mt-1"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
|
{showSensitiveActionToggle && (
|
||||||
|
<div className="flex w-full flex-col items-start gap-4 rounded-xl border border-zinc-100 bg-white p-6">
|
||||||
|
<div className="flex w-full items-start justify-between gap-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<Text variant="large-semibold">
|
||||||
|
Sensitive action approval
|
||||||
|
</Text>
|
||||||
|
<Text variant="large" className="mt-1 text-zinc-900">
|
||||||
|
The agent will pause at sensitive action blocks and wait for
|
||||||
|
your review before continuing
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
<Switch
|
||||||
|
checked={currentSensitiveActionSafeMode}
|
||||||
|
onCheckedChange={handleSensitiveActionToggle}
|
||||||
|
disabled={isPending}
|
||||||
|
className="mt-1"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
</div>
|
</div>
|
||||||
</Dialog.Content>
|
</Dialog.Content>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
|
|||||||
@@ -180,7 +180,7 @@ export function RunAgentModal({
|
|||||||
|
|
||||||
{/* Content */}
|
{/* Content */}
|
||||||
{hasAnySetupFields ? (
|
{hasAnySetupFields ? (
|
||||||
<div className="mt-10 pb-32">
|
<div className="mt-4 pb-10">
|
||||||
<RunAgentModalContextProvider
|
<RunAgentModalContextProvider
|
||||||
value={{
|
value={{
|
||||||
agent,
|
agent,
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ export function ModalHeader({ agent }: ModalHeaderProps) {
|
|||||||
<ShowMoreText
|
<ShowMoreText
|
||||||
previewLimit={400}
|
previewLimit={400}
|
||||||
variant="small"
|
variant="small"
|
||||||
className="mt-4 !text-zinc-700"
|
className="mb-2 mt-4 !text-zinc-700"
|
||||||
>
|
>
|
||||||
{agent.description}
|
{agent.description}
|
||||||
</ShowMoreText>
|
</ShowMoreText>
|
||||||
@@ -40,6 +40,8 @@ export function ModalHeader({ agent }: ModalHeaderProps) {
|
|||||||
<Text variant="lead-semibold" className="text-blue-600">
|
<Text variant="lead-semibold" className="text-blue-600">
|
||||||
Tip
|
Tip
|
||||||
</Text>
|
</Text>
|
||||||
|
<div className="h-px w-full bg-blue-100" />
|
||||||
|
|
||||||
<Text variant="body">
|
<Text variant="body">
|
||||||
For best results, run this agent{" "}
|
For best results, run this agent{" "}
|
||||||
{humanizeCronExpression(
|
{humanizeCronExpression(
|
||||||
@@ -50,7 +52,7 @@ export function ModalHeader({ agent }: ModalHeaderProps) {
|
|||||||
) : null}
|
) : null}
|
||||||
|
|
||||||
{agent.instructions ? (
|
{agent.instructions ? (
|
||||||
<div className="flex flex-col gap-4 rounded-medium border border-purple-100 bg-[#F1EBFE/5] p-4">
|
<div className="mt-4 flex flex-col gap-4 rounded-medium border border-purple-100 bg-[#f1ebfe80] p-4">
|
||||||
<Text variant="lead-semibold" className="text-purple-600">
|
<Text variant="lead-semibold" className="text-purple-600">
|
||||||
Instructions
|
Instructions
|
||||||
</Text>
|
</Text>
|
||||||
|
|||||||
@@ -5,48 +5,112 @@ import { Graph } from "@/lib/autogpt-server-api/types";
|
|||||||
import { cn } from "@/lib/utils";
|
import { cn } from "@/lib/utils";
|
||||||
import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react";
|
import { ShieldCheckIcon, ShieldIcon } from "@phosphor-icons/react";
|
||||||
import { useAgentSafeMode } from "@/hooks/useAgentSafeMode";
|
import { useAgentSafeMode } from "@/hooks/useAgentSafeMode";
|
||||||
|
import {
|
||||||
|
Tooltip,
|
||||||
|
TooltipContent,
|
||||||
|
TooltipTrigger,
|
||||||
|
} from "@/components/atoms/Tooltip/BaseTooltip";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
graph: GraphModel | LibraryAgent | Graph;
|
graph: GraphModel | LibraryAgent | Graph;
|
||||||
className?: string;
|
className?: string;
|
||||||
fullWidth?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function SafeModeToggle({ graph }: Props) {
|
interface SafeModeIconButtonProps {
|
||||||
|
isEnabled: boolean;
|
||||||
|
label: string;
|
||||||
|
tooltipEnabled: string;
|
||||||
|
tooltipDisabled: string;
|
||||||
|
onToggle: () => void;
|
||||||
|
isPending: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function SafeModeIconButton({
|
||||||
|
isEnabled,
|
||||||
|
label,
|
||||||
|
tooltipEnabled,
|
||||||
|
tooltipDisabled,
|
||||||
|
onToggle,
|
||||||
|
isPending,
|
||||||
|
}: SafeModeIconButtonProps) {
|
||||||
|
return (
|
||||||
|
<Tooltip delayDuration={100}>
|
||||||
|
<TooltipTrigger asChild>
|
||||||
|
<Button
|
||||||
|
variant="icon"
|
||||||
|
size="icon"
|
||||||
|
aria-label={`${label}: ${isEnabled ? "ON" : "OFF"}. ${isEnabled ? tooltipEnabled : tooltipDisabled}`}
|
||||||
|
onClick={onToggle}
|
||||||
|
disabled={isPending}
|
||||||
|
className={cn(isPending ? "opacity-0" : "opacity-100")}
|
||||||
|
>
|
||||||
|
{isEnabled ? (
|
||||||
|
<ShieldCheckIcon weight="bold" size={16} />
|
||||||
|
) : (
|
||||||
|
<ShieldIcon weight="bold" size={16} />
|
||||||
|
)}
|
||||||
|
</Button>
|
||||||
|
</TooltipTrigger>
|
||||||
|
<TooltipContent>
|
||||||
|
<div className="text-center">
|
||||||
|
<div className="font-medium">
|
||||||
|
{label}: {isEnabled ? "ON" : "OFF"}
|
||||||
|
</div>
|
||||||
|
<div className="mt-1 text-xs text-muted-foreground">
|
||||||
|
{isEnabled ? tooltipEnabled : tooltipDisabled}
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</TooltipContent>
|
||||||
|
</Tooltip>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function SafeModeToggle({ graph, className }: Props) {
|
||||||
const {
|
const {
|
||||||
currentSafeMode,
|
currentHITLSafeMode,
|
||||||
|
showHITLToggle,
|
||||||
|
isHITLStateUndetermined,
|
||||||
|
handleHITLToggle,
|
||||||
|
currentSensitiveActionSafeMode,
|
||||||
|
showSensitiveActionToggle,
|
||||||
|
handleSensitiveActionToggle,
|
||||||
isPending,
|
isPending,
|
||||||
shouldShowToggle,
|
shouldShowToggle,
|
||||||
isStateUndetermined,
|
|
||||||
handleToggle,
|
|
||||||
} = useAgentSafeMode(graph);
|
} = useAgentSafeMode(graph);
|
||||||
|
|
||||||
if (!shouldShowToggle || isStateUndetermined) {
|
if (!shouldShowToggle || isHITLStateUndetermined) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const showHITL = showHITLToggle && !isHITLStateUndetermined;
|
||||||
|
const showSensitive = showSensitiveActionToggle;
|
||||||
|
|
||||||
|
if (!showHITL && !showSensitive) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<Button
|
<div className={cn("flex gap-1", className)}>
|
||||||
variant="icon"
|
{showHITL && (
|
||||||
key={graph.id}
|
<SafeModeIconButton
|
||||||
size="icon"
|
isEnabled={currentHITLSafeMode}
|
||||||
aria-label={
|
label="Human-in-the-loop"
|
||||||
currentSafeMode!
|
tooltipEnabled="The agent will pause at human-in-the-loop blocks and wait for your approval"
|
||||||
? "Safe Mode: ON. Human in the loop blocks require manual review"
|
tooltipDisabled="Human-in-the-loop blocks will proceed automatically"
|
||||||
: "Safe Mode: OFF. Human in the loop blocks proceed automatically"
|
onToggle={handleHITLToggle}
|
||||||
}
|
isPending={isPending}
|
||||||
onClick={handleToggle}
|
/>
|
||||||
className={cn(isPending ? "opacity-0" : "opacity-100")}
|
|
||||||
>
|
|
||||||
{currentSafeMode! ? (
|
|
||||||
<>
|
|
||||||
<ShieldCheckIcon weight="bold" size={16} />
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<>
|
|
||||||
<ShieldIcon weight="bold" size={16} />
|
|
||||||
</>
|
|
||||||
)}
|
)}
|
||||||
</Button>
|
{showSensitive && (
|
||||||
|
<SafeModeIconButton
|
||||||
|
isEnabled={currentSensitiveActionSafeMode}
|
||||||
|
label="Sensitive actions"
|
||||||
|
tooltipEnabled="The agent will pause at sensitive action blocks and wait for your approval"
|
||||||
|
tooltipDisabled="Sensitive action blocks will proceed automatically"
|
||||||
|
onToggle={handleSensitiveActionToggle}
|
||||||
|
isPending={isPending}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,8 +13,16 @@ interface Props {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function SelectedSettingsView({ agent, onClearSelectedRun }: Props) {
|
export function SelectedSettingsView({ agent, onClearSelectedRun }: Props) {
|
||||||
const { currentSafeMode, isPending, hasHITLBlocks, handleToggle } =
|
const {
|
||||||
useAgentSafeMode(agent);
|
currentHITLSafeMode,
|
||||||
|
showHITLToggle,
|
||||||
|
handleHITLToggle,
|
||||||
|
currentSensitiveActionSafeMode,
|
||||||
|
showSensitiveActionToggle,
|
||||||
|
handleSensitiveActionToggle,
|
||||||
|
isPending,
|
||||||
|
shouldShowToggle,
|
||||||
|
} = useAgentSafeMode(agent);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<SelectedViewLayout agent={agent}>
|
<SelectedViewLayout agent={agent}>
|
||||||
@@ -34,24 +42,51 @@ export function SelectedSettingsView({ agent, onClearSelectedRun }: Props) {
|
|||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className={`${AGENT_LIBRARY_SECTION_PADDING_X} space-y-6`}>
|
<div className={`${AGENT_LIBRARY_SECTION_PADDING_X} space-y-6`}>
|
||||||
{hasHITLBlocks ? (
|
{shouldShowToggle ? (
|
||||||
|
<>
|
||||||
|
{showHITLToggle && (
|
||||||
<div className="flex w-full max-w-2xl flex-col items-start gap-4 rounded-xl border border-zinc-100 bg-white p-6">
|
<div className="flex w-full max-w-2xl flex-col items-start gap-4 rounded-xl border border-zinc-100 bg-white p-6">
|
||||||
<div className="flex w-full items-start justify-between gap-4">
|
<div className="flex w-full items-start justify-between gap-4">
|
||||||
<div className="flex-1">
|
<div className="flex-1">
|
||||||
<Text variant="large-semibold">Require human approval</Text>
|
<Text variant="large-semibold">
|
||||||
|
Human-in-the-loop approval
|
||||||
|
</Text>
|
||||||
<Text variant="large" className="mt-1 text-zinc-900">
|
<Text variant="large" className="mt-1 text-zinc-900">
|
||||||
The agent will pause and wait for your review before
|
The agent will pause at human-in-the-loop blocks and
|
||||||
continuing
|
wait for your review before continuing
|
||||||
</Text>
|
</Text>
|
||||||
</div>
|
</div>
|
||||||
<Switch
|
<Switch
|
||||||
checked={currentSafeMode || false}
|
checked={currentHITLSafeMode || false}
|
||||||
onCheckedChange={handleToggle}
|
onCheckedChange={handleHITLToggle}
|
||||||
disabled={isPending}
|
disabled={isPending}
|
||||||
className="mt-1"
|
className="mt-1"
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
)}
|
||||||
|
{showSensitiveActionToggle && (
|
||||||
|
<div className="flex w-full max-w-2xl flex-col items-start gap-4 rounded-xl border border-zinc-100 bg-white p-6">
|
||||||
|
<div className="flex w-full items-start justify-between gap-4">
|
||||||
|
<div className="flex-1">
|
||||||
|
<Text variant="large-semibold">
|
||||||
|
Sensitive action approval
|
||||||
|
</Text>
|
||||||
|
<Text variant="large" className="mt-1 text-zinc-900">
|
||||||
|
The agent will pause at sensitive action blocks and wait
|
||||||
|
for your review before continuing
|
||||||
|
</Text>
|
||||||
|
</div>
|
||||||
|
<Switch
|
||||||
|
checked={currentSensitiveActionSafeMode}
|
||||||
|
onCheckedChange={handleSensitiveActionToggle}
|
||||||
|
disabled={isPending}
|
||||||
|
className="mt-1"
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
) : (
|
) : (
|
||||||
<div className="rounded-xl border border-zinc-100 bg-white p-6">
|
<div className="rounded-xl border border-zinc-100 bg-white p-6">
|
||||||
<Text variant="body" className="text-muted-foreground">
|
<Text variant="body" className="text-muted-foreground">
|
||||||
|
|||||||
@@ -1,8 +1,15 @@
|
|||||||
"use client";
|
"use client";
|
||||||
import React, { useCallback, useEffect, useMemo, useState } from "react";
|
import React, {
|
||||||
|
useCallback,
|
||||||
|
useContext,
|
||||||
|
useEffect,
|
||||||
|
useMemo,
|
||||||
|
useState,
|
||||||
|
} from "react";
|
||||||
|
|
||||||
import {
|
import {
|
||||||
CredentialsMetaInput,
|
CredentialsMetaInput,
|
||||||
|
CredentialsType,
|
||||||
GraphExecutionID,
|
GraphExecutionID,
|
||||||
GraphMeta,
|
GraphMeta,
|
||||||
LibraryAgentPreset,
|
LibraryAgentPreset,
|
||||||
@@ -29,7 +36,11 @@ import {
|
|||||||
} from "@/components/__legacy__/ui/icons";
|
} from "@/components/__legacy__/ui/icons";
|
||||||
import { Input } from "@/components/__legacy__/ui/input";
|
import { Input } from "@/components/__legacy__/ui/input";
|
||||||
import { Button } from "@/components/atoms/Button/Button";
|
import { Button } from "@/components/atoms/Button/Button";
|
||||||
import { CredentialsInput } from "@/components/contextual/CredentialsInput/CredentialsInput";
|
import { CredentialsGroupedView } from "@/components/contextual/CredentialsInput/components/CredentialsGroupedView/CredentialsGroupedView";
|
||||||
|
import {
|
||||||
|
findSavedCredentialByProviderAndType,
|
||||||
|
findSavedUserCredentialByProviderAndType,
|
||||||
|
} from "@/components/contextual/CredentialsInput/components/CredentialsGroupedView/helpers";
|
||||||
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
import { InformationTooltip } from "@/components/molecules/InformationTooltip/InformationTooltip";
|
||||||
import {
|
import {
|
||||||
useToast,
|
useToast,
|
||||||
@@ -37,6 +48,7 @@ import {
|
|||||||
} from "@/components/molecules/Toast/use-toast";
|
} from "@/components/molecules/Toast/use-toast";
|
||||||
import { humanizeCronExpression } from "@/lib/cron-expression-utils";
|
import { humanizeCronExpression } from "@/lib/cron-expression-utils";
|
||||||
import { cn, isEmpty } from "@/lib/utils";
|
import { cn, isEmpty } from "@/lib/utils";
|
||||||
|
import { CredentialsProvidersContext } from "@/providers/agent-credentials/credentials-provider";
|
||||||
import { ClockIcon, CopyIcon, InfoIcon } from "@phosphor-icons/react";
|
import { ClockIcon, CopyIcon, InfoIcon } from "@phosphor-icons/react";
|
||||||
import { CalendarClockIcon, Trash2Icon } from "lucide-react";
|
import { CalendarClockIcon, Trash2Icon } from "lucide-react";
|
||||||
|
|
||||||
@@ -90,6 +102,7 @@ export function AgentRunDraftView({
|
|||||||
const api = useBackendAPI();
|
const api = useBackendAPI();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const toastOnFail = useToastOnFail();
|
const toastOnFail = useToastOnFail();
|
||||||
|
const allProviders = useContext(CredentialsProvidersContext);
|
||||||
|
|
||||||
const [inputValues, setInputValues] = useState<Record<string, any>>({});
|
const [inputValues, setInputValues] = useState<Record<string, any>>({});
|
||||||
const [inputCredentials, setInputCredentials] = useState<
|
const [inputCredentials, setInputCredentials] = useState<
|
||||||
@@ -128,6 +141,77 @@ export function AgentRunDraftView({
|
|||||||
() => graph.credentials_input_schema.properties,
|
() => graph.credentials_input_schema.properties,
|
||||||
[graph],
|
[graph],
|
||||||
);
|
);
|
||||||
|
const credentialFields = useMemo(
|
||||||
|
function getCredentialFields() {
|
||||||
|
return Object.entries(agentCredentialsInputFields);
|
||||||
|
},
|
||||||
|
[agentCredentialsInputFields],
|
||||||
|
);
|
||||||
|
const requiredCredentials = useMemo(
|
||||||
|
function getRequiredCredentials() {
|
||||||
|
return new Set(
|
||||||
|
(graph.credentials_input_schema?.required as string[]) || [],
|
||||||
|
);
|
||||||
|
},
|
||||||
|
[graph.credentials_input_schema?.required],
|
||||||
|
);
|
||||||
|
|
||||||
|
useEffect(
|
||||||
|
function initializeDefaultCredentials() {
|
||||||
|
if (!allProviders) return;
|
||||||
|
if (!graph.credentials_input_schema?.properties) return;
|
||||||
|
if (requiredCredentials.size === 0) return;
|
||||||
|
|
||||||
|
setInputCredentials(function updateCredentials(currentCreds) {
|
||||||
|
const next = { ...currentCreds };
|
||||||
|
let didAdd = false;
|
||||||
|
|
||||||
|
for (const key of requiredCredentials) {
|
||||||
|
if (next[key]) continue;
|
||||||
|
const schema = graph.credentials_input_schema.properties[key];
|
||||||
|
if (!schema) continue;
|
||||||
|
|
||||||
|
const providerNames = schema.credentials_provider || [];
|
||||||
|
const credentialTypes = schema.credentials_types || [];
|
||||||
|
const requiredScopes = schema.credentials_scopes;
|
||||||
|
|
||||||
|
const userCredential = findSavedUserCredentialByProviderAndType(
|
||||||
|
providerNames,
|
||||||
|
credentialTypes,
|
||||||
|
requiredScopes,
|
||||||
|
allProviders,
|
||||||
|
);
|
||||||
|
|
||||||
|
const savedCredential =
|
||||||
|
userCredential ||
|
||||||
|
findSavedCredentialByProviderAndType(
|
||||||
|
providerNames,
|
||||||
|
credentialTypes,
|
||||||
|
requiredScopes,
|
||||||
|
allProviders,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!savedCredential) continue;
|
||||||
|
|
||||||
|
next[key] = {
|
||||||
|
id: savedCredential.id,
|
||||||
|
provider: savedCredential.provider,
|
||||||
|
type: savedCredential.type as CredentialsType,
|
||||||
|
title: savedCredential.title,
|
||||||
|
};
|
||||||
|
didAdd = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!didAdd) return currentCreds;
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
},
|
||||||
|
[
|
||||||
|
allProviders,
|
||||||
|
graph.credentials_input_schema?.properties,
|
||||||
|
requiredCredentials,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
const [allRequiredInputsAreSet, missingInputs] = useMemo(() => {
|
const [allRequiredInputsAreSet, missingInputs] = useMemo(() => {
|
||||||
const nonEmptyInputs = new Set(
|
const nonEmptyInputs = new Set(
|
||||||
@@ -145,18 +229,35 @@ export function AgentRunDraftView({
|
|||||||
);
|
);
|
||||||
return [isSuperset, difference];
|
return [isSuperset, difference];
|
||||||
}, [agentInputSchema.required, inputValues]);
|
}, [agentInputSchema.required, inputValues]);
|
||||||
const [allCredentialsAreSet, missingCredentials] = useMemo(() => {
|
const [allCredentialsAreSet, missingCredentials] = useMemo(
|
||||||
const availableCredentials = new Set(Object.keys(inputCredentials));
|
function getCredentialStatus() {
|
||||||
const allCredentials = new Set(Object.keys(agentCredentialsInputFields));
|
const missing = Array.from(requiredCredentials).filter((key) => {
|
||||||
// Backwards-compatible implementation of isSupersetOf and difference
|
const cred = inputCredentials[key];
|
||||||
const isSuperset = Array.from(allCredentials).every((item) =>
|
return !cred || !cred.id;
|
||||||
availableCredentials.has(item),
|
});
|
||||||
|
return [missing.length === 0, missing];
|
||||||
|
},
|
||||||
|
[requiredCredentials, inputCredentials],
|
||||||
);
|
);
|
||||||
const difference = Array.from(allCredentials).filter(
|
function addChangedCredentials(prev: Set<keyof LibraryAgentPresetUpdatable>) {
|
||||||
(item) => !availableCredentials.has(item),
|
const next = new Set(prev);
|
||||||
);
|
next.add("credentials");
|
||||||
return [isSuperset, difference];
|
return next;
|
||||||
}, [agentCredentialsInputFields, inputCredentials]);
|
}
|
||||||
|
|
||||||
|
function handleCredentialChange(key: string, value?: CredentialsMetaInput) {
|
||||||
|
setInputCredentials(function updateInputCredentials(currentCreds) {
|
||||||
|
const next = { ...currentCreds };
|
||||||
|
if (value === undefined) {
|
||||||
|
delete next[key];
|
||||||
|
return next;
|
||||||
|
}
|
||||||
|
next[key] = value;
|
||||||
|
return next;
|
||||||
|
});
|
||||||
|
setChangedPresetAttributes(addChangedCredentials);
|
||||||
|
}
|
||||||
|
|
||||||
const notifyMissingInputs = useCallback(
|
const notifyMissingInputs = useCallback(
|
||||||
(needPresetName: boolean = true) => {
|
(needPresetName: boolean = true) => {
|
||||||
const allMissingFields = (
|
const allMissingFields = (
|
||||||
@@ -649,35 +750,6 @@ export function AgentRunDraftView({
|
|||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
|
|
||||||
{/* Credentials inputs */}
|
|
||||||
{Object.entries(agentCredentialsInputFields).map(
|
|
||||||
([key, inputSubSchema]) => (
|
|
||||||
<CredentialsInput
|
|
||||||
key={key}
|
|
||||||
schema={{ ...inputSubSchema, discriminator: undefined }}
|
|
||||||
selectedCredentials={
|
|
||||||
inputCredentials[key] ?? inputSubSchema.default
|
|
||||||
}
|
|
||||||
onSelectCredentials={(value) => {
|
|
||||||
setInputCredentials((obj) => {
|
|
||||||
const newObj = { ...obj };
|
|
||||||
if (value === undefined) {
|
|
||||||
delete newObj[key];
|
|
||||||
return newObj;
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
...obj,
|
|
||||||
[key]: value,
|
|
||||||
};
|
|
||||||
});
|
|
||||||
setChangedPresetAttributes((prev) =>
|
|
||||||
prev.add("credentials"),
|
|
||||||
);
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
),
|
|
||||||
)}
|
|
||||||
|
|
||||||
{/* Regular inputs */}
|
{/* Regular inputs */}
|
||||||
{Object.entries(agentInputFields).map(([key, inputSubSchema]) => (
|
{Object.entries(agentInputFields).map(([key, inputSubSchema]) => (
|
||||||
<RunAgentInputs
|
<RunAgentInputs
|
||||||
@@ -695,6 +767,17 @@ export function AgentRunDraftView({
|
|||||||
data-testid={`agent-input-${key}`}
|
data-testid={`agent-input-${key}`}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
|
|
||||||
|
{/* Credentials inputs */}
|
||||||
|
{credentialFields.length > 0 && (
|
||||||
|
<CredentialsGroupedView
|
||||||
|
credentialFields={credentialFields}
|
||||||
|
requiredCredentials={requiredCredentials}
|
||||||
|
inputCredentials={inputCredentials}
|
||||||
|
inputValues={inputValues}
|
||||||
|
onCredentialChange={handleCredentialChange}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ import { useGetV2GetUserProfile } from "@/app/api/__generated__/endpoints/store/
|
|||||||
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
import { LibraryAgent } from "@/app/api/__generated__/models/libraryAgent";
|
||||||
import { okData } from "@/app/api/helpers";
|
import { okData } from "@/app/api/helpers";
|
||||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
|
import { isLogoutInProgress } from "@/lib/autogpt-server-api/helpers";
|
||||||
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import { updateFavoriteInQueries } from "./helpers";
|
import { updateFavoriteInQueries } from "./helpers";
|
||||||
|
|
||||||
interface Props {
|
interface Props {
|
||||||
@@ -23,10 +25,14 @@ export function useLibraryAgentCard({ agent }: Props) {
|
|||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
const queryClient = getQueryClient();
|
const queryClient = getQueryClient();
|
||||||
const { mutateAsync: updateLibraryAgent } = usePatchV2UpdateLibraryAgent();
|
const { mutateAsync: updateLibraryAgent } = usePatchV2UpdateLibraryAgent();
|
||||||
|
const { user, isLoggedIn } = useSupabase();
|
||||||
|
const logoutInProgress = isLogoutInProgress();
|
||||||
|
|
||||||
const { data: profile } = useGetV2GetUserProfile({
|
const { data: profile } = useGetV2GetUserProfile({
|
||||||
query: {
|
query: {
|
||||||
select: okData,
|
select: okData,
|
||||||
|
enabled: isLoggedIn && !!user && !logoutInProgress,
|
||||||
|
queryKey: ["/api/store/profile", user?.id],
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
|
import { getHomepageRoute } from "@/lib/constants";
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
|
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||||
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
import { loginFormSchema, LoginProvider } from "@/types/auth";
|
||||||
import { zodResolver } from "@hookform/resolvers/zod";
|
import { zodResolver } from "@hookform/resolvers/zod";
|
||||||
import { useRouter, useSearchParams } from "next/navigation";
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
@@ -20,15 +22,17 @@ export function useLoginPage() {
|
|||||||
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
||||||
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
||||||
const isCloudEnv = environment.isCloud();
|
const isCloudEnv = environment.isCloud();
|
||||||
|
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||||
|
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||||
|
|
||||||
// Get redirect destination from 'next' query parameter
|
// Get redirect destination from 'next' query parameter
|
||||||
const nextUrl = searchParams.get("next");
|
const nextUrl = searchParams.get("next");
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isLoggedIn && !isLoggingIn) {
|
if (isLoggedIn && !isLoggingIn) {
|
||||||
router.push(nextUrl || "/marketplace");
|
router.push(nextUrl || homepageRoute);
|
||||||
}
|
}
|
||||||
}, [isLoggedIn, isLoggingIn, nextUrl, router]);
|
}, [homepageRoute, isLoggedIn, isLoggingIn, nextUrl, router]);
|
||||||
|
|
||||||
const form = useForm<z.infer<typeof loginFormSchema>>({
|
const form = useForm<z.infer<typeof loginFormSchema>>({
|
||||||
resolver: zodResolver(loginFormSchema),
|
resolver: zodResolver(loginFormSchema),
|
||||||
@@ -98,7 +102,7 @@ export function useLoginPage() {
|
|||||||
} else if (result.onboarding) {
|
} else if (result.onboarding) {
|
||||||
router.replace("/onboarding");
|
router.replace("/onboarding");
|
||||||
} else {
|
} else {
|
||||||
router.replace("/marketplace");
|
router.replace(homepageRoute);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
toast({
|
toast({
|
||||||
|
|||||||
@@ -5,6 +5,8 @@ import {
|
|||||||
CarouselContent,
|
CarouselContent,
|
||||||
CarouselItem,
|
CarouselItem,
|
||||||
} from "@/components/__legacy__/ui/carousel";
|
} from "@/components/__legacy__/ui/carousel";
|
||||||
|
import { FadeIn } from "@/components/molecules/FadeIn/FadeIn";
|
||||||
|
import { StaggeredList } from "@/components/molecules/StaggeredList/StaggeredList";
|
||||||
import { useAgentsSection } from "./useAgentsSection";
|
import { useAgentsSection } from "./useAgentsSection";
|
||||||
import { StoreAgent } from "@/app/api/__generated__/models/storeAgent";
|
import { StoreAgent } from "@/app/api/__generated__/models/storeAgent";
|
||||||
import { StoreCard } from "../StoreCard/StoreCard";
|
import { StoreCard } from "../StoreCard/StoreCard";
|
||||||
@@ -41,12 +43,14 @@ export const AgentsSection = ({
|
|||||||
return (
|
return (
|
||||||
<div className="flex flex-col items-center justify-center">
|
<div className="flex flex-col items-center justify-center">
|
||||||
<div className="w-full max-w-[1360px]">
|
<div className="w-full max-w-[1360px]">
|
||||||
|
<FadeIn direction="left" duration={0.5}>
|
||||||
<h2
|
<h2
|
||||||
style={{ marginBottom: margin }}
|
style={{ marginBottom: margin }}
|
||||||
className="font-poppins text-lg font-semibold text-[#282828] dark:text-neutral-200"
|
className="font-poppins text-lg font-semibold text-neutral-800 dark:text-neutral-200"
|
||||||
>
|
>
|
||||||
{sectionTitle}
|
{sectionTitle}
|
||||||
</h2>
|
</h2>
|
||||||
|
</FadeIn>
|
||||||
{!displayedAgents || displayedAgents.length === 0 ? (
|
{!displayedAgents || displayedAgents.length === 0 ? (
|
||||||
<div className="text-center text-gray-500 dark:text-gray-400">
|
<div className="text-center text-gray-500 dark:text-gray-400">
|
||||||
No agents found
|
No agents found
|
||||||
@@ -54,8 +58,8 @@ export const AgentsSection = ({
|
|||||||
) : (
|
) : (
|
||||||
<>
|
<>
|
||||||
{/* Mobile Carousel View */}
|
{/* Mobile Carousel View */}
|
||||||
|
<FadeIn direction="up" className="md:hidden">
|
||||||
<Carousel
|
<Carousel
|
||||||
className="md:hidden"
|
|
||||||
opts={{
|
opts={{
|
||||||
loop: true,
|
loop: true,
|
||||||
}}
|
}}
|
||||||
@@ -78,8 +82,14 @@ export const AgentsSection = ({
|
|||||||
))}
|
))}
|
||||||
</CarouselContent>
|
</CarouselContent>
|
||||||
</Carousel>
|
</Carousel>
|
||||||
|
</FadeIn>
|
||||||
|
|
||||||
<div className="hidden grid-cols-1 place-items-center gap-6 md:grid md:grid-cols-2 lg:grid-cols-3 2xl:grid-cols-4">
|
{/* Desktop Grid View with Staggered Animation */}
|
||||||
|
<StaggeredList
|
||||||
|
direction="up"
|
||||||
|
staggerDelay={0.08}
|
||||||
|
className="hidden grid-cols-1 place-items-center gap-6 md:grid md:grid-cols-2 lg:grid-cols-3 2xl:grid-cols-4"
|
||||||
|
>
|
||||||
{displayedAgents.map((agent, index) => (
|
{displayedAgents.map((agent, index) => (
|
||||||
<StoreCard
|
<StoreCard
|
||||||
key={index}
|
key={index}
|
||||||
@@ -94,7 +104,7 @@ export const AgentsSection = ({
|
|||||||
onClick={() => handleCardClick(agent.creator, agent.slug)}
|
onClick={() => handleCardClick(agent.creator, agent.slug)}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</StaggeredList>
|
||||||
</>
|
</>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ export function BecomeACreator({
|
|||||||
|
|
||||||
<PublishAgentModal
|
<PublishAgentModal
|
||||||
trigger={
|
trigger={
|
||||||
<button className="inline-flex h-[48px] cursor-pointer items-center justify-center rounded-[38px] bg-neutral-800 px-8 py-3 transition-colors hover:bg-neutral-700 dark:bg-neutral-700 dark:hover:bg-neutral-600 md:h-[56px] md:px-10 md:py-4 lg:h-[68px] lg:px-12 lg:py-5">
|
<button className="inline-flex h-[48px] cursor-pointer items-center justify-center rounded-[38px] bg-neutral-800 px-8 py-3 transition-colors hover:bg-neutral-700 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-neutral-950 focus-visible:ring-offset-2 dark:bg-neutral-700 dark:hover:bg-neutral-600 dark:focus-visible:ring-neutral-50 md:h-[56px] md:px-10 md:py-4 lg:h-[68px] lg:px-12 lg:py-5">
|
||||||
<span className="whitespace-nowrap font-poppins text-base font-medium leading-normal text-neutral-50 md:text-lg md:leading-relaxed lg:text-xl lg:leading-7">
|
<span className="whitespace-nowrap font-poppins text-base font-medium leading-normal text-neutral-50 md:text-lg md:leading-relaxed lg:text-xl lg:leading-7">
|
||||||
{buttonText}
|
{buttonText}
|
||||||
</span>
|
</span>
|
||||||
|
|||||||
@@ -20,9 +20,18 @@ export const CreatorCard = ({
|
|||||||
}: CreatorCardProps) => {
|
}: CreatorCardProps) => {
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className={`h-[264px] w-full px-[18px] pb-5 pt-6 ${backgroundColor(index)} inline-flex cursor-pointer flex-col items-start justify-start gap-3.5 rounded-[26px] transition-all duration-200 hover:brightness-95`}
|
className={`h-[264px] w-full px-[18px] pb-5 pt-6 ${backgroundColor(index)} inline-flex cursor-pointer flex-col items-start justify-start gap-3.5 rounded-[26px] transition-[filter] duration-200 hover:brightness-95 focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-neutral-950 focus-visible:ring-offset-2 dark:focus-visible:ring-neutral-50`}
|
||||||
onClick={onClick}
|
onClick={onClick}
|
||||||
|
onKeyDown={(e) => {
|
||||||
|
if (e.key === "Enter" || e.key === " ") {
|
||||||
|
e.preventDefault();
|
||||||
|
onClick();
|
||||||
|
}
|
||||||
|
}}
|
||||||
data-testid="creator-card"
|
data-testid="creator-card"
|
||||||
|
role="button"
|
||||||
|
tabIndex={0}
|
||||||
|
aria-label={`View ${creatorName}'s profile - ${agentsUploaded} agents`}
|
||||||
>
|
>
|
||||||
<div className="relative h-[64px] w-[64px]">
|
<div className="relative h-[64px] w-[64px]">
|
||||||
<div className="absolute inset-0 overflow-hidden rounded-full">
|
<div className="absolute inset-0 overflow-hidden rounded-full">
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
|
import { FadeIn } from "@/components/molecules/FadeIn/FadeIn";
|
||||||
|
import { StaggeredList } from "@/components/molecules/StaggeredList/StaggeredList";
|
||||||
import { CreatorCard } from "../CreatorCard/CreatorCard";
|
import { CreatorCard } from "../CreatorCard/CreatorCard";
|
||||||
import { useFeaturedCreators } from "./useFeaturedCreators";
|
import { useFeaturedCreators } from "./useFeaturedCreators";
|
||||||
import { Creator } from "@/app/api/__generated__/models/creator";
|
import { Creator } from "@/app/api/__generated__/models/creator";
|
||||||
@@ -19,11 +21,17 @@ export const FeaturedCreators = ({
|
|||||||
return (
|
return (
|
||||||
<div className="flex w-full flex-col items-center justify-center">
|
<div className="flex w-full flex-col items-center justify-center">
|
||||||
<div className="w-full max-w-[1360px]">
|
<div className="w-full max-w-[1360px]">
|
||||||
|
<FadeIn direction="left" duration={0.5}>
|
||||||
<h2 className="mb-9 font-poppins text-lg font-semibold text-neutral-800 dark:text-neutral-200">
|
<h2 className="mb-9 font-poppins text-lg font-semibold text-neutral-800 dark:text-neutral-200">
|
||||||
{title}
|
{title}
|
||||||
</h2>
|
</h2>
|
||||||
|
</FadeIn>
|
||||||
|
|
||||||
<div className="grid grid-cols-1 gap-6 md:grid-cols-2 lg:grid-cols-4">
|
<StaggeredList
|
||||||
|
direction="up"
|
||||||
|
staggerDelay={0.1}
|
||||||
|
className="grid grid-cols-1 gap-6 md:grid-cols-2 lg:grid-cols-4"
|
||||||
|
>
|
||||||
{displayedCreators.map((creator, index) => (
|
{displayedCreators.map((creator, index) => (
|
||||||
<CreatorCard
|
<CreatorCard
|
||||||
key={index}
|
key={index}
|
||||||
@@ -35,7 +43,7 @@ export const FeaturedCreators = ({
|
|||||||
index={index}
|
index={index}
|
||||||
/>
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</StaggeredList>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ import {
|
|||||||
CarouselNext,
|
CarouselNext,
|
||||||
CarouselIndicator,
|
CarouselIndicator,
|
||||||
} from "@/components/__legacy__/ui/carousel";
|
} from "@/components/__legacy__/ui/carousel";
|
||||||
|
import { FadeIn } from "@/components/molecules/FadeIn/FadeIn";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { useFeaturedSection } from "./useFeaturedSection";
|
import { useFeaturedSection } from "./useFeaturedSection";
|
||||||
import { StoreAgent } from "@/app/api/__generated__/models/storeAgent";
|
import { StoreAgent } from "@/app/api/__generated__/models/storeAgent";
|
||||||
@@ -25,10 +26,13 @@ export const FeaturedSection = ({ featuredAgents }: FeaturedSectionProps) => {
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<section className="w-full">
|
<section className="w-full">
|
||||||
|
<FadeIn direction="left" duration={0.5}>
|
||||||
<h2 className="mb-8 font-poppins text-2xl font-semibold leading-7 text-neutral-800 dark:text-neutral-200">
|
<h2 className="mb-8 font-poppins text-2xl font-semibold leading-7 text-neutral-800 dark:text-neutral-200">
|
||||||
Featured agents
|
Featured agents
|
||||||
</h2>
|
</h2>
|
||||||
|
</FadeIn>
|
||||||
|
|
||||||
|
<FadeIn direction="up" duration={0.6} delay={0.1}>
|
||||||
<Carousel
|
<Carousel
|
||||||
opts={{
|
opts={{
|
||||||
align: "center",
|
align: "center",
|
||||||
@@ -59,6 +63,7 @@ export const FeaturedSection = ({ featuredAgents }: FeaturedSectionProps) => {
|
|||||||
<CarouselNext afterClick={handleNextSlide} />
|
<CarouselNext afterClick={handleNextSlide} />
|
||||||
</div>
|
</div>
|
||||||
</Carousel>
|
</Carousel>
|
||||||
|
</FadeIn>
|
||||||
</section>
|
</section>
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
import { Badge } from "@/components/__legacy__/ui/badge";
|
import { FilterChip } from "@/components/atoms/FilterChip/FilterChip";
|
||||||
import { useFilterChips } from "./useFilterChips";
|
import { useFilterChips } from "./useFilterChips";
|
||||||
|
|
||||||
interface FilterChipsProps {
|
interface FilterChipsProps {
|
||||||
@@ -9,8 +9,6 @@ interface FilterChipsProps {
|
|||||||
multiSelect?: boolean;
|
multiSelect?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Some flaws in its logic
|
|
||||||
// FRONTEND-TODO : This needs to be fixed
|
|
||||||
export const FilterChips = ({
|
export const FilterChips = ({
|
||||||
badges,
|
badges,
|
||||||
onFilterChange,
|
onFilterChange,
|
||||||
@@ -22,18 +20,20 @@ export const FilterChips = ({
|
|||||||
});
|
});
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex h-auto min-h-8 flex-wrap items-center justify-center gap-3 lg:min-h-14 lg:justify-start lg:gap-5">
|
<div
|
||||||
{badges.map((badge) => (
|
className="flex h-auto min-h-8 flex-wrap items-center justify-center gap-3 lg:min-h-14 lg:justify-start lg:gap-5"
|
||||||
<Badge
|
role="group"
|
||||||
key={badge}
|
aria-label="Filter options"
|
||||||
variant={selectedFilters.includes(badge) ? "secondary" : "outline"}
|
|
||||||
className="mb-2 flex cursor-pointer items-center justify-center gap-2 rounded-full border border-black/50 px-3 py-1 dark:border-white/50 lg:mb-3 lg:gap-2.5 lg:px-6 lg:py-2"
|
|
||||||
onClick={() => handleBadgeClick(badge)}
|
|
||||||
>
|
>
|
||||||
<div className="text-sm font-light tracking-tight text-[#474747] dark:text-[#e0e0e0] lg:text-xl lg:font-medium lg:leading-9">
|
{badges.map((badge) => (
|
||||||
{badge}
|
<FilterChip
|
||||||
</div>
|
key={badge}
|
||||||
</Badge>
|
label={badge}
|
||||||
|
selected={selectedFilters.includes(badge)}
|
||||||
|
onClick={() => handleBadgeClick(badge)}
|
||||||
|
size="lg"
|
||||||
|
className="mb-2 lg:mb-3"
|
||||||
|
/>
|
||||||
))}
|
))}
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"use client";
|
"use client";
|
||||||
|
|
||||||
|
import { FadeIn } from "@/components/molecules/FadeIn/FadeIn";
|
||||||
import { FilterChips } from "../FilterChips/FilterChips";
|
import { FilterChips } from "../FilterChips/FilterChips";
|
||||||
import { SearchBar } from "../SearchBar/SearchBar";
|
import { SearchBar } from "../SearchBar/SearchBar";
|
||||||
import { useHeroSection } from "./useHeroSection";
|
import { useHeroSection } from "./useHeroSection";
|
||||||
@@ -9,6 +10,7 @@ export const HeroSection = () => {
|
|||||||
return (
|
return (
|
||||||
<div className="mb-2 mt-8 flex flex-col items-center justify-center px-4 sm:mb-4 sm:mt-12 sm:px-6 md:mb-6 md:mt-16 lg:my-24 lg:px-8 xl:my-16">
|
<div className="mb-2 mt-8 flex flex-col items-center justify-center px-4 sm:mb-4 sm:mt-12 sm:px-6 md:mb-6 md:mt-16 lg:my-24 lg:px-8 xl:my-16">
|
||||||
<div className="w-full max-w-3xl lg:max-w-4xl xl:max-w-5xl">
|
<div className="w-full max-w-3xl lg:max-w-4xl xl:max-w-5xl">
|
||||||
|
<FadeIn direction="down" duration={0.6} delay={0}>
|
||||||
<div className="mb-4 text-center md:mb-8">
|
<div className="mb-4 text-center md:mb-8">
|
||||||
<h1 className="text-center">
|
<h1 className="text-center">
|
||||||
<span className="font-poppins text-[48px] font-semibold leading-[54px] text-neutral-950 dark:text-neutral-50">
|
<span className="font-poppins text-[48px] font-semibold leading-[54px] text-neutral-950 dark:text-neutral-50">
|
||||||
@@ -26,13 +28,18 @@ export const HeroSection = () => {
|
|||||||
</span>
|
</span>
|
||||||
</h1>
|
</h1>
|
||||||
</div>
|
</div>
|
||||||
|
</FadeIn>
|
||||||
|
<FadeIn direction="up" duration={0.6} delay={0.15}>
|
||||||
<h3 className="mb:text-2xl mb-6 text-center font-sans text-xl font-normal leading-loose text-neutral-700 dark:text-neutral-300 md:mb-12">
|
<h3 className="mb:text-2xl mb-6 text-center font-sans text-xl font-normal leading-loose text-neutral-700 dark:text-neutral-300 md:mb-12">
|
||||||
Bringing you AI agents designed by thinkers from around the world
|
Bringing you AI agents designed by thinkers from around the world
|
||||||
</h3>
|
</h3>
|
||||||
|
</FadeIn>
|
||||||
|
<FadeIn direction="up" duration={0.5} delay={0.3}>
|
||||||
<div className="mb-4 flex justify-center sm:mb-5">
|
<div className="mb-4 flex justify-center sm:mb-5">
|
||||||
<SearchBar height="h-[74px]" />
|
<SearchBar height="h-[74px]" />
|
||||||
</div>
|
</div>
|
||||||
<div>
|
</FadeIn>
|
||||||
|
<FadeIn direction="up" duration={0.5} delay={0.4}>
|
||||||
<div className="flex justify-center">
|
<div className="flex justify-center">
|
||||||
<FilterChips
|
<FilterChips
|
||||||
badges={searchTerms}
|
badges={searchTerms}
|
||||||
@@ -40,7 +47,7 @@ export const HeroSection = () => {
|
|||||||
multiSelect={false}
|
multiSelect={false}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</FadeIn>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"use client";
|
"use client";
|
||||||
import { Separator } from "@/components/__legacy__/ui/separator";
|
import { Separator } from "@/components/atoms/Separator/Separator";
|
||||||
|
import { FadeIn } from "@/components/molecules/FadeIn/FadeIn";
|
||||||
import { FeaturedSection } from "../FeaturedSection/FeaturedSection";
|
import { FeaturedSection } from "../FeaturedSection/FeaturedSection";
|
||||||
import { BecomeACreator } from "../BecomeACreator/BecomeACreator";
|
import { BecomeACreator } from "../BecomeACreator/BecomeACreator";
|
||||||
import { HeroSection } from "../HeroSection/HeroSection";
|
import { HeroSection } from "../HeroSection/HeroSection";
|
||||||
@@ -54,11 +55,13 @@ export const MainMarkeplacePage = () => {
|
|||||||
<FeaturedCreators featuredCreators={featuredCreators.creators} />
|
<FeaturedCreators featuredCreators={featuredCreators.creators} />
|
||||||
)}
|
)}
|
||||||
<Separator className="mb-[25px] mt-[60px]" />
|
<Separator className="mb-[25px] mt-[60px]" />
|
||||||
|
<FadeIn direction="up" duration={0.6}>
|
||||||
<BecomeACreator
|
<BecomeACreator
|
||||||
title="Become a Creator"
|
title="Become a Creator"
|
||||||
description="Join our ever-growing community of hackers and tinkerers"
|
description="Join our ever-growing community of hackers and tinkerers"
|
||||||
buttonText="Become a Creator"
|
buttonText="Become a Creator"
|
||||||
/>
|
/>
|
||||||
|
</FadeIn>
|
||||||
</main>
|
</main>
|
||||||
</div>
|
</div>
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -16,9 +16,9 @@ interface SearchBarProps {
|
|||||||
export const SearchBar = ({
|
export const SearchBar = ({
|
||||||
placeholder = 'Search for tasks like "optimise SEO"',
|
placeholder = 'Search for tasks like "optimise SEO"',
|
||||||
backgroundColor = "bg-neutral-100 dark:bg-neutral-800",
|
backgroundColor = "bg-neutral-100 dark:bg-neutral-800",
|
||||||
iconColor = "text-[#646464] dark:text-neutral-400",
|
iconColor = "text-neutral-500 dark:text-neutral-400",
|
||||||
textColor = "text-[#707070] dark:text-neutral-200",
|
textColor = "text-neutral-500 dark:text-neutral-200",
|
||||||
placeholderColor = "text-[#707070] dark:text-neutral-400",
|
placeholderColor = "text-neutral-500 dark:text-neutral-400",
|
||||||
width = "w-9/10 lg:w-[56.25rem]",
|
width = "w-9/10 lg:w-[56.25rem]",
|
||||||
height = "h-[60px]",
|
height = "h-[60px]",
|
||||||
}: SearchBarProps) => {
|
}: SearchBarProps) => {
|
||||||
@@ -32,10 +32,13 @@ export const SearchBar = ({
|
|||||||
>
|
>
|
||||||
<MagnifyingGlassIcon className={`h-5 w-5 md:h-7 md:w-7 ${iconColor}`} />
|
<MagnifyingGlassIcon className={`h-5 w-5 md:h-7 md:w-7 ${iconColor}`} />
|
||||||
<input
|
<input
|
||||||
type="text"
|
type="search"
|
||||||
|
name="search"
|
||||||
|
autoComplete="off"
|
||||||
value={searchQuery}
|
value={searchQuery}
|
||||||
onChange={(e) => setSearchQuery(e.target.value)}
|
onChange={(e) => setSearchQuery(e.target.value)}
|
||||||
placeholder={placeholder}
|
placeholder={placeholder}
|
||||||
|
aria-label="Search for AI agents"
|
||||||
className={`flex-grow border-none bg-transparent ${textColor} font-sans text-lg font-normal leading-[2.25rem] tracking-tight md:text-xl placeholder:${placeholderColor} focus:outline-none`}
|
className={`flex-grow border-none bg-transparent ${textColor} font-sans text-lg font-normal leading-[2.25rem] tracking-tight md:text-xl placeholder:${placeholderColor} focus:outline-none`}
|
||||||
data-testid="store-search-input"
|
data-testid="store-search-input"
|
||||||
/>
|
/>
|
||||||
|
|||||||
@@ -1,10 +1,25 @@
|
|||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
import { StarRatingIcons } from "@/components/__legacy__/ui/icons";
|
import { Star } from "@phosphor-icons/react";
|
||||||
import Avatar, {
|
import Avatar, {
|
||||||
AvatarFallback,
|
AvatarFallback,
|
||||||
AvatarImage,
|
AvatarImage,
|
||||||
} from "@/components/atoms/Avatar/Avatar";
|
} from "@/components/atoms/Avatar/Avatar";
|
||||||
|
|
||||||
|
function StarRating({ rating }: { rating: number }) {
|
||||||
|
const stars = [];
|
||||||
|
const clampedRating = Math.max(0, Math.min(5, rating));
|
||||||
|
for (let i = 1; i <= 5; i++) {
|
||||||
|
stars.push(
|
||||||
|
<Star
|
||||||
|
key={i}
|
||||||
|
weight={i <= clampedRating ? "fill" : "regular"}
|
||||||
|
className="h-4 w-4 text-neutral-900 dark:text-yellow-500"
|
||||||
|
/>,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return <>{stars}</>;
|
||||||
|
}
|
||||||
|
|
||||||
interface StoreCardProps {
|
interface StoreCardProps {
|
||||||
agentName: string;
|
agentName: string;
|
||||||
agentImage: string;
|
agentImage: string;
|
||||||
@@ -34,7 +49,7 @@ export const StoreCard: React.FC<StoreCardProps> = ({
|
|||||||
|
|
||||||
return (
|
return (
|
||||||
<div
|
<div
|
||||||
className="flex h-[27rem] w-full max-w-md cursor-pointer flex-col items-start rounded-3xl bg-background transition-all duration-300 hover:shadow-lg dark:hover:shadow-gray-700"
|
className="flex h-[27rem] w-full max-w-md cursor-pointer flex-col items-start rounded-3xl bg-background transition-shadow duration-300 hover:shadow-lg focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-neutral-950 focus-visible:ring-offset-2 dark:hover:shadow-gray-700 dark:focus-visible:ring-neutral-50"
|
||||||
onClick={handleClick}
|
onClick={handleClick}
|
||||||
data-testid="store-card"
|
data-testid="store-card"
|
||||||
role="button"
|
role="button"
|
||||||
@@ -76,7 +91,7 @@ export const StoreCard: React.FC<StoreCardProps> = ({
|
|||||||
<div className="mt-3 flex w-full flex-1 flex-col px-4">
|
<div className="mt-3 flex w-full flex-1 flex-col px-4">
|
||||||
{/* Second Section: Agent Name and Creator Name */}
|
{/* Second Section: Agent Name and Creator Name */}
|
||||||
<div className="flex w-full flex-col">
|
<div className="flex w-full flex-col">
|
||||||
<h3 className="line-clamp-2 font-poppins text-2xl font-semibold text-[#272727] dark:text-neutral-100">
|
<h3 className="line-clamp-2 font-poppins text-2xl font-semibold text-neutral-800 dark:text-neutral-100">
|
||||||
{agentName}
|
{agentName}
|
||||||
</h3>
|
</h3>
|
||||||
{!hideAvatar && creatorName && (
|
{!hideAvatar && creatorName && (
|
||||||
@@ -107,11 +122,11 @@ export const StoreCard: React.FC<StoreCardProps> = ({
|
|||||||
{rating.toFixed(1)}
|
{rating.toFixed(1)}
|
||||||
</span>
|
</span>
|
||||||
<div
|
<div
|
||||||
className="inline-flex items-center"
|
className="inline-flex items-center gap-0.5"
|
||||||
role="img"
|
role="img"
|
||||||
aria-label={`Rating: ${rating.toFixed(1)} out of 5 stars`}
|
aria-label={`Rating: ${rating.toFixed(1)} out of 5 stars`}
|
||||||
>
|
>
|
||||||
{StarRatingIcons(rating)}
|
<StarRating rating={rating} />
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -3,12 +3,14 @@
|
|||||||
import { useGetV2GetUserProfile } from "@/app/api/__generated__/endpoints/store/store";
|
import { useGetV2GetUserProfile } from "@/app/api/__generated__/endpoints/store/store";
|
||||||
import { ProfileInfoForm } from "@/components/__legacy__/ProfileInfoForm";
|
import { ProfileInfoForm } from "@/components/__legacy__/ProfileInfoForm";
|
||||||
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
import { ErrorCard } from "@/components/molecules/ErrorCard/ErrorCard";
|
||||||
|
import { isLogoutInProgress } from "@/lib/autogpt-server-api/helpers";
|
||||||
import { ProfileDetails } from "@/lib/autogpt-server-api/types";
|
import { ProfileDetails } from "@/lib/autogpt-server-api/types";
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import { ProfileLoading } from "./ProfileLoading";
|
import { ProfileLoading } from "./ProfileLoading";
|
||||||
|
|
||||||
export default function UserProfilePage() {
|
export default function UserProfilePage() {
|
||||||
const { user } = useSupabase();
|
const { user } = useSupabase();
|
||||||
|
const logoutInProgress = isLogoutInProgress();
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: profile,
|
data: profile,
|
||||||
@@ -18,7 +20,7 @@ export default function UserProfilePage() {
|
|||||||
refetch,
|
refetch,
|
||||||
} = useGetV2GetUserProfile<ProfileDetails | null>({
|
} = useGetV2GetUserProfile<ProfileDetails | null>({
|
||||||
query: {
|
query: {
|
||||||
enabled: !!user,
|
enabled: !!user && !logoutInProgress,
|
||||||
select: (res) => {
|
select: (res) => {
|
||||||
if (res.status === 200) {
|
if (res.status === 200) {
|
||||||
return {
|
return {
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"use server";
|
"use server";
|
||||||
|
|
||||||
|
import { getHomepageRoute } from "@/lib/constants";
|
||||||
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
import { getServerSupabase } from "@/lib/supabase/server/getServerSupabase";
|
||||||
import { signupFormSchema } from "@/types/auth";
|
import { signupFormSchema } from "@/types/auth";
|
||||||
import * as Sentry from "@sentry/nextjs";
|
import * as Sentry from "@sentry/nextjs";
|
||||||
@@ -11,6 +12,7 @@ export async function signup(
|
|||||||
password: string,
|
password: string,
|
||||||
confirmPassword: string,
|
confirmPassword: string,
|
||||||
agreeToTerms: boolean,
|
agreeToTerms: boolean,
|
||||||
|
isChatEnabled: boolean,
|
||||||
) {
|
) {
|
||||||
try {
|
try {
|
||||||
const parsed = signupFormSchema.safeParse({
|
const parsed = signupFormSchema.safeParse({
|
||||||
@@ -58,7 +60,9 @@ export async function signup(
|
|||||||
}
|
}
|
||||||
|
|
||||||
const isOnboardingEnabled = await shouldShowOnboarding();
|
const isOnboardingEnabled = await shouldShowOnboarding();
|
||||||
const next = isOnboardingEnabled ? "/onboarding" : "/";
|
const next = isOnboardingEnabled
|
||||||
|
? "/onboarding"
|
||||||
|
: getHomepageRoute(isChatEnabled);
|
||||||
|
|
||||||
return { success: true, next };
|
return { success: true, next };
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { useToast } from "@/components/molecules/Toast/use-toast";
|
import { useToast } from "@/components/molecules/Toast/use-toast";
|
||||||
|
import { getHomepageRoute } from "@/lib/constants";
|
||||||
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
import { useSupabase } from "@/lib/supabase/hooks/useSupabase";
|
||||||
import { environment } from "@/services/environment";
|
import { environment } from "@/services/environment";
|
||||||
|
import { Flag, useGetFlag } from "@/services/feature-flags/use-get-flag";
|
||||||
import { LoginProvider, signupFormSchema } from "@/types/auth";
|
import { LoginProvider, signupFormSchema } from "@/types/auth";
|
||||||
import { zodResolver } from "@hookform/resolvers/zod";
|
import { zodResolver } from "@hookform/resolvers/zod";
|
||||||
import { useRouter, useSearchParams } from "next/navigation";
|
import { useRouter, useSearchParams } from "next/navigation";
|
||||||
@@ -20,15 +22,17 @@ export function useSignupPage() {
|
|||||||
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
const [isGoogleLoading, setIsGoogleLoading] = useState(false);
|
||||||
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
const [showNotAllowedModal, setShowNotAllowedModal] = useState(false);
|
||||||
const isCloudEnv = environment.isCloud();
|
const isCloudEnv = environment.isCloud();
|
||||||
|
const isChatEnabled = useGetFlag(Flag.CHAT);
|
||||||
|
const homepageRoute = getHomepageRoute(isChatEnabled);
|
||||||
|
|
||||||
// Get redirect destination from 'next' query parameter
|
// Get redirect destination from 'next' query parameter
|
||||||
const nextUrl = searchParams.get("next");
|
const nextUrl = searchParams.get("next");
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (isLoggedIn && !isSigningUp) {
|
if (isLoggedIn && !isSigningUp) {
|
||||||
router.push(nextUrl || "/marketplace");
|
router.push(nextUrl || homepageRoute);
|
||||||
}
|
}
|
||||||
}, [isLoggedIn, isSigningUp, nextUrl, router]);
|
}, [homepageRoute, isLoggedIn, isSigningUp, nextUrl, router]);
|
||||||
|
|
||||||
const form = useForm<z.infer<typeof signupFormSchema>>({
|
const form = useForm<z.infer<typeof signupFormSchema>>({
|
||||||
resolver: zodResolver(signupFormSchema),
|
resolver: zodResolver(signupFormSchema),
|
||||||
@@ -104,6 +108,7 @@ export function useSignupPage() {
|
|||||||
data.password,
|
data.password,
|
||||||
data.confirmPassword,
|
data.confirmPassword,
|
||||||
data.agreeToTerms,
|
data.agreeToTerms,
|
||||||
|
isChatEnabled === true,
|
||||||
);
|
);
|
||||||
|
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
@@ -129,7 +134,7 @@ export function useSignupPage() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Prefer the URL's next parameter, then result.next (for onboarding), then default
|
// Prefer the URL's next parameter, then result.next (for onboarding), then default
|
||||||
const redirectTo = nextUrl || result.next || "/";
|
const redirectTo = nextUrl || result.next || homepageRoute;
|
||||||
router.replace(redirectTo);
|
router.replace(redirectTo);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
setIsLoading(false);
|
setIsLoading(false);
|
||||||
|
|||||||
@@ -4,12 +4,12 @@ import {
|
|||||||
getServerAuthToken,
|
getServerAuthToken,
|
||||||
} from "@/lib/autogpt-server-api/helpers";
|
} from "@/lib/autogpt-server-api/helpers";
|
||||||
|
|
||||||
import { transformDates } from "./date-transformer";
|
|
||||||
import { environment } from "@/services/environment";
|
|
||||||
import {
|
import {
|
||||||
IMPERSONATION_HEADER_NAME,
|
IMPERSONATION_HEADER_NAME,
|
||||||
IMPERSONATION_STORAGE_KEY,
|
IMPERSONATION_STORAGE_KEY,
|
||||||
} from "@/lib/constants";
|
} from "@/lib/constants";
|
||||||
|
import { environment } from "@/services/environment";
|
||||||
|
import { transformDates } from "./date-transformer";
|
||||||
|
|
||||||
const FRONTEND_BASE_URL =
|
const FRONTEND_BASE_URL =
|
||||||
process.env.NEXT_PUBLIC_FRONTEND_BASE_URL || "http://localhost:3000";
|
process.env.NEXT_PUBLIC_FRONTEND_BASE_URL || "http://localhost:3000";
|
||||||
|
|||||||
@@ -1022,7 +1022,7 @@
|
|||||||
"get": {
|
"get": {
|
||||||
"tags": ["v2", "chat", "chat"],
|
"tags": ["v2", "chat", "chat"],
|
||||||
"summary": "Get Session",
|
"summary": "Get Session",
|
||||||
"description": "Retrieve the details of a specific chat session.\n\nLooks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.\n\nArgs:\n session_id: The unique identifier for the desired chat session.\n user_id: The optional authenticated user ID, or None for anonymous access.\n\nReturns:\n SessionDetailResponse: Details for the requested session; raises NotFoundError if not found.",
|
"description": "Retrieve the details of a specific chat session.\n\nLooks up a chat session by ID for the given user (if authenticated) and returns all session data including messages.\n\nArgs:\n session_id: The unique identifier for the desired chat session.\n user_id: The optional authenticated user ID, or None for anonymous access.\n\nReturns:\n SessionDetailResponse: Details for the requested session, or None if not found.",
|
||||||
"operationId": "getV2GetSession",
|
"operationId": "getV2GetSession",
|
||||||
"security": [{ "HTTPBearerJWT": [] }],
|
"security": [{ "HTTPBearerJWT": [] }],
|
||||||
"parameters": [
|
"parameters": [
|
||||||
@@ -6383,6 +6383,11 @@
|
|||||||
"title": "Has Human In The Loop",
|
"title": "Has Human In The Loop",
|
||||||
"readOnly": true
|
"readOnly": true
|
||||||
},
|
},
|
||||||
|
"has_sensitive_action": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Has Sensitive Action",
|
||||||
|
"readOnly": true
|
||||||
|
},
|
||||||
"trigger_setup_info": {
|
"trigger_setup_info": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
|
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
|
||||||
@@ -6399,6 +6404,7 @@
|
|||||||
"output_schema",
|
"output_schema",
|
||||||
"has_external_trigger",
|
"has_external_trigger",
|
||||||
"has_human_in_the_loop",
|
"has_human_in_the_loop",
|
||||||
|
"has_sensitive_action",
|
||||||
"trigger_setup_info"
|
"trigger_setup_info"
|
||||||
],
|
],
|
||||||
"title": "BaseGraph"
|
"title": "BaseGraph"
|
||||||
@@ -7629,6 +7635,11 @@
|
|||||||
"title": "Has Human In The Loop",
|
"title": "Has Human In The Loop",
|
||||||
"readOnly": true
|
"readOnly": true
|
||||||
},
|
},
|
||||||
|
"has_sensitive_action": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Has Sensitive Action",
|
||||||
|
"readOnly": true
|
||||||
|
},
|
||||||
"trigger_setup_info": {
|
"trigger_setup_info": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
|
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
|
||||||
@@ -7652,6 +7663,7 @@
|
|||||||
"output_schema",
|
"output_schema",
|
||||||
"has_external_trigger",
|
"has_external_trigger",
|
||||||
"has_human_in_the_loop",
|
"has_human_in_the_loop",
|
||||||
|
"has_sensitive_action",
|
||||||
"trigger_setup_info",
|
"trigger_setup_info",
|
||||||
"credentials_input_schema"
|
"credentials_input_schema"
|
||||||
],
|
],
|
||||||
@@ -7730,6 +7742,11 @@
|
|||||||
"title": "Has Human In The Loop",
|
"title": "Has Human In The Loop",
|
||||||
"readOnly": true
|
"readOnly": true
|
||||||
},
|
},
|
||||||
|
"has_sensitive_action": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Has Sensitive Action",
|
||||||
|
"readOnly": true
|
||||||
|
},
|
||||||
"trigger_setup_info": {
|
"trigger_setup_info": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
|
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
|
||||||
@@ -7754,6 +7771,7 @@
|
|||||||
"output_schema",
|
"output_schema",
|
||||||
"has_external_trigger",
|
"has_external_trigger",
|
||||||
"has_human_in_the_loop",
|
"has_human_in_the_loop",
|
||||||
|
"has_sensitive_action",
|
||||||
"trigger_setup_info",
|
"trigger_setup_info",
|
||||||
"credentials_input_schema"
|
"credentials_input_schema"
|
||||||
],
|
],
|
||||||
@@ -7762,8 +7780,14 @@
|
|||||||
"GraphSettings": {
|
"GraphSettings": {
|
||||||
"properties": {
|
"properties": {
|
||||||
"human_in_the_loop_safe_mode": {
|
"human_in_the_loop_safe_mode": {
|
||||||
"anyOf": [{ "type": "boolean" }, { "type": "null" }],
|
"type": "boolean",
|
||||||
"title": "Human In The Loop Safe Mode"
|
"title": "Human In The Loop Safe Mode",
|
||||||
|
"default": true
|
||||||
|
},
|
||||||
|
"sensitive_action_safe_mode": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Sensitive Action Safe Mode",
|
||||||
|
"default": false
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"type": "object",
|
"type": "object",
|
||||||
@@ -7921,6 +7945,16 @@
|
|||||||
"title": "Has External Trigger",
|
"title": "Has External Trigger",
|
||||||
"description": "Whether the agent has an external trigger (e.g. webhook) node"
|
"description": "Whether the agent has an external trigger (e.g. webhook) node"
|
||||||
},
|
},
|
||||||
|
"has_human_in_the_loop": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Has Human In The Loop",
|
||||||
|
"description": "Whether the agent has human-in-the-loop blocks"
|
||||||
|
},
|
||||||
|
"has_sensitive_action": {
|
||||||
|
"type": "boolean",
|
||||||
|
"title": "Has Sensitive Action",
|
||||||
|
"description": "Whether the agent has sensitive action blocks"
|
||||||
|
},
|
||||||
"trigger_setup_info": {
|
"trigger_setup_info": {
|
||||||
"anyOf": [
|
"anyOf": [
|
||||||
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
|
{ "$ref": "#/components/schemas/GraphTriggerInfo" },
|
||||||
@@ -7967,6 +8001,8 @@
|
|||||||
"output_schema",
|
"output_schema",
|
||||||
"credentials_input_schema",
|
"credentials_input_schema",
|
||||||
"has_external_trigger",
|
"has_external_trigger",
|
||||||
|
"has_human_in_the_loop",
|
||||||
|
"has_sensitive_action",
|
||||||
"new_output",
|
"new_output",
|
||||||
"can_access_graph",
|
"can_access_graph",
|
||||||
"is_latest_version",
|
"is_latest_version",
|
||||||
|
|||||||
@@ -141,52 +141,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@keyframes shimmer {
|
|
||||||
0% {
|
|
||||||
background-position: -200% 0;
|
|
||||||
}
|
|
||||||
100% {
|
|
||||||
background-position: 200% 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@keyframes l3 {
|
|
||||||
25% {
|
|
||||||
background-position:
|
|
||||||
0 0,
|
|
||||||
100% 100%,
|
|
||||||
100% calc(100% - 5px);
|
|
||||||
}
|
|
||||||
50% {
|
|
||||||
background-position:
|
|
||||||
0 100%,
|
|
||||||
100% 100%,
|
|
||||||
0 calc(100% - 5px);
|
|
||||||
}
|
|
||||||
75% {
|
|
||||||
background-position:
|
|
||||||
0 100%,
|
|
||||||
100% 0,
|
|
||||||
100% 5px;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.loader {
|
|
||||||
width: 80px;
|
|
||||||
height: 70px;
|
|
||||||
border: 5px solid rgb(241 245 249);
|
|
||||||
padding: 0 8px;
|
|
||||||
box-sizing: border-box;
|
|
||||||
background:
|
|
||||||
linear-gradient(rgb(15 23 42) 0 0) 0 0/8px 20px,
|
|
||||||
linear-gradient(rgb(15 23 42) 0 0) 100% 0/8px 20px,
|
|
||||||
radial-gradient(farthest-side, rgb(15 23 42) 90%, #0000) 0 5px/8px 8px
|
|
||||||
content-box,
|
|
||||||
transparent;
|
|
||||||
background-repeat: no-repeat;
|
|
||||||
animation: l3 2s infinite linear;
|
|
||||||
}
|
|
||||||
|
|
||||||
input[type="number"]::-webkit-outer-spin-button,
|
input[type="number"]::-webkit-outer-spin-button,
|
||||||
input[type="number"]::-webkit-inner-spin-button {
|
input[type="number"]::-webkit-inner-spin-button {
|
||||||
-webkit-appearance: none;
|
-webkit-appearance: none;
|
||||||
|
|||||||