mirror of
https://github.com/All-Hands-AI/OpenHands.git
synced 2026-01-10 07:18:10 -05:00
Merge branch 'main' into fix/cleanup-orphaned-localstorage-on-conversation-delete
This commit is contained in:
@@ -150,9 +150,9 @@ Each integration follows a consistent pattern with service classes, storage mode
|
||||
|
||||
**Important Notes:**
|
||||
- Enterprise code is licensed under Polyform Free Trial License (30-day limit)
|
||||
- The enterprise server extends the OSS server through dynamic imports
|
||||
- The enterprise server extends the OpenHands server through dynamic imports
|
||||
- Database changes require careful migration planning in `enterprise/migrations/`
|
||||
- Always test changes in both OSS and enterprise contexts
|
||||
- Always test changes in both OpenHands and enterprise contexts
|
||||
- Use the enterprise-specific Makefile commands for development
|
||||
|
||||
**Enterprise Testing Best Practices:**
|
||||
@@ -166,7 +166,7 @@ Each integration follows a consistent pattern with service classes, storage mode
|
||||
**Import Patterns:**
|
||||
- Use relative imports without `enterprise.` prefix in enterprise code
|
||||
- Example: `from storage.database import session_maker` not `from enterprise.storage.database import session_maker`
|
||||
- This ensures code works in both OSS and enterprise contexts
|
||||
- This ensures code works in both OpenHands and enterprise contexts
|
||||
|
||||
**Test Structure:**
|
||||
- Place tests in `enterprise/tests/unit/` following the same structure as the source code
|
||||
|
||||
@@ -10,6 +10,15 @@ repos:
|
||||
args: ["--allow-multiple-documents"]
|
||||
- id: debug-statements
|
||||
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: warn-appmode-oss
|
||||
name: "Warn on AppMode.OSS in backend (use AppMode.OPENHANDS)"
|
||||
language: system
|
||||
entry: bash -lc 'if rg -n "\\bAppMode\\.OSS\\b" openhands tests/unit; then echo "Found AppMode.OSS usage. Prefer AppMode.OPENHANDS."; exit 1; fi'
|
||||
pass_filenames: false
|
||||
|
||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||
rev: v2.5.1
|
||||
hooks:
|
||||
|
||||
@@ -10,13 +10,13 @@ This directory contains the enterprise server used by [OpenHands Cloud](https://
|
||||
|
||||
You may also want to check out the MIT-licensed [OpenHands](https://github.com/OpenHands/OpenHands)
|
||||
|
||||
## Extension of OpenHands (OSS)
|
||||
## Extension of OpenHands
|
||||
|
||||
The code in `/enterprise` directory builds on top of open source (OSS) code, extending its functionality. The enterprise code is entangled with the OSS code in two ways
|
||||
The code in `/enterprise` builds on top of OpenHands (MIT-licensed), extending its functionality. The enterprise code is entangled with OpenHands in two ways:
|
||||
|
||||
- Enterprise stacks on top of OSS. For example, the middleware in enterprise is stacked right on top of the middlewares in OSS. In `SAAS`, the middleware from BOTH repos will be present and running (which can sometimes cause conflicts)
|
||||
- Enterprise stacks on top of OpenHands. For example, the middleware in enterprise is stacked right on top of the middlewares in OpenHands. In `SAAS`, the middleware from BOTH repos will be present and running (which can sometimes cause conflicts)
|
||||
|
||||
- Enterprise overrides the implementation in OSS (only one is present at a time). For example, the server config SaasServerConfig which overrides [`ServerConfig`](https://github.com/OpenHands/OpenHands/blob/main/openhands/server/config/server_config.py#L8) on OSS. This is done through dynamic imports ([see here](https://github.com/OpenHands/OpenHands/blob/main/openhands/server/config/server_config.py#L37-#L45))
|
||||
- Enterprise overrides the implementation in OpenHands (only one is present at a time). For example, the server config SaasServerConfig overrides [`ServerConfig`](https://github.com/OpenHands/OpenHands/blob/main/openhands/server/config/server_config.py#L8) in OpenHands. This is done through dynamic imports ([see here](https://github.com/OpenHands/OpenHands/blob/main/openhands/server/config/server_config.py#L37-#L45))
|
||||
|
||||
Key areas that change on `SAAS` are
|
||||
|
||||
@@ -26,11 +26,11 @@ Key areas that change on `SAAS` are
|
||||
|
||||
### Authentication
|
||||
|
||||
| Aspect | OSS | Enterprise |
|
||||
| Aspect | OpenHands | Enterprise |
|
||||
| ------------------------- | ------------------------------------------------------ | ----------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Authentication Method** | User adds a personal access token (PAT) through the UI | User performs OAuth through the UI. The Github app provides a short-lived access token and refresh token |
|
||||
| **Authentication Method** | User adds a personal access token (PAT) through the UI | User performs OAuth through the UI. The GitHub app provides a short-lived access token and refresh token |
|
||||
| **Token Storage** | PAT is stored in **Settings** | Token is stored in **GithubTokenManager** (a file store in our backend) |
|
||||
| **Authenticated status** | We simply check if token exists in `Settings` | We issue a signed cookie with `github_user_id` during oauth, so subsequent requests with the cookie can be considered authenticated |
|
||||
| **Authenticated status** | We simply check if token exists in `Settings` | We issue a signed cookie with `github_user_id` during OAuth, so subsequent requests with the cookie can be considered authenticated |
|
||||
|
||||
Note that in the future, authentication will happen via keycloak. All modifications for authentication will happen in enterprise.
|
||||
|
||||
@@ -38,7 +38,7 @@ Note that in the future, authentication will happen via keycloak. All modificati
|
||||
|
||||
The github service is responsible for interacting with Github APIs. As a consequence, it uses the user's token and refreshes it if need be
|
||||
|
||||
| Aspect | OSS | Enterprise |
|
||||
| Aspect | OpenHands | Enterprise |
|
||||
| ------------------------- | -------------------------------------- | ---------------------------------------------- |
|
||||
| **Class used** | `GitHubService` | `SaaSGitHubService` |
|
||||
| **Token used** | User's PAT fetched from `Settings` | User's token fetched from `GitHubTokenManager` |
|
||||
@@ -50,7 +50,7 @@ NOTE: in the future we will simply replace the `GithubTokenManager` with keycloa
|
||||
|
||||
## User ID vs User Token
|
||||
|
||||
- On OSS, the entire APP revolves around the Github token the user sets. `openhands/server` uses `request.state.github_token` for the entire app
|
||||
- In OpenHands, the entire app revolves around the GitHub token the user sets. `openhands/server` uses `request.state.github_token` for the entire app
|
||||
- On Enterprise, the entire APP resolves around the Github User ID. This is because the cookie sets it, so `openhands/server` AND `enterprise/server` depend on it and completly ignore `request.state.github_token` (token is fetched from `GithubTokenManager` instead)
|
||||
|
||||
Note that introducing Github User ID on OSS, for instance, will cause large breakages.
|
||||
Note that introducing GitHub User ID in OpenHands, for instance, will cause large breakages.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
You have a few options here, which are expanded on below:
|
||||
|
||||
- A simple local development setup, with live reloading for both OSS and this repo
|
||||
- A simple local development setup, with live reloading for both OpenHands and this repo
|
||||
- A more complex setup that includes Redis
|
||||
- An even more complex setup that includes GitHub events
|
||||
|
||||
@@ -26,7 +26,7 @@ Before starting, make sure you have the following tools installed:
|
||||
|
||||
## Option 1: Simple local development
|
||||
|
||||
This option will allow you to modify the both the OSS code and the code in this repo,
|
||||
This option will allow you to modify both the OpenHands code and the code in this repo,
|
||||
and see the changes in real-time.
|
||||
|
||||
This option works best for most scenarios. The only thing it's missing is
|
||||
@@ -105,7 +105,7 @@ export REDIS_PORT=6379
|
||||
|
||||
(see above)
|
||||
|
||||
### 2. Build OSS Openhands
|
||||
### 2. Build OpenHands
|
||||
|
||||
Develop on [Openhands](https://github.com/All-Hands-AI/OpenHands) locally. When ready, run the following inside Openhands repo (not the Deploy repo)
|
||||
|
||||
@@ -155,7 +155,7 @@ Visit the tunnel domain found in Step 4 to run the app (`https://bc71-2603-7000-
|
||||
|
||||
### Local Debugging with VSCode
|
||||
|
||||
Local Development necessitates running a version of OpenHands that is as similar as possible to the version running in the SAAS Environment. Before running these steps, it is assumed you have a local development version of the OSS OpenHands project running.
|
||||
Local Development necessitates running a version of OpenHands that is as similar as possible to the version running in the SAAS Environment. Before running these steps, it is assumed you have a local development version of OpenHands running.
|
||||
|
||||
#### Redis
|
||||
|
||||
@@ -201,8 +201,8 @@ And then invoking `printenv`. NOTE: _DO NOT DO THIS WITH PROD!!!_ (Hopefully by
|
||||
"DEBUG": "1",
|
||||
"FILE_STORE": "local",
|
||||
"REDIS_HOST": "localhost:6379",
|
||||
"OPENHANDS": "<YOUR LOCAL OSS OPENHANDS DIR>",
|
||||
"FRONTEND_DIRECTORY": "<YOUR LOCAL OSS OPENHANDS DIR>/frontend/build",
|
||||
"OPENHANDS": "<YOUR LOCAL OPENHANDS DIR>",
|
||||
"FRONTEND_DIRECTORY": "<YOUR LOCAL OPENHANDS DIR>/frontend/build",
|
||||
"SANDBOX_RUNTIME_CONTAINER_IMAGE": "ghcr.io/openhands/runtime:main-nikolaik",
|
||||
"FILE_STORE_PATH": "<YOUR HOME DIRECTORY>>/.openhands-state",
|
||||
"OPENHANDS_CONFIG_CLS": "server.config.SaaSServerConfig",
|
||||
@@ -235,8 +235,8 @@ And then invoking `printenv`. NOTE: _DO NOT DO THIS WITH PROD!!!_ (Hopefully by
|
||||
"DEBUG": "1",
|
||||
"FILE_STORE": "local",
|
||||
"REDIS_HOST": "localhost:6379",
|
||||
"OPENHANDS": "<YOUR LOCAL OSS OPENHANDS DIR>",
|
||||
"FRONTEND_DIRECTORY": "<YOUR LOCAL OSS OPENHANDS DIR>/frontend/build",
|
||||
"OPENHANDS": "<YOUR LOCAL OPENHANDS DIR>",
|
||||
"FRONTEND_DIRECTORY": "<YOUR LOCAL OPENHANDS DIR>/frontend/build",
|
||||
"SANDBOX_RUNTIME_CONTAINER_IMAGE": "ghcr.io/openhands/runtime:main-nikolaik",
|
||||
"FILE_STORE_PATH": "<YOUR HOME DIRECTORY>>/.openhands-state",
|
||||
"OPENHANDS_CONFIG_CLS": "server.config.SaaSServerConfig",
|
||||
|
||||
@@ -21,6 +21,7 @@ from integrations.utils import (
|
||||
CONVERSATION_URL,
|
||||
HOST_URL,
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR,
|
||||
get_session_expired_message,
|
||||
)
|
||||
from integrations.v1_utils import get_saas_user_auth
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
@@ -31,7 +32,11 @@ from server.utils.conversation_callback_utils import register_callback_processor
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.provider import ProviderToken, ProviderType
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
from openhands.storage.data_models.secrets import Secrets
|
||||
from openhands.utils.async_utils import call_sync_from_async
|
||||
|
||||
@@ -305,7 +310,7 @@ class GithubManager(Manager):
|
||||
f'[GitHub] Created conversation {conversation_id} for user {user_info.username}'
|
||||
)
|
||||
|
||||
if not github_view.v1:
|
||||
if not github_view.v1_enabled:
|
||||
# Create a GithubCallbackProcessor
|
||||
processor = GithubCallbackProcessor(
|
||||
github_view=github_view,
|
||||
@@ -342,6 +347,13 @@ class GithubManager(Manager):
|
||||
|
||||
msg_info = f'@{user_info.username} please set a valid LLM API key in [OpenHands Cloud]({HOST_URL}) before starting a job.'
|
||||
|
||||
except SessionExpiredError as e:
|
||||
logger.warning(
|
||||
f'[GitHub] Session expired for user {user_info.username}: {str(e)}'
|
||||
)
|
||||
|
||||
msg_info = get_session_expired_message(user_info.username)
|
||||
|
||||
msg = self.create_outgoing_message(msg_info)
|
||||
await self.send_message(msg, github_view)
|
||||
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
import httpx
|
||||
from github import Auth, Github, GithubIntegration
|
||||
from integrations.utils import CONVERSATION_URL, get_summary_instruction
|
||||
from pydantic import Field
|
||||
from server.auth.constants import GITHUB_APP_CLIENT_ID, GITHUB_APP_PRIVATE_KEY
|
||||
|
||||
from openhands.agent_server.models import AskAgentRequest, AskAgentResponse
|
||||
from openhands.app_server.event_callback.event_callback_models import (
|
||||
@@ -20,8 +21,6 @@ from openhands.app_server.event_callback.util import (
|
||||
ensure_conversation_found,
|
||||
ensure_running_sandbox,
|
||||
get_agent_server_url_from_sandbox,
|
||||
get_conversation_url,
|
||||
get_prompt_template,
|
||||
)
|
||||
from openhands.sdk import Event
|
||||
from openhands.sdk.event import ConversationStateUpdateEvent
|
||||
@@ -34,7 +33,6 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
|
||||
github_view_data: dict[str, Any] = Field(default_factory=dict)
|
||||
should_request_summary: bool = Field(default=True)
|
||||
should_extract: bool = Field(default=True)
|
||||
inline_pr_comment: bool = Field(default=False)
|
||||
|
||||
async def __call__(
|
||||
@@ -64,7 +62,12 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
self.should_request_summary = False
|
||||
|
||||
try:
|
||||
_logger.info(f'[GitHub V1] Requesting summary {conversation_id}')
|
||||
summary = await self._request_summary(conversation_id)
|
||||
_logger.info(
|
||||
f'[GitHub V1] Posting summary {conversation_id}',
|
||||
extra={'summary': summary},
|
||||
)
|
||||
await self._post_summary_to_github(summary)
|
||||
|
||||
return EventCallbackResult(
|
||||
@@ -82,12 +85,12 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
# Check if we have installation ID and credentials before posting
|
||||
if (
|
||||
self.github_view_data.get('installation_id')
|
||||
and os.getenv('GITHUB_APP_CLIENT_ID')
|
||||
and os.getenv('GITHUB_APP_PRIVATE_KEY')
|
||||
and GITHUB_APP_CLIENT_ID
|
||||
and GITHUB_APP_PRIVATE_KEY
|
||||
):
|
||||
await self._post_summary_to_github(
|
||||
f'OpenHands encountered an error: **{str(e)}**.\n\n'
|
||||
f'[See the conversation]({get_conversation_url().format(conversation_id)})'
|
||||
f'[See the conversation]({CONVERSATION_URL.format(conversation_id)})'
|
||||
'for more information.'
|
||||
)
|
||||
except Exception as post_error:
|
||||
@@ -115,16 +118,11 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
f'Missing installation ID for GitHub payload: {self.github_view_data}'
|
||||
)
|
||||
|
||||
github_app_client_id = os.getenv('GITHUB_APP_CLIENT_ID', '').strip()
|
||||
github_app_private_key = os.getenv('GITHUB_APP_PRIVATE_KEY', '').replace(
|
||||
'\\n', '\n'
|
||||
)
|
||||
|
||||
if not github_app_client_id or not github_app_private_key:
|
||||
if not GITHUB_APP_CLIENT_ID or not GITHUB_APP_PRIVATE_KEY:
|
||||
raise ValueError('GitHub App credentials are not configured')
|
||||
|
||||
github_integration = GithubIntegration(
|
||||
auth=Auth.AppAuth(github_app_client_id, github_app_private_key),
|
||||
auth=Auth.AppAuth(GITHUB_APP_CLIENT_ID, GITHUB_APP_PRIVATE_KEY),
|
||||
)
|
||||
token_data = github_integration.get_access_token(installation_id)
|
||||
return token_data.token
|
||||
@@ -274,16 +272,16 @@ class GithubV1CallbackProcessor(EventCallbackProcessor):
|
||||
app_conversation_info.sandbox_id,
|
||||
)
|
||||
|
||||
assert sandbox.session_api_key is not None, (
|
||||
f'No session API key for sandbox: {sandbox.id}'
|
||||
)
|
||||
assert (
|
||||
sandbox.session_api_key is not None
|
||||
), f'No session API key for sandbox: {sandbox.id}'
|
||||
|
||||
# 3. URL + instruction
|
||||
agent_server_url = get_agent_server_url_from_sandbox(sandbox)
|
||||
agent_server_url = get_agent_server_url_from_sandbox(sandbox)
|
||||
|
||||
# Prepare message based on agent state
|
||||
message_content = get_prompt_template('summary_prompt.j2')
|
||||
message_content = get_summary_instruction()
|
||||
|
||||
# Ask the agent and return the response text
|
||||
return await self._ask_question(
|
||||
@@ -140,7 +140,10 @@ class GithubIssue(ResolverViewInterface):
|
||||
title: str
|
||||
description: str
|
||||
previous_comments: list[Comment]
|
||||
v1: bool
|
||||
v1_enabled: bool
|
||||
|
||||
def _get_branch_name(self) -> str | None:
|
||||
return getattr(self, 'branch_name', None)
|
||||
|
||||
async def _load_resolver_context(self):
|
||||
github_service = GithubServiceImpl(
|
||||
@@ -188,23 +191,27 @@ class GithubIssue(ResolverViewInterface):
|
||||
async def initialize_new_conversation(self) -> ConversationMetadata:
|
||||
# FIXME: Handle if initialize_conversation returns None
|
||||
|
||||
v1_enabled = await get_user_v1_enabled_setting(self.user_info.keycloak_user_id)
|
||||
logger.info(
|
||||
f'[GitHub V1]: User flag found for {self.user_info.keycloak_user_id} is {v1_enabled}'
|
||||
self.v1_enabled = await get_user_v1_enabled_setting(
|
||||
self.user_info.keycloak_user_id
|
||||
)
|
||||
if v1_enabled:
|
||||
logger.info(
|
||||
f'[GitHub V1]: User flag found for {self.user_info.keycloak_user_id} is {self.v1_enabled}'
|
||||
)
|
||||
if self.v1_enabled:
|
||||
# Create dummy conversationm metadata
|
||||
# Don't save to conversation store
|
||||
# V1 conversations are stored in a separate table
|
||||
self.conversation_id = uuid4().hex
|
||||
return ConversationMetadata(
|
||||
conversation_id=uuid4().hex, selected_repository=self.full_repo_name
|
||||
conversation_id=self.conversation_id,
|
||||
selected_repository=self.full_repo_name,
|
||||
)
|
||||
|
||||
conversation_metadata: ConversationMetadata = await initialize_conversation( # type: ignore[assignment]
|
||||
user_id=self.user_info.keycloak_user_id,
|
||||
conversation_id=None,
|
||||
selected_repository=self.full_repo_name,
|
||||
selected_branch=None,
|
||||
selected_branch=self._get_branch_name(),
|
||||
conversation_trigger=ConversationTrigger.RESOLVER,
|
||||
git_provider=ProviderType.GITHUB,
|
||||
)
|
||||
@@ -218,25 +225,18 @@ class GithubIssue(ResolverViewInterface):
|
||||
conversation_metadata: ConversationMetadata,
|
||||
saas_user_auth: UserAuth,
|
||||
):
|
||||
v1_enabled = await get_user_v1_enabled_setting(self.user_info.keycloak_user_id)
|
||||
logger.info(
|
||||
f'[GitHub V1]: User flag found for {self.user_info.keycloak_user_id} is {v1_enabled}'
|
||||
)
|
||||
if v1_enabled:
|
||||
try:
|
||||
# Use V1 app conversation service
|
||||
await self._create_v1_conversation(
|
||||
jinja_env, saas_user_auth, conversation_metadata
|
||||
)
|
||||
return
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f'Error checking V1 settings, falling back to V0: {e}')
|
||||
|
||||
# Use existing V0 conversation service
|
||||
await self._create_v0_conversation(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
f'[GitHub V1]: User flag found for {self.user_info.keycloak_user_id} is {self.v1_enabled}'
|
||||
)
|
||||
if self.v1_enabled:
|
||||
# Use V1 app conversation service
|
||||
await self._create_v1_conversation(
|
||||
jinja_env, saas_user_auth, conversation_metadata
|
||||
)
|
||||
else:
|
||||
await self._create_v0_conversation(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
)
|
||||
|
||||
async def _create_v0_conversation(
|
||||
self,
|
||||
@@ -294,6 +294,7 @@ class GithubIssue(ResolverViewInterface):
|
||||
system_message_suffix=conversation_instructions,
|
||||
initial_message=initial_message,
|
||||
selected_repository=self.full_repo_name,
|
||||
selected_branch=self._get_branch_name(),
|
||||
git_provider=ProviderType.GITHUB,
|
||||
title=f'GitHub Issue #{self.issue_number}: {self.title}',
|
||||
trigger=ConversationTrigger.RESOLVER,
|
||||
@@ -318,11 +319,9 @@ class GithubIssue(ResolverViewInterface):
|
||||
f'Failed to start V1 conversation: {task.detail}'
|
||||
)
|
||||
|
||||
self.v1 = True
|
||||
|
||||
def _create_github_v1_callback_processor(self):
|
||||
"""Create a V1 callback processor for GitHub integration."""
|
||||
from openhands.app_server.event_callback.github_v1_callback_processor import (
|
||||
from integrations.github.github_v1_callback_processor import (
|
||||
GithubV1CallbackProcessor,
|
||||
)
|
||||
|
||||
@@ -390,31 +389,6 @@ class GithubPRComment(GithubIssueComment):
|
||||
|
||||
return user_instructions, conversation_instructions
|
||||
|
||||
async def initialize_new_conversation(self) -> ConversationMetadata:
|
||||
v1_enabled = await get_user_v1_enabled_setting(self.user_info.keycloak_user_id)
|
||||
logger.info(
|
||||
f'[GitHub V1]: User flag found for {self.user_info.keycloak_user_id} is {v1_enabled}'
|
||||
)
|
||||
if v1_enabled:
|
||||
# Create dummy conversationm metadata
|
||||
# Don't save to conversation store
|
||||
# V1 conversations are stored in a separate table
|
||||
return ConversationMetadata(
|
||||
conversation_id=uuid4().hex, selected_repository=self.full_repo_name
|
||||
)
|
||||
|
||||
conversation_metadata: ConversationMetadata = await initialize_conversation( # type: ignore[assignment]
|
||||
user_id=self.user_info.keycloak_user_id,
|
||||
conversation_id=None,
|
||||
selected_repository=self.full_repo_name,
|
||||
selected_branch=self.branch_name,
|
||||
conversation_trigger=ConversationTrigger.RESOLVER,
|
||||
git_provider=ProviderType.GITHUB,
|
||||
)
|
||||
|
||||
self.conversation_id = conversation_metadata.conversation_id
|
||||
return conversation_metadata
|
||||
|
||||
|
||||
@dataclass
|
||||
class GithubInlinePRComment(GithubPRComment):
|
||||
@@ -830,7 +804,7 @@ class GithubFactory:
|
||||
title='',
|
||||
description='',
|
||||
previous_comments=[],
|
||||
v1=False,
|
||||
v1_enabled=False,
|
||||
)
|
||||
|
||||
elif GithubFactory.is_issue_comment(message):
|
||||
@@ -856,7 +830,7 @@ class GithubFactory:
|
||||
title='',
|
||||
description='',
|
||||
previous_comments=[],
|
||||
v1=False,
|
||||
v1_enabled=False,
|
||||
)
|
||||
|
||||
elif GithubFactory.is_pr_comment(message):
|
||||
@@ -898,7 +872,7 @@ class GithubFactory:
|
||||
title='',
|
||||
description='',
|
||||
previous_comments=[],
|
||||
v1=False,
|
||||
v1_enabled=False,
|
||||
)
|
||||
|
||||
elif GithubFactory.is_inline_pr_comment(message):
|
||||
@@ -932,7 +906,7 @@ class GithubFactory:
|
||||
title='',
|
||||
description='',
|
||||
previous_comments=[],
|
||||
v1=False,
|
||||
v1_enabled=False,
|
||||
)
|
||||
|
||||
else:
|
||||
|
||||
@@ -15,6 +15,7 @@ from integrations.utils import (
|
||||
CONVERSATION_URL,
|
||||
HOST_URL,
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR,
|
||||
get_session_expired_message,
|
||||
)
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from pydantic import SecretStr
|
||||
@@ -24,7 +25,11 @@ from server.utils.conversation_callback_utils import register_callback_processor
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl
|
||||
from openhands.integrations.provider import ProviderToken, ProviderType
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
from openhands.storage.data_models.secrets import Secrets
|
||||
|
||||
|
||||
@@ -249,6 +254,13 @@ class GitlabManager(Manager):
|
||||
|
||||
msg_info = f'@{user_info.username} please set a valid LLM API key in [OpenHands Cloud]({HOST_URL}) before starting a job.'
|
||||
|
||||
except SessionExpiredError as e:
|
||||
logger.warning(
|
||||
f'[GitLab] Session expired for user {user_info.username}: {str(e)}'
|
||||
)
|
||||
|
||||
msg_info = get_session_expired_message(user_info.username)
|
||||
|
||||
# Send the acknowledgment message
|
||||
msg = self.create_outgoing_message(msg_info)
|
||||
await self.send_message(msg, gitlab_view)
|
||||
|
||||
@@ -80,22 +80,52 @@ class SaaSGitLabService(GitLabService):
|
||||
logger.warning('external_auth_token and user_id not set!')
|
||||
return gitlab_token
|
||||
|
||||
async def get_owned_groups(self) -> list[dict]:
|
||||
async def get_owned_groups(self, min_access_level: int = 40) -> list[dict]:
|
||||
"""
|
||||
Get all groups for which the current user is the owner.
|
||||
Get all top-level groups where the current user has admin access.
|
||||
|
||||
This method supports pagination and fetches all groups where the user has
|
||||
at least the specified access level.
|
||||
|
||||
Args:
|
||||
min_access_level: Minimum access level required (default: 40 for Maintainer or Owner)
|
||||
- 40: Maintainer or Owner
|
||||
- 50: Owner only
|
||||
|
||||
Returns:
|
||||
list[dict]: A list of groups owned by the current user.
|
||||
list[dict]: A list of groups where user has the specified access level or higher.
|
||||
"""
|
||||
url = f'{self.BASE_URL}/groups'
|
||||
params = {'owned': 'true', 'per_page': 100, 'top_level_only': 'true'}
|
||||
groups_with_admin_access = []
|
||||
page = 1
|
||||
per_page = 100
|
||||
|
||||
try:
|
||||
response, headers = await self._make_request(url, params)
|
||||
return response
|
||||
except Exception:
|
||||
logger.warning('Error fetching owned groups', exc_info=True)
|
||||
return []
|
||||
while True:
|
||||
try:
|
||||
url = f'{self.BASE_URL}/groups'
|
||||
params = {
|
||||
'page': str(page),
|
||||
'per_page': str(per_page),
|
||||
'min_access_level': min_access_level,
|
||||
'top_level_only': 'true',
|
||||
}
|
||||
response, headers = await self._make_request(url, params)
|
||||
|
||||
if not response:
|
||||
break
|
||||
|
||||
groups_with_admin_access.extend(response)
|
||||
page += 1
|
||||
|
||||
# Check if we've reached the last page
|
||||
link_header = headers.get('Link', '')
|
||||
if 'rel="next"' not in link_header:
|
||||
break
|
||||
|
||||
except Exception:
|
||||
logger.warning(f'Error fetching groups on page {page}', exc_info=True)
|
||||
break
|
||||
|
||||
return groups_with_admin_access
|
||||
|
||||
async def add_owned_projects_and_groups_to_db(self, owned_personal_projects):
|
||||
"""
|
||||
@@ -527,3 +557,55 @@ class SaaSGitLabService(GitLabService):
|
||||
await self._make_request(url=url, params=params, method=RequestMethod.POST)
|
||||
except Exception as e:
|
||||
logger.exception(f'[GitLab]: Reply to MR failed {e}')
|
||||
|
||||
async def get_user_resources_with_admin_access(
|
||||
self,
|
||||
) -> tuple[list[dict], list[dict]]:
|
||||
"""
|
||||
Get all projects and groups where the current user has admin access (maintainer or owner).
|
||||
|
||||
Returns:
|
||||
tuple[list[dict], list[dict]]: A tuple containing:
|
||||
- list of projects where user has admin access
|
||||
- list of groups where user has admin access
|
||||
"""
|
||||
projects_with_admin_access = []
|
||||
groups_with_admin_access = []
|
||||
|
||||
# Fetch all projects the user is a member of
|
||||
page = 1
|
||||
per_page = 100
|
||||
while True:
|
||||
try:
|
||||
url = f'{self.BASE_URL}/projects'
|
||||
params = {
|
||||
'page': str(page),
|
||||
'per_page': str(per_page),
|
||||
'membership': 1,
|
||||
'min_access_level': 40, # Maintainer or Owner
|
||||
}
|
||||
response, headers = await self._make_request(url, params)
|
||||
|
||||
if not response:
|
||||
break
|
||||
|
||||
projects_with_admin_access.extend(response)
|
||||
page += 1
|
||||
|
||||
# Check if we've reached the last page
|
||||
link_header = headers.get('Link', '')
|
||||
if 'rel="next"' not in link_header:
|
||||
break
|
||||
|
||||
except Exception:
|
||||
logger.warning(f'Error fetching projects on page {page}', exc_info=True)
|
||||
break
|
||||
|
||||
# Fetch all groups where user is owner or maintainer
|
||||
groups_with_admin_access = await self.get_owned_groups(min_access_level=40)
|
||||
|
||||
logger.info(
|
||||
f'Found {len(projects_with_admin_access)} projects and {len(groups_with_admin_access)} groups with admin access'
|
||||
)
|
||||
|
||||
return projects_with_admin_access, groups_with_admin_access
|
||||
|
||||
199
enterprise/integrations/gitlab/webhook_installation.py
Normal file
199
enterprise/integrations/gitlab/webhook_installation.py
Normal file
@@ -0,0 +1,199 @@
|
||||
"""Shared utilities for GitLab webhook installation.
|
||||
|
||||
This module contains reusable functions and classes for installing GitLab webhooks
|
||||
that can be used by both the cron job and API routes.
|
||||
"""
|
||||
|
||||
from typing import cast
|
||||
from uuid import uuid4
|
||||
|
||||
from integrations.types import GitLabResourceType
|
||||
from integrations.utils import GITLAB_WEBHOOK_URL
|
||||
from storage.gitlab_webhook import GitlabWebhook, WebhookStatus
|
||||
from storage.gitlab_webhook_store import GitlabWebhookStore
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.service_types import GitService
|
||||
|
||||
# Webhook configuration constants
|
||||
WEBHOOK_NAME = 'OpenHands Resolver'
|
||||
SCOPES: list[str] = [
|
||||
'note_events',
|
||||
'merge_requests_events',
|
||||
'confidential_issues_events',
|
||||
'issues_events',
|
||||
'confidential_note_events',
|
||||
'job_events',
|
||||
'pipeline_events',
|
||||
]
|
||||
|
||||
|
||||
class BreakLoopException(Exception):
|
||||
"""Exception raised when webhook installation conditions are not met or rate limited."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
async def verify_webhook_conditions(
|
||||
gitlab_service: type[GitService],
|
||||
resource_type: GitLabResourceType,
|
||||
resource_id: str,
|
||||
webhook_store: GitlabWebhookStore,
|
||||
webhook: GitlabWebhook,
|
||||
) -> None:
|
||||
"""
|
||||
Verify all conditions are met for webhook installation.
|
||||
Raises BreakLoopException if any condition fails or rate limited.
|
||||
|
||||
Args:
|
||||
gitlab_service: GitLab service instance
|
||||
resource_type: Type of resource (PROJECT or GROUP)
|
||||
resource_id: ID of the resource
|
||||
webhook_store: Webhook store instance
|
||||
webhook: Webhook object to verify
|
||||
"""
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service = cast(type[SaaSGitLabService], gitlab_service)
|
||||
|
||||
# Check if resource exists
|
||||
does_resource_exist, status = await gitlab_service.check_resource_exists(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Does resource exists',
|
||||
extra={
|
||||
'does_resource_exist': does_resource_exist,
|
||||
'status': status,
|
||||
'resource_id': resource_id,
|
||||
'resource_type': resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
if status == WebhookStatus.RATE_LIMITED:
|
||||
raise BreakLoopException()
|
||||
if not does_resource_exist and status != WebhookStatus.RATE_LIMITED:
|
||||
await webhook_store.delete_webhook(webhook)
|
||||
raise BreakLoopException()
|
||||
|
||||
# Check if user has admin access
|
||||
(
|
||||
is_user_admin_of_resource,
|
||||
status,
|
||||
) = await gitlab_service.check_user_has_admin_access_to_resource(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Is user admin',
|
||||
extra={
|
||||
'is_user_admin': is_user_admin_of_resource,
|
||||
'status': status,
|
||||
'resource_id': resource_id,
|
||||
'resource_type': resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
if status == WebhookStatus.RATE_LIMITED:
|
||||
raise BreakLoopException()
|
||||
if not is_user_admin_of_resource:
|
||||
await webhook_store.delete_webhook(webhook)
|
||||
raise BreakLoopException()
|
||||
|
||||
# Check if webhook already exists
|
||||
(
|
||||
does_webhook_exist_on_resource,
|
||||
status,
|
||||
) = await gitlab_service.check_webhook_exists_on_resource(
|
||||
resource_type, resource_id, GITLAB_WEBHOOK_URL
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Does webhook already exist',
|
||||
extra={
|
||||
'does_webhook_exist_on_resource': does_webhook_exist_on_resource,
|
||||
'status': status,
|
||||
'resource_id': resource_id,
|
||||
'resource_type': resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
if status == WebhookStatus.RATE_LIMITED:
|
||||
raise BreakLoopException()
|
||||
if does_webhook_exist_on_resource != webhook.webhook_exists:
|
||||
await webhook_store.update_webhook(
|
||||
webhook, {'webhook_exists': does_webhook_exist_on_resource}
|
||||
)
|
||||
|
||||
if does_webhook_exist_on_resource:
|
||||
raise BreakLoopException()
|
||||
|
||||
|
||||
async def install_webhook_on_resource(
|
||||
gitlab_service: type[GitService],
|
||||
resource_type: GitLabResourceType,
|
||||
resource_id: str,
|
||||
webhook_store: GitlabWebhookStore,
|
||||
webhook: GitlabWebhook,
|
||||
) -> tuple[str | None, WebhookStatus | None]:
|
||||
"""
|
||||
Install webhook on a GitLab resource.
|
||||
|
||||
Args:
|
||||
gitlab_service: GitLab service instance
|
||||
resource_type: Type of resource (PROJECT or GROUP)
|
||||
resource_id: ID of the resource
|
||||
webhook_store: Webhook store instance
|
||||
webhook: Webhook object to install
|
||||
|
||||
Returns:
|
||||
Tuple of (webhook_id, status)
|
||||
"""
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service = cast(type[SaaSGitLabService], gitlab_service)
|
||||
|
||||
webhook_secret = f'{webhook.user_id}-{str(uuid4())}'
|
||||
webhook_uuid = f'{str(uuid4())}'
|
||||
|
||||
webhook_id, status = await gitlab_service.install_webhook(
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_name=WEBHOOK_NAME,
|
||||
webhook_url=GITLAB_WEBHOOK_URL,
|
||||
webhook_secret=webhook_secret,
|
||||
webhook_uuid=webhook_uuid,
|
||||
scopes=SCOPES,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Creating new webhook',
|
||||
extra={
|
||||
'webhook_id': webhook_id,
|
||||
'status': status,
|
||||
'resource_id': resource_id,
|
||||
'resource_type': resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
if status == WebhookStatus.RATE_LIMITED:
|
||||
raise BreakLoopException()
|
||||
|
||||
if webhook_id:
|
||||
await webhook_store.update_webhook(
|
||||
webhook=webhook,
|
||||
update_fields={
|
||||
'webhook_secret': webhook_secret,
|
||||
'webhook_exists': True, # webhook was created
|
||||
'webhook_url': GITLAB_WEBHOOK_URL,
|
||||
'scopes': SCOPES,
|
||||
'webhook_uuid': webhook_uuid, # required to identify which webhook installation is sending payload
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f'Installed webhook for {webhook.user_id} on {resource_type}:{resource_id}'
|
||||
)
|
||||
|
||||
return webhook_id, status
|
||||
@@ -17,6 +17,7 @@ from integrations.utils import (
|
||||
HOST_URL,
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR,
|
||||
filter_potential_repos_by_user_msg,
|
||||
get_session_expired_message,
|
||||
)
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from server.auth.saas_user_auth import get_user_auth_from_keycloak_id
|
||||
@@ -30,7 +31,11 @@ from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.provider import ProviderHandler
|
||||
from openhands.integrations.service_types import Repository
|
||||
from openhands.server.shared import server_config
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
from openhands.server.user_auth.user_auth import UserAuth
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
@@ -380,6 +385,10 @@ class JiraManager(Manager):
|
||||
logger.warning(f'[Jira] LLM authentication error: {str(e)}')
|
||||
msg_info = f'Please set a valid LLM API key in [OpenHands Cloud]({HOST_URL}) before starting a job.'
|
||||
|
||||
except SessionExpiredError as e:
|
||||
logger.warning(f'[Jira] Session expired: {str(e)}')
|
||||
msg_info = get_session_expired_message()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'[Jira] Unexpected error starting job: {str(e)}', exc_info=True
|
||||
|
||||
@@ -19,6 +19,7 @@ from integrations.utils import (
|
||||
HOST_URL,
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR,
|
||||
filter_potential_repos_by_user_msg,
|
||||
get_session_expired_message,
|
||||
)
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from server.auth.saas_user_auth import get_user_auth_from_keycloak_id
|
||||
@@ -32,7 +33,11 @@ from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.provider import ProviderHandler
|
||||
from openhands.integrations.service_types import Repository
|
||||
from openhands.server.shared import server_config
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
from openhands.server.user_auth.user_auth import UserAuth
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
@@ -397,6 +402,10 @@ class JiraDcManager(Manager):
|
||||
logger.warning(f'[Jira DC] LLM authentication error: {str(e)}')
|
||||
msg_info = f'Please set a valid LLM API key in [OpenHands Cloud]({HOST_URL}) before starting a job.'
|
||||
|
||||
except SessionExpiredError as e:
|
||||
logger.warning(f'[Jira DC] Session expired: {str(e)}')
|
||||
msg_info = get_session_expired_message()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'[Jira DC] Unexpected error starting job: {str(e)}', exc_info=True
|
||||
|
||||
@@ -16,6 +16,7 @@ from integrations.utils import (
|
||||
HOST_URL,
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR,
|
||||
filter_potential_repos_by_user_msg,
|
||||
get_session_expired_message,
|
||||
)
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from server.auth.saas_user_auth import get_user_auth_from_keycloak_id
|
||||
@@ -29,7 +30,11 @@ from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.provider import ProviderHandler
|
||||
from openhands.integrations.service_types import Repository
|
||||
from openhands.server.shared import server_config
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
from openhands.server.user_auth.user_auth import UserAuth
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
@@ -387,6 +392,10 @@ class LinearManager(Manager):
|
||||
logger.warning(f'[Linear] LLM authentication error: {str(e)}')
|
||||
msg_info = f'Please set a valid LLM API key in [OpenHands Cloud]({HOST_URL}) before starting a job.'
|
||||
|
||||
except SessionExpiredError as e:
|
||||
logger.warning(f'[Linear] Session expired: {str(e)}')
|
||||
msg_info = get_session_expired_message()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'[Linear] Unexpected error starting job: {str(e)}', exc_info=True
|
||||
|
||||
@@ -14,6 +14,7 @@ from integrations.slack.slack_view import (
|
||||
from integrations.utils import (
|
||||
HOST_URL,
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR,
|
||||
get_session_expired_message,
|
||||
)
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from pydantic import SecretStr
|
||||
@@ -29,7 +30,11 @@ from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.provider import ProviderHandler
|
||||
from openhands.integrations.service_types import Repository
|
||||
from openhands.server.shared import config, server_config
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
from openhands.server.user_auth.user_auth import UserAuth
|
||||
|
||||
authorize_url_generator = AuthorizeUrlGenerator(
|
||||
@@ -352,6 +357,13 @@ class SlackManager(Manager):
|
||||
|
||||
msg_info = f'@{user_info.slack_display_name} please set a valid LLM API key in [OpenHands Cloud]({HOST_URL}) before starting a job.'
|
||||
|
||||
except SessionExpiredError as e:
|
||||
logger.warning(
|
||||
f'[Slack] Session expired for user {user_info.slack_display_name}: {str(e)}'
|
||||
)
|
||||
|
||||
msg_info = get_session_expired_message(user_info.slack_display_name)
|
||||
|
||||
except StartingConvoException as e:
|
||||
msg_info = str(e)
|
||||
|
||||
|
||||
@@ -47,6 +47,27 @@ ENABLE_PROACTIVE_CONVERSATION_STARTERS = (
|
||||
os.getenv('ENABLE_PROACTIVE_CONVERSATION_STARTERS', 'false').lower() == 'true'
|
||||
)
|
||||
|
||||
|
||||
def get_session_expired_message(username: str | None = None) -> str:
|
||||
"""Get a user-friendly session expired message.
|
||||
|
||||
Used by integrations to notify users when their Keycloak offline session
|
||||
has expired.
|
||||
|
||||
Args:
|
||||
username: Optional username to mention in the message. If provided,
|
||||
the message will include @username prefix (used by Git providers
|
||||
like GitHub, GitLab, Slack). If None, returns a generic message
|
||||
(used by Jira, Jira DC, Linear).
|
||||
|
||||
Returns:
|
||||
A formatted session expired message
|
||||
"""
|
||||
if username:
|
||||
return f'@{username} your session has expired. Please login again at [OpenHands Cloud]({HOST_URL}) and try again.'
|
||||
return f'Your session has expired. Please login again at [OpenHands Cloud]({HOST_URL}) and try again.'
|
||||
|
||||
|
||||
# Toggle for solvability report feature
|
||||
ENABLE_SOLVABILITY_ANALYSIS = (
|
||||
os.getenv('ENABLE_SOLVABILITY_ANALYSIS', 'false').lower() == 'true'
|
||||
@@ -58,7 +79,10 @@ ENABLE_V1_GITHUB_RESOLVER = (
|
||||
)
|
||||
|
||||
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR = 'openhands/integrations/templates/resolver/'
|
||||
OPENHANDS_RESOLVER_TEMPLATES_DIR = (
|
||||
os.getenv('OPENHANDS_RESOLVER_TEMPLATES_DIR')
|
||||
or 'openhands/integrations/templates/resolver/'
|
||||
)
|
||||
jinja_env = Environment(loader=FileSystemLoader(OPENHANDS_RESOLVER_TEMPLATES_DIR))
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
"""bump condenser defaults: max_size 120->240
|
||||
|
||||
Revision ID: 086
|
||||
Revises: 085
|
||||
Create Date: 2026-01-05
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.sql import column, table
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '086'
|
||||
down_revision: Union[str, None] = '085'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema.
|
||||
|
||||
Update existing users with condenser_max_size=120 or NULL to 240.
|
||||
This covers both users who had the old default (120) explicitly set
|
||||
and users who had NULL (which defaulted to 120 in the application code).
|
||||
The SDK default for keep_first will be used automatically.
|
||||
"""
|
||||
user_settings_table = table(
|
||||
'user_settings',
|
||||
column('condenser_max_size', sa.Integer),
|
||||
)
|
||||
# Update users with explicit 120 value
|
||||
op.execute(
|
||||
user_settings_table.update()
|
||||
.where(user_settings_table.c.condenser_max_size == 120)
|
||||
.values(condenser_max_size=240)
|
||||
)
|
||||
# Update users with NULL value (which defaulted to 120 in application code)
|
||||
op.execute(
|
||||
user_settings_table.update()
|
||||
.where(user_settings_table.c.condenser_max_size.is_(None))
|
||||
.values(condenser_max_size=240)
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema.
|
||||
|
||||
Note: This sets all 240 values back to NULL (not 120) since we can't
|
||||
distinguish between users who had 120 vs NULL before the upgrade.
|
||||
"""
|
||||
user_settings_table = table(
|
||||
'user_settings', column('condenser_max_size', sa.Integer)
|
||||
)
|
||||
op.execute(
|
||||
user_settings_table.update()
|
||||
.where(user_settings_table.c.condenser_max_size == 240)
|
||||
.values(condenser_max_size=None)
|
||||
)
|
||||
@@ -0,0 +1,54 @@
|
||||
"""create blocked_email_domains table
|
||||
|
||||
Revision ID: 087
|
||||
Revises: 086
|
||||
Create Date: 2025-01-27 00:00:00.000000
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '087'
|
||||
down_revision: Union[str, None] = '086'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Create blocked_email_domains table for storing blocked email domain patterns."""
|
||||
op.create_table(
|
||||
'blocked_email_domains',
|
||||
sa.Column('id', sa.Integer(), sa.Identity(), nullable=False, primary_key=True),
|
||||
sa.Column('domain', sa.String(), nullable=False),
|
||||
sa.Column(
|
||||
'created_at',
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||
),
|
||||
sa.Column(
|
||||
'updated_at',
|
||||
sa.DateTime(timezone=True),
|
||||
nullable=False,
|
||||
server_default=sa.text('CURRENT_TIMESTAMP'),
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
)
|
||||
|
||||
# Create unique index on domain column
|
||||
op.create_index(
|
||||
'ix_blocked_email_domains_domain',
|
||||
'blocked_email_domains',
|
||||
['domain'],
|
||||
unique=True,
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Drop blocked_email_domains table."""
|
||||
op.drop_index('ix_blocked_email_domains_domain', table_name='blocked_email_domains')
|
||||
op.drop_table('blocked_email_domains')
|
||||
281
enterprise/poetry.lock
generated
281
enterprise/poetry.lock
generated
@@ -4517,14 +4517,14 @@ dev = ["Sphinx (>=5.1.1)", "black (==24.8.0)", "build (>=0.10.0)", "coverage[tom
|
||||
|
||||
[[package]]
|
||||
name = "libtmux"
|
||||
version = "0.46.2"
|
||||
version = "0.53.0"
|
||||
description = "Typed library that provides an ORM wrapper for tmux, a terminal multiplexer."
|
||||
optional = false
|
||||
python-versions = "<4.0,>=3.9"
|
||||
python-versions = "<4.0,>=3.10"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "libtmux-0.46.2-py3-none-any.whl", hash = "sha256:6c32dbf22bde8e5e33b2714a4295f6e838dc640f337cd4c085a044f6828c7793"},
|
||||
{file = "libtmux-0.46.2.tar.gz", hash = "sha256:9a398fec5d714129c8344555d466e1a903dfc0f741ba07aabe75a8ceb25c5dda"},
|
||||
{file = "libtmux-0.53.0-py3-none-any.whl", hash = "sha256:024b7ae6a12aae55358e8feb914c8632b3ab9bd61c0987c53559643c6a58ee4f"},
|
||||
{file = "libtmux-0.53.0.tar.gz", hash = "sha256:1d19af4cea0c19543954d7e7317c7025c0739b029cccbe3b843212fae238f1bd"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5836,14 +5836,14 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0
|
||||
|
||||
[[package]]
|
||||
name = "openhands-agent-server"
|
||||
version = "1.7.3"
|
||||
version = "1.7.4"
|
||||
description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_agent_server-1.7.3-py3-none-any.whl", hash = "sha256:456e7162cefec8ed7fda61433180b3f867265e15c7151b3a2e3e02546c9d9b6d"},
|
||||
{file = "openhands_agent_server-1.7.3.tar.gz", hash = "sha256:2c06dc497c38050d445559da2825d4d69fe84af90289c82a95317e45359cc547"},
|
||||
{file = "openhands_agent_server-1.7.4-py3-none-any.whl", hash = "sha256:997b3dc5243a1ba105f5bd9b0b5bc0cd590c5aa79cd609f23f841218e5f77393"},
|
||||
{file = "openhands_agent_server-1.7.4.tar.gz", hash = "sha256:0491cf2a5d596610364cbbe9360412bc10a66ae71c0466ab64fd264826e6f1d8"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -5860,7 +5860,7 @@ wsproto = ">=1.2.0"
|
||||
|
||||
[[package]]
|
||||
name = "openhands-ai"
|
||||
version = "0.0.0-post.5769+b5758b160"
|
||||
version = "0.0.0-post.5803+a8098505c"
|
||||
description = "OpenHands: Code Less, Make More"
|
||||
optional = false
|
||||
python-versions = "^3.12,<3.14"
|
||||
@@ -5902,9 +5902,9 @@ memory-profiler = "^0.61.0"
|
||||
numpy = "*"
|
||||
openai = "2.8.0"
|
||||
openhands-aci = "0.3.2"
|
||||
openhands-agent-server = "1.7.3"
|
||||
openhands-sdk = "1.7.3"
|
||||
openhands-tools = "1.7.3"
|
||||
openhands-agent-server = "1.7.4"
|
||||
openhands-sdk = "1.7.4"
|
||||
openhands-tools = "1.7.4"
|
||||
opentelemetry-api = "^1.33.1"
|
||||
opentelemetry-exporter-otlp-proto-grpc = "^1.33.1"
|
||||
pathspec = "^0.12.1"
|
||||
@@ -5921,7 +5921,6 @@ pygithub = "^2.5.0"
|
||||
pyjwt = "^2.9.0"
|
||||
pylatexenc = "*"
|
||||
pypdf = "^6.0.0"
|
||||
PyPDF2 = "*"
|
||||
python-docx = "*"
|
||||
python-dotenv = "*"
|
||||
python-frontmatter = "^1.1.0"
|
||||
@@ -5960,14 +5959,14 @@ url = ".."
|
||||
|
||||
[[package]]
|
||||
name = "openhands-sdk"
|
||||
version = "1.7.3"
|
||||
version = "1.7.4"
|
||||
description = "OpenHands SDK - Core functionality for building AI agents"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_sdk-1.7.3-py3-none-any.whl", hash = "sha256:afbce9c9e7d1167d9b9610673657fbbcd454b04f0151d943418d897de790aeed"},
|
||||
{file = "openhands_sdk-1.7.3.tar.gz", hash = "sha256:7fa0cde9148ab905e24346b50f2d7267fb6dde32ec8dcbc1c7d35ced6e0233aa"},
|
||||
{file = "openhands_sdk-1.7.4-py3-none-any.whl", hash = "sha256:b57511a0467bd3fa64e8cccb7e8026f95e10ee7c5b148335eaa762a32aad8369"},
|
||||
{file = "openhands_sdk-1.7.4.tar.gz", hash = "sha256:f8e63f996a13d2ea41447384b77a4ffebeb9e85aa54fafcf584f97f7cdc2cd9b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -5976,7 +5975,7 @@ fastmcp = ">=2.11.3"
|
||||
httpx = ">=0.27.0"
|
||||
litellm = ">=1.80.10"
|
||||
lmnr = ">=0.7.24"
|
||||
pydantic = ">=2.11.7"
|
||||
pydantic = ">=2.12.5"
|
||||
python-frontmatter = ">=1.1.0"
|
||||
python-json-logger = ">=3.3.0"
|
||||
tenacity = ">=9.1.2"
|
||||
@@ -5987,14 +5986,14 @@ boto3 = ["boto3 (>=1.35.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "openhands-tools"
|
||||
version = "1.7.3"
|
||||
version = "1.7.4"
|
||||
description = "OpenHands Tools - Runtime tools for AI agents"
|
||||
optional = false
|
||||
python-versions = ">=3.12"
|
||||
groups = ["main"]
|
||||
files = [
|
||||
{file = "openhands_tools-1.7.3-py3-none-any.whl", hash = "sha256:e823f5a47936dd23221cb4eb846d62b59dce5be69210330095fc242772e71d27"},
|
||||
{file = "openhands_tools-1.7.3.tar.gz", hash = "sha256:f2779cc5ca3b78b9afebb7617006da8069c12b41e6d67cbf0cc8de5d819005f8"},
|
||||
{file = "openhands_tools-1.7.4-py3-none-any.whl", hash = "sha256:b6a9b04bc59610087d6df789054c966df176c16371fc9c0b0f333ba09f5710d1"},
|
||||
{file = "openhands_tools-1.7.4.tar.gz", hash = "sha256:776b570da0e86ae48c7815e9adb3839e953e2f4cab7295184ce15849348c52e7"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -6003,7 +6002,7 @@ binaryornot = ">=0.4.4"
|
||||
browser-use = ">=0.8.0"
|
||||
cachetools = "*"
|
||||
func-timeout = ">=4.3.5"
|
||||
libtmux = ">=0.46.2"
|
||||
libtmux = ">=0.53.0"
|
||||
openhands-sdk = "*"
|
||||
pydantic = ">=2.11.7"
|
||||
tom-swe = ">=1.0.3"
|
||||
@@ -7255,22 +7254,22 @@ markers = {test = "platform_python_implementation == \"CPython\" and sys_platfor
|
||||
|
||||
[[package]]
|
||||
name = "pydantic"
|
||||
version = "2.11.7"
|
||||
version = "2.12.5"
|
||||
description = "Data validation using Python type hints"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "test"]
|
||||
files = [
|
||||
{file = "pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b"},
|
||||
{file = "pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db"},
|
||||
{file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"},
|
||||
{file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
annotated-types = ">=0.6.0"
|
||||
email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""}
|
||||
pydantic-core = "2.33.2"
|
||||
typing-extensions = ">=4.12.2"
|
||||
typing-inspection = ">=0.4.0"
|
||||
pydantic-core = "2.41.5"
|
||||
typing-extensions = ">=4.14.1"
|
||||
typing-inspection = ">=0.4.2"
|
||||
|
||||
[package.extras]
|
||||
email = ["email-validator (>=2.0.0)"]
|
||||
@@ -7278,115 +7277,137 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-core"
|
||||
version = "2.33.2"
|
||||
version = "2.41.5"
|
||||
description = "Core functionality for Pydantic validation and serialization"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "test"]
|
||||
files = [
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"},
|
||||
{file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"},
|
||||
{file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"},
|
||||
{file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"},
|
||||
{file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"},
|
||||
{file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"},
|
||||
{file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"},
|
||||
{file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"},
|
||||
{file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"},
|
||||
{file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"},
|
||||
{file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"},
|
||||
{file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"},
|
||||
{file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"},
|
||||
{file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"},
|
||||
{file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"},
|
||||
{file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"},
|
||||
{file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"},
|
||||
{file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"},
|
||||
{file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"},
|
||||
{file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"},
|
||||
{file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"},
|
||||
{file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"},
|
||||
{file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"},
|
||||
{file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"},
|
||||
{file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"},
|
||||
{file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"},
|
||||
{file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"},
|
||||
{file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"},
|
||||
{file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"},
|
||||
{file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"},
|
||||
{file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"},
|
||||
{file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"},
|
||||
{file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"},
|
||||
{file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"},
|
||||
{file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"},
|
||||
{file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"},
|
||||
{file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"},
|
||||
{file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"},
|
||||
{file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"},
|
||||
{file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"},
|
||||
{file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0"
|
||||
typing-extensions = ">=4.14.1"
|
||||
|
||||
[[package]]
|
||||
name = "pydantic-settings"
|
||||
@@ -13625,14 +13646,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "typing-inspection"
|
||||
version = "0.4.1"
|
||||
version = "0.4.2"
|
||||
description = "Runtime typing introspection tools"
|
||||
optional = false
|
||||
python-versions = ">=3.9"
|
||||
groups = ["main", "test"]
|
||||
files = [
|
||||
{file = "typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51"},
|
||||
{file = "typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28"},
|
||||
{file = "typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7"},
|
||||
{file = "typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
|
||||
@@ -38,8 +38,3 @@ ROLE_CHECK_ENABLED = os.getenv('ROLE_CHECK_ENABLED', 'false').lower() in (
|
||||
'y',
|
||||
'on',
|
||||
)
|
||||
BLOCKED_EMAIL_DOMAINS = [
|
||||
domain.strip().lower()
|
||||
for domain in os.getenv('BLOCKED_EMAIL_DOMAINS', '').split(',')
|
||||
if domain.strip()
|
||||
]
|
||||
|
||||
@@ -1,20 +1,13 @@
|
||||
from server.auth.constants import BLOCKED_EMAIL_DOMAINS
|
||||
from storage.blocked_email_domain_store import BlockedEmailDomainStore
|
||||
from storage.database import session_maker
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
|
||||
|
||||
class DomainBlocker:
|
||||
def __init__(self) -> None:
|
||||
def __init__(self, store: BlockedEmailDomainStore) -> None:
|
||||
logger.debug('Initializing DomainBlocker')
|
||||
self.blocked_domains: list[str] = BLOCKED_EMAIL_DOMAINS
|
||||
if self.blocked_domains:
|
||||
logger.info(
|
||||
f'Successfully loaded {len(self.blocked_domains)} blocked email domains: {self.blocked_domains}'
|
||||
)
|
||||
|
||||
def is_active(self) -> bool:
|
||||
"""Check if domain blocking is enabled"""
|
||||
return bool(self.blocked_domains)
|
||||
self.store = store
|
||||
|
||||
def _extract_domain(self, email: str) -> str | None:
|
||||
"""Extract and normalize email domain from email address"""
|
||||
@@ -31,16 +24,16 @@ class DomainBlocker:
|
||||
return None
|
||||
|
||||
def is_domain_blocked(self, email: str) -> bool:
|
||||
"""Check if email domain is blocked
|
||||
"""Check if email domain is blocked by querying the database directly via SQL.
|
||||
|
||||
Supports blocking:
|
||||
- Exact domains: 'example.com' blocks 'user@example.com'
|
||||
- Subdomains: 'example.com' blocks 'user@subdomain.example.com'
|
||||
- TLDs: '.us' blocks 'user@company.us' and 'user@subdomain.company.us'
|
||||
"""
|
||||
if not self.is_active():
|
||||
return False
|
||||
|
||||
The blocking logic is handled efficiently in SQL, avoiding the need to load
|
||||
all blocked domains into memory.
|
||||
"""
|
||||
if not email:
|
||||
logger.debug('No email provided for domain check')
|
||||
return False
|
||||
@@ -50,26 +43,25 @@ class DomainBlocker:
|
||||
logger.debug(f'Could not extract domain from email: {email}')
|
||||
return False
|
||||
|
||||
# Check if domain matches any blocked pattern
|
||||
for blocked_pattern in self.blocked_domains:
|
||||
if blocked_pattern.startswith('.'):
|
||||
# TLD pattern (e.g., '.us') - check if domain ends with it
|
||||
if domain.endswith(blocked_pattern):
|
||||
logger.warning(
|
||||
f'Email domain {domain} is blocked by TLD pattern {blocked_pattern} for email: {email}'
|
||||
)
|
||||
return True
|
||||
try:
|
||||
# Query database directly via SQL to check if domain is blocked
|
||||
is_blocked = self.store.is_domain_blocked(domain)
|
||||
|
||||
if is_blocked:
|
||||
logger.warning(f'Email domain {domain} is blocked for email: {email}')
|
||||
else:
|
||||
# Full domain pattern (e.g., 'example.com')
|
||||
# Block exact match or subdomains
|
||||
if domain == blocked_pattern or domain.endswith(f'.{blocked_pattern}'):
|
||||
logger.warning(
|
||||
f'Email domain {domain} is blocked by domain pattern {blocked_pattern} for email: {email}'
|
||||
)
|
||||
return True
|
||||
logger.debug(f'Email domain {domain} is not blocked')
|
||||
|
||||
logger.debug(f'Email domain {domain} is not blocked')
|
||||
return False
|
||||
return is_blocked
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f'Error checking if domain is blocked for email {email}: {e}',
|
||||
exc_info=True,
|
||||
)
|
||||
# Fail-safe: if database query fails, don't block (allow auth to proceed)
|
||||
return False
|
||||
|
||||
|
||||
domain_blocker = DomainBlocker()
|
||||
# Initialize store and domain blocker
|
||||
_store = BlockedEmailDomainStore(session_maker=session_maker)
|
||||
domain_blocker = DomainBlocker(store=_store)
|
||||
|
||||
@@ -317,7 +317,7 @@ async def saas_user_auth_from_signed_token(signed_token: str) -> SaasUserAuth:
|
||||
email_verified = access_token_payload['email_verified']
|
||||
|
||||
# Check if email domain is blocked
|
||||
if email and domain_blocker.is_active() and domain_blocker.is_domain_blocked(email):
|
||||
if email and domain_blocker.is_domain_blocked(email):
|
||||
logger.warning(
|
||||
f'Blocked authentication attempt for existing user with email: {email}'
|
||||
)
|
||||
|
||||
@@ -14,6 +14,7 @@ from keycloak.exceptions import (
|
||||
KeycloakAuthenticationError,
|
||||
KeycloakConnectionError,
|
||||
KeycloakError,
|
||||
KeycloakPostError,
|
||||
)
|
||||
from server.auth.constants import (
|
||||
BITBUCKET_APP_CLIENT_ID,
|
||||
@@ -43,6 +44,7 @@ from storage.offline_token_store import OfflineTokenStore
|
||||
from tenacity import RetryCallState, retry, retry_if_exception_type, stop_after_attempt
|
||||
|
||||
from openhands.integrations.service_types import ProviderType
|
||||
from openhands.server.types import SessionExpiredError
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
|
||||
@@ -465,6 +467,14 @@ class TokenManager:
|
||||
except KeycloakConnectionError:
|
||||
logger.exception('KeycloakConnectionError when refreshing token')
|
||||
raise
|
||||
except KeycloakPostError as e:
|
||||
error_message = str(e)
|
||||
if 'invalid_grant' in error_message or 'session not found' in error_message:
|
||||
logger.warning(f'User session expired or invalid: {error_message}')
|
||||
raise SessionExpiredError(
|
||||
'Your session has expired. Please login again.'
|
||||
) from e
|
||||
raise
|
||||
|
||||
@retry(
|
||||
stop=stop_after_attempt(2),
|
||||
|
||||
@@ -151,7 +151,7 @@ async def keycloak_callback(
|
||||
|
||||
# Check if email domain is blocked
|
||||
email = user_info.get('email')
|
||||
if email and domain_blocker.is_active() and domain_blocker.is_domain_blocked(email):
|
||||
if email and domain_blocker.is_domain_blocked(email):
|
||||
logger.warning(
|
||||
f'Blocked authentication attempt for email: {email}, user_id: {user_id}'
|
||||
)
|
||||
|
||||
@@ -1,15 +1,28 @@
|
||||
import asyncio
|
||||
import hashlib
|
||||
import json
|
||||
|
||||
from fastapi import APIRouter, Header, HTTPException, Request
|
||||
from fastapi import APIRouter, Depends, Header, HTTPException, Request, status
|
||||
from fastapi.responses import JSONResponse
|
||||
from integrations.gitlab.gitlab_manager import GitlabManager
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
from integrations.gitlab.webhook_installation import (
|
||||
BreakLoopException,
|
||||
install_webhook_on_resource,
|
||||
verify_webhook_conditions,
|
||||
)
|
||||
from integrations.models import Message, SourceType
|
||||
from integrations.types import GitLabResourceType
|
||||
from integrations.utils import GITLAB_WEBHOOK_URL
|
||||
from pydantic import BaseModel
|
||||
from server.auth.token_manager import TokenManager
|
||||
from storage.gitlab_webhook import GitlabWebhook
|
||||
from storage.gitlab_webhook_store import GitlabWebhookStore
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl
|
||||
from openhands.server.shared import sio
|
||||
from openhands.server.user_auth import get_user_id
|
||||
|
||||
gitlab_integration_router = APIRouter(prefix='/integration')
|
||||
webhook_store = GitlabWebhookStore()
|
||||
@@ -18,6 +31,37 @@ token_manager = TokenManager()
|
||||
gitlab_manager = GitlabManager(token_manager)
|
||||
|
||||
|
||||
# Request/Response models
|
||||
class ResourceIdentifier(BaseModel):
|
||||
type: GitLabResourceType
|
||||
id: str
|
||||
|
||||
|
||||
class ReinstallWebhookRequest(BaseModel):
|
||||
resource: ResourceIdentifier
|
||||
|
||||
|
||||
class ResourceWithWebhookStatus(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
full_path: str
|
||||
type: str
|
||||
webhook_installed: bool
|
||||
webhook_uuid: str | None
|
||||
last_synced: str | None
|
||||
|
||||
|
||||
class GitLabResourcesResponse(BaseModel):
|
||||
resources: list[ResourceWithWebhookStatus]
|
||||
|
||||
|
||||
class ResourceInstallationResult(BaseModel):
|
||||
resource_id: str
|
||||
resource_type: str
|
||||
success: bool
|
||||
error: str | None
|
||||
|
||||
|
||||
async def verify_gitlab_signature(
|
||||
header_webhook_secret: str, webhook_uuid: str, user_id: str
|
||||
):
|
||||
@@ -83,3 +127,260 @@ async def gitlab_events(
|
||||
except Exception as e:
|
||||
logger.exception(f'Error processing GitLab event: {e}')
|
||||
return JSONResponse(status_code=400, content={'error': 'Invalid payload.'})
|
||||
|
||||
|
||||
@gitlab_integration_router.get('/gitlab/resources')
|
||||
async def get_gitlab_resources(
|
||||
user_id: str = Depends(get_user_id),
|
||||
) -> GitLabResourcesResponse:
|
||||
"""Get all GitLab projects and groups where the user has admin access.
|
||||
|
||||
Returns a list of resources with their webhook installation status.
|
||||
"""
|
||||
try:
|
||||
# Get GitLab service for the user
|
||||
gitlab_service = GitLabServiceImpl(external_auth_id=user_id)
|
||||
|
||||
if not isinstance(gitlab_service, SaaSGitLabService):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail='Only SaaS GitLab service is supported',
|
||||
)
|
||||
|
||||
# Fetch projects and groups with admin access
|
||||
projects, groups = await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Filter out projects that belong to a group (nested projects)
|
||||
# We only want top-level personal projects since group webhooks cover nested projects
|
||||
filtered_projects = [
|
||||
project
|
||||
for project in projects
|
||||
if project.get('namespace', {}).get('kind') != 'group'
|
||||
]
|
||||
|
||||
# Extract IDs for bulk fetching
|
||||
project_ids = [str(project['id']) for project in filtered_projects]
|
||||
group_ids = [str(group['id']) for group in groups]
|
||||
|
||||
# Bulk fetch webhook records from database (organization-wide)
|
||||
(
|
||||
project_webhook_map,
|
||||
group_webhook_map,
|
||||
) = await webhook_store.get_webhooks_by_resources(project_ids, group_ids)
|
||||
|
||||
# Parallelize GitLab API calls to check webhook status for all resources
|
||||
async def check_project_webhook(project):
|
||||
project_id = str(project['id'])
|
||||
webhook_exists, _ = await gitlab_service.check_webhook_exists_on_resource(
|
||||
GitLabResourceType.PROJECT, project_id, GITLAB_WEBHOOK_URL
|
||||
)
|
||||
return project_id, webhook_exists
|
||||
|
||||
async def check_group_webhook(group):
|
||||
group_id = str(group['id'])
|
||||
webhook_exists, _ = await gitlab_service.check_webhook_exists_on_resource(
|
||||
GitLabResourceType.GROUP, group_id, GITLAB_WEBHOOK_URL
|
||||
)
|
||||
return group_id, webhook_exists
|
||||
|
||||
# Gather all API calls in parallel
|
||||
project_checks = [
|
||||
check_project_webhook(project) for project in filtered_projects
|
||||
]
|
||||
group_checks = [check_group_webhook(group) for group in groups]
|
||||
|
||||
# Execute all checks concurrently
|
||||
all_results = await asyncio.gather(*(project_checks + group_checks))
|
||||
|
||||
# Split results back into projects and groups
|
||||
num_projects = len(filtered_projects)
|
||||
project_results = all_results[:num_projects]
|
||||
group_results = all_results[num_projects:]
|
||||
|
||||
# Build response
|
||||
resources = []
|
||||
|
||||
# Add projects with their webhook status
|
||||
for project, (project_id, webhook_exists) in zip(
|
||||
filtered_projects, project_results
|
||||
):
|
||||
webhook = project_webhook_map.get(project_id)
|
||||
|
||||
resources.append(
|
||||
ResourceWithWebhookStatus(
|
||||
id=project_id,
|
||||
name=project.get('name', ''),
|
||||
full_path=project.get('path_with_namespace', ''),
|
||||
type='project',
|
||||
webhook_installed=webhook_exists,
|
||||
webhook_uuid=webhook.webhook_uuid if webhook else None,
|
||||
last_synced=(
|
||||
webhook.last_synced.isoformat()
|
||||
if webhook and webhook.last_synced
|
||||
else None
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Add groups with their webhook status
|
||||
for group, (group_id, webhook_exists) in zip(groups, group_results):
|
||||
webhook = group_webhook_map.get(group_id)
|
||||
|
||||
resources.append(
|
||||
ResourceWithWebhookStatus(
|
||||
id=group_id,
|
||||
name=group.get('name', ''),
|
||||
full_path=group.get('full_path', ''),
|
||||
type='group',
|
||||
webhook_installed=webhook_exists,
|
||||
webhook_uuid=webhook.webhook_uuid if webhook else None,
|
||||
last_synced=(
|
||||
webhook.last_synced.isoformat()
|
||||
if webhook and webhook.last_synced
|
||||
else None
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Retrieved GitLab resources',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'project_count': len(projects),
|
||||
'group_count': len(groups),
|
||||
},
|
||||
)
|
||||
|
||||
return GitLabResourcesResponse(resources=resources)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f'Error retrieving GitLab resources: {e}')
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to retrieve GitLab resources',
|
||||
)
|
||||
|
||||
|
||||
@gitlab_integration_router.post('/gitlab/reinstall-webhook')
|
||||
async def reinstall_gitlab_webhook(
|
||||
body: ReinstallWebhookRequest,
|
||||
user_id: str = Depends(get_user_id),
|
||||
) -> ResourceInstallationResult:
|
||||
"""Reinstall GitLab webhook for a specific resource immediately.
|
||||
|
||||
This endpoint validates permissions, resets webhook status in the database,
|
||||
and immediately installs the webhook on the specified resource.
|
||||
"""
|
||||
try:
|
||||
# Get GitLab service for the user
|
||||
gitlab_service = GitLabServiceImpl(external_auth_id=user_id)
|
||||
|
||||
if not isinstance(gitlab_service, SaaSGitLabService):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail='Only SaaS GitLab service is supported',
|
||||
)
|
||||
|
||||
resource_id = body.resource.id
|
||||
resource_type = body.resource.type
|
||||
|
||||
# Check if user has admin access to this resource
|
||||
(
|
||||
has_admin_access,
|
||||
check_status,
|
||||
) = await gitlab_service.check_user_has_admin_access_to_resource(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
if not has_admin_access:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail='User does not have admin access to this resource',
|
||||
)
|
||||
|
||||
# Reset webhook in database (organization-wide, not user-specific)
|
||||
# This allows any admin user to reinstall webhooks
|
||||
await webhook_store.reset_webhook_for_reinstallation_by_resource(
|
||||
resource_type, resource_id, user_id
|
||||
)
|
||||
|
||||
# Get or create webhook record (without user_id filter)
|
||||
webhook = await webhook_store.get_webhook_by_resource_only(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
if not webhook:
|
||||
# Create new webhook record
|
||||
webhook = GitlabWebhook(
|
||||
user_id=user_id, # Track who created it
|
||||
project_id=resource_id
|
||||
if resource_type == GitLabResourceType.PROJECT
|
||||
else None,
|
||||
group_id=resource_id
|
||||
if resource_type == GitLabResourceType.GROUP
|
||||
else None,
|
||||
webhook_exists=False,
|
||||
)
|
||||
await webhook_store.store_webhooks([webhook])
|
||||
# Fetch it again to get the ID (without user_id filter)
|
||||
webhook = await webhook_store.get_webhook_by_resource_only(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
# Verify conditions and install webhook
|
||||
try:
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=webhook_store,
|
||||
webhook=webhook,
|
||||
)
|
||||
|
||||
# Install the webhook
|
||||
webhook_id, install_status = await install_webhook_on_resource(
|
||||
gitlab_service=gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=webhook_store,
|
||||
webhook=webhook,
|
||||
)
|
||||
|
||||
if webhook_id:
|
||||
logger.info(
|
||||
'GitLab webhook reinstalled successfully',
|
||||
extra={
|
||||
'user_id': user_id,
|
||||
'resource_type': resource_type.value,
|
||||
'resource_id': resource_id,
|
||||
},
|
||||
)
|
||||
return ResourceInstallationResult(
|
||||
resource_id=resource_id,
|
||||
resource_type=resource_type.value,
|
||||
success=True,
|
||||
error=None,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to install webhook',
|
||||
)
|
||||
|
||||
except BreakLoopException:
|
||||
# Conditions not met or webhook already exists
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail='Webhook installation conditions not met or webhook already exists',
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f'Error reinstalling GitLab webhook: {e}')
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail='Failed to reinstall webhook',
|
||||
)
|
||||
|
||||
@@ -9,12 +9,19 @@ This implementation provides read-only access to events from shared conversation
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Request
|
||||
from google.cloud import storage
|
||||
from google.cloud.storage.bucket import Bucket
|
||||
from google.cloud.storage.client import Client
|
||||
from more_itertools import bucket
|
||||
from pydantic import Field
|
||||
from server.sharing.shared_conversation_info_service import (
|
||||
SharedConversationInfoService,
|
||||
)
|
||||
@@ -28,6 +35,9 @@ from server.sharing.sql_shared_conversation_info_service import (
|
||||
|
||||
from openhands.agent_server.models import EventPage, EventSortOrder
|
||||
from openhands.app_server.event.event_service import EventService
|
||||
from openhands.app_server.event.google_cloud_event_service import (
|
||||
GoogleCloudEventService,
|
||||
)
|
||||
from openhands.app_server.event_callback.event_callback_models import EventKind
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.sdk import Event
|
||||
@@ -36,17 +46,13 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class SharedEventServiceImpl(SharedEventService):
|
||||
class GoogleCloudSharedEventService(SharedEventService):
|
||||
"""Implementation of SharedEventService that validates shared access."""
|
||||
|
||||
shared_conversation_info_service: SharedConversationInfoService
|
||||
event_service: EventService
|
||||
bucket: Bucket
|
||||
|
||||
async def get_shared_event(
|
||||
self, conversation_id: UUID, event_id: str
|
||||
) -> Event | None:
|
||||
"""Given a conversation_id and event_id, retrieve an event if the conversation is shared."""
|
||||
# First check if the conversation is shared
|
||||
async def get_event_service(self, conversation_id: UUID) -> EventService | None:
|
||||
shared_conversation_info = (
|
||||
await self.shared_conversation_info_service.get_shared_conversation_info(
|
||||
conversation_id
|
||||
@@ -55,8 +61,25 @@ class SharedEventServiceImpl(SharedEventService):
|
||||
if shared_conversation_info is None:
|
||||
return None
|
||||
|
||||
return GoogleCloudEventService(
|
||||
bucket=bucket,
|
||||
prefix=Path('users'),
|
||||
user_id=shared_conversation_info.created_by_user_id,
|
||||
app_conversation_info_service=None,
|
||||
app_conversation_info_load_tasks={},
|
||||
)
|
||||
|
||||
async def get_shared_event(
|
||||
self, conversation_id: UUID, event_id: UUID
|
||||
) -> Event | None:
|
||||
"""Given a conversation_id and event_id, retrieve an event if the conversation is shared."""
|
||||
# First check if the conversation is shared
|
||||
event_service = await self.get_event_service(conversation_id)
|
||||
if event_service is None:
|
||||
return None
|
||||
|
||||
# If conversation is shared, get the event
|
||||
return await self.event_service.get_event(event_id)
|
||||
return await event_service.get_event(conversation_id, event_id)
|
||||
|
||||
async def search_shared_events(
|
||||
self,
|
||||
@@ -70,18 +93,14 @@ class SharedEventServiceImpl(SharedEventService):
|
||||
) -> EventPage:
|
||||
"""Search events for a specific shared conversation."""
|
||||
# First check if the conversation is shared
|
||||
shared_conversation_info = (
|
||||
await self.shared_conversation_info_service.get_shared_conversation_info(
|
||||
conversation_id
|
||||
)
|
||||
)
|
||||
if shared_conversation_info is None:
|
||||
event_service = await self.get_event_service(conversation_id)
|
||||
if event_service is None:
|
||||
# Return empty page if conversation is not shared
|
||||
return EventPage(items=[], next_page_id=None)
|
||||
|
||||
# If conversation is shared, search events for this conversation
|
||||
return await self.event_service.search_events(
|
||||
conversation_id__eq=conversation_id,
|
||||
return await event_service.search_events(
|
||||
conversation_id=conversation_id,
|
||||
kind__eq=kind__eq,
|
||||
timestamp__gte=timestamp__gte,
|
||||
timestamp__lt=timestamp__lt,
|
||||
@@ -96,47 +115,45 @@ class SharedEventServiceImpl(SharedEventService):
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
||||
) -> int:
|
||||
"""Count events for a specific shared conversation."""
|
||||
# First check if the conversation is shared
|
||||
shared_conversation_info = (
|
||||
await self.shared_conversation_info_service.get_shared_conversation_info(
|
||||
conversation_id
|
||||
)
|
||||
)
|
||||
if shared_conversation_info is None:
|
||||
event_service = await self.get_event_service(conversation_id)
|
||||
if event_service is None:
|
||||
# Return empty page if conversation is not shared
|
||||
return 0
|
||||
|
||||
# If conversation is shared, count events for this conversation
|
||||
return await self.event_service.count_events(
|
||||
conversation_id__eq=conversation_id,
|
||||
return await event_service.count_events(
|
||||
conversation_id=conversation_id,
|
||||
kind__eq=kind__eq,
|
||||
timestamp__gte=timestamp__gte,
|
||||
timestamp__lt=timestamp__lt,
|
||||
sort_order=sort_order,
|
||||
)
|
||||
|
||||
|
||||
class SharedEventServiceImplInjector(SharedEventServiceInjector):
|
||||
class GoogleCloudSharedEventServiceInjector(SharedEventServiceInjector):
|
||||
bucket_name: str | None = Field(
|
||||
default_factory=lambda: os.environ.get('FILE_STORE_PATH')
|
||||
)
|
||||
|
||||
async def inject(
|
||||
self, state: InjectorState, request: Request | None = None
|
||||
) -> AsyncGenerator[SharedEventService, None]:
|
||||
# Define inline to prevent circular lookup
|
||||
from openhands.app_server.config import (
|
||||
get_db_session,
|
||||
get_event_service,
|
||||
)
|
||||
from openhands.app_server.config import get_db_session
|
||||
|
||||
async with (
|
||||
get_db_session(state, request) as db_session,
|
||||
get_event_service(state, request) as event_service,
|
||||
):
|
||||
async with get_db_session(state, request) as db_session:
|
||||
shared_conversation_info_service = SQLSharedConversationInfoService(
|
||||
db_session=db_session
|
||||
)
|
||||
service = SharedEventServiceImpl(
|
||||
|
||||
bucket_name = self.bucket_name
|
||||
storage_client: Client = storage.Client()
|
||||
bucket: Bucket = storage_client.bucket(bucket_name)
|
||||
|
||||
service = GoogleCloudSharedEventService(
|
||||
shared_conversation_info_service=shared_conversation_info_service,
|
||||
event_service=event_service,
|
||||
bucket=bucket,
|
||||
)
|
||||
yield service
|
||||
@@ -5,8 +5,8 @@ from typing import Annotated
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from server.sharing.filesystem_shared_event_service import (
|
||||
SharedEventServiceImplInjector,
|
||||
from server.sharing.google_cloud_shared_event_service import (
|
||||
GoogleCloudSharedEventServiceInjector,
|
||||
)
|
||||
from server.sharing.shared_event_service import SharedEventService
|
||||
|
||||
@@ -15,7 +15,9 @@ from openhands.app_server.event_callback.event_callback_models import EventKind
|
||||
from openhands.sdk import Event
|
||||
|
||||
router = APIRouter(prefix='/api/shared-events', tags=['Sharing'])
|
||||
shared_event_service_dependency = Depends(SharedEventServiceImplInjector().depends)
|
||||
shared_event_service_dependency = Depends(
|
||||
GoogleCloudSharedEventServiceInjector().depends
|
||||
)
|
||||
|
||||
|
||||
# Read methods
|
||||
@@ -85,10 +87,6 @@ async def count_shared_events(
|
||||
datetime | None,
|
||||
Query(title='Optional filter by timestamp less than'),
|
||||
] = None,
|
||||
sort_order: Annotated[
|
||||
EventSortOrder,
|
||||
Query(title='Sort order for results'),
|
||||
] = EventSortOrder.TIMESTAMP,
|
||||
shared_event_service: SharedEventService = shared_event_service_dependency,
|
||||
) -> int:
|
||||
"""Count events for a shared conversation matching the given filters."""
|
||||
@@ -97,14 +95,13 @@ async def count_shared_events(
|
||||
kind__eq=kind__eq,
|
||||
timestamp__gte=timestamp__gte,
|
||||
timestamp__lt=timestamp__lt,
|
||||
sort_order=sort_order,
|
||||
)
|
||||
|
||||
|
||||
@router.get('')
|
||||
async def batch_get_shared_events(
|
||||
conversation_id: Annotated[
|
||||
UUID,
|
||||
str,
|
||||
Query(title='Conversation ID to get events for'),
|
||||
],
|
||||
id: Annotated[list[str], Query()],
|
||||
@@ -112,15 +109,20 @@ async def batch_get_shared_events(
|
||||
) -> list[Event | None]:
|
||||
"""Get a batch of events for a shared conversation given their ids, returning null for any missing event."""
|
||||
assert len(id) <= 100
|
||||
events = await shared_event_service.batch_get_shared_events(conversation_id, id)
|
||||
event_ids = [UUID(id_) for id_ in id]
|
||||
events = await shared_event_service.batch_get_shared_events(
|
||||
UUID(conversation_id), event_ids
|
||||
)
|
||||
return events
|
||||
|
||||
|
||||
@router.get('/{conversation_id}/{event_id}')
|
||||
async def get_shared_event(
|
||||
conversation_id: UUID,
|
||||
conversation_id: str,
|
||||
event_id: str,
|
||||
shared_event_service: SharedEventService = shared_event_service_dependency,
|
||||
) -> Event | None:
|
||||
"""Get a single event from a shared conversation by conversation_id and event_id."""
|
||||
return await shared_event_service.get_shared_event(conversation_id, event_id)
|
||||
return await shared_event_service.get_shared_event(
|
||||
UUID(conversation_id), UUID(event_id)
|
||||
)
|
||||
|
||||
@@ -18,7 +18,7 @@ class SharedEventService(ABC):
|
||||
|
||||
@abstractmethod
|
||||
async def get_shared_event(
|
||||
self, conversation_id: UUID, event_id: str
|
||||
self, conversation_id: UUID, event_id: UUID
|
||||
) -> Event | None:
|
||||
"""Given a conversation_id and event_id, retrieve an event if the conversation is shared."""
|
||||
|
||||
@@ -42,12 +42,11 @@ class SharedEventService(ABC):
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
||||
) -> int:
|
||||
"""Count events for a specific shared conversation."""
|
||||
|
||||
async def batch_get_shared_events(
|
||||
self, conversation_id: UUID, event_ids: list[str]
|
||||
self, conversation_id: UUID, event_ids: list[UUID]
|
||||
) -> list[Event | None]:
|
||||
"""Given a conversation_id and list of event_ids, get events if the conversation is shared."""
|
||||
return await asyncio.gather(
|
||||
|
||||
30
enterprise/storage/blocked_email_domain.py
Normal file
30
enterprise/storage/blocked_email_domain.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from sqlalchemy import Column, DateTime, Identity, Integer, String
|
||||
from storage.base import Base
|
||||
|
||||
|
||||
class BlockedEmailDomain(Base): # type: ignore
|
||||
"""Stores blocked email domain patterns.
|
||||
|
||||
Supports blocking:
|
||||
- Exact domains: 'example.com' blocks 'user@example.com'
|
||||
- Subdomains: 'example.com' blocks 'user@subdomain.example.com'
|
||||
- TLDs: '.us' blocks 'user@company.us' and 'user@subdomain.company.us'
|
||||
"""
|
||||
|
||||
__tablename__ = 'blocked_email_domains'
|
||||
|
||||
id = Column(Integer, Identity(), primary_key=True)
|
||||
domain = Column(String, nullable=False, unique=True)
|
||||
created_at = Column(
|
||||
DateTime(timezone=True),
|
||||
default=lambda: datetime.now(UTC),
|
||||
nullable=False,
|
||||
)
|
||||
updated_at = Column(
|
||||
DateTime(timezone=True),
|
||||
default=lambda: datetime.now(UTC),
|
||||
onupdate=lambda: datetime.now(UTC),
|
||||
nullable=False,
|
||||
)
|
||||
45
enterprise/storage/blocked_email_domain_store.py
Normal file
45
enterprise/storage/blocked_email_domain_store.py
Normal file
@@ -0,0 +1,45 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
|
||||
|
||||
@dataclass
|
||||
class BlockedEmailDomainStore:
|
||||
session_maker: sessionmaker
|
||||
|
||||
def is_domain_blocked(self, domain: str) -> bool:
|
||||
"""Check if a domain is blocked by querying the database directly.
|
||||
|
||||
This method uses SQL to efficiently check if the domain matches any blocked pattern:
|
||||
- TLD patterns (e.g., '.us'): checks if domain ends with the pattern
|
||||
- Full domain patterns (e.g., 'example.com'): checks for exact match or subdomain match
|
||||
|
||||
Args:
|
||||
domain: The extracted domain from the email (e.g., 'example.com' or 'subdomain.example.com')
|
||||
|
||||
Returns:
|
||||
True if the domain is blocked, False otherwise
|
||||
"""
|
||||
with self.session_maker() as session:
|
||||
# SQL query that handles both TLD patterns and full domain patterns
|
||||
# TLD patterns (starting with '.'): check if domain ends with the pattern
|
||||
# Full domain patterns: check for exact match or subdomain match
|
||||
# All comparisons are case-insensitive using LOWER() to ensure consistent matching
|
||||
query = text("""
|
||||
SELECT EXISTS(
|
||||
SELECT 1
|
||||
FROM blocked_email_domains
|
||||
WHERE
|
||||
-- TLD pattern (e.g., '.us') - check if domain ends with it (case-insensitive)
|
||||
(LOWER(domain) LIKE '.%' AND LOWER(:domain) LIKE '%' || LOWER(domain)) OR
|
||||
-- Full domain pattern (e.g., 'example.com')
|
||||
-- Block exact match or subdomains (case-insensitive)
|
||||
(LOWER(domain) NOT LIKE '.%' AND (
|
||||
LOWER(:domain) = LOWER(domain) OR
|
||||
LOWER(:domain) LIKE '%.' || LOWER(domain)
|
||||
))
|
||||
)
|
||||
""")
|
||||
result = session.execute(query, {'domain': domain}).scalar()
|
||||
return bool(result)
|
||||
@@ -220,6 +220,127 @@ class GitlabWebhookStore:
|
||||
return webhooks[0].webhook_secret
|
||||
return None
|
||||
|
||||
async def get_webhook_by_resource_only(
|
||||
self, resource_type: GitLabResourceType, resource_id: str
|
||||
) -> GitlabWebhook | None:
|
||||
"""Get a webhook by resource without filtering by user_id.
|
||||
|
||||
This allows any admin user in the organization to manage webhooks,
|
||||
not just the original installer.
|
||||
|
||||
Args:
|
||||
resource_type: The type of resource (PROJECT or GROUP)
|
||||
resource_id: The ID of the resource
|
||||
|
||||
Returns:
|
||||
GitlabWebhook object if found, None otherwise
|
||||
"""
|
||||
async with self.a_session_maker() as session:
|
||||
if resource_type == GitLabResourceType.PROJECT:
|
||||
query = select(GitlabWebhook).where(
|
||||
GitlabWebhook.project_id == resource_id
|
||||
)
|
||||
else: # GROUP
|
||||
query = select(GitlabWebhook).where(
|
||||
GitlabWebhook.group_id == resource_id
|
||||
)
|
||||
|
||||
result = await session.execute(query)
|
||||
webhook = result.scalars().first()
|
||||
return webhook
|
||||
|
||||
async def get_webhooks_by_resources(
|
||||
self, project_ids: list[str], group_ids: list[str]
|
||||
) -> tuple[dict[str, GitlabWebhook], dict[str, GitlabWebhook]]:
|
||||
"""Bulk fetch webhooks for multiple resources.
|
||||
|
||||
This is more efficient than fetching one at a time in a loop.
|
||||
|
||||
Args:
|
||||
project_ids: List of project IDs to fetch
|
||||
group_ids: List of group IDs to fetch
|
||||
|
||||
Returns:
|
||||
Tuple of (project_webhook_map, group_webhook_map)
|
||||
"""
|
||||
async with self.a_session_maker() as session:
|
||||
project_webhook_map = {}
|
||||
group_webhook_map = {}
|
||||
|
||||
# Fetch all project webhooks in one query
|
||||
if project_ids:
|
||||
project_query = select(GitlabWebhook).where(
|
||||
GitlabWebhook.project_id.in_(project_ids)
|
||||
)
|
||||
result = await session.execute(project_query)
|
||||
project_webhooks = result.scalars().all()
|
||||
project_webhook_map = {wh.project_id: wh for wh in project_webhooks}
|
||||
|
||||
# Fetch all group webhooks in one query
|
||||
if group_ids:
|
||||
group_query = select(GitlabWebhook).where(
|
||||
GitlabWebhook.group_id.in_(group_ids)
|
||||
)
|
||||
result = await session.execute(group_query)
|
||||
group_webhooks = result.scalars().all()
|
||||
group_webhook_map = {wh.group_id: wh for wh in group_webhooks}
|
||||
|
||||
return project_webhook_map, group_webhook_map
|
||||
|
||||
async def reset_webhook_for_reinstallation_by_resource(
|
||||
self, resource_type: GitLabResourceType, resource_id: str, updating_user_id: str
|
||||
) -> bool:
|
||||
"""Reset webhook for reinstallation without filtering by user_id.
|
||||
|
||||
This allows any admin user to reset webhooks, and updates the user_id
|
||||
to track who last modified it.
|
||||
|
||||
Args:
|
||||
resource_type: The type of resource (PROJECT or GROUP)
|
||||
resource_id: The ID of the resource
|
||||
updating_user_id: The user ID performing the update (for audit purposes)
|
||||
|
||||
Returns:
|
||||
True if webhook was reset, False if not found
|
||||
"""
|
||||
async with self.a_session_maker() as session:
|
||||
async with session.begin():
|
||||
if resource_type == GitLabResourceType.PROJECT:
|
||||
update_statement = (
|
||||
update(GitlabWebhook)
|
||||
.where(GitlabWebhook.project_id == resource_id)
|
||||
.values(
|
||||
webhook_exists=False,
|
||||
webhook_uuid=None,
|
||||
user_id=updating_user_id, # Update to track who modified it
|
||||
)
|
||||
)
|
||||
else: # GROUP
|
||||
update_statement = (
|
||||
update(GitlabWebhook)
|
||||
.where(GitlabWebhook.group_id == resource_id)
|
||||
.values(
|
||||
webhook_exists=False,
|
||||
webhook_uuid=None,
|
||||
user_id=updating_user_id, # Update to track who modified it
|
||||
)
|
||||
)
|
||||
|
||||
result = await session.execute(update_statement)
|
||||
rows_updated = result.rowcount
|
||||
|
||||
logger.info(
|
||||
'Reset webhook for reinstallation (organization-wide)',
|
||||
extra={
|
||||
'updating_user_id': updating_user_id,
|
||||
'resource_type': resource_type.value,
|
||||
'resource_id': resource_id,
|
||||
'rows_updated': rows_updated,
|
||||
},
|
||||
)
|
||||
|
||||
return rows_updated > 0
|
||||
|
||||
@classmethod
|
||||
async def get_instance(cls) -> GitlabWebhookStore:
|
||||
"""Get an instance of the GitlabWebhookStore.
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import asyncio
|
||||
from typing import cast
|
||||
from uuid import uuid4
|
||||
|
||||
from integrations.gitlab.webhook_installation import (
|
||||
BreakLoopException,
|
||||
install_webhook_on_resource,
|
||||
verify_webhook_conditions,
|
||||
)
|
||||
from integrations.types import GitLabResourceType
|
||||
from integrations.utils import GITLAB_WEBHOOK_URL
|
||||
from sqlalchemy import text
|
||||
@@ -14,20 +18,6 @@ from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl
|
||||
from openhands.integrations.service_types import GitService
|
||||
|
||||
CHUNK_SIZE = 100
|
||||
WEBHOOK_NAME = 'OpenHands Resolver'
|
||||
SCOPES: list[str] = [
|
||||
'note_events',
|
||||
'merge_requests_events',
|
||||
'confidential_issues_events',
|
||||
'issues_events',
|
||||
'confidential_note_events',
|
||||
'job_events',
|
||||
'pipeline_events',
|
||||
]
|
||||
|
||||
|
||||
class BreakLoopException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class VerifyWebhookStatus:
|
||||
@@ -43,77 +33,6 @@ class VerifyWebhookStatus:
|
||||
if status == WebhookStatus.RATE_LIMITED:
|
||||
raise BreakLoopException()
|
||||
|
||||
async def check_if_resource_exists(
|
||||
self,
|
||||
gitlab_service: type[GitService],
|
||||
resource_type: GitLabResourceType,
|
||||
resource_id: str,
|
||||
webhook_store: GitlabWebhookStore,
|
||||
webhook: GitlabWebhook,
|
||||
):
|
||||
"""
|
||||
Check if the GitLab resource still exists
|
||||
"""
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service = cast(type[SaaSGitLabService], gitlab_service)
|
||||
|
||||
does_resource_exist, status = await gitlab_service.check_resource_exists(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Does resource exists',
|
||||
extra={
|
||||
'does_resource_exist': does_resource_exist,
|
||||
'status': status,
|
||||
'resource_id': resource_id,
|
||||
'resource_type': resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
self.determine_if_rate_limited(status)
|
||||
if not does_resource_exist and status != WebhookStatus.RATE_LIMITED:
|
||||
await webhook_store.delete_webhook(webhook)
|
||||
raise BreakLoopException()
|
||||
|
||||
async def check_if_user_has_admin_acccess_to_resource(
|
||||
self,
|
||||
gitlab_service: type[GitService],
|
||||
resource_type: GitLabResourceType,
|
||||
resource_id: str,
|
||||
webhook_store: GitlabWebhookStore,
|
||||
webhook: GitlabWebhook,
|
||||
):
|
||||
"""
|
||||
Check is user still has permission to resource
|
||||
"""
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service = cast(type[SaaSGitLabService], gitlab_service)
|
||||
|
||||
(
|
||||
is_user_admin_of_resource,
|
||||
status,
|
||||
) = await gitlab_service.check_user_has_admin_access_to_resource(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Is user admin',
|
||||
extra={
|
||||
'is_user_admin': is_user_admin_of_resource,
|
||||
'status': status,
|
||||
'resource_id': resource_id,
|
||||
'resource_type': resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
self.determine_if_rate_limited(status)
|
||||
if not is_user_admin_of_resource:
|
||||
await webhook_store.delete_webhook(webhook)
|
||||
raise BreakLoopException()
|
||||
|
||||
async def check_if_webhook_already_exists_on_resource(
|
||||
self,
|
||||
gitlab_service: type[GitService],
|
||||
@@ -162,23 +81,8 @@ class VerifyWebhookStatus:
|
||||
webhook_store: GitlabWebhookStore,
|
||||
webhook: GitlabWebhook,
|
||||
):
|
||||
await self.check_if_resource_exists(
|
||||
gitlab_service=gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=webhook_store,
|
||||
webhook=webhook,
|
||||
)
|
||||
|
||||
await self.check_if_user_has_admin_acccess_to_resource(
|
||||
gitlab_service=gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=webhook_store,
|
||||
webhook=webhook,
|
||||
)
|
||||
|
||||
await self.check_if_webhook_already_exists_on_resource(
|
||||
# Use the standalone function
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
@@ -197,51 +101,15 @@ class VerifyWebhookStatus:
|
||||
"""
|
||||
Install webhook on resource
|
||||
"""
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
gitlab_service = cast(type[SaaSGitLabService], gitlab_service)
|
||||
|
||||
webhook_secret = f'{webhook.user_id}-{str(uuid4())}'
|
||||
webhook_uuid = f'{str(uuid4())}'
|
||||
|
||||
webhook_id, status = await gitlab_service.install_webhook(
|
||||
# Use the standalone function
|
||||
await install_webhook_on_resource(
|
||||
gitlab_service=gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_name=WEBHOOK_NAME,
|
||||
webhook_url=GITLAB_WEBHOOK_URL,
|
||||
webhook_secret=webhook_secret,
|
||||
webhook_uuid=webhook_uuid,
|
||||
scopes=SCOPES,
|
||||
webhook_store=webhook_store,
|
||||
webhook=webhook,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
'Creating new webhook',
|
||||
extra={
|
||||
'webhook_id': webhook_id,
|
||||
'status': status,
|
||||
'resource_id': resource_id,
|
||||
'resource_type': resource_type,
|
||||
},
|
||||
)
|
||||
|
||||
self.determine_if_rate_limited(status)
|
||||
|
||||
if webhook_id:
|
||||
await webhook_store.update_webhook(
|
||||
webhook=webhook,
|
||||
update_fields={
|
||||
'webhook_secret': webhook_secret,
|
||||
'webhook_exists': True, # webhook was created
|
||||
'webhook_url': GITLAB_WEBHOOK_URL,
|
||||
'scopes': SCOPES,
|
||||
'webhook_uuid': webhook_uuid, # required to identify which webhook installation is sending payload
|
||||
},
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f'Installed webhook for {webhook.user_id} on {resource_type}:{resource_id}'
|
||||
)
|
||||
|
||||
async def install_webhooks(self):
|
||||
"""
|
||||
Periodically check the conditions for installing a webhook on resource as valid
|
||||
|
||||
@@ -10,12 +10,14 @@ Covers:
|
||||
- Low-level helper methods
|
||||
"""
|
||||
|
||||
import os
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from integrations.github.github_v1_callback_processor import (
|
||||
GithubV1CallbackProcessor,
|
||||
)
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationInfo,
|
||||
@@ -24,9 +26,6 @@ from openhands.app_server.event_callback.event_callback_models import EventCallb
|
||||
from openhands.app_server.event_callback.event_callback_result_models import (
|
||||
EventCallbackResultStatus,
|
||||
)
|
||||
from openhands.app_server.event_callback.github_v1_callback_processor import (
|
||||
GithubV1CallbackProcessor,
|
||||
)
|
||||
from openhands.app_server.sandbox.sandbox_models import (
|
||||
ExposedUrl,
|
||||
SandboxInfo,
|
||||
@@ -198,30 +197,27 @@ class TestGithubV1CallbackProcessor:
|
||||
# Successful paths
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
@patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
'GITHUB_APP_CLIENT_ID': 'test_client_id',
|
||||
'GITHUB_APP_PRIVATE_KEY': 'test_private_key',
|
||||
},
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'test_client_id',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'test_private_key',
|
||||
)
|
||||
@patch('openhands.app_server.config.get_app_conversation_info_service')
|
||||
@patch('openhands.app_server.config.get_sandbox_service')
|
||||
@patch('openhands.app_server.config.get_httpx_client')
|
||||
@patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.get_prompt_template'
|
||||
)
|
||||
@patch('openhands.app_server.event_callback.github_v1_callback_processor.Auth')
|
||||
@patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.GithubIntegration'
|
||||
)
|
||||
@patch('openhands.app_server.event_callback.github_v1_callback_processor.Github')
|
||||
@patch('integrations.github.github_v1_callback_processor.get_summary_instruction')
|
||||
@patch('integrations.github.github_v1_callback_processor.Auth')
|
||||
@patch('integrations.github.github_v1_callback_processor.GithubIntegration')
|
||||
@patch('integrations.github.github_v1_callback_processor.Github')
|
||||
async def test_successful_callback_execution(
|
||||
self,
|
||||
mock_github,
|
||||
mock_github_integration,
|
||||
mock_auth,
|
||||
mock_get_prompt_template,
|
||||
mock_get_summary_instruction,
|
||||
mock_get_httpx_client,
|
||||
mock_get_sandbox_service,
|
||||
mock_get_app_conversation_info_service,
|
||||
@@ -242,7 +238,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_sandbox_info,
|
||||
)
|
||||
|
||||
mock_get_prompt_template.return_value = 'Please provide a summary'
|
||||
mock_get_summary_instruction.return_value = 'Please provide a summary'
|
||||
|
||||
# Auth.AppAuth mock
|
||||
mock_app_auth_instance = MagicMock()
|
||||
@@ -293,28 +289,25 @@ class TestGithubV1CallbackProcessor:
|
||||
assert kwargs['headers']['X-Session-API-Key'] == 'test_api_key'
|
||||
assert kwargs['json']['question'] == 'Please provide a summary'
|
||||
|
||||
@patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
'GITHUB_APP_CLIENT_ID': 'test_client_id',
|
||||
'GITHUB_APP_PRIVATE_KEY': 'test_private_key',
|
||||
},
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'test_client_id',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'test_private_key',
|
||||
)
|
||||
@patch('openhands.app_server.config.get_app_conversation_info_service')
|
||||
@patch('openhands.app_server.config.get_sandbox_service')
|
||||
@patch('openhands.app_server.config.get_httpx_client')
|
||||
@patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.get_prompt_template'
|
||||
)
|
||||
@patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.GithubIntegration'
|
||||
)
|
||||
@patch('openhands.app_server.event_callback.github_v1_callback_processor.Github')
|
||||
@patch('integrations.github.github_v1_callback_processor.get_summary_instruction')
|
||||
@patch('integrations.github.github_v1_callback_processor.GithubIntegration')
|
||||
@patch('integrations.github.github_v1_callback_processor.Github')
|
||||
async def test_successful_inline_pr_comment(
|
||||
self,
|
||||
mock_github,
|
||||
mock_github_integration,
|
||||
mock_get_prompt_template,
|
||||
mock_get_summary_instruction,
|
||||
mock_get_httpx_client,
|
||||
mock_get_sandbox_service,
|
||||
mock_get_app_conversation_info_service,
|
||||
@@ -334,7 +327,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_sandbox_info,
|
||||
)
|
||||
|
||||
mock_get_prompt_template.return_value = 'Please provide a summary'
|
||||
mock_get_summary_instruction.return_value = 'Please provide a summary'
|
||||
|
||||
mock_token_data = MagicMock()
|
||||
mock_token_data.token = 'test_access_token'
|
||||
@@ -367,6 +360,7 @@ class TestGithubV1CallbackProcessor:
|
||||
# Error paths
|
||||
# ------------------------------------------------------------------ #
|
||||
|
||||
@patch('integrations.github.github_v1_callback_processor.get_summary_instruction')
|
||||
@patch('openhands.app_server.config.get_httpx_client')
|
||||
@patch('openhands.app_server.config.get_sandbox_service')
|
||||
@patch('openhands.app_server.config.get_app_conversation_info_service')
|
||||
@@ -375,6 +369,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_get_app_conversation_info_service,
|
||||
mock_get_sandbox_service,
|
||||
mock_get_httpx_client,
|
||||
mock_get_summary_instruction,
|
||||
conversation_state_update_event,
|
||||
event_callback,
|
||||
mock_app_conversation_info,
|
||||
@@ -393,6 +388,8 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_sandbox_info,
|
||||
)
|
||||
|
||||
mock_get_summary_instruction.return_value = 'Please provide a summary'
|
||||
|
||||
result = await processor(
|
||||
conversation_id=conversation_id,
|
||||
callback=event_callback,
|
||||
@@ -403,7 +400,15 @@ class TestGithubV1CallbackProcessor:
|
||||
assert result.status == EventCallbackResultStatus.ERROR
|
||||
assert 'Missing installation ID' in result.detail
|
||||
|
||||
@patch.dict(os.environ, {}, clear=True)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'',
|
||||
)
|
||||
@patch('integrations.github.github_v1_callback_processor.get_summary_instruction')
|
||||
@patch('openhands.app_server.config.get_httpx_client')
|
||||
@patch('openhands.app_server.config.get_sandbox_service')
|
||||
@patch('openhands.app_server.config.get_app_conversation_info_service')
|
||||
@@ -412,6 +417,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_get_app_conversation_info_service,
|
||||
mock_get_sandbox_service,
|
||||
mock_get_httpx_client,
|
||||
mock_get_summary_instruction,
|
||||
github_callback_processor,
|
||||
conversation_state_update_event,
|
||||
event_callback,
|
||||
@@ -428,6 +434,8 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_sandbox_info,
|
||||
)
|
||||
|
||||
mock_get_summary_instruction.return_value = 'Please provide a summary'
|
||||
|
||||
result = await github_callback_processor(
|
||||
conversation_id=conversation_id,
|
||||
callback=event_callback,
|
||||
@@ -438,12 +446,13 @@ class TestGithubV1CallbackProcessor:
|
||||
assert result.status == EventCallbackResultStatus.ERROR
|
||||
assert 'GitHub App credentials are not configured' in result.detail
|
||||
|
||||
@patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
'GITHUB_APP_CLIENT_ID': 'test_client_id',
|
||||
'GITHUB_APP_PRIVATE_KEY': 'test_private_key',
|
||||
},
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'test_client_id',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'test_private_key',
|
||||
)
|
||||
@patch('openhands.app_server.config.get_app_conversation_info_service')
|
||||
@patch('openhands.app_server.config.get_sandbox_service')
|
||||
@@ -489,22 +498,21 @@ class TestGithubV1CallbackProcessor:
|
||||
assert result.status == EventCallbackResultStatus.ERROR
|
||||
assert 'Sandbox not running' in result.detail
|
||||
|
||||
@patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
'GITHUB_APP_CLIENT_ID': 'test_client_id',
|
||||
'GITHUB_APP_PRIVATE_KEY': 'test_private_key',
|
||||
},
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'test_client_id',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'test_private_key',
|
||||
)
|
||||
@patch('openhands.app_server.config.get_app_conversation_info_service')
|
||||
@patch('openhands.app_server.config.get_sandbox_service')
|
||||
@patch('openhands.app_server.config.get_httpx_client')
|
||||
@patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.get_prompt_template'
|
||||
)
|
||||
@patch('integrations.github.github_v1_callback_processor.get_summary_instruction')
|
||||
async def test_agent_server_http_error(
|
||||
self,
|
||||
mock_get_prompt_template,
|
||||
mock_get_summary_instruction,
|
||||
mock_get_httpx_client,
|
||||
mock_get_sandbox_service,
|
||||
mock_get_app_conversation_info_service,
|
||||
@@ -525,7 +533,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_sandbox_info,
|
||||
)
|
||||
|
||||
mock_get_prompt_template.return_value = 'Please provide a summary'
|
||||
mock_get_summary_instruction.return_value = 'Please provide a summary'
|
||||
|
||||
mock_httpx_client = mock_get_httpx_client.return_value.__aenter__.return_value
|
||||
mock_response = MagicMock()
|
||||
@@ -547,22 +555,21 @@ class TestGithubV1CallbackProcessor:
|
||||
assert result.status == EventCallbackResultStatus.ERROR
|
||||
assert 'Failed to send message to agent server' in result.detail
|
||||
|
||||
@patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
'GITHUB_APP_CLIENT_ID': 'test_client_id',
|
||||
'GITHUB_APP_PRIVATE_KEY': 'test_private_key',
|
||||
},
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'test_client_id',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'test_private_key',
|
||||
)
|
||||
@patch('openhands.app_server.config.get_app_conversation_info_service')
|
||||
@patch('openhands.app_server.config.get_sandbox_service')
|
||||
@patch('openhands.app_server.config.get_httpx_client')
|
||||
@patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.get_prompt_template'
|
||||
)
|
||||
@patch('integrations.github.github_v1_callback_processor.get_summary_instruction')
|
||||
async def test_agent_server_timeout(
|
||||
self,
|
||||
mock_get_prompt_template,
|
||||
mock_get_summary_instruction,
|
||||
mock_get_httpx_client,
|
||||
mock_get_sandbox_service,
|
||||
mock_get_app_conversation_info_service,
|
||||
@@ -582,7 +589,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_sandbox_info,
|
||||
)
|
||||
|
||||
mock_get_prompt_template.return_value = 'Please provide a summary'
|
||||
mock_get_summary_instruction.return_value = 'Please provide a summary'
|
||||
|
||||
mock_httpx_client = mock_get_httpx_client.return_value.__aenter__.return_value
|
||||
mock_httpx_client.post.side_effect = httpx.TimeoutException('Request timeout')
|
||||
@@ -607,7 +614,14 @@ class TestGithubV1CallbackProcessor:
|
||||
with pytest.raises(ValueError, match='Missing installation ID'):
|
||||
processor._get_installation_access_token()
|
||||
|
||||
@patch.dict(os.environ, {}, clear=True)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'',
|
||||
)
|
||||
def test_get_installation_access_token_missing_credentials(
|
||||
self, github_callback_processor
|
||||
):
|
||||
@@ -616,17 +630,16 @@ class TestGithubV1CallbackProcessor:
|
||||
):
|
||||
github_callback_processor._get_installation_access_token()
|
||||
|
||||
@patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
'GITHUB_APP_CLIENT_ID': 'test_client_id',
|
||||
'GITHUB_APP_PRIVATE_KEY': 'test_private_key\\nwith_newlines',
|
||||
},
|
||||
)
|
||||
@patch('openhands.app_server.event_callback.github_v1_callback_processor.Auth')
|
||||
@patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.GithubIntegration'
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'test_client_id',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'test_private_key\nwith_newlines',
|
||||
)
|
||||
@patch('integrations.github.github_v1_callback_processor.Auth')
|
||||
@patch('integrations.github.github_v1_callback_processor.GithubIntegration')
|
||||
def test_get_installation_access_token_success(
|
||||
self, mock_github_integration, mock_auth, github_callback_processor
|
||||
):
|
||||
@@ -649,7 +662,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_github_integration.assert_called_once_with(auth=mock_app_auth_instance)
|
||||
mock_integration_instance.get_access_token.assert_called_once_with(12345)
|
||||
|
||||
@patch('openhands.app_server.event_callback.github_v1_callback_processor.Github')
|
||||
@patch('integrations.github.github_v1_callback_processor.Github')
|
||||
async def test_post_summary_to_github_issue_comment(
|
||||
self, mock_github, github_callback_processor
|
||||
):
|
||||
@@ -672,7 +685,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_repo.get_issue.assert_called_once_with(number=42)
|
||||
mock_issue.create_comment.assert_called_once_with('Test summary')
|
||||
|
||||
@patch('openhands.app_server.event_callback.github_v1_callback_processor.Github')
|
||||
@patch('integrations.github.github_v1_callback_processor.Github')
|
||||
async def test_post_summary_to_github_pr_comment(
|
||||
self, mock_github, github_callback_processor_inline
|
||||
):
|
||||
@@ -708,14 +721,15 @@ class TestGithubV1CallbackProcessor:
|
||||
with pytest.raises(RuntimeError, match='Missing GitHub credentials'):
|
||||
await github_callback_processor._post_summary_to_github('Test summary')
|
||||
|
||||
@patch.dict(
|
||||
os.environ,
|
||||
{
|
||||
'GITHUB_APP_CLIENT_ID': 'test_client_id',
|
||||
'GITHUB_APP_PRIVATE_KEY': 'test_private_key',
|
||||
'WEB_HOST': 'test.example.com',
|
||||
},
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_CLIENT_ID',
|
||||
'test_client_id',
|
||||
)
|
||||
@patch(
|
||||
'integrations.github.github_v1_callback_processor.GITHUB_APP_PRIVATE_KEY',
|
||||
'test_private_key',
|
||||
)
|
||||
@patch('integrations.github.github_v1_callback_processor.get_summary_instruction')
|
||||
@patch('openhands.app_server.config.get_httpx_client')
|
||||
@patch('openhands.app_server.config.get_sandbox_service')
|
||||
@patch('openhands.app_server.config.get_app_conversation_info_service')
|
||||
@@ -724,6 +738,7 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_get_app_conversation_info_service,
|
||||
mock_get_sandbox_service,
|
||||
mock_get_httpx_client,
|
||||
mock_get_summary_instruction,
|
||||
github_callback_processor,
|
||||
conversation_state_update_event,
|
||||
event_callback,
|
||||
@@ -741,13 +756,14 @@ class TestGithubV1CallbackProcessor:
|
||||
mock_sandbox_info,
|
||||
)
|
||||
mock_httpx_client.post.side_effect = Exception('Simulated agent server error')
|
||||
mock_get_summary_instruction.return_value = 'Please provide a summary'
|
||||
|
||||
with (
|
||||
patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.GithubIntegration'
|
||||
'integrations.github.github_v1_callback_processor.GithubIntegration'
|
||||
) as mock_github_integration,
|
||||
patch(
|
||||
'openhands.app_server.event_callback.github_v1_callback_processor.Github'
|
||||
'integrations.github.github_v1_callback_processor.Github'
|
||||
) as mock_github,
|
||||
):
|
||||
mock_integration = MagicMock()
|
||||
204
enterprise/tests/unit/integrations/gitlab/test_gitlab_service.py
Normal file
204
enterprise/tests/unit/integrations/gitlab/test_gitlab_service.py
Normal file
@@ -0,0 +1,204 @@
|
||||
"""Unit tests for SaaSGitLabService."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def gitlab_service():
|
||||
"""Create a SaaSGitLabService instance for testing."""
|
||||
return SaaSGitLabService(external_auth_id='test_user_id')
|
||||
|
||||
|
||||
class TestGetUserResourcesWithAdminAccess:
|
||||
"""Test cases for get_user_resources_with_admin_access method."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_resources_single_page_projects_and_groups(self, gitlab_service):
|
||||
"""Test fetching resources when all data fits in a single page."""
|
||||
# Arrange
|
||||
mock_projects = [
|
||||
{'id': 1, 'name': 'Project 1'},
|
||||
{'id': 2, 'name': 'Project 2'},
|
||||
]
|
||||
mock_groups = [
|
||||
{'id': 10, 'name': 'Group 1'},
|
||||
]
|
||||
|
||||
with patch.object(gitlab_service, '_make_request') as mock_request:
|
||||
# First call for projects, second for groups
|
||||
mock_request.side_effect = [
|
||||
(mock_projects, {'Link': ''}), # No next page
|
||||
(mock_groups, {'Link': ''}), # No next page
|
||||
]
|
||||
|
||||
# Act
|
||||
(
|
||||
projects,
|
||||
groups,
|
||||
) = await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Assert
|
||||
assert len(projects) == 2
|
||||
assert len(groups) == 1
|
||||
assert projects[0]['id'] == 1
|
||||
assert projects[1]['id'] == 2
|
||||
assert groups[0]['id'] == 10
|
||||
assert mock_request.call_count == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_resources_multiple_pages_projects(self, gitlab_service):
|
||||
"""Test fetching projects across multiple pages."""
|
||||
# Arrange
|
||||
page1_projects = [{'id': i, 'name': f'Project {i}'} for i in range(1, 101)]
|
||||
page2_projects = [{'id': i, 'name': f'Project {i}'} for i in range(101, 151)]
|
||||
|
||||
with patch.object(gitlab_service, '_make_request') as mock_request:
|
||||
mock_request.side_effect = [
|
||||
(page1_projects, {'Link': '<url>; rel="next"'}), # Has next page
|
||||
(page2_projects, {'Link': ''}), # Last page
|
||||
([], {'Link': ''}), # Groups (empty)
|
||||
]
|
||||
|
||||
# Act
|
||||
(
|
||||
projects,
|
||||
groups,
|
||||
) = await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Assert
|
||||
assert len(projects) == 150
|
||||
assert len(groups) == 0
|
||||
assert mock_request.call_count == 3
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_resources_multiple_pages_groups(self, gitlab_service):
|
||||
"""Test fetching groups across multiple pages."""
|
||||
# Arrange
|
||||
page1_groups = [{'id': i, 'name': f'Group {i}'} for i in range(1, 101)]
|
||||
page2_groups = [{'id': i, 'name': f'Group {i}'} for i in range(101, 151)]
|
||||
|
||||
with patch.object(gitlab_service, '_make_request') as mock_request:
|
||||
mock_request.side_effect = [
|
||||
([], {'Link': ''}), # Projects (empty)
|
||||
(page1_groups, {'Link': '<url>; rel="next"'}), # Has next page
|
||||
(page2_groups, {'Link': ''}), # Last page
|
||||
]
|
||||
|
||||
# Act
|
||||
(
|
||||
projects,
|
||||
groups,
|
||||
) = await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Assert
|
||||
assert len(projects) == 0
|
||||
assert len(groups) == 150
|
||||
assert mock_request.call_count == 3
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_resources_empty_response(self, gitlab_service):
|
||||
"""Test when user has no projects or groups with admin access."""
|
||||
# Arrange
|
||||
with patch.object(gitlab_service, '_make_request') as mock_request:
|
||||
mock_request.side_effect = [
|
||||
([], {'Link': ''}), # No projects
|
||||
([], {'Link': ''}), # No groups
|
||||
]
|
||||
|
||||
# Act
|
||||
(
|
||||
projects,
|
||||
groups,
|
||||
) = await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Assert
|
||||
assert len(projects) == 0
|
||||
assert len(groups) == 0
|
||||
assert mock_request.call_count == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_resources_uses_correct_params_for_projects(self, gitlab_service):
|
||||
"""Test that projects API is called with correct parameters."""
|
||||
# Arrange
|
||||
with patch.object(gitlab_service, '_make_request') as mock_request:
|
||||
mock_request.side_effect = [
|
||||
([], {'Link': ''}), # Projects
|
||||
([], {'Link': ''}), # Groups
|
||||
]
|
||||
|
||||
# Act
|
||||
await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Assert
|
||||
# Check first call (projects)
|
||||
first_call = mock_request.call_args_list[0]
|
||||
assert 'projects' in first_call[0][0]
|
||||
assert first_call[0][1]['membership'] == 1
|
||||
assert first_call[0][1]['min_access_level'] == 40
|
||||
assert first_call[0][1]['per_page'] == '100'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_resources_uses_correct_params_for_groups(self, gitlab_service):
|
||||
"""Test that groups API is called with correct parameters."""
|
||||
# Arrange
|
||||
with patch.object(gitlab_service, '_make_request') as mock_request:
|
||||
mock_request.side_effect = [
|
||||
([], {'Link': ''}), # Projects
|
||||
([], {'Link': ''}), # Groups
|
||||
]
|
||||
|
||||
# Act
|
||||
await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Assert
|
||||
# Check second call (groups)
|
||||
second_call = mock_request.call_args_list[1]
|
||||
assert 'groups' in second_call[0][0]
|
||||
assert second_call[0][1]['min_access_level'] == 40
|
||||
assert second_call[0][1]['top_level_only'] == 'true'
|
||||
assert second_call[0][1]['per_page'] == '100'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_resources_handles_api_error_gracefully(self, gitlab_service):
|
||||
"""Test that API errors are handled gracefully and don't crash."""
|
||||
# Arrange
|
||||
with patch.object(gitlab_service, '_make_request') as mock_request:
|
||||
# First call succeeds, second call fails
|
||||
mock_request.side_effect = [
|
||||
([{'id': 1, 'name': 'Project 1'}], {'Link': ''}),
|
||||
Exception('API Error'),
|
||||
]
|
||||
|
||||
# Act
|
||||
(
|
||||
projects,
|
||||
groups,
|
||||
) = await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Assert
|
||||
# Should return what was fetched before the error
|
||||
assert len(projects) == 1
|
||||
assert len(groups) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_resources_stops_on_empty_response(self, gitlab_service):
|
||||
"""Test that pagination stops when API returns empty response."""
|
||||
# Arrange
|
||||
with patch.object(gitlab_service, '_make_request') as mock_request:
|
||||
mock_request.side_effect = [
|
||||
(None, {'Link': ''}), # Empty response stops pagination
|
||||
([], {'Link': ''}), # Groups
|
||||
]
|
||||
|
||||
# Act
|
||||
(
|
||||
projects,
|
||||
groups,
|
||||
) = await gitlab_service.get_user_resources_with_admin_access()
|
||||
|
||||
# Assert
|
||||
assert len(projects) == 0
|
||||
assert mock_request.call_count == 2 # Should not continue pagination
|
||||
@@ -18,7 +18,11 @@ from integrations.jira.jira_view import (
|
||||
from integrations.models import Message, SourceType
|
||||
|
||||
from openhands.integrations.service_types import ProviderType, Repository
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
|
||||
|
||||
class TestJiraManagerInit:
|
||||
@@ -732,6 +736,32 @@ class TestStartJob:
|
||||
call_args = jira_manager.send_message.call_args[0]
|
||||
assert 'valid LLM API key' in call_args[0].message
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_job_session_expired_error(
|
||||
self, jira_manager, sample_jira_workspace
|
||||
):
|
||||
"""Test job start with session expired error."""
|
||||
mock_view = MagicMock(spec=JiraNewConversationView)
|
||||
mock_view.jira_user = MagicMock()
|
||||
mock_view.jira_user.keycloak_user_id = 'test_user'
|
||||
mock_view.job_context = MagicMock()
|
||||
mock_view.job_context.issue_key = 'PROJ-123'
|
||||
mock_view.jira_workspace = sample_jira_workspace
|
||||
mock_view.create_or_update_conversation = AsyncMock(
|
||||
side_effect=SessionExpiredError('Session expired')
|
||||
)
|
||||
|
||||
jira_manager.send_message = AsyncMock()
|
||||
jira_manager.token_manager.decrypt_text.return_value = 'decrypted_key'
|
||||
|
||||
await jira_manager.start_job(mock_view)
|
||||
|
||||
# Should send error message about session expired
|
||||
jira_manager.send_message.assert_called_once()
|
||||
call_args = jira_manager.send_message.call_args[0]
|
||||
assert 'session has expired' in call_args[0].message
|
||||
assert 'login again' in call_args[0].message
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_job_unexpected_error(
|
||||
self, jira_manager, sample_jira_workspace
|
||||
|
||||
@@ -18,7 +18,11 @@ from integrations.jira_dc.jira_dc_view import (
|
||||
from integrations.models import Message, SourceType
|
||||
|
||||
from openhands.integrations.service_types import ProviderType, Repository
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
|
||||
|
||||
class TestJiraDcManagerInit:
|
||||
@@ -761,6 +765,32 @@ class TestStartJob:
|
||||
call_args = jira_dc_manager.send_message.call_args[0]
|
||||
assert 'valid LLM API key' in call_args[0].message
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_job_session_expired_error(
|
||||
self, jira_dc_manager, sample_jira_dc_workspace
|
||||
):
|
||||
"""Test job start with session expired error."""
|
||||
mock_view = MagicMock(spec=JiraDcNewConversationView)
|
||||
mock_view.jira_dc_user = MagicMock()
|
||||
mock_view.jira_dc_user.keycloak_user_id = 'test_user'
|
||||
mock_view.job_context = MagicMock()
|
||||
mock_view.job_context.issue_key = 'PROJ-123'
|
||||
mock_view.jira_dc_workspace = sample_jira_dc_workspace
|
||||
mock_view.create_or_update_conversation = AsyncMock(
|
||||
side_effect=SessionExpiredError('Session expired')
|
||||
)
|
||||
|
||||
jira_dc_manager.send_message = AsyncMock()
|
||||
jira_dc_manager.token_manager.decrypt_text.return_value = 'decrypted_key'
|
||||
|
||||
await jira_dc_manager.start_job(mock_view)
|
||||
|
||||
# Should send error message about session expired
|
||||
jira_dc_manager.send_message.assert_called_once()
|
||||
call_args = jira_dc_manager.send_message.call_args[0]
|
||||
assert 'session has expired' in call_args[0].message
|
||||
assert 'login again' in call_args[0].message
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_job_unexpected_error(
|
||||
self, jira_dc_manager, sample_jira_dc_workspace
|
||||
|
||||
@@ -18,7 +18,11 @@ from integrations.linear.linear_view import (
|
||||
from integrations.models import Message, SourceType
|
||||
|
||||
from openhands.integrations.service_types import ProviderType, Repository
|
||||
from openhands.server.types import LLMAuthenticationError, MissingSettingsError
|
||||
from openhands.server.types import (
|
||||
LLMAuthenticationError,
|
||||
MissingSettingsError,
|
||||
SessionExpiredError,
|
||||
)
|
||||
|
||||
|
||||
class TestLinearManagerInit:
|
||||
@@ -826,6 +830,33 @@ class TestStartJob:
|
||||
call_args = linear_manager.send_message.call_args[0]
|
||||
assert 'valid LLM API key' in call_args[0].message
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_job_session_expired_error(
|
||||
self, linear_manager, sample_linear_workspace
|
||||
):
|
||||
"""Test job start with session expired error."""
|
||||
mock_view = MagicMock(spec=LinearNewConversationView)
|
||||
mock_view.linear_user = MagicMock()
|
||||
mock_view.linear_user.keycloak_user_id = 'test_user'
|
||||
mock_view.job_context = MagicMock()
|
||||
mock_view.job_context.issue_key = 'TEST-123'
|
||||
mock_view.job_context.issue_id = 'issue_id'
|
||||
mock_view.linear_workspace = sample_linear_workspace
|
||||
mock_view.create_or_update_conversation = AsyncMock(
|
||||
side_effect=SessionExpiredError('Session expired')
|
||||
)
|
||||
|
||||
linear_manager.send_message = AsyncMock()
|
||||
linear_manager.token_manager.decrypt_text.return_value = 'decrypted_key'
|
||||
|
||||
await linear_manager.start_job(mock_view)
|
||||
|
||||
# Should send error message about session expired
|
||||
linear_manager.send_message.assert_called_once()
|
||||
call_args = linear_manager.send_message.call_args[0]
|
||||
assert 'session has expired' in call_args[0].message
|
||||
assert 'login again' in call_args[0].message
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_job_unexpected_error(
|
||||
self, linear_manager, sample_linear_workspace
|
||||
|
||||
@@ -4,7 +4,9 @@ from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from integrations.utils import (
|
||||
HOST_URL,
|
||||
append_conversation_footer,
|
||||
get_session_expired_message,
|
||||
get_summary_for_agent_state,
|
||||
)
|
||||
|
||||
@@ -164,6 +166,68 @@ class TestGetSummaryForAgentState:
|
||||
assert self.conversation_link not in result
|
||||
|
||||
|
||||
class TestGetSessionExpiredMessage:
|
||||
"""Test cases for get_session_expired_message function."""
|
||||
|
||||
def test_message_with_username_contains_at_prefix(self):
|
||||
"""Test that the message contains the username with @ prefix."""
|
||||
result = get_session_expired_message('testuser')
|
||||
assert '@testuser' in result
|
||||
|
||||
def test_message_with_username_contains_session_expired_text(self):
|
||||
"""Test that the message contains session expired text."""
|
||||
result = get_session_expired_message('testuser')
|
||||
assert 'session has expired' in result
|
||||
|
||||
def test_message_with_username_contains_login_instruction(self):
|
||||
"""Test that the message contains login instruction."""
|
||||
result = get_session_expired_message('testuser')
|
||||
assert 'login again' in result
|
||||
|
||||
def test_message_with_username_contains_host_url(self):
|
||||
"""Test that the message contains the OpenHands Cloud URL."""
|
||||
result = get_session_expired_message('testuser')
|
||||
assert HOST_URL in result
|
||||
assert 'OpenHands Cloud' in result
|
||||
|
||||
def test_different_usernames(self):
|
||||
"""Test that different usernames produce different messages."""
|
||||
result1 = get_session_expired_message('user1')
|
||||
result2 = get_session_expired_message('user2')
|
||||
assert '@user1' in result1
|
||||
assert '@user2' in result2
|
||||
assert '@user1' not in result2
|
||||
assert '@user2' not in result1
|
||||
|
||||
def test_message_without_username_contains_session_expired_text(self):
|
||||
"""Test that the message without username contains session expired text."""
|
||||
result = get_session_expired_message()
|
||||
assert 'session has expired' in result
|
||||
|
||||
def test_message_without_username_contains_login_instruction(self):
|
||||
"""Test that the message without username contains login instruction."""
|
||||
result = get_session_expired_message()
|
||||
assert 'login again' in result
|
||||
|
||||
def test_message_without_username_contains_host_url(self):
|
||||
"""Test that the message without username contains the OpenHands Cloud URL."""
|
||||
result = get_session_expired_message()
|
||||
assert HOST_URL in result
|
||||
assert 'OpenHands Cloud' in result
|
||||
|
||||
def test_message_without_username_does_not_contain_at_prefix(self):
|
||||
"""Test that the message without username does not contain @ prefix."""
|
||||
result = get_session_expired_message()
|
||||
assert not result.startswith('@')
|
||||
assert 'Your session' in result
|
||||
|
||||
def test_message_with_none_username(self):
|
||||
"""Test that passing None explicitly works the same as no argument."""
|
||||
result = get_session_expired_message(None)
|
||||
assert not result.startswith('@')
|
||||
assert 'Your session' in result
|
||||
|
||||
|
||||
class TestAppendConversationFooter:
|
||||
"""Test cases for append_conversation_footer function."""
|
||||
|
||||
|
||||
@@ -0,0 +1,502 @@
|
||||
"""Unit tests for GitLab integration routes."""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi import HTTPException, status
|
||||
from integrations.gitlab.gitlab_service import SaaSGitLabService
|
||||
from integrations.gitlab.webhook_installation import BreakLoopException
|
||||
from integrations.types import GitLabResourceType
|
||||
from server.routes.integration.gitlab import (
|
||||
ReinstallWebhookRequest,
|
||||
ResourceIdentifier,
|
||||
get_gitlab_resources,
|
||||
reinstall_gitlab_webhook,
|
||||
)
|
||||
from storage.gitlab_webhook import GitlabWebhook
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_gitlab_service():
|
||||
"""Create a mock SaaSGitLabService instance."""
|
||||
service = MagicMock(spec=SaaSGitLabService)
|
||||
service.get_user_resources_with_admin_access = AsyncMock(
|
||||
return_value=(
|
||||
[
|
||||
{
|
||||
'id': 1,
|
||||
'name': 'Test Project',
|
||||
'path_with_namespace': 'user/test-project',
|
||||
'namespace': {'kind': 'user'},
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'name': 'Group Project',
|
||||
'path_with_namespace': 'group/group-project',
|
||||
'namespace': {'kind': 'group'},
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
'id': 10,
|
||||
'name': 'Test Group',
|
||||
'full_path': 'test-group',
|
||||
},
|
||||
],
|
||||
)
|
||||
)
|
||||
service.check_webhook_exists_on_resource = AsyncMock(return_value=(True, None))
|
||||
service.check_user_has_admin_access_to_resource = AsyncMock(
|
||||
return_value=(True, None)
|
||||
)
|
||||
return service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_webhook():
|
||||
"""Create a mock webhook object."""
|
||||
webhook = MagicMock(spec=GitlabWebhook)
|
||||
webhook.webhook_uuid = 'test-uuid'
|
||||
webhook.last_synced = None
|
||||
return webhook
|
||||
|
||||
|
||||
class TestGetGitLabResources:
|
||||
"""Test cases for get_gitlab_resources endpoint."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_get_resources_success(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_gitlab_service,
|
||||
):
|
||||
"""Test successfully retrieving GitLab resources with webhook status."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.get_webhooks_by_resources = AsyncMock(
|
||||
return_value=({}, {}) # Empty maps for simplicity
|
||||
)
|
||||
|
||||
# Act
|
||||
response = await get_gitlab_resources(user_id=user_id)
|
||||
|
||||
# Assert
|
||||
assert len(response.resources) == 2 # 1 project (filtered) + 1 group
|
||||
assert response.resources[0].type == 'project'
|
||||
assert response.resources[0].id == '1'
|
||||
assert response.resources[0].name == 'Test Project'
|
||||
assert response.resources[1].type == 'group'
|
||||
assert response.resources[1].id == '10'
|
||||
mock_gitlab_service.get_user_resources_with_admin_access.assert_called_once()
|
||||
mock_webhook_store.get_webhooks_by_resources.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_get_resources_filters_nested_projects(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_gitlab_service,
|
||||
):
|
||||
"""Test that projects nested under groups are filtered out."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.get_webhooks_by_resources = AsyncMock(return_value=({}, {}))
|
||||
|
||||
# Act
|
||||
response = await get_gitlab_resources(user_id=user_id)
|
||||
|
||||
# Assert
|
||||
# Should only include the user project, not the group project
|
||||
project_resources = [r for r in response.resources if r.type == 'project']
|
||||
assert len(project_resources) == 1
|
||||
assert project_resources[0].id == '1'
|
||||
assert project_resources[0].name == 'Test Project'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_get_resources_includes_webhook_metadata(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_gitlab_service,
|
||||
mock_webhook,
|
||||
):
|
||||
"""Test that webhook metadata is included in the response."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.get_webhooks_by_resources = AsyncMock(
|
||||
return_value=({'1': mock_webhook}, {'10': mock_webhook})
|
||||
)
|
||||
|
||||
# Act
|
||||
response = await get_gitlab_resources(user_id=user_id)
|
||||
|
||||
# Assert
|
||||
assert response.resources[0].webhook_uuid == 'test-uuid'
|
||||
assert response.resources[1].webhook_uuid == 'test-uuid'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
async def test_get_resources_non_saas_service(
|
||||
self, mock_gitlab_service_impl, mock_gitlab_service
|
||||
):
|
||||
"""Test that non-SaaS GitLab service raises an error."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
non_saas_service = AsyncMock()
|
||||
mock_gitlab_service_impl.return_value = non_saas_service
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await get_gitlab_resources(user_id=user_id)
|
||||
|
||||
assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'Only SaaS GitLab service is supported' in exc_info.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_get_resources_parallel_api_calls(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_gitlab_service,
|
||||
):
|
||||
"""Test that webhook status checks are made in parallel."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.get_webhooks_by_resources = AsyncMock(return_value=({}, {}))
|
||||
call_count = 0
|
||||
|
||||
async def track_calls(*args, **kwargs):
|
||||
nonlocal call_count
|
||||
call_count += 1
|
||||
return (True, None)
|
||||
|
||||
mock_gitlab_service.check_webhook_exists_on_resource = AsyncMock(
|
||||
side_effect=track_calls
|
||||
)
|
||||
|
||||
# Act
|
||||
await get_gitlab_resources(user_id=user_id)
|
||||
|
||||
# Assert
|
||||
# Should be called for each resource (1 project + 1 group)
|
||||
assert call_count == 2
|
||||
|
||||
|
||||
class TestReinstallGitLabWebhook:
|
||||
"""Test cases for reinstall_gitlab_webhook endpoint."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.install_webhook_on_resource')
|
||||
@patch('server.routes.integration.gitlab.verify_webhook_conditions')
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_reinstall_webhook_success_existing_webhook(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_verify_conditions,
|
||||
mock_install_webhook,
|
||||
mock_gitlab_service,
|
||||
mock_webhook,
|
||||
):
|
||||
"""Test successful webhook reinstallation when webhook record exists."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
resource_id = 'project-123'
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.reset_webhook_for_reinstallation_by_resource = AsyncMock(
|
||||
return_value=True
|
||||
)
|
||||
mock_webhook_store.get_webhook_by_resource_only = AsyncMock(
|
||||
return_value=mock_webhook
|
||||
)
|
||||
mock_verify_conditions.return_value = None
|
||||
mock_install_webhook.return_value = ('webhook-id-123', None)
|
||||
|
||||
body = ReinstallWebhookRequest(
|
||||
resource=ResourceIdentifier(type=resource_type, id=resource_id)
|
||||
)
|
||||
|
||||
# Act
|
||||
result = await reinstall_gitlab_webhook(body=body, user_id=user_id)
|
||||
|
||||
# Assert
|
||||
assert result.success is True
|
||||
assert result.resource_id == resource_id
|
||||
assert result.resource_type == resource_type.value
|
||||
assert result.error is None
|
||||
mock_gitlab_service.check_user_has_admin_access_to_resource.assert_called_once_with(
|
||||
resource_type, resource_id
|
||||
)
|
||||
mock_webhook_store.reset_webhook_for_reinstallation_by_resource.assert_called_once_with(
|
||||
resource_type, resource_id, user_id
|
||||
)
|
||||
mock_verify_conditions.assert_called_once()
|
||||
mock_install_webhook.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.install_webhook_on_resource')
|
||||
@patch('server.routes.integration.gitlab.verify_webhook_conditions')
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_reinstall_webhook_success_new_webhook_record(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_verify_conditions,
|
||||
mock_install_webhook,
|
||||
mock_gitlab_service,
|
||||
):
|
||||
"""Test successful webhook reinstallation when webhook record doesn't exist."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
resource_id = 'project-456'
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.reset_webhook_for_reinstallation_by_resource = (
|
||||
AsyncMock(return_value=False) # No existing webhook to reset
|
||||
)
|
||||
mock_webhook_store.get_webhook_by_resource_only = AsyncMock(
|
||||
side_effect=[
|
||||
None,
|
||||
MagicMock(),
|
||||
] # First call returns None, second returns new webhook
|
||||
)
|
||||
mock_webhook_store.store_webhooks = AsyncMock()
|
||||
mock_verify_conditions.return_value = None
|
||||
mock_install_webhook.return_value = ('webhook-id-456', None)
|
||||
|
||||
body = ReinstallWebhookRequest(
|
||||
resource=ResourceIdentifier(type=resource_type, id=resource_id)
|
||||
)
|
||||
|
||||
# Act
|
||||
result = await reinstall_gitlab_webhook(body=body, user_id=user_id)
|
||||
|
||||
# Assert
|
||||
assert result.success is True
|
||||
mock_webhook_store.store_webhooks.assert_called_once()
|
||||
# Should fetch webhook twice: once to check, once after creating
|
||||
assert mock_webhook_store.get_webhook_by_resource_only.call_count == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_reinstall_webhook_no_admin_access(
|
||||
self, mock_isinstance, mock_gitlab_service_impl, mock_gitlab_service
|
||||
):
|
||||
"""Test reinstallation when user doesn't have admin access."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
resource_id = 'project-789'
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_gitlab_service.check_user_has_admin_access_to_resource = AsyncMock(
|
||||
return_value=(False, None)
|
||||
)
|
||||
|
||||
body = ReinstallWebhookRequest(
|
||||
resource=ResourceIdentifier(type=resource_type, id=resource_id)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await reinstall_gitlab_webhook(body=body, user_id=user_id)
|
||||
|
||||
assert exc_info.value.status_code == status.HTTP_403_FORBIDDEN
|
||||
assert 'does not have admin access' in exc_info.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
async def test_reinstall_webhook_non_saas_service(self, mock_gitlab_service_impl):
|
||||
"""Test reinstallation with non-SaaS GitLab service."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
resource_id = 'project-999'
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
|
||||
non_saas_service = AsyncMock()
|
||||
mock_gitlab_service_impl.return_value = non_saas_service
|
||||
|
||||
body = ReinstallWebhookRequest(
|
||||
resource=ResourceIdentifier(type=resource_type, id=resource_id)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await reinstall_gitlab_webhook(body=body, user_id=user_id)
|
||||
|
||||
assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'Only SaaS GitLab service is supported' in exc_info.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.install_webhook_on_resource')
|
||||
@patch('server.routes.integration.gitlab.verify_webhook_conditions')
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_reinstall_webhook_conditions_not_met(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_verify_conditions,
|
||||
mock_install_webhook,
|
||||
mock_gitlab_service,
|
||||
mock_webhook,
|
||||
):
|
||||
"""Test reinstallation when webhook conditions are not met."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
resource_id = 'project-111'
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.reset_webhook_for_reinstallation_by_resource = AsyncMock(
|
||||
return_value=True
|
||||
)
|
||||
mock_webhook_store.get_webhook_by_resource_only = AsyncMock(
|
||||
return_value=mock_webhook
|
||||
)
|
||||
mock_verify_conditions.side_effect = BreakLoopException()
|
||||
|
||||
body = ReinstallWebhookRequest(
|
||||
resource=ResourceIdentifier(type=resource_type, id=resource_id)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await reinstall_gitlab_webhook(body=body, user_id=user_id)
|
||||
|
||||
assert exc_info.value.status_code == status.HTTP_400_BAD_REQUEST
|
||||
assert 'conditions not met' in exc_info.value.detail.lower()
|
||||
mock_install_webhook.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.install_webhook_on_resource')
|
||||
@patch('server.routes.integration.gitlab.verify_webhook_conditions')
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_reinstall_webhook_installation_fails(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_verify_conditions,
|
||||
mock_install_webhook,
|
||||
mock_gitlab_service,
|
||||
mock_webhook,
|
||||
):
|
||||
"""Test reinstallation when webhook installation fails."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
resource_id = 'project-222'
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.reset_webhook_for_reinstallation_by_resource = AsyncMock(
|
||||
return_value=True
|
||||
)
|
||||
mock_webhook_store.get_webhook_by_resource_only = AsyncMock(
|
||||
return_value=mock_webhook
|
||||
)
|
||||
mock_verify_conditions.return_value = None
|
||||
mock_install_webhook.return_value = (None, None) # Installation failed
|
||||
|
||||
body = ReinstallWebhookRequest(
|
||||
resource=ResourceIdentifier(type=resource_type, id=resource_id)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
await reinstall_gitlab_webhook(body=body, user_id=user_id)
|
||||
|
||||
assert exc_info.value.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR
|
||||
assert 'Failed to install webhook' in exc_info.value.detail
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('server.routes.integration.gitlab.install_webhook_on_resource')
|
||||
@patch('server.routes.integration.gitlab.verify_webhook_conditions')
|
||||
@patch('server.routes.integration.gitlab.GitLabServiceImpl')
|
||||
@patch('server.routes.integration.gitlab.webhook_store')
|
||||
@patch('server.routes.integration.gitlab.isinstance')
|
||||
async def test_reinstall_webhook_group_resource(
|
||||
self,
|
||||
mock_isinstance,
|
||||
mock_webhook_store,
|
||||
mock_gitlab_service_impl,
|
||||
mock_verify_conditions,
|
||||
mock_install_webhook,
|
||||
mock_gitlab_service,
|
||||
mock_webhook,
|
||||
):
|
||||
"""Test reinstallation for a group resource."""
|
||||
# Arrange
|
||||
user_id = 'test_user_id'
|
||||
resource_id = 'group-333'
|
||||
resource_type = GitLabResourceType.GROUP
|
||||
|
||||
mock_gitlab_service_impl.return_value = mock_gitlab_service
|
||||
mock_isinstance.return_value = True
|
||||
mock_webhook_store.reset_webhook_for_reinstallation_by_resource = AsyncMock(
|
||||
return_value=True
|
||||
)
|
||||
mock_webhook_store.get_webhook_by_resource_only = AsyncMock(
|
||||
return_value=mock_webhook
|
||||
)
|
||||
mock_verify_conditions.return_value = None
|
||||
mock_install_webhook.return_value = ('webhook-id-group', None)
|
||||
|
||||
body = ReinstallWebhookRequest(
|
||||
resource=ResourceIdentifier(type=resource_type, id=resource_id)
|
||||
)
|
||||
|
||||
# Act
|
||||
result = await reinstall_gitlab_webhook(body=body, user_id=user_id)
|
||||
|
||||
# Assert
|
||||
assert result.success is True
|
||||
assert result.resource_id == resource_id
|
||||
assert result.resource_type == resource_type.value
|
||||
mock_webhook_store.reset_webhook_for_reinstallation_by_resource.assert_called_once_with(
|
||||
resource_type, resource_id, user_id
|
||||
)
|
||||
388
enterprise/tests/unit/storage/test_gitlab_webhook_store.py
Normal file
388
enterprise/tests/unit/storage/test_gitlab_webhook_store.py
Normal file
@@ -0,0 +1,388 @@
|
||||
"""Unit tests for GitlabWebhookStore."""
|
||||
|
||||
import pytest
|
||||
from integrations.types import GitLabResourceType
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine
|
||||
from sqlalchemy.pool import StaticPool
|
||||
from storage.base import Base
|
||||
from storage.gitlab_webhook import GitlabWebhook
|
||||
from storage.gitlab_webhook_store import GitlabWebhookStore
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def async_engine():
|
||||
"""Create an async SQLite engine for testing."""
|
||||
engine = create_async_engine(
|
||||
'sqlite+aiosqlite:///:memory:',
|
||||
poolclass=StaticPool,
|
||||
connect_args={'check_same_thread': False},
|
||||
echo=False,
|
||||
)
|
||||
|
||||
# Create all tables
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
yield engine
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def async_session_maker(async_engine):
|
||||
"""Create an async session maker for testing."""
|
||||
return async_sessionmaker(async_engine, class_=AsyncSession, expire_on_commit=False)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def webhook_store(async_session_maker):
|
||||
"""Create a GitlabWebhookStore instance for testing."""
|
||||
return GitlabWebhookStore(a_session_maker=async_session_maker)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def sample_webhooks(async_session_maker):
|
||||
"""Create sample webhook records for testing."""
|
||||
async with async_session_maker() as session:
|
||||
# Create webhooks for user_1
|
||||
webhook1 = GitlabWebhook(
|
||||
project_id='project-1',
|
||||
group_id=None,
|
||||
user_id='user_1',
|
||||
webhook_exists=True,
|
||||
webhook_url='https://example.com/webhook',
|
||||
webhook_secret='secret-1',
|
||||
webhook_uuid='uuid-1',
|
||||
)
|
||||
webhook2 = GitlabWebhook(
|
||||
project_id='project-2',
|
||||
group_id=None,
|
||||
user_id='user_1',
|
||||
webhook_exists=True,
|
||||
webhook_url='https://example.com/webhook',
|
||||
webhook_secret='secret-2',
|
||||
webhook_uuid='uuid-2',
|
||||
)
|
||||
webhook3 = GitlabWebhook(
|
||||
project_id=None,
|
||||
group_id='group-1',
|
||||
user_id='user_1',
|
||||
webhook_exists=False, # Already marked for reinstallation
|
||||
webhook_url='https://example.com/webhook',
|
||||
webhook_secret='secret-3',
|
||||
webhook_uuid='uuid-3',
|
||||
)
|
||||
|
||||
# Create webhook for user_2
|
||||
webhook4 = GitlabWebhook(
|
||||
project_id='project-3',
|
||||
group_id=None,
|
||||
user_id='user_2',
|
||||
webhook_exists=True,
|
||||
webhook_url='https://example.com/webhook',
|
||||
webhook_secret='secret-4',
|
||||
webhook_uuid='uuid-4',
|
||||
)
|
||||
|
||||
session.add_all([webhook1, webhook2, webhook3, webhook4])
|
||||
await session.commit()
|
||||
|
||||
# Refresh to get IDs (outside of begin() context)
|
||||
await session.refresh(webhook1)
|
||||
await session.refresh(webhook2)
|
||||
await session.refresh(webhook3)
|
||||
await session.refresh(webhook4)
|
||||
|
||||
return [webhook1, webhook2, webhook3, webhook4]
|
||||
|
||||
|
||||
class TestGetWebhookByResourceOnly:
|
||||
"""Test cases for get_webhook_by_resource_only method."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_project_webhook_by_resource_only(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test getting a project webhook by resource ID without user_id filter."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-1'
|
||||
|
||||
# Act
|
||||
webhook = await webhook_store.get_webhook_by_resource_only(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert webhook is not None
|
||||
assert webhook.project_id == resource_id
|
||||
assert webhook.user_id == 'user_1'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_group_webhook_by_resource_only(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test getting a group webhook by resource ID without user_id filter."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.GROUP
|
||||
resource_id = 'group-1'
|
||||
|
||||
# Act
|
||||
webhook = await webhook_store.get_webhook_by_resource_only(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert webhook is not None
|
||||
assert webhook.group_id == resource_id
|
||||
assert webhook.user_id == 'user_1'
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_webhook_by_resource_only_not_found(
|
||||
self, webhook_store, async_session_maker
|
||||
):
|
||||
"""Test getting a webhook that doesn't exist."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'non-existent-project'
|
||||
|
||||
# Act
|
||||
webhook = await webhook_store.get_webhook_by_resource_only(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert webhook is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_webhook_by_resource_only_organization_wide(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test that webhook lookup works regardless of which user originally created it."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-3' # Created by user_2
|
||||
|
||||
# Act
|
||||
webhook = await webhook_store.get_webhook_by_resource_only(
|
||||
resource_type, resource_id
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert webhook is not None
|
||||
assert webhook.project_id == resource_id
|
||||
# Should find webhook even though it was created by a different user
|
||||
assert webhook.user_id == 'user_2'
|
||||
|
||||
|
||||
class TestResetWebhookForReinstallationByResource:
|
||||
"""Test cases for reset_webhook_for_reinstallation_by_resource method."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reset_project_webhook_by_resource(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test resetting a project webhook by resource without user_id filter."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-1'
|
||||
updating_user_id = 'user_2' # Different user can reset it
|
||||
|
||||
# Act
|
||||
result = await webhook_store.reset_webhook_for_reinstallation_by_resource(
|
||||
resource_type, resource_id, updating_user_id
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
|
||||
# Verify webhook was reset
|
||||
async with async_session_maker() as session:
|
||||
result_query = await session.execute(
|
||||
select(GitlabWebhook).where(GitlabWebhook.project_id == resource_id)
|
||||
)
|
||||
webhook = result_query.scalars().first()
|
||||
assert webhook.webhook_exists is False
|
||||
assert webhook.webhook_uuid is None
|
||||
assert (
|
||||
webhook.user_id == updating_user_id
|
||||
) # Updated to track who modified it
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reset_group_webhook_by_resource(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test resetting a group webhook by resource without user_id filter."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.GROUP
|
||||
resource_id = 'group-1'
|
||||
updating_user_id = 'user_2'
|
||||
|
||||
# Act
|
||||
result = await webhook_store.reset_webhook_for_reinstallation_by_resource(
|
||||
resource_type, resource_id, updating_user_id
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
|
||||
# Verify webhook was reset
|
||||
async with async_session_maker() as session:
|
||||
result_query = await session.execute(
|
||||
select(GitlabWebhook).where(GitlabWebhook.group_id == resource_id)
|
||||
)
|
||||
webhook = result_query.scalars().first()
|
||||
assert webhook.webhook_exists is False
|
||||
assert webhook.webhook_uuid is None
|
||||
assert webhook.user_id == updating_user_id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reset_webhook_by_resource_not_found(
|
||||
self, webhook_store, async_session_maker
|
||||
):
|
||||
"""Test resetting a webhook that doesn't exist."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'non-existent-project'
|
||||
updating_user_id = 'user_1'
|
||||
|
||||
# Act
|
||||
result = await webhook_store.reset_webhook_for_reinstallation_by_resource(
|
||||
resource_type, resource_id, updating_user_id
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reset_webhook_by_resource_organization_wide(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test that any user can reset a webhook regardless of original creator."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-3' # Created by user_2
|
||||
updating_user_id = 'user_1' # Different user resetting it
|
||||
|
||||
# Act
|
||||
result = await webhook_store.reset_webhook_for_reinstallation_by_resource(
|
||||
resource_type, resource_id, updating_user_id
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
|
||||
# Verify webhook was reset and user_id updated
|
||||
async with async_session_maker() as session:
|
||||
result_query = await session.execute(
|
||||
select(GitlabWebhook).where(GitlabWebhook.project_id == resource_id)
|
||||
)
|
||||
webhook = result_query.scalars().first()
|
||||
assert webhook.webhook_exists is False
|
||||
assert webhook.user_id == updating_user_id
|
||||
|
||||
|
||||
class TestGetWebhooksByResources:
|
||||
"""Test cases for get_webhooks_by_resources method."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_webhooks_by_resources_projects_only(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test bulk fetching webhooks for multiple projects."""
|
||||
# Arrange
|
||||
project_ids = ['project-1', 'project-2', 'project-3']
|
||||
group_ids: list[str] = []
|
||||
|
||||
# Act
|
||||
project_map, group_map = await webhook_store.get_webhooks_by_resources(
|
||||
project_ids, group_ids
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert len(project_map) == 3
|
||||
assert 'project-1' in project_map
|
||||
assert 'project-2' in project_map
|
||||
assert 'project-3' in project_map
|
||||
assert len(group_map) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_webhooks_by_resources_groups_only(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test bulk fetching webhooks for multiple groups."""
|
||||
# Arrange
|
||||
project_ids: list[str] = []
|
||||
group_ids = ['group-1']
|
||||
|
||||
# Act
|
||||
project_map, group_map = await webhook_store.get_webhooks_by_resources(
|
||||
project_ids, group_ids
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert len(project_map) == 0
|
||||
assert len(group_map) == 1
|
||||
assert 'group-1' in group_map
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_webhooks_by_resources_mixed(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test bulk fetching webhooks for both projects and groups."""
|
||||
# Arrange
|
||||
project_ids = ['project-1', 'project-2']
|
||||
group_ids = ['group-1']
|
||||
|
||||
# Act
|
||||
project_map, group_map = await webhook_store.get_webhooks_by_resources(
|
||||
project_ids, group_ids
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert len(project_map) == 2
|
||||
assert len(group_map) == 1
|
||||
assert 'project-1' in project_map
|
||||
assert 'project-2' in project_map
|
||||
assert 'group-1' in group_map
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_webhooks_by_resources_empty_lists(
|
||||
self, webhook_store, async_session_maker
|
||||
):
|
||||
"""Test bulk fetching with empty ID lists."""
|
||||
# Arrange
|
||||
project_ids: list[str] = []
|
||||
group_ids: list[str] = []
|
||||
|
||||
# Act
|
||||
project_map, group_map = await webhook_store.get_webhooks_by_resources(
|
||||
project_ids, group_ids
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert len(project_map) == 0
|
||||
assert len(group_map) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_webhooks_by_resources_partial_matches(
|
||||
self, webhook_store, async_session_maker, sample_webhooks
|
||||
):
|
||||
"""Test bulk fetching when some IDs don't exist."""
|
||||
# Arrange
|
||||
project_ids = ['project-1', 'non-existent-project']
|
||||
group_ids = ['group-1', 'non-existent-group']
|
||||
|
||||
# Act
|
||||
project_map, group_map = await webhook_store.get_webhooks_by_resources(
|
||||
project_ids, group_ids
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert len(project_map) == 1
|
||||
assert 'project-1' in project_map
|
||||
assert 'non-existent-project' not in project_map
|
||||
assert len(group_map) == 1
|
||||
assert 'group-1' in group_map
|
||||
assert 'non-existent-group' not in group_map
|
||||
438
enterprise/tests/unit/sync/test_install_gitlab_webhooks.py
Normal file
438
enterprise/tests/unit/sync/test_install_gitlab_webhooks.py
Normal file
@@ -0,0 +1,438 @@
|
||||
"""Unit tests for install_gitlab_webhooks module."""
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from integrations.gitlab.webhook_installation import (
|
||||
BreakLoopException,
|
||||
install_webhook_on_resource,
|
||||
verify_webhook_conditions,
|
||||
)
|
||||
from integrations.types import GitLabResourceType
|
||||
from integrations.utils import GITLAB_WEBHOOK_URL
|
||||
from storage.gitlab_webhook import GitlabWebhook, WebhookStatus
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_gitlab_service():
|
||||
"""Create a mock GitLab service."""
|
||||
service = MagicMock()
|
||||
service.check_resource_exists = AsyncMock(return_value=(True, None))
|
||||
service.check_user_has_admin_access_to_resource = AsyncMock(
|
||||
return_value=(True, None)
|
||||
)
|
||||
service.check_webhook_exists_on_resource = AsyncMock(return_value=(False, None))
|
||||
service.install_webhook = AsyncMock(return_value=('webhook-id-123', None))
|
||||
return service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_webhook_store():
|
||||
"""Create a mock webhook store."""
|
||||
store = MagicMock()
|
||||
store.delete_webhook = AsyncMock()
|
||||
store.update_webhook = AsyncMock()
|
||||
return store
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_webhook():
|
||||
"""Create a sample webhook object."""
|
||||
webhook = MagicMock(spec=GitlabWebhook)
|
||||
webhook.user_id = 'test_user_id'
|
||||
webhook.webhook_exists = False
|
||||
webhook.webhook_uuid = None
|
||||
return webhook
|
||||
|
||||
|
||||
class TestVerifyWebhookConditions:
|
||||
"""Test cases for verify_webhook_conditions function."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_verify_conditions_all_pass(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when all conditions are met for webhook installation."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
|
||||
# Act
|
||||
# Should not raise any exception
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Assert
|
||||
mock_gitlab_service.check_resource_exists.assert_called_once_with(
|
||||
resource_type, resource_id
|
||||
)
|
||||
mock_gitlab_service.check_user_has_admin_access_to_resource.assert_called_once_with(
|
||||
resource_type, resource_id
|
||||
)
|
||||
mock_gitlab_service.check_webhook_exists_on_resource.assert_called_once_with(
|
||||
resource_type, resource_id, GITLAB_WEBHOOK_URL
|
||||
)
|
||||
mock_webhook_store.delete_webhook.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_verify_conditions_resource_does_not_exist(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when resource does not exist."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-999'
|
||||
mock_gitlab_service.check_resource_exists = AsyncMock(
|
||||
return_value=(False, None)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(BreakLoopException):
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Assert webhook is deleted
|
||||
mock_webhook_store.delete_webhook.assert_called_once_with(sample_webhook)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_verify_conditions_rate_limited_on_resource_check(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when rate limited during resource existence check."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
mock_gitlab_service.check_resource_exists = AsyncMock(
|
||||
return_value=(False, WebhookStatus.RATE_LIMITED)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(BreakLoopException):
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Should not delete webhook on rate limit
|
||||
mock_webhook_store.delete_webhook.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_verify_conditions_user_no_admin_access(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when user does not have admin access."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.GROUP
|
||||
resource_id = 'group-456'
|
||||
mock_gitlab_service.check_user_has_admin_access_to_resource = AsyncMock(
|
||||
return_value=(False, None)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(BreakLoopException):
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Assert webhook is deleted
|
||||
mock_webhook_store.delete_webhook.assert_called_once_with(sample_webhook)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_verify_conditions_rate_limited_on_admin_check(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when rate limited during admin access check."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
mock_gitlab_service.check_user_has_admin_access_to_resource = AsyncMock(
|
||||
return_value=(False, WebhookStatus.RATE_LIMITED)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(BreakLoopException):
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Should not delete webhook on rate limit
|
||||
mock_webhook_store.delete_webhook.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_verify_conditions_webhook_already_exists(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when webhook already exists on resource."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
mock_gitlab_service.check_webhook_exists_on_resource = AsyncMock(
|
||||
return_value=(True, None)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(BreakLoopException):
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_verify_conditions_rate_limited_on_webhook_check(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when rate limited during webhook existence check."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
mock_gitlab_service.check_webhook_exists_on_resource = AsyncMock(
|
||||
return_value=(False, WebhookStatus.RATE_LIMITED)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(BreakLoopException):
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_verify_conditions_updates_webhook_status_mismatch(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test that webhook status is updated when database and API don't match."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
sample_webhook.webhook_exists = True # DB says exists
|
||||
mock_gitlab_service.check_webhook_exists_on_resource = AsyncMock(
|
||||
return_value=(False, None) # API says doesn't exist
|
||||
)
|
||||
|
||||
# Act
|
||||
# Should not raise BreakLoopException when webhook doesn't exist (allows installation)
|
||||
await verify_webhook_conditions(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Assert webhook status was updated to match API
|
||||
mock_webhook_store.update_webhook.assert_called_once_with(
|
||||
sample_webhook, {'webhook_exists': False}
|
||||
)
|
||||
|
||||
|
||||
class TestInstallWebhookOnResource:
|
||||
"""Test cases for install_webhook_on_resource function."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_install_webhook_success(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test successful webhook installation."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
|
||||
# Act
|
||||
webhook_id, status = await install_webhook_on_resource(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert webhook_id == 'webhook-id-123'
|
||||
assert status is None
|
||||
mock_gitlab_service.install_webhook.assert_called_once()
|
||||
mock_webhook_store.update_webhook.assert_called_once()
|
||||
# Verify update_webhook was called with correct fields (using keyword arguments)
|
||||
call_args = mock_webhook_store.update_webhook.call_args
|
||||
assert call_args[1]['webhook'] == sample_webhook
|
||||
update_fields = call_args[1]['update_fields']
|
||||
assert update_fields['webhook_exists'] is True
|
||||
assert update_fields['webhook_url'] == GITLAB_WEBHOOK_URL
|
||||
assert 'webhook_secret' in update_fields
|
||||
assert 'webhook_uuid' in update_fields
|
||||
assert 'scopes' in update_fields
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_install_webhook_group_resource(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test webhook installation for a group resource."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.GROUP
|
||||
resource_id = 'group-456'
|
||||
|
||||
# Act
|
||||
webhook_id, status = await install_webhook_on_resource(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert webhook_id == 'webhook-id-123'
|
||||
# Verify install_webhook was called with GROUP type
|
||||
call_args = mock_gitlab_service.install_webhook.call_args
|
||||
assert call_args[1]['resource_type'] == resource_type
|
||||
assert call_args[1]['resource_id'] == resource_id
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_install_webhook_rate_limited(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when installation is rate limited."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
mock_gitlab_service.install_webhook = AsyncMock(
|
||||
return_value=(None, WebhookStatus.RATE_LIMITED)
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(BreakLoopException):
|
||||
await install_webhook_on_resource(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Should not update webhook on rate limit
|
||||
mock_webhook_store.update_webhook.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_install_webhook_installation_fails(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test when webhook installation fails."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
mock_gitlab_service.install_webhook = AsyncMock(return_value=(None, None))
|
||||
|
||||
# Act
|
||||
webhook_id, status = await install_webhook_on_resource(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Assert
|
||||
assert webhook_id is None
|
||||
assert status is None
|
||||
# Should not update webhook when installation fails
|
||||
mock_webhook_store.update_webhook.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_install_webhook_generates_unique_secrets(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test that unique webhook secrets and UUIDs are generated."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
|
||||
# Act - First call
|
||||
webhook_id1, _ = await install_webhook_on_resource(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Capture first call's values before resetting
|
||||
call1_secret = mock_webhook_store.update_webhook.call_args_list[0][1][
|
||||
'update_fields'
|
||||
]['webhook_secret']
|
||||
call1_uuid = mock_webhook_store.update_webhook.call_args_list[0][1][
|
||||
'update_fields'
|
||||
]['webhook_uuid']
|
||||
|
||||
# Reset mocks and call again
|
||||
mock_gitlab_service.install_webhook.reset_mock()
|
||||
mock_webhook_store.update_webhook.reset_mock()
|
||||
|
||||
# Act - Second call
|
||||
webhook_id2, _ = await install_webhook_on_resource(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Capture second call's values
|
||||
call2_secret = mock_webhook_store.update_webhook.call_args_list[0][1][
|
||||
'update_fields'
|
||||
]['webhook_secret']
|
||||
call2_uuid = mock_webhook_store.update_webhook.call_args_list[0][1][
|
||||
'update_fields'
|
||||
]['webhook_uuid']
|
||||
|
||||
# Assert - Secrets and UUIDs should be different
|
||||
assert call1_secret != call2_secret
|
||||
assert call1_uuid != call2_uuid
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_install_webhook_uses_correct_webhook_name_and_url(
|
||||
self, mock_gitlab_service, mock_webhook_store, sample_webhook
|
||||
):
|
||||
"""Test that correct webhook name and URL are used."""
|
||||
# Arrange
|
||||
resource_type = GitLabResourceType.PROJECT
|
||||
resource_id = 'project-123'
|
||||
|
||||
# Act
|
||||
await install_webhook_on_resource(
|
||||
gitlab_service=mock_gitlab_service,
|
||||
resource_type=resource_type,
|
||||
resource_id=resource_id,
|
||||
webhook_store=mock_webhook_store,
|
||||
webhook=sample_webhook,
|
||||
)
|
||||
|
||||
# Assert
|
||||
call_args = mock_gitlab_service.install_webhook.call_args
|
||||
assert call_args[1]['webhook_name'] == 'OpenHands Resolver'
|
||||
assert call_args[1]['webhook_url'] == GITLAB_WEBHOOK_URL
|
||||
@@ -546,7 +546,6 @@ async def test_keycloak_callback_blocked_email_domain(mock_request):
|
||||
)
|
||||
mock_token_manager.disable_keycloak_user = AsyncMock()
|
||||
|
||||
mock_domain_blocker.is_active.return_value = True
|
||||
mock_domain_blocker.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
@@ -600,7 +599,6 @@ async def test_keycloak_callback_allowed_email_domain(mock_request):
|
||||
mock_token_manager.store_idp_tokens = AsyncMock()
|
||||
mock_token_manager.validate_offline_token = AsyncMock(return_value=True)
|
||||
|
||||
mock_domain_blocker.is_active.return_value = True
|
||||
mock_domain_blocker.is_domain_blocked.return_value = False
|
||||
|
||||
mock_verifier.is_active.return_value = True
|
||||
@@ -621,7 +619,7 @@ async def test_keycloak_callback_allowed_email_domain(mock_request):
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_keycloak_callback_domain_blocking_inactive(mock_request):
|
||||
"""Test keycloak_callback when domain blocking is not active."""
|
||||
"""Test keycloak_callback when email domain is not blocked."""
|
||||
# Arrange
|
||||
with (
|
||||
patch('server.routes.auth.token_manager') as mock_token_manager,
|
||||
@@ -654,7 +652,7 @@ async def test_keycloak_callback_domain_blocking_inactive(mock_request):
|
||||
mock_token_manager.store_idp_tokens = AsyncMock()
|
||||
mock_token_manager.validate_offline_token = AsyncMock(return_value=True)
|
||||
|
||||
mock_domain_blocker.is_active.return_value = False
|
||||
mock_domain_blocker.is_domain_blocked.return_value = False
|
||||
|
||||
mock_verifier.is_active.return_value = True
|
||||
mock_verifier.is_user_allowed.return_value = True
|
||||
@@ -666,7 +664,7 @@ async def test_keycloak_callback_domain_blocking_inactive(mock_request):
|
||||
|
||||
# Assert
|
||||
assert isinstance(result, RedirectResponse)
|
||||
mock_domain_blocker.is_domain_blocked.assert_not_called()
|
||||
mock_domain_blocker.is_domain_blocked.assert_called_once_with('user@colsch.us')
|
||||
mock_token_manager.disable_keycloak_user.assert_not_called()
|
||||
|
||||
|
||||
@@ -705,8 +703,6 @@ async def test_keycloak_callback_missing_email(mock_request):
|
||||
mock_token_manager.store_idp_tokens = AsyncMock()
|
||||
mock_token_manager.validate_offline_token = AsyncMock(return_value=True)
|
||||
|
||||
mock_domain_blocker.is_active.return_value = True
|
||||
|
||||
mock_verifier.is_active.return_value = True
|
||||
mock_verifier.is_user_allowed.return_value = True
|
||||
|
||||
|
||||
@@ -1,33 +1,21 @@
|
||||
"""Unit tests for DomainBlocker class."""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
from server.auth.domain_blocker import DomainBlocker
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def domain_blocker():
|
||||
"""Create a DomainBlocker instance for testing."""
|
||||
return DomainBlocker()
|
||||
def mock_store():
|
||||
"""Create a mock BlockedEmailDomainStore for testing."""
|
||||
return MagicMock()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'blocked_domains,expected',
|
||||
[
|
||||
(['colsch.us', 'other-domain.com'], True),
|
||||
(['example.com'], True),
|
||||
([], False),
|
||||
],
|
||||
)
|
||||
def test_is_active(domain_blocker, blocked_domains, expected):
|
||||
"""Test that is_active returns correct value based on blocked domains configuration."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = blocked_domains
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_active()
|
||||
|
||||
# Assert
|
||||
assert result == expected
|
||||
@pytest.fixture
|
||||
def domain_blocker(mock_store):
|
||||
"""Create a DomainBlocker instance for testing with a mocked store."""
|
||||
return DomainBlocker(store=mock_store)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -69,94 +57,104 @@ def test_extract_domain_invalid_emails(domain_blocker, email, expected):
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_is_domain_blocked_when_inactive(domain_blocker):
|
||||
"""Test that is_domain_blocked returns False when blocking is not active."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = []
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@colsch.us')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
|
||||
|
||||
def test_is_domain_blocked_with_none_email(domain_blocker):
|
||||
def test_is_domain_blocked_with_none_email(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked returns False when email is None."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['colsch.us']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked(None)
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_store.is_domain_blocked.assert_not_called()
|
||||
|
||||
|
||||
def test_is_domain_blocked_with_empty_email(domain_blocker):
|
||||
def test_is_domain_blocked_with_empty_email(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked returns False when email is empty."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['colsch.us']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_store.is_domain_blocked.assert_not_called()
|
||||
|
||||
|
||||
def test_is_domain_blocked_with_invalid_email(domain_blocker):
|
||||
def test_is_domain_blocked_with_invalid_email(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked returns False when email format is invalid."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['colsch.us']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('invalid-email')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_store.is_domain_blocked.assert_not_called()
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_not_blocked(domain_blocker):
|
||||
"""Test that is_domain_blocked returns False when domain is not in blocked list."""
|
||||
def test_is_domain_blocked_domain_not_blocked(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked returns False when domain is not blocked."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['colsch.us', 'other-domain.com']
|
||||
mock_store.is_domain_blocked.return_value = False
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@example.com')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_store.is_domain_blocked.assert_called_once_with('example.com')
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_blocked(domain_blocker):
|
||||
"""Test that is_domain_blocked returns True when domain is in blocked list."""
|
||||
def test_is_domain_blocked_domain_blocked(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked returns True when domain is blocked."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['colsch.us', 'other-domain.com']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@colsch.us')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('colsch.us')
|
||||
|
||||
|
||||
def test_is_domain_blocked_case_insensitive(domain_blocker):
|
||||
"""Test that is_domain_blocked performs case-insensitive domain matching."""
|
||||
def test_is_domain_blocked_case_insensitive(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked performs case-insensitive domain extraction."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['colsch.us']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@COLSCH.US')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('colsch.us')
|
||||
|
||||
|
||||
def test_is_domain_blocked_multiple_blocked_domains(domain_blocker):
|
||||
"""Test that is_domain_blocked correctly checks against multiple blocked domains."""
|
||||
def test_is_domain_blocked_with_whitespace(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked handles emails with whitespace correctly."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['colsch.us', 'other-domain.com', 'blocked.org']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked(' user@colsch.us ')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('colsch.us')
|
||||
|
||||
|
||||
def test_is_domain_blocked_multiple_blocked_domains(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked correctly checks multiple domains."""
|
||||
# Arrange
|
||||
mock_store.is_domain_blocked.side_effect = lambda domain: domain in [
|
||||
'other-domain.com',
|
||||
'blocked.org',
|
||||
]
|
||||
|
||||
# Act
|
||||
result1 = domain_blocker.is_domain_blocked('user@other-domain.com')
|
||||
@@ -167,109 +165,71 @@ def test_is_domain_blocked_multiple_blocked_domains(domain_blocker):
|
||||
assert result1 is True
|
||||
assert result2 is True
|
||||
assert result3 is False
|
||||
assert mock_store.is_domain_blocked.call_count == 3
|
||||
|
||||
|
||||
def test_is_domain_blocked_with_whitespace(domain_blocker):
|
||||
"""Test that is_domain_blocked handles emails with whitespace correctly."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['colsch.us']
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked(' user@colsch.us ')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# TLD Blocking Tests (patterns starting with '.')
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def test_is_domain_blocked_tld_pattern_blocks_matching_domain(domain_blocker):
|
||||
def test_is_domain_blocked_tld_pattern_blocks_matching_domain(
|
||||
domain_blocker, mock_store
|
||||
):
|
||||
"""Test that TLD pattern blocks domains ending with that TLD."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.us']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@company.us')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('company.us')
|
||||
|
||||
|
||||
def test_is_domain_blocked_tld_pattern_blocks_subdomain_with_tld(domain_blocker):
|
||||
def test_is_domain_blocked_tld_pattern_blocks_subdomain_with_tld(
|
||||
domain_blocker, mock_store
|
||||
):
|
||||
"""Test that TLD pattern blocks subdomains with that TLD."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.us']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@subdomain.company.us')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('subdomain.company.us')
|
||||
|
||||
|
||||
def test_is_domain_blocked_tld_pattern_does_not_block_different_tld(domain_blocker):
|
||||
def test_is_domain_blocked_tld_pattern_does_not_block_different_tld(
|
||||
domain_blocker, mock_store
|
||||
):
|
||||
"""Test that TLD pattern does not block domains with different TLD."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.us']
|
||||
mock_store.is_domain_blocked.return_value = False
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@company.com')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_store.is_domain_blocked.assert_called_once_with('company.com')
|
||||
|
||||
|
||||
def test_is_domain_blocked_tld_pattern_does_not_block_substring_match(
|
||||
domain_blocker,
|
||||
):
|
||||
"""Test that TLD pattern does not block domains that contain but don't end with the TLD."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.us']
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@focus.com')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
|
||||
|
||||
def test_is_domain_blocked_tld_pattern_case_insensitive(domain_blocker):
|
||||
def test_is_domain_blocked_tld_pattern_case_insensitive(domain_blocker, mock_store):
|
||||
"""Test that TLD pattern matching is case-insensitive."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.us']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@COMPANY.US')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('company.us')
|
||||
|
||||
|
||||
def test_is_domain_blocked_multiple_tld_patterns(domain_blocker):
|
||||
"""Test blocking with multiple TLD patterns."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.us', '.vn', '.com']
|
||||
|
||||
# Act
|
||||
result_us = domain_blocker.is_domain_blocked('user@test.us')
|
||||
result_vn = domain_blocker.is_domain_blocked('user@test.vn')
|
||||
result_com = domain_blocker.is_domain_blocked('user@test.com')
|
||||
result_org = domain_blocker.is_domain_blocked('user@test.org')
|
||||
|
||||
# Assert
|
||||
assert result_us is True
|
||||
assert result_vn is True
|
||||
assert result_com is True
|
||||
assert result_org is False
|
||||
|
||||
|
||||
def test_is_domain_blocked_tld_pattern_with_multi_level_tld(domain_blocker):
|
||||
def test_is_domain_blocked_tld_pattern_with_multi_level_tld(domain_blocker, mock_store):
|
||||
"""Test that TLD pattern works with multi-level TLDs like .co.uk."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.co.uk']
|
||||
mock_store.is_domain_blocked.side_effect = lambda domain: domain.endswith('.co.uk')
|
||||
|
||||
# Act
|
||||
result_match = domain_blocker.is_domain_blocked('user@example.co.uk')
|
||||
@@ -282,81 +242,87 @@ def test_is_domain_blocked_tld_pattern_with_multi_level_tld(domain_blocker):
|
||||
assert result_no_match is False
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Subdomain Blocking Tests (domain patterns now block subdomains)
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_pattern_blocks_exact_match(domain_blocker):
|
||||
def test_is_domain_blocked_domain_pattern_blocks_exact_match(
|
||||
domain_blocker, mock_store
|
||||
):
|
||||
"""Test that domain pattern blocks exact domain match."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['example.com']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@example.com')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('example.com')
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_pattern_blocks_subdomain(domain_blocker):
|
||||
def test_is_domain_blocked_domain_pattern_blocks_subdomain(domain_blocker, mock_store):
|
||||
"""Test that domain pattern blocks subdomains of that domain."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['example.com']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@subdomain.example.com')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('subdomain.example.com')
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_pattern_blocks_multi_level_subdomain(
|
||||
domain_blocker,
|
||||
domain_blocker, mock_store
|
||||
):
|
||||
"""Test that domain pattern blocks multi-level subdomains."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['example.com']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@api.v2.example.com')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with('api.v2.example.com')
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_pattern_does_not_block_similar_domain(
|
||||
domain_blocker,
|
||||
domain_blocker, mock_store
|
||||
):
|
||||
"""Test that domain pattern does not block domains that contain but don't match the pattern."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['example.com']
|
||||
mock_store.is_domain_blocked.return_value = False
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@notexample.com')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_store.is_domain_blocked.assert_called_once_with('notexample.com')
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_pattern_does_not_block_different_tld(
|
||||
domain_blocker,
|
||||
domain_blocker, mock_store
|
||||
):
|
||||
"""Test that domain pattern does not block same domain with different TLD."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['example.com']
|
||||
mock_store.is_domain_blocked.return_value = False
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@example.org')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_store.is_domain_blocked.assert_called_once_with('example.org')
|
||||
|
||||
|
||||
def test_is_domain_blocked_subdomain_pattern_blocks_exact_and_nested(domain_blocker):
|
||||
def test_is_domain_blocked_subdomain_pattern_blocks_exact_and_nested(
|
||||
domain_blocker, mock_store
|
||||
):
|
||||
"""Test that blocking a subdomain also blocks its nested subdomains."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['api.example.com']
|
||||
mock_store.is_domain_blocked.side_effect = (
|
||||
lambda domain: 'api.example.com' in domain
|
||||
)
|
||||
|
||||
# Act
|
||||
result_exact = domain_blocker.is_domain_blocked('user@api.example.com')
|
||||
@@ -369,80 +335,10 @@ def test_is_domain_blocked_subdomain_pattern_blocks_exact_and_nested(domain_bloc
|
||||
assert result_parent is False
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Mixed Pattern Tests (TLD + domain patterns together)
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def test_is_domain_blocked_mixed_patterns_tld_and_domain(domain_blocker):
|
||||
"""Test blocking with both TLD and domain patterns."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.us', 'openhands.dev']
|
||||
|
||||
# Act
|
||||
result_tld = domain_blocker.is_domain_blocked('user@company.us')
|
||||
result_domain = domain_blocker.is_domain_blocked('user@openhands.dev')
|
||||
result_subdomain = domain_blocker.is_domain_blocked('user@api.openhands.dev')
|
||||
result_allowed = domain_blocker.is_domain_blocked('user@example.com')
|
||||
|
||||
# Assert
|
||||
assert result_tld is True
|
||||
assert result_domain is True
|
||||
assert result_subdomain is True
|
||||
assert result_allowed is False
|
||||
|
||||
|
||||
def test_is_domain_blocked_overlapping_patterns(domain_blocker):
|
||||
"""Test that overlapping patterns (TLD and specific domain) both work."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.us', 'test.us']
|
||||
|
||||
# Act
|
||||
result_specific = domain_blocker.is_domain_blocked('user@test.us')
|
||||
result_other_us = domain_blocker.is_domain_blocked('user@other.us')
|
||||
|
||||
# Assert
|
||||
assert result_specific is True
|
||||
assert result_other_us is True
|
||||
|
||||
|
||||
def test_is_domain_blocked_complex_multi_pattern_scenario(domain_blocker):
|
||||
"""Test complex scenario with multiple TLD and domain patterns."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = [
|
||||
'.us',
|
||||
'.vn',
|
||||
'test.com',
|
||||
'openhands.dev',
|
||||
]
|
||||
|
||||
# Act & Assert
|
||||
# TLD patterns
|
||||
assert domain_blocker.is_domain_blocked('user@anything.us') is True
|
||||
assert domain_blocker.is_domain_blocked('user@company.vn') is True
|
||||
|
||||
# Domain patterns (exact)
|
||||
assert domain_blocker.is_domain_blocked('user@test.com') is True
|
||||
assert domain_blocker.is_domain_blocked('user@openhands.dev') is True
|
||||
|
||||
# Domain patterns (subdomains)
|
||||
assert domain_blocker.is_domain_blocked('user@api.test.com') is True
|
||||
assert domain_blocker.is_domain_blocked('user@staging.openhands.dev') is True
|
||||
|
||||
# Not blocked
|
||||
assert domain_blocker.is_domain_blocked('user@allowed.com') is False
|
||||
assert domain_blocker.is_domain_blocked('user@example.org') is False
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Edge Case Tests
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_with_hyphens(domain_blocker):
|
||||
def test_is_domain_blocked_domain_with_hyphens(domain_blocker, mock_store):
|
||||
"""Test that domain patterns work with hyphenated domains."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['my-company.com']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result_exact = domain_blocker.is_domain_blocked('user@my-company.com')
|
||||
@@ -451,12 +347,13 @@ def test_is_domain_blocked_domain_with_hyphens(domain_blocker):
|
||||
# Assert
|
||||
assert result_exact is True
|
||||
assert result_subdomain is True
|
||||
assert mock_store.is_domain_blocked.call_count == 2
|
||||
|
||||
|
||||
def test_is_domain_blocked_domain_with_numbers(domain_blocker):
|
||||
def test_is_domain_blocked_domain_with_numbers(domain_blocker, mock_store):
|
||||
"""Test that domain patterns work with numeric domains."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['test123.com']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result_exact = domain_blocker.is_domain_blocked('user@test123.com')
|
||||
@@ -465,24 +362,13 @@ def test_is_domain_blocked_domain_with_numbers(domain_blocker):
|
||||
# Assert
|
||||
assert result_exact is True
|
||||
assert result_subdomain is True
|
||||
assert mock_store.is_domain_blocked.call_count == 2
|
||||
|
||||
|
||||
def test_is_domain_blocked_short_tld(domain_blocker):
|
||||
"""Test that short TLD patterns work correctly."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['.io']
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@company.io')
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
|
||||
|
||||
def test_is_domain_blocked_very_long_subdomain_chain(domain_blocker):
|
||||
def test_is_domain_blocked_very_long_subdomain_chain(domain_blocker, mock_store):
|
||||
"""Test that blocking works with very long subdomain chains."""
|
||||
# Arrange
|
||||
domain_blocker.blocked_domains = ['example.com']
|
||||
mock_store.is_domain_blocked.return_value = True
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked(
|
||||
@@ -491,3 +377,19 @@ def test_is_domain_blocked_very_long_subdomain_chain(domain_blocker):
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_store.is_domain_blocked.assert_called_once_with(
|
||||
'level4.level3.level2.level1.example.com'
|
||||
)
|
||||
|
||||
|
||||
def test_is_domain_blocked_handles_store_exception(domain_blocker, mock_store):
|
||||
"""Test that is_domain_blocked returns False when store raises an exception."""
|
||||
# Arrange
|
||||
mock_store.is_domain_blocked.side_effect = Exception('Database connection error')
|
||||
|
||||
# Act
|
||||
result = domain_blocker.is_domain_blocked('user@example.com')
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_store.is_domain_blocked.assert_called_once_with('example.com')
|
||||
|
||||
@@ -86,12 +86,12 @@ class TestGithubV1ConversationRouting(TestCase):
|
||||
def setUp(self):
|
||||
"""Set up test fixtures."""
|
||||
# Create a proper UserData instance instead of MagicMock
|
||||
user_data = UserData(
|
||||
self.user_data = UserData(
|
||||
user_id=123, username='testuser', keycloak_user_id='test-keycloak-id'
|
||||
)
|
||||
|
||||
# Create a mock raw_payload
|
||||
raw_payload = Message(
|
||||
self.raw_payload = Message(
|
||||
source=SourceType.GITHUB,
|
||||
message={
|
||||
'payload': {
|
||||
@@ -101,8 +101,10 @@ class TestGithubV1ConversationRouting(TestCase):
|
||||
},
|
||||
)
|
||||
|
||||
self.github_issue = GithubIssue(
|
||||
user_info=user_data,
|
||||
def _create_github_issue(self):
|
||||
"""Create a GithubIssue instance for testing."""
|
||||
return GithubIssue(
|
||||
user_info=self.user_data,
|
||||
full_repo_name='test/repo',
|
||||
issue_number=123,
|
||||
installation_id=456,
|
||||
@@ -110,35 +112,72 @@ class TestGithubV1ConversationRouting(TestCase):
|
||||
should_extract=True,
|
||||
send_summary_instruction=False,
|
||||
is_public_repo=True,
|
||||
raw_payload=raw_payload,
|
||||
raw_payload=self.raw_payload,
|
||||
uuid='test-uuid',
|
||||
title='Test Issue',
|
||||
description='Test issue description',
|
||||
previous_comments=[],
|
||||
v1=False,
|
||||
v1_enabled=False,
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('integrations.github.github_view.initialize_conversation')
|
||||
@patch('integrations.github.github_view.get_user_v1_enabled_setting')
|
||||
async def test_initialize_sets_v1_enabled_from_setting_when_false(
|
||||
self, mock_get_v1_setting, mock_initialize_conversation
|
||||
):
|
||||
"""Test that initialize_new_conversation sets v1_enabled from get_user_v1_enabled_setting."""
|
||||
mock_get_v1_setting.return_value = False
|
||||
mock_initialize_conversation.return_value = MagicMock(
|
||||
conversation_id='new-conversation-id'
|
||||
)
|
||||
|
||||
github_issue = self._create_github_issue()
|
||||
await github_issue.initialize_new_conversation()
|
||||
|
||||
# Verify get_user_v1_enabled_setting was called with correct user ID
|
||||
mock_get_v1_setting.assert_called_once_with('test-keycloak-id')
|
||||
# Verify v1_enabled was set to False
|
||||
self.assertFalse(github_issue.v1_enabled)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('integrations.github.github_view.get_user_v1_enabled_setting')
|
||||
async def test_initialize_sets_v1_enabled_from_setting_when_true(
|
||||
self, mock_get_v1_setting
|
||||
):
|
||||
"""Test that initialize_new_conversation sets v1_enabled to True when setting returns True."""
|
||||
mock_get_v1_setting.return_value = True
|
||||
|
||||
github_issue = self._create_github_issue()
|
||||
await github_issue.initialize_new_conversation()
|
||||
|
||||
# Verify get_user_v1_enabled_setting was called with correct user ID
|
||||
mock_get_v1_setting.assert_called_once_with('test-keycloak-id')
|
||||
# Verify v1_enabled was set to True
|
||||
self.assertTrue(github_issue.v1_enabled)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch.object(GithubIssue, '_create_v0_conversation')
|
||||
@patch.object(GithubIssue, '_create_v1_conversation')
|
||||
async def test_create_new_conversation_routes_to_v0_when_disabled(
|
||||
self, mock_create_v1, mock_create_v0, mock_get_v1_setting
|
||||
self, mock_create_v1, mock_create_v0
|
||||
):
|
||||
"""Test that conversation creation routes to V0 when v1_enabled is False."""
|
||||
# Mock v1_enabled as False
|
||||
mock_get_v1_setting.return_value = False
|
||||
mock_create_v0.return_value = None
|
||||
mock_create_v1.return_value = None
|
||||
|
||||
github_issue = self._create_github_issue()
|
||||
github_issue.v1_enabled = False
|
||||
|
||||
# Mock parameters
|
||||
jinja_env = MagicMock()
|
||||
git_provider_tokens = MagicMock()
|
||||
conversation_metadata = MagicMock()
|
||||
saas_user_auth = MagicMock()
|
||||
|
||||
# Call the method
|
||||
await self.github_issue.create_new_conversation(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
await github_issue.create_new_conversation(
|
||||
jinja_env, git_provider_tokens, conversation_metadata, saas_user_auth
|
||||
)
|
||||
|
||||
# Verify V0 was called and V1 was not
|
||||
@@ -148,62 +187,31 @@ class TestGithubV1ConversationRouting(TestCase):
|
||||
mock_create_v1.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('integrations.github.github_view.get_user_v1_enabled_setting')
|
||||
@patch.object(GithubIssue, '_create_v0_conversation')
|
||||
@patch.object(GithubIssue, '_create_v1_conversation')
|
||||
async def test_create_new_conversation_routes_to_v1_when_enabled(
|
||||
self, mock_create_v1, mock_create_v0, mock_get_v1_setting
|
||||
self, mock_create_v1, mock_create_v0
|
||||
):
|
||||
"""Test that conversation creation routes to V1 when v1_enabled is True."""
|
||||
# Mock v1_enabled as True
|
||||
mock_get_v1_setting.return_value = True
|
||||
mock_create_v0.return_value = None
|
||||
mock_create_v1.return_value = None
|
||||
|
||||
github_issue = self._create_github_issue()
|
||||
github_issue.v1_enabled = True
|
||||
|
||||
# Mock parameters
|
||||
jinja_env = MagicMock()
|
||||
git_provider_tokens = MagicMock()
|
||||
conversation_metadata = MagicMock()
|
||||
saas_user_auth = MagicMock()
|
||||
|
||||
# Call the method
|
||||
await self.github_issue.create_new_conversation(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
await github_issue.create_new_conversation(
|
||||
jinja_env, git_provider_tokens, conversation_metadata, saas_user_auth
|
||||
)
|
||||
|
||||
# Verify V1 was called and V0 was not
|
||||
mock_create_v1.assert_called_once_with(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
jinja_env, saas_user_auth, conversation_metadata
|
||||
)
|
||||
mock_create_v0.assert_not_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('integrations.github.github_view.get_user_v1_enabled_setting')
|
||||
@patch.object(GithubIssue, '_create_v0_conversation')
|
||||
@patch.object(GithubIssue, '_create_v1_conversation')
|
||||
async def test_create_new_conversation_fallback_on_v1_setting_error(
|
||||
self, mock_create_v1, mock_create_v0, mock_get_v1_setting
|
||||
):
|
||||
"""Test that conversation creation falls back to V0 when _create_v1_conversation fails."""
|
||||
# Mock v1_enabled as True so V1 is attempted
|
||||
mock_get_v1_setting.return_value = True
|
||||
# Mock _create_v1_conversation to raise an exception
|
||||
mock_create_v1.side_effect = Exception('V1 conversation creation failed')
|
||||
mock_create_v0.return_value = None
|
||||
|
||||
# Mock parameters
|
||||
jinja_env = MagicMock()
|
||||
git_provider_tokens = MagicMock()
|
||||
conversation_metadata = MagicMock()
|
||||
|
||||
# Call the method
|
||||
await self.github_issue.create_new_conversation(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
)
|
||||
|
||||
# Verify V1 was attempted first, then V0 was called as fallback
|
||||
mock_create_v1.assert_called_once_with(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
)
|
||||
mock_create_v0.assert_called_once_with(
|
||||
jinja_env, git_provider_tokens, conversation_metadata
|
||||
)
|
||||
|
||||
@@ -673,7 +673,6 @@ async def test_saas_user_auth_from_signed_token_blocked_domain(mock_config):
|
||||
signed_token = jwt.encode(token_payload, 'test_secret', algorithm='HS256')
|
||||
|
||||
with patch('server.auth.saas_user_auth.domain_blocker') as mock_domain_blocker:
|
||||
mock_domain_blocker.is_active.return_value = True
|
||||
mock_domain_blocker.is_domain_blocked.return_value = True
|
||||
|
||||
# Act & Assert
|
||||
@@ -703,7 +702,6 @@ async def test_saas_user_auth_from_signed_token_allowed_domain(mock_config):
|
||||
signed_token = jwt.encode(token_payload, 'test_secret', algorithm='HS256')
|
||||
|
||||
with patch('server.auth.saas_user_auth.domain_blocker') as mock_domain_blocker:
|
||||
mock_domain_blocker.is_active.return_value = True
|
||||
mock_domain_blocker.is_domain_blocked.return_value = False
|
||||
|
||||
# Act
|
||||
@@ -720,7 +718,7 @@ async def test_saas_user_auth_from_signed_token_allowed_domain(mock_config):
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_saas_user_auth_from_signed_token_domain_blocking_inactive(mock_config):
|
||||
"""Test that saas_user_auth_from_signed_token succeeds when domain blocking is not active."""
|
||||
"""Test that saas_user_auth_from_signed_token succeeds when email domain is not blocked."""
|
||||
# Arrange
|
||||
access_payload = {
|
||||
'sub': 'test_user_id',
|
||||
@@ -737,7 +735,7 @@ async def test_saas_user_auth_from_signed_token_domain_blocking_inactive(mock_co
|
||||
signed_token = jwt.encode(token_payload, 'test_secret', algorithm='HS256')
|
||||
|
||||
with patch('server.auth.saas_user_auth.domain_blocker') as mock_domain_blocker:
|
||||
mock_domain_blocker.is_active.return_value = False
|
||||
mock_domain_blocker.is_domain_blocked.return_value = False
|
||||
|
||||
# Act
|
||||
result = await saas_user_auth_from_signed_token(signed_token)
|
||||
@@ -745,4 +743,4 @@ async def test_saas_user_auth_from_signed_token_domain_blocking_inactive(mock_co
|
||||
# Assert
|
||||
assert isinstance(result, SaasUserAuth)
|
||||
assert result.user_id == 'test_user_id'
|
||||
mock_domain_blocker.is_domain_blocked.assert_not_called()
|
||||
mock_domain_blocker.is_domain_blocked.assert_called_once_with('user@colsch.us')
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
"""Tests for SharedEventService."""
|
||||
|
||||
import os
|
||||
from datetime import UTC, datetime
|
||||
from unittest.mock import AsyncMock
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from server.sharing.filesystem_shared_event_service import (
|
||||
SharedEventServiceImpl,
|
||||
from server.sharing.google_cloud_shared_event_service import (
|
||||
GoogleCloudSharedEventService,
|
||||
GoogleCloudSharedEventServiceInjector,
|
||||
)
|
||||
from server.sharing.shared_conversation_info_service import (
|
||||
SharedConversationInfoService,
|
||||
@@ -25,18 +27,24 @@ def mock_shared_conversation_info_service():
|
||||
return AsyncMock(spec=SharedConversationInfoService)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_bucket():
|
||||
"""Create a mock GCS bucket."""
|
||||
return MagicMock()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_event_service():
|
||||
"""Create a mock EventService."""
|
||||
"""Create a mock EventService for returned by get_event_service."""
|
||||
return AsyncMock(spec=EventService)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def shared_event_service(mock_shared_conversation_info_service, mock_event_service):
|
||||
def shared_event_service(mock_shared_conversation_info_service, mock_bucket):
|
||||
"""Create a SharedEventService for testing."""
|
||||
return SharedEventServiceImpl(
|
||||
return GoogleCloudSharedEventService(
|
||||
shared_conversation_info_service=mock_shared_conversation_info_service,
|
||||
event_service=mock_event_service,
|
||||
bucket=mock_bucket,
|
||||
)
|
||||
|
||||
|
||||
@@ -79,11 +87,16 @@ class TestSharedEventService:
|
||||
):
|
||||
"""Test that get_shared_event returns an event for a public conversation."""
|
||||
conversation_id = sample_public_conversation.id
|
||||
event_id = 'test_event_id'
|
||||
event_id = uuid4()
|
||||
|
||||
# Mock the public conversation service to return a public conversation
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation
|
||||
|
||||
# Mock get_event_service to return our mock event service
|
||||
shared_event_service.get_event_service = AsyncMock(
|
||||
return_value=mock_event_service
|
||||
)
|
||||
|
||||
# Mock the event service to return an event
|
||||
mock_event_service.get_event.return_value = sample_event
|
||||
|
||||
@@ -92,10 +105,8 @@ class TestSharedEventService:
|
||||
|
||||
# Verify the result
|
||||
assert result == sample_event
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with(
|
||||
conversation_id
|
||||
)
|
||||
mock_event_service.get_event.assert_called_once_with(event_id)
|
||||
shared_event_service.get_event_service.assert_called_once_with(conversation_id)
|
||||
mock_event_service.get_event.assert_called_once_with(conversation_id, event_id)
|
||||
|
||||
async def test_get_shared_event_returns_none_for_private_conversation(
|
||||
self,
|
||||
@@ -105,20 +116,18 @@ class TestSharedEventService:
|
||||
):
|
||||
"""Test that get_shared_event returns None for a private conversation."""
|
||||
conversation_id = uuid4()
|
||||
event_id = 'test_event_id'
|
||||
event_id = uuid4()
|
||||
|
||||
# Mock the public conversation service to return None (private conversation)
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None
|
||||
# Mock get_event_service to return None (private conversation)
|
||||
shared_event_service.get_event_service = AsyncMock(return_value=None)
|
||||
|
||||
# Call the method
|
||||
result = await shared_event_service.get_shared_event(conversation_id, event_id)
|
||||
|
||||
# Verify the result
|
||||
assert result is None
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with(
|
||||
conversation_id
|
||||
)
|
||||
# Event service should not be called
|
||||
shared_event_service.get_event_service.assert_called_once_with(conversation_id)
|
||||
# Event service should not be called since get_event_service returns None
|
||||
mock_event_service.get_event.assert_not_called()
|
||||
|
||||
async def test_search_shared_events_returns_events_for_public_conversation(
|
||||
@@ -132,8 +141,10 @@ class TestSharedEventService:
|
||||
"""Test that search_shared_events returns events for a public conversation."""
|
||||
conversation_id = sample_public_conversation.id
|
||||
|
||||
# Mock the public conversation service to return a public conversation
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation
|
||||
# Mock get_event_service to return our mock event service
|
||||
shared_event_service.get_event_service = AsyncMock(
|
||||
return_value=mock_event_service
|
||||
)
|
||||
|
||||
# Mock the event service to return events
|
||||
mock_event_page = EventPage(items=[], next_page_id=None)
|
||||
@@ -150,11 +161,9 @@ class TestSharedEventService:
|
||||
assert result == mock_event_page
|
||||
assert len(result.items) == 0 # Empty list as we mocked
|
||||
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with(
|
||||
conversation_id
|
||||
)
|
||||
shared_event_service.get_event_service.assert_called_once_with(conversation_id)
|
||||
mock_event_service.search_events.assert_called_once_with(
|
||||
conversation_id__eq=conversation_id,
|
||||
conversation_id=conversation_id,
|
||||
kind__eq='ActionEvent',
|
||||
timestamp__gte=None,
|
||||
timestamp__lt=None,
|
||||
@@ -172,8 +181,8 @@ class TestSharedEventService:
|
||||
"""Test that search_shared_events returns empty page for a private conversation."""
|
||||
conversation_id = uuid4()
|
||||
|
||||
# Mock the public conversation service to return None (private conversation)
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None
|
||||
# Mock get_event_service to return None (private conversation)
|
||||
shared_event_service.get_event_service = AsyncMock(return_value=None)
|
||||
|
||||
# Call the method
|
||||
result = await shared_event_service.search_shared_events(
|
||||
@@ -186,9 +195,7 @@ class TestSharedEventService:
|
||||
assert len(result.items) == 0
|
||||
assert result.next_page_id is None
|
||||
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with(
|
||||
conversation_id
|
||||
)
|
||||
shared_event_service.get_event_service.assert_called_once_with(conversation_id)
|
||||
# Event service should not be called
|
||||
mock_event_service.search_events.assert_not_called()
|
||||
|
||||
@@ -202,8 +209,10 @@ class TestSharedEventService:
|
||||
"""Test that count_shared_events returns count for a public conversation."""
|
||||
conversation_id = sample_public_conversation.id
|
||||
|
||||
# Mock the public conversation service to return a public conversation
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation
|
||||
# Mock get_event_service to return our mock event service
|
||||
shared_event_service.get_event_service = AsyncMock(
|
||||
return_value=mock_event_service
|
||||
)
|
||||
|
||||
# Mock the event service to return a count
|
||||
mock_event_service.count_events.return_value = 5
|
||||
@@ -217,15 +226,12 @@ class TestSharedEventService:
|
||||
# Verify the result
|
||||
assert result == 5
|
||||
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with(
|
||||
conversation_id
|
||||
)
|
||||
shared_event_service.get_event_service.assert_called_once_with(conversation_id)
|
||||
mock_event_service.count_events.assert_called_once_with(
|
||||
conversation_id__eq=conversation_id,
|
||||
conversation_id=conversation_id,
|
||||
kind__eq='ActionEvent',
|
||||
timestamp__gte=None,
|
||||
timestamp__lt=None,
|
||||
sort_order=EventSortOrder.TIMESTAMP,
|
||||
)
|
||||
|
||||
async def test_count_shared_events_returns_zero_for_private_conversation(
|
||||
@@ -237,8 +243,8 @@ class TestSharedEventService:
|
||||
"""Test that count_shared_events returns 0 for a private conversation."""
|
||||
conversation_id = uuid4()
|
||||
|
||||
# Mock the public conversation service to return None (private conversation)
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None
|
||||
# Mock get_event_service to return None (private conversation)
|
||||
shared_event_service.get_event_service = AsyncMock(return_value=None)
|
||||
|
||||
# Call the method
|
||||
result = await shared_event_service.count_shared_events(
|
||||
@@ -248,9 +254,7 @@ class TestSharedEventService:
|
||||
# Verify the result
|
||||
assert result == 0
|
||||
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with(
|
||||
conversation_id
|
||||
)
|
||||
shared_event_service.get_event_service.assert_called_once_with(conversation_id)
|
||||
# Event service should not be called
|
||||
mock_event_service.count_events.assert_not_called()
|
||||
|
||||
@@ -264,10 +268,12 @@ class TestSharedEventService:
|
||||
):
|
||||
"""Test that batch_get_shared_events returns events for a public conversation."""
|
||||
conversation_id = sample_public_conversation.id
|
||||
event_ids = ['event1', 'event2']
|
||||
event_ids = [uuid4(), uuid4()]
|
||||
|
||||
# Mock the public conversation service to return a public conversation
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation
|
||||
# Mock get_event_service to return our mock event service
|
||||
shared_event_service.get_event_service = AsyncMock(
|
||||
return_value=mock_event_service
|
||||
)
|
||||
|
||||
# Mock the event service to return events
|
||||
mock_event_service.get_event.side_effect = [sample_event, None]
|
||||
@@ -282,11 +288,8 @@ class TestSharedEventService:
|
||||
assert result[0] == sample_event
|
||||
assert result[1] is None
|
||||
|
||||
# Verify that get_shared_conversation_info was called for each event
|
||||
assert (
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.call_count
|
||||
== 2
|
||||
)
|
||||
# Verify that get_event_service was called for each event
|
||||
assert shared_event_service.get_event_service.call_count == 2
|
||||
# Verify that get_event was called for each event
|
||||
assert mock_event_service.get_event.call_count == 2
|
||||
|
||||
@@ -298,10 +301,10 @@ class TestSharedEventService:
|
||||
):
|
||||
"""Test that batch_get_shared_events returns None for a private conversation."""
|
||||
conversation_id = uuid4()
|
||||
event_ids = ['event1', 'event2']
|
||||
event_ids = [uuid4(), uuid4()]
|
||||
|
||||
# Mock the public conversation service to return None (private conversation)
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None
|
||||
# Mock get_event_service to return None (private conversation)
|
||||
shared_event_service.get_event_service = AsyncMock(return_value=None)
|
||||
|
||||
# Call the method
|
||||
result = await shared_event_service.batch_get_shared_events(
|
||||
@@ -313,11 +316,8 @@ class TestSharedEventService:
|
||||
assert result[0] is None
|
||||
assert result[1] is None
|
||||
|
||||
# Verify that get_shared_conversation_info was called for each event
|
||||
assert (
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.call_count
|
||||
== 2
|
||||
)
|
||||
# Verify that get_event_service was called for each event
|
||||
assert shared_event_service.get_event_service.call_count == 2
|
||||
# Event service should not be called
|
||||
mock_event_service.get_event.assert_not_called()
|
||||
|
||||
@@ -333,8 +333,10 @@ class TestSharedEventService:
|
||||
timestamp_gte = datetime(2023, 1, 1, tzinfo=UTC)
|
||||
timestamp_lt = datetime(2023, 12, 31, tzinfo=UTC)
|
||||
|
||||
# Mock the public conversation service to return a public conversation
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation
|
||||
# Mock get_event_service to return our mock event service
|
||||
shared_event_service.get_event_service = AsyncMock(
|
||||
return_value=mock_event_service
|
||||
)
|
||||
|
||||
# Mock the event service to return events
|
||||
mock_event_page = EventPage(items=[], next_page_id='next_page')
|
||||
@@ -355,7 +357,7 @@ class TestSharedEventService:
|
||||
assert result == mock_event_page
|
||||
|
||||
mock_event_service.search_events.assert_called_once_with(
|
||||
conversation_id__eq=conversation_id,
|
||||
conversation_id=conversation_id,
|
||||
kind__eq='ObservationEvent',
|
||||
timestamp__gte=timestamp_gte,
|
||||
timestamp__lt=timestamp_lt,
|
||||
@@ -363,3 +365,224 @@ class TestSharedEventService:
|
||||
page_id='current_page',
|
||||
limit=50,
|
||||
)
|
||||
|
||||
|
||||
class TestGoogleCloudSharedEventServiceGetEventService:
|
||||
"""Test cases for GoogleCloudSharedEventService.get_event_service method."""
|
||||
|
||||
async def test_get_event_service_returns_event_service_for_shared_conversation(
|
||||
self,
|
||||
shared_event_service,
|
||||
mock_shared_conversation_info_service,
|
||||
sample_public_conversation,
|
||||
):
|
||||
"""Test that get_event_service returns an EventService for a shared conversation."""
|
||||
conversation_id = sample_public_conversation.id
|
||||
|
||||
# Mock the shared conversation info service to return a shared conversation
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation
|
||||
|
||||
# Call the method
|
||||
result = await shared_event_service.get_event_service(conversation_id)
|
||||
|
||||
# Verify the result
|
||||
assert result is not None
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with(
|
||||
conversation_id
|
||||
)
|
||||
|
||||
async def test_get_event_service_returns_none_for_non_shared_conversation(
|
||||
self,
|
||||
shared_event_service,
|
||||
mock_shared_conversation_info_service,
|
||||
):
|
||||
"""Test that get_event_service returns None for a non-shared conversation."""
|
||||
conversation_id = uuid4()
|
||||
|
||||
# Mock the shared conversation info service to return None
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None
|
||||
|
||||
# Call the method
|
||||
result = await shared_event_service.get_event_service(conversation_id)
|
||||
|
||||
# Verify the result
|
||||
assert result is None
|
||||
mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with(
|
||||
conversation_id
|
||||
)
|
||||
|
||||
|
||||
class TestGoogleCloudSharedEventServiceInjector:
|
||||
"""Test cases for GoogleCloudSharedEventServiceInjector."""
|
||||
|
||||
def test_bucket_name_from_environment_variable(self):
|
||||
"""Test that bucket_name is read from FILE_STORE_PATH environment variable."""
|
||||
test_bucket_name = 'test-bucket-name'
|
||||
with patch.dict(os.environ, {'FILE_STORE_PATH': test_bucket_name}):
|
||||
# Create a new injector instance to pick up the environment variable
|
||||
# Note: The class attribute is evaluated at class definition time,
|
||||
# so we need to test that the attribute exists and can be overridden
|
||||
injector = GoogleCloudSharedEventServiceInjector()
|
||||
injector.bucket_name = os.environ.get('FILE_STORE_PATH')
|
||||
assert injector.bucket_name == test_bucket_name
|
||||
|
||||
def test_bucket_name_default_value_when_env_not_set(self):
|
||||
"""Test that bucket_name is None when FILE_STORE_PATH is not set."""
|
||||
with patch.dict(os.environ, {}, clear=True):
|
||||
# Remove FILE_STORE_PATH if it exists
|
||||
os.environ.pop('FILE_STORE_PATH', None)
|
||||
injector = GoogleCloudSharedEventServiceInjector()
|
||||
# The bucket_name will be whatever was set at class definition time
|
||||
# or None if FILE_STORE_PATH was not set when the class was defined
|
||||
assert hasattr(injector, 'bucket_name')
|
||||
|
||||
async def test_injector_yields_google_cloud_shared_event_service(self):
|
||||
"""Test that the injector yields a GoogleCloudSharedEventService instance."""
|
||||
mock_state = MagicMock()
|
||||
mock_request = MagicMock()
|
||||
mock_db_session = AsyncMock()
|
||||
|
||||
# Create the injector
|
||||
injector = GoogleCloudSharedEventServiceInjector()
|
||||
injector.bucket_name = 'test-bucket'
|
||||
|
||||
# Mock the get_db_session context manager
|
||||
mock_db_context = AsyncMock()
|
||||
mock_db_context.__aenter__.return_value = mock_db_session
|
||||
mock_db_context.__aexit__.return_value = None
|
||||
|
||||
# Mock storage.Client and bucket
|
||||
mock_storage_client = MagicMock()
|
||||
mock_bucket = MagicMock()
|
||||
mock_storage_client.bucket.return_value = mock_bucket
|
||||
|
||||
with (
|
||||
patch(
|
||||
'server.sharing.google_cloud_shared_event_service.storage.Client',
|
||||
return_value=mock_storage_client,
|
||||
),
|
||||
patch(
|
||||
'openhands.app_server.config.get_db_session',
|
||||
return_value=mock_db_context,
|
||||
),
|
||||
):
|
||||
# Call the inject method
|
||||
async for service in injector.inject(mock_state, mock_request):
|
||||
# Verify the service is an instance of GoogleCloudSharedEventService
|
||||
assert isinstance(service, GoogleCloudSharedEventService)
|
||||
assert service.bucket == mock_bucket
|
||||
|
||||
# Verify the storage client was called with the correct bucket name
|
||||
mock_storage_client.bucket.assert_called_once_with('test-bucket')
|
||||
|
||||
async def test_injector_uses_bucket_name_from_instance(self):
|
||||
"""Test that the injector uses the bucket_name from the instance."""
|
||||
mock_state = MagicMock()
|
||||
mock_request = MagicMock()
|
||||
mock_db_session = AsyncMock()
|
||||
|
||||
# Create the injector with a specific bucket name
|
||||
injector = GoogleCloudSharedEventServiceInjector()
|
||||
injector.bucket_name = 'my-custom-bucket'
|
||||
|
||||
# Mock the get_db_session context manager
|
||||
mock_db_context = AsyncMock()
|
||||
mock_db_context.__aenter__.return_value = mock_db_session
|
||||
mock_db_context.__aexit__.return_value = None
|
||||
|
||||
# Mock storage.Client and bucket
|
||||
mock_storage_client = MagicMock()
|
||||
mock_bucket = MagicMock()
|
||||
mock_storage_client.bucket.return_value = mock_bucket
|
||||
|
||||
with (
|
||||
patch(
|
||||
'server.sharing.google_cloud_shared_event_service.storage.Client',
|
||||
return_value=mock_storage_client,
|
||||
),
|
||||
patch(
|
||||
'openhands.app_server.config.get_db_session',
|
||||
return_value=mock_db_context,
|
||||
),
|
||||
):
|
||||
# Call the inject method
|
||||
async for service in injector.inject(mock_state, mock_request):
|
||||
pass
|
||||
|
||||
# Verify the storage client was called with the custom bucket name
|
||||
mock_storage_client.bucket.assert_called_once_with('my-custom-bucket')
|
||||
|
||||
async def test_injector_creates_sql_shared_conversation_info_service(self):
|
||||
"""Test that the injector creates SQLSharedConversationInfoService with db_session."""
|
||||
mock_state = MagicMock()
|
||||
mock_request = MagicMock()
|
||||
mock_db_session = AsyncMock()
|
||||
|
||||
# Create the injector
|
||||
injector = GoogleCloudSharedEventServiceInjector()
|
||||
injector.bucket_name = 'test-bucket'
|
||||
|
||||
# Mock the get_db_session context manager
|
||||
mock_db_context = AsyncMock()
|
||||
mock_db_context.__aenter__.return_value = mock_db_session
|
||||
mock_db_context.__aexit__.return_value = None
|
||||
|
||||
# Mock storage.Client and bucket
|
||||
mock_storage_client = MagicMock()
|
||||
mock_bucket = MagicMock()
|
||||
mock_storage_client.bucket.return_value = mock_bucket
|
||||
|
||||
with (
|
||||
patch(
|
||||
'server.sharing.google_cloud_shared_event_service.storage.Client',
|
||||
return_value=mock_storage_client,
|
||||
),
|
||||
patch(
|
||||
'openhands.app_server.config.get_db_session',
|
||||
return_value=mock_db_context,
|
||||
),
|
||||
patch(
|
||||
'server.sharing.google_cloud_shared_event_service.SQLSharedConversationInfoService'
|
||||
) as mock_sql_service_class,
|
||||
):
|
||||
mock_sql_service = MagicMock()
|
||||
mock_sql_service_class.return_value = mock_sql_service
|
||||
|
||||
# Call the inject method
|
||||
async for service in injector.inject(mock_state, mock_request):
|
||||
# Verify the service has the correct shared_conversation_info_service
|
||||
assert service.shared_conversation_info_service == mock_sql_service
|
||||
|
||||
# Verify SQLSharedConversationInfoService was created with db_session
|
||||
mock_sql_service_class.assert_called_once_with(db_session=mock_db_session)
|
||||
|
||||
async def test_injector_works_without_request(self):
|
||||
"""Test that the injector works when request is None."""
|
||||
mock_state = MagicMock()
|
||||
mock_db_session = AsyncMock()
|
||||
|
||||
# Create the injector
|
||||
injector = GoogleCloudSharedEventServiceInjector()
|
||||
injector.bucket_name = 'test-bucket'
|
||||
|
||||
# Mock the get_db_session context manager
|
||||
mock_db_context = AsyncMock()
|
||||
mock_db_context.__aenter__.return_value = mock_db_session
|
||||
mock_db_context.__aexit__.return_value = None
|
||||
|
||||
# Mock storage.Client and bucket
|
||||
mock_storage_client = MagicMock()
|
||||
mock_bucket = MagicMock()
|
||||
mock_storage_client.bucket.return_value = mock_bucket
|
||||
|
||||
with patch(
|
||||
'server.sharing.google_cloud_shared_event_service.storage.Client',
|
||||
return_value=mock_storage_client,
|
||||
):
|
||||
with patch(
|
||||
'openhands.app_server.config.get_db_session',
|
||||
return_value=mock_db_context,
|
||||
):
|
||||
# Call the inject method with request=None
|
||||
async for service in injector.inject(mock_state, request=None):
|
||||
assert isinstance(service, GoogleCloudSharedEventService)
|
||||
|
||||
@@ -33,10 +33,10 @@ npm run dev:mock:saas
|
||||
These commands set `VITE_MOCK_API=true` which activates the MSW Service Worker to intercept requests.
|
||||
|
||||
> [!NOTE]
|
||||
> **OSS vs SaaS Mode**
|
||||
> **OpenHands vs SaaS Mode**
|
||||
>
|
||||
> OpenHands runs in two modes:
|
||||
> - **OSS mode**: For local/self-hosted deployments where users provide their own LLM API keys and configure git providers manually
|
||||
> - **OpenHands mode**: For local/self-hosted deployments where users provide their own LLM API keys and configure git providers manually
|
||||
> - **SaaS mode**: For the cloud offering with billing, managed API keys, and OAuth-based GitHub integration
|
||||
>
|
||||
> Use `dev:mock:saas` when working on SaaS-specific features like billing, API key management, or subscription flows.
|
||||
|
||||
48
frontend/__tests__/components/chat-status-indicator.test.tsx
Normal file
48
frontend/__tests__/components/chat-status-indicator.test.tsx
Normal file
@@ -0,0 +1,48 @@
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { describe, it, expect, vi } from "vitest";
|
||||
import ChatStatusIndicator from "#/components/features/chat/chat-status-indicator";
|
||||
|
||||
vi.mock("#/icons/debug-stackframe-dot.svg?react", () => ({
|
||||
default: (props: any) => (
|
||||
<svg data-testid="debug-stackframe-dot" {...props} />
|
||||
),
|
||||
}));
|
||||
|
||||
describe("ChatStatusIndicator", () => {
|
||||
it("renders the status indicator with status text", () => {
|
||||
render(
|
||||
<ChatStatusIndicator
|
||||
status="Waiting for sandbox"
|
||||
statusColor="#FFD600"
|
||||
/>
|
||||
);
|
||||
|
||||
expect(
|
||||
screen.getByTestId("chat-status-indicator"),
|
||||
).toBeInTheDocument();
|
||||
expect(screen.getByText("Waiting for sandbox")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("passes the statusColor to the DebugStackframeDot icon", () => {
|
||||
render(
|
||||
<ChatStatusIndicator
|
||||
status="Error"
|
||||
statusColor="#FF684E"
|
||||
/>
|
||||
);
|
||||
|
||||
const icon = screen.getByTestId("debug-stackframe-dot");
|
||||
expect(icon).toHaveAttribute("color", "#FF684E");
|
||||
});
|
||||
|
||||
it("renders the DebugStackframeDot icon", () => {
|
||||
render(
|
||||
<ChatStatusIndicator
|
||||
status="Loading"
|
||||
statusColor="#FFD600"
|
||||
/>
|
||||
);
|
||||
|
||||
expect(screen.getByTestId("debug-stackframe-dot")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -10,13 +10,14 @@ import {
|
||||
} from "vitest";
|
||||
import { render, screen, waitFor, within } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { MemoryRouter } from "react-router";
|
||||
import { MemoryRouter, Route, Routes } from "react-router";
|
||||
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||
import { renderWithProviders } from "test-utils";
|
||||
import { renderWithProviders, useParamsMock } from "test-utils";
|
||||
import type { Message } from "#/message";
|
||||
import { SUGGESTIONS } from "#/utils/suggestions";
|
||||
import { ChatInterface } from "#/components/features/chat/chat-interface";
|
||||
import { useWsClient } from "#/context/ws-client-provider";
|
||||
import { useConversationId } from "#/hooks/use-conversation-id";
|
||||
import { useErrorMessageStore } from "#/stores/error-message-store";
|
||||
import { useOptimisticUserMessageStore } from "#/stores/optimistic-user-message-store";
|
||||
import { useConfig } from "#/hooks/query/use-config";
|
||||
@@ -24,24 +25,15 @@ import { useGetTrajectory } from "#/hooks/mutation/use-get-trajectory";
|
||||
import { useUnifiedUploadFiles } from "#/hooks/mutation/use-unified-upload-files";
|
||||
import { OpenHandsAction } from "#/types/core/actions";
|
||||
import { useEventStore } from "#/stores/use-event-store";
|
||||
import { useAgentState } from "#/hooks/use-agent-state";
|
||||
import { AgentState } from "#/types/agent-state";
|
||||
|
||||
vi.mock("#/context/ws-client-provider");
|
||||
vi.mock("#/hooks/query/use-config");
|
||||
vi.mock("#/hooks/mutation/use-get-trajectory");
|
||||
vi.mock("#/hooks/mutation/use-unified-upload-files");
|
||||
vi.mock("#/hooks/use-conversation-id");
|
||||
|
||||
// Mock React Router hooks at the top level
|
||||
vi.mock("react-router", async () => {
|
||||
const actual = await vi.importActual("react-router");
|
||||
return {
|
||||
...actual,
|
||||
useNavigate: () => vi.fn(),
|
||||
useParams: () => ({ conversationId: "test-conversation-id" }),
|
||||
useRouteLoaderData: vi.fn(() => ({})),
|
||||
};
|
||||
});
|
||||
|
||||
// Mock other hooks that might be used by the component
|
||||
vi.mock("#/hooks/use-user-providers", () => ({
|
||||
useUserProviders: () => ({
|
||||
providers: [],
|
||||
@@ -59,6 +51,12 @@ vi.mock("#/hooks/use-conversation-name-context-menu", () => ({
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("#/hooks/use-agent-state", () => ({
|
||||
useAgentState: vi.fn(() => ({
|
||||
curAgentState: AgentState.AWAITING_USER_INPUT,
|
||||
})),
|
||||
}));
|
||||
|
||||
// Helper function to render with Router context
|
||||
const renderChatInterfaceWithRouter = () =>
|
||||
renderWithProviders(
|
||||
@@ -79,13 +77,26 @@ const renderChatInterface = (messages: Message[]) =>
|
||||
const renderWithQueryClient = (
|
||||
ui: React.ReactElement,
|
||||
queryClient: QueryClient,
|
||||
route = "/test-conversation-id",
|
||||
) =>
|
||||
render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<MemoryRouter>{ui}</MemoryRouter>
|
||||
<MemoryRouter initialEntries={[route]}>
|
||||
<Routes>
|
||||
<Route path="/:conversationId" element={ui} />
|
||||
<Route path="/" element={ui} />
|
||||
</Routes>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
|
||||
beforeEach(() => {
|
||||
useParamsMock.mockReturnValue({ conversationId: "test-conversation-id" });
|
||||
vi.mocked(useConversationId).mockReturnValue({
|
||||
conversationId: "test-conversation-id",
|
||||
});
|
||||
});
|
||||
|
||||
describe("ChatInterface - Chat Suggestions", () => {
|
||||
// Create a new QueryClient for each test
|
||||
let queryClient: QueryClient;
|
||||
@@ -121,7 +132,9 @@ describe("ChatInterface - Chat Suggestions", () => {
|
||||
mutateAsync: vi.fn(),
|
||||
isLoading: false,
|
||||
});
|
||||
(useUnifiedUploadFiles as unknown as ReturnType<typeof vi.fn>).mockReturnValue({
|
||||
(
|
||||
useUnifiedUploadFiles as unknown as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue({
|
||||
mutateAsync: vi
|
||||
.fn()
|
||||
.mockResolvedValue({ skipped_files: [], uploaded_files: [] }),
|
||||
@@ -252,7 +265,9 @@ describe("ChatInterface - Empty state", () => {
|
||||
mutateAsync: vi.fn(),
|
||||
isLoading: false,
|
||||
});
|
||||
(useUnifiedUploadFiles as unknown as ReturnType<typeof vi.fn>).mockReturnValue({
|
||||
(
|
||||
useUnifiedUploadFiles as unknown as ReturnType<typeof vi.fn>
|
||||
).mockReturnValue({
|
||||
mutateAsync: vi
|
||||
.fn()
|
||||
.mockResolvedValue({ skipped_files: [], uploaded_files: [] }),
|
||||
@@ -344,6 +359,28 @@ describe("ChatInterface - Empty state", () => {
|
||||
);
|
||||
});
|
||||
|
||||
describe('ChatInterface - Status Indicator', () => {
|
||||
it("should render ChatStatusIndicator when agent is not awaiting user input / conversation is NOT ready", () => {
|
||||
vi.mocked(useAgentState).mockReturnValue({
|
||||
curAgentState: AgentState.LOADING,
|
||||
});
|
||||
|
||||
renderChatInterfaceWithRouter();
|
||||
|
||||
expect(screen.getByTestId("chat-status-indicator")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should NOT render ChatStatusIndicator when agent is awaiting user input / conversation is ready", () => {
|
||||
vi.mocked(useAgentState).mockReturnValue({
|
||||
curAgentState: AgentState.AWAITING_USER_INPUT,
|
||||
});
|
||||
|
||||
renderChatInterfaceWithRouter();
|
||||
|
||||
expect(screen.queryByTestId("chat-status-indicator")).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe.skip("ChatInterface - General functionality", () => {
|
||||
beforeAll(() => {
|
||||
// mock useScrollToBottom hook
|
||||
@@ -605,3 +642,43 @@ describe.skip("ChatInterface - General functionality", () => {
|
||||
expect(screen.getByTestId("feedback-actions")).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe("ChatInterface – skeleton loading state", () => {
|
||||
test("renders chat message skeleton when loading existing conversation", () => {
|
||||
(useWsClient as unknown as ReturnType<typeof vi.fn>).mockReturnValue({
|
||||
send: vi.fn(),
|
||||
isLoadingMessages: true,
|
||||
parsedEvents: [],
|
||||
});
|
||||
|
||||
renderWithQueryClient(<ChatInterface />, new QueryClient());
|
||||
|
||||
expect(screen.getByTestId("chat-messages-skeleton")).toBeInTheDocument();
|
||||
|
||||
expect(screen.queryByTestId("loading-spinner")).not.toBeInTheDocument();
|
||||
|
||||
expect(screen.queryByTestId("chat-suggestions")).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
test("does not render skeleton for new conversation (shows spinner instead)", () => {
|
||||
useParamsMock.mockReturnValue({ conversationId: undefined } as unknown as {
|
||||
conversationId: string;
|
||||
});
|
||||
(useConversationId as unknown as ReturnType<typeof vi.fn>).mockReturnValue({
|
||||
conversationId: "",
|
||||
});
|
||||
(useWsClient as unknown as ReturnType<typeof vi.fn>).mockReturnValue({
|
||||
send: vi.fn(),
|
||||
isLoadingMessages: true,
|
||||
parsedEvents: [],
|
||||
});
|
||||
|
||||
renderWithQueryClient(<ChatInterface />, new QueryClient(), "/");
|
||||
|
||||
expect(screen.getAllByTestId("loading-spinner").length).toBeGreaterThan(0);
|
||||
|
||||
expect(
|
||||
screen.queryByTestId("chat-messages-skeleton"),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { GitLabWebhookManagerState } from "#/components/features/settings/git-settings/gitlab-webhook-manager-state";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock("react-i18next", () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe("GitLabWebhookManagerState", () => {
|
||||
it("should render title and message with translated keys", () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
titleKey: I18nKey.GITLAB$WEBHOOK_MANAGER_TITLE,
|
||||
messageKey: I18nKey.GITLAB$WEBHOOK_MANAGER_LOADING,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<GitLabWebhookManagerState {...props} />);
|
||||
|
||||
// Assert
|
||||
expect(
|
||||
screen.getByText(I18nKey.GITLAB$WEBHOOK_MANAGER_TITLE),
|
||||
).toBeInTheDocument();
|
||||
expect(
|
||||
screen.getByText(I18nKey.GITLAB$WEBHOOK_MANAGER_LOADING),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should apply custom className to container", () => {
|
||||
// Arrange
|
||||
const customClassName = "custom-container-class";
|
||||
const props = {
|
||||
titleKey: I18nKey.GITLAB$WEBHOOK_MANAGER_TITLE,
|
||||
messageKey: I18nKey.GITLAB$WEBHOOK_MANAGER_LOADING,
|
||||
className: customClassName,
|
||||
};
|
||||
|
||||
// Act
|
||||
const { container } = render(<GitLabWebhookManagerState {...props} />);
|
||||
|
||||
// Assert
|
||||
const containerElement = container.firstChild as HTMLElement;
|
||||
expect(containerElement).toHaveClass(customClassName);
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,416 @@
|
||||
import { render, screen, waitFor } from "@testing-library/react";
|
||||
import userEvent from "@testing-library/user-event";
|
||||
import { describe, expect, it, vi, beforeEach } from "vitest";
|
||||
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||
import { GitLabWebhookManager } from "#/components/features/settings/git-settings/gitlab-webhook-manager";
|
||||
import { integrationService } from "#/api/integration-service/integration-service.api";
|
||||
import type {
|
||||
GitLabResource,
|
||||
ResourceInstallationResult,
|
||||
} from "#/api/integration-service/integration-service.types";
|
||||
import * as ToastHandlers from "#/utils/custom-toast-handlers";
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock("react-i18next", () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}));
|
||||
|
||||
// Mock toast handlers
|
||||
vi.mock("#/utils/custom-toast-handlers", () => ({
|
||||
displaySuccessToast: vi.fn(),
|
||||
displayErrorToast: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockResources: GitLabResource[] = [
|
||||
{
|
||||
id: "1",
|
||||
name: "Test Project",
|
||||
full_path: "user/test-project",
|
||||
type: "project",
|
||||
webhook_installed: false,
|
||||
webhook_uuid: null,
|
||||
last_synced: null,
|
||||
},
|
||||
{
|
||||
id: "10",
|
||||
name: "Test Group",
|
||||
full_path: "test-group",
|
||||
type: "group",
|
||||
webhook_installed: true,
|
||||
webhook_uuid: "uuid-123",
|
||||
last_synced: "2024-01-01T00:00:00Z",
|
||||
},
|
||||
];
|
||||
|
||||
describe("GitLabWebhookManager", () => {
|
||||
const queryClient = new QueryClient({
|
||||
defaultOptions: {
|
||||
queries: { retry: false },
|
||||
mutations: { retry: false },
|
||||
},
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
queryClient.clear();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const renderComponent = () => {
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<GitLabWebhookManager />
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
};
|
||||
|
||||
it("should display loading state when fetching resources", async () => {
|
||||
// Arrange
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockImplementation(
|
||||
() => new Promise(() => {}), // Never resolves
|
||||
);
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
|
||||
// Assert
|
||||
expect(
|
||||
screen.getByText("GITLAB$WEBHOOK_MANAGER_LOADING"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should display error state when fetching fails", async () => {
|
||||
// Arrange
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockRejectedValue(
|
||||
new Error("Failed to fetch"),
|
||||
);
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText("GITLAB$WEBHOOK_MANAGER_ERROR"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should display no resources message when list is empty", async () => {
|
||||
// Arrange
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: [],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText("GITLAB$WEBHOOK_MANAGER_NO_RESOURCES"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should display resources table when resources are available", async () => {
|
||||
// Arrange
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: mockResources,
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText("Test Project")).toBeInTheDocument();
|
||||
expect(screen.getByText("Test Group")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.getByText("user/test-project")).toBeInTheDocument();
|
||||
expect(screen.getByText("test-group")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should display correct resource types in table", async () => {
|
||||
// Arrange
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: mockResources,
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
const projectType = screen.getByText("project");
|
||||
const groupType = screen.getByText("group");
|
||||
expect(projectType).toBeInTheDocument();
|
||||
expect(groupType).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should disable reinstall button when webhook is already installed", async () => {
|
||||
// Arrange
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: [
|
||||
{
|
||||
id: "10",
|
||||
name: "Test Group",
|
||||
full_path: "test-group",
|
||||
type: "group",
|
||||
webhook_installed: true,
|
||||
webhook_uuid: "uuid-123",
|
||||
last_synced: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
const reinstallButton = screen.getByTestId(
|
||||
"reinstall-webhook-button-group:10",
|
||||
);
|
||||
expect(reinstallButton).toBeDisabled();
|
||||
});
|
||||
});
|
||||
|
||||
it("should enable reinstall button when webhook is not installed", async () => {
|
||||
// Arrange
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: [
|
||||
{
|
||||
id: "1",
|
||||
name: "Test Project",
|
||||
full_path: "user/test-project",
|
||||
type: "project",
|
||||
webhook_installed: false,
|
||||
webhook_uuid: null,
|
||||
last_synced: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
const reinstallButton = screen.getByTestId(
|
||||
"reinstall-webhook-button-project:1",
|
||||
);
|
||||
expect(reinstallButton).not.toBeDisabled();
|
||||
});
|
||||
});
|
||||
|
||||
it("should call reinstall service when reinstall button is clicked", async () => {
|
||||
// Arrange
|
||||
const user = userEvent.setup();
|
||||
const reinstallSpy = vi.spyOn(integrationService, "reinstallGitLabWebhook");
|
||||
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: [
|
||||
{
|
||||
id: "1",
|
||||
name: "Test Project",
|
||||
full_path: "user/test-project",
|
||||
type: "project",
|
||||
webhook_installed: false,
|
||||
webhook_uuid: null,
|
||||
last_synced: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
const reinstallButton = await screen.findByTestId(
|
||||
"reinstall-webhook-button-project:1",
|
||||
);
|
||||
await user.click(reinstallButton);
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(reinstallSpy).toHaveBeenCalledWith({
|
||||
resource: {
|
||||
type: "project",
|
||||
id: "1",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it("should show loading state on button during reinstallation", async () => {
|
||||
// Arrange
|
||||
const user = userEvent.setup();
|
||||
let resolveReinstall: (value: ResourceInstallationResult) => void;
|
||||
const reinstallPromise = new Promise<ResourceInstallationResult>(
|
||||
(resolve) => {
|
||||
resolveReinstall = resolve;
|
||||
},
|
||||
);
|
||||
|
||||
vi.spyOn(integrationService, "reinstallGitLabWebhook").mockReturnValue(
|
||||
reinstallPromise,
|
||||
);
|
||||
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: [
|
||||
{
|
||||
id: "1",
|
||||
name: "Test Project",
|
||||
full_path: "user/test-project",
|
||||
type: "project",
|
||||
webhook_installed: false,
|
||||
webhook_uuid: null,
|
||||
last_synced: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
const reinstallButton = await screen.findByTestId(
|
||||
"reinstall-webhook-button-project:1",
|
||||
);
|
||||
await user.click(reinstallButton);
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText("GITLAB$WEBHOOK_REINSTALLING"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Cleanup
|
||||
resolveReinstall!({
|
||||
resource_id: "1",
|
||||
resource_type: "project",
|
||||
success: true,
|
||||
error: null,
|
||||
});
|
||||
});
|
||||
|
||||
it("should display error message when reinstallation fails", async () => {
|
||||
// Arrange
|
||||
const user = userEvent.setup();
|
||||
const errorMessage = "Permission denied";
|
||||
vi.spyOn(integrationService, "reinstallGitLabWebhook").mockResolvedValue({
|
||||
resource_id: "1",
|
||||
resource_type: "project",
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
});
|
||||
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: [
|
||||
{
|
||||
id: "1",
|
||||
name: "Test Project",
|
||||
full_path: "user/test-project",
|
||||
type: "project",
|
||||
webhook_installed: false,
|
||||
webhook_uuid: null,
|
||||
last_synced: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
const reinstallButton = await screen.findByTestId(
|
||||
"reinstall-webhook-button-project:1",
|
||||
);
|
||||
await user.click(reinstallButton);
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText(errorMessage)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should display success toast when reinstallation succeeds", async () => {
|
||||
// Arrange
|
||||
const user = userEvent.setup();
|
||||
const displaySuccessToastSpy = vi.spyOn(
|
||||
ToastHandlers,
|
||||
"displaySuccessToast",
|
||||
);
|
||||
|
||||
vi.spyOn(integrationService, "reinstallGitLabWebhook").mockResolvedValue({
|
||||
resource_id: "1",
|
||||
resource_type: "project",
|
||||
success: true,
|
||||
error: null,
|
||||
});
|
||||
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: [
|
||||
{
|
||||
id: "1",
|
||||
name: "Test Project",
|
||||
full_path: "user/test-project",
|
||||
type: "project",
|
||||
webhook_installed: false,
|
||||
webhook_uuid: null,
|
||||
last_synced: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
const reinstallButton = await screen.findByTestId(
|
||||
"reinstall-webhook-button-project:1",
|
||||
);
|
||||
await user.click(reinstallButton);
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(displaySuccessToastSpy).toHaveBeenCalledWith(
|
||||
"GITLAB$WEBHOOK_REINSTALL_SUCCESS",
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it("should display error toast when reinstallation throws error", async () => {
|
||||
// Arrange
|
||||
const user = userEvent.setup();
|
||||
const displayErrorToastSpy = vi.spyOn(ToastHandlers, "displayErrorToast");
|
||||
const errorMessage = "Network error";
|
||||
|
||||
vi.spyOn(integrationService, "reinstallGitLabWebhook").mockRejectedValue(
|
||||
new Error(errorMessage),
|
||||
);
|
||||
|
||||
vi.spyOn(integrationService, "getGitLabResources").mockResolvedValue({
|
||||
resources: [
|
||||
{
|
||||
id: "1",
|
||||
name: "Test Project",
|
||||
full_path: "user/test-project",
|
||||
type: "project",
|
||||
webhook_installed: false,
|
||||
webhook_uuid: null,
|
||||
last_synced: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderComponent();
|
||||
const reinstallButton = await screen.findByTestId(
|
||||
"reinstall-webhook-button-project:1",
|
||||
);
|
||||
await user.click(reinstallButton);
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(displayErrorToastSpy).toHaveBeenCalledWith(errorMessage);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,97 @@
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { WebhookStatusBadge } from "#/components/features/settings/git-settings/webhook-status-badge";
|
||||
|
||||
// Mock react-i18next
|
||||
vi.mock("react-i18next", () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}));
|
||||
|
||||
describe("WebhookStatusBadge", () => {
|
||||
it("should display installed status when webhook is installed", () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
webhookInstalled: true,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<WebhookStatusBadge {...props} />);
|
||||
|
||||
// Assert
|
||||
const badge = screen.getByText("GITLAB$WEBHOOK_STATUS_INSTALLED");
|
||||
expect(badge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should display not installed status when webhook is not installed", () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
webhookInstalled: false,
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<WebhookStatusBadge {...props} />);
|
||||
|
||||
// Assert
|
||||
const badge = screen.getByText("GITLAB$WEBHOOK_STATUS_NOT_INSTALLED");
|
||||
expect(badge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should display installed status when installation result is successful", () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
webhookInstalled: false,
|
||||
installationResult: {
|
||||
success: true,
|
||||
error: null,
|
||||
},
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<WebhookStatusBadge {...props} />);
|
||||
|
||||
// Assert
|
||||
const badge = screen.getByText("GITLAB$WEBHOOK_STATUS_INSTALLED");
|
||||
expect(badge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should display failed status when installation result has error", () => {
|
||||
// Arrange
|
||||
const props = {
|
||||
webhookInstalled: false,
|
||||
installationResult: {
|
||||
success: false,
|
||||
error: "Installation failed",
|
||||
},
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<WebhookStatusBadge {...props} />);
|
||||
|
||||
// Assert
|
||||
const badge = screen.getByText("GITLAB$WEBHOOK_STATUS_FAILED");
|
||||
expect(badge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should show error message when installation fails", () => {
|
||||
// Arrange
|
||||
const errorMessage = "Permission denied";
|
||||
const props = {
|
||||
webhookInstalled: false,
|
||||
installationResult: {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
},
|
||||
};
|
||||
|
||||
// Act
|
||||
render(<WebhookStatusBadge {...props} />);
|
||||
|
||||
// Assert
|
||||
const badgeContainer = screen.getByText(
|
||||
"GITLAB$WEBHOOK_STATUS_FAILED",
|
||||
).parentElement;
|
||||
expect(badgeContainer).toHaveAttribute("title", errorMessage);
|
||||
});
|
||||
});
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
import { screen, waitFor, render, cleanup } from "@testing-library/react";
|
||||
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||
import { http, HttpResponse } from "msw";
|
||||
import { MemoryRouter, Route, Routes } from "react-router";
|
||||
import { useOptimisticUserMessageStore } from "#/stores/optimistic-user-message-store";
|
||||
import { useBrowserStore } from "#/stores/browser-store";
|
||||
import { useCommandStore } from "#/stores/command-store";
|
||||
@@ -78,13 +79,22 @@ function renderWithWebSocketContext(
|
||||
|
||||
return render(
|
||||
<QueryClientProvider client={queryClient}>
|
||||
<ConversationWebSocketProvider
|
||||
conversationId={conversationId}
|
||||
conversationUrl={conversationUrl}
|
||||
sessionApiKey={sessionApiKey}
|
||||
>
|
||||
{children}
|
||||
</ConversationWebSocketProvider>
|
||||
<MemoryRouter initialEntries={["/test-conversation-default"]}>
|
||||
<Routes>
|
||||
<Route
|
||||
path="/:conversationId"
|
||||
element={
|
||||
<ConversationWebSocketProvider
|
||||
conversationId={conversationId}
|
||||
conversationUrl={conversationUrl}
|
||||
sessionApiKey={sessionApiKey}
|
||||
>
|
||||
{children}
|
||||
</ConversationWebSocketProvider>
|
||||
}
|
||||
/>
|
||||
</Routes>
|
||||
</MemoryRouter>
|
||||
</QueryClientProvider>,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import { renderHook } from "@testing-library/react";
|
||||
import { useDocumentTitleFromState } from "#/hooks/use-document-title-from-state";
|
||||
import { useActiveConversation } from "#/hooks/query/use-active-conversation";
|
||||
|
||||
// Mock the useActiveConversation hook
|
||||
vi.mock("#/hooks/query/use-active-conversation");
|
||||
|
||||
const mockUseActiveConversation = vi.mocked(useActiveConversation);
|
||||
|
||||
describe("useDocumentTitleFromState", () => {
|
||||
const originalTitle = document.title;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
document.title = "Test";
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
document.title = originalTitle;
|
||||
vi.resetAllMocks();
|
||||
});
|
||||
|
||||
it("should set document title to default suffix when no conversation", () => {
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: null,
|
||||
} as any);
|
||||
|
||||
renderHook(() => useDocumentTitleFromState());
|
||||
|
||||
expect(document.title).toBe("OpenHands");
|
||||
});
|
||||
|
||||
it("should set document title to custom suffix when no conversation", () => {
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: null,
|
||||
} as any);
|
||||
|
||||
renderHook(() => useDocumentTitleFromState("Custom App"));
|
||||
|
||||
expect(document.title).toBe("Custom App");
|
||||
});
|
||||
|
||||
it("should set document title with conversation title", () => {
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: {
|
||||
conversation_id: "123",
|
||||
title: "My Conversation",
|
||||
status: "RUNNING",
|
||||
},
|
||||
} as any);
|
||||
|
||||
renderHook(() => useDocumentTitleFromState());
|
||||
|
||||
expect(document.title).toBe("My Conversation | OpenHands");
|
||||
});
|
||||
|
||||
it("should update document title when conversation title changes", () => {
|
||||
// Initial state - no conversation
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: null,
|
||||
} as any);
|
||||
|
||||
const { rerender } = renderHook(() => useDocumentTitleFromState());
|
||||
expect(document.title).toBe("OpenHands");
|
||||
|
||||
// Conversation with initial title
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: {
|
||||
conversation_id: "123",
|
||||
title: "Conversation 65e29",
|
||||
status: "RUNNING",
|
||||
},
|
||||
} as any);
|
||||
rerender();
|
||||
expect(document.title).toBe("Conversation 65e29 | OpenHands");
|
||||
|
||||
// Conversation title updated to human-readable title
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: {
|
||||
conversation_id: "123",
|
||||
title: "Help me build a React app",
|
||||
status: "RUNNING",
|
||||
},
|
||||
} as any);
|
||||
rerender();
|
||||
expect(document.title).toBe("Help me build a React app | OpenHands");
|
||||
});
|
||||
|
||||
it("should handle conversation without title", () => {
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: {
|
||||
conversation_id: "123",
|
||||
title: undefined,
|
||||
status: "RUNNING",
|
||||
},
|
||||
} as any);
|
||||
|
||||
renderHook(() => useDocumentTitleFromState());
|
||||
|
||||
expect(document.title).toBe("OpenHands");
|
||||
});
|
||||
|
||||
it("should handle empty conversation title", () => {
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: {
|
||||
conversation_id: "123",
|
||||
title: "",
|
||||
status: "RUNNING",
|
||||
},
|
||||
} as any);
|
||||
|
||||
renderHook(() => useDocumentTitleFromState());
|
||||
|
||||
expect(document.title).toBe("OpenHands");
|
||||
});
|
||||
|
||||
it("should reset document title on cleanup", () => {
|
||||
mockUseActiveConversation.mockReturnValue({
|
||||
data: {
|
||||
conversation_id: "123",
|
||||
title: "My Conversation",
|
||||
status: "RUNNING",
|
||||
},
|
||||
} as any);
|
||||
|
||||
const { unmount } = renderHook(() => useDocumentTitleFromState());
|
||||
|
||||
expect(document.title).toBe("My Conversation | OpenHands");
|
||||
|
||||
unmount();
|
||||
|
||||
expect(document.title).toBe("OpenHands");
|
||||
});
|
||||
});
|
||||
68
frontend/__tests__/routes/changes-tab.test.tsx
Normal file
68
frontend/__tests__/routes/changes-tab.test.tsx
Normal file
@@ -0,0 +1,68 @@
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { MemoryRouter } from "react-router";
|
||||
import GitChanges from "#/routes/changes-tab";
|
||||
import { useUnifiedGetGitChanges } from "#/hooks/query/use-unified-get-git-changes";
|
||||
import { useAgentState } from "#/hooks/use-agent-state";
|
||||
import { AgentState } from "#/types/agent-state";
|
||||
|
||||
vi.mock("react-i18next", () => ({
|
||||
useTranslation: () => ({
|
||||
t: (key: string) => key,
|
||||
}),
|
||||
}));
|
||||
|
||||
vi.mock("#/hooks/query/use-unified-get-git-changes");
|
||||
vi.mock("#/hooks/use-agent-state");
|
||||
vi.mock("#/hooks/use-conversation-id", () => ({
|
||||
useConversationId: () => ({ conversationId: "test-id" }),
|
||||
}));
|
||||
|
||||
const wrapper = ({ children }: { children: React.ReactNode }) => (
|
||||
<MemoryRouter>
|
||||
<QueryClientProvider client={new QueryClient()}>
|
||||
{children}
|
||||
</QueryClientProvider>
|
||||
</MemoryRouter>
|
||||
);
|
||||
|
||||
describe("Changes Tab", () => {
|
||||
it("should show EmptyChangesMessage when there are no changes", () => {
|
||||
vi.mocked(useUnifiedGetGitChanges).mockReturnValue({
|
||||
data: [],
|
||||
isLoading: false,
|
||||
isSuccess: true,
|
||||
isError: false,
|
||||
error: null,
|
||||
refetch: vi.fn(),
|
||||
});
|
||||
vi.mocked(useAgentState).mockReturnValue({
|
||||
curAgentState: AgentState.RUNNING,
|
||||
});
|
||||
|
||||
render(<GitChanges />, { wrapper });
|
||||
|
||||
expect(screen.getByText("DIFF_VIEWER$NO_CHANGES")).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it("should not show EmptyChangesMessage when there are changes", () => {
|
||||
vi.mocked(useUnifiedGetGitChanges).mockReturnValue({
|
||||
data: [{ path: "src/file.ts", status: "M" }],
|
||||
isLoading: false,
|
||||
isSuccess: true,
|
||||
isError: false,
|
||||
error: null,
|
||||
refetch: vi.fn(),
|
||||
});
|
||||
vi.mocked(useAgentState).mockReturnValue({
|
||||
curAgentState: AgentState.RUNNING,
|
||||
});
|
||||
|
||||
render(<GitChanges />, { wrapper });
|
||||
|
||||
expect(
|
||||
screen.queryByText("DIFF_VIEWER$NO_CHANGES"),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -13,6 +13,7 @@ import { MOCK_DEFAULT_USER_SETTINGS } from "#/mocks/handlers";
|
||||
import { GetConfigResponse } from "#/api/option-service/option.types";
|
||||
import * as ToastHandlers from "#/utils/custom-toast-handlers";
|
||||
import { SecretsService } from "#/api/secrets-service";
|
||||
import { integrationService } from "#/api/integration-service/integration-service.api";
|
||||
|
||||
const VALID_OSS_CONFIG: GetConfigResponse = {
|
||||
APP_MODE: "oss",
|
||||
@@ -63,6 +64,15 @@ const renderGitSettingsScreen = () => {
|
||||
GITLAB$HOST_LABEL: "GitLab Host",
|
||||
BITBUCKET$TOKEN_LABEL: "Bitbucket Token",
|
||||
BITBUCKET$HOST_LABEL: "Bitbucket Host",
|
||||
SETTINGS$GITLAB: "GitLab",
|
||||
COMMON$STATUS: "Status",
|
||||
STATUS$CONNECTED: "Connected",
|
||||
SETTINGS$GITLAB_NOT_CONNECTED: "Not Connected",
|
||||
SETTINGS$GITLAB_REINSTALL_WEBHOOK: "Reinstall Webhook",
|
||||
SETTINGS$GITLAB_INSTALLING_WEBHOOK:
|
||||
"Installing GitLab webhook, please wait a few minutes.",
|
||||
SETTINGS$SAVING: "Saving...",
|
||||
ERROR$GENERIC: "An error occurred",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -356,7 +366,9 @@ describe("Form submission", () => {
|
||||
|
||||
renderGitSettingsScreen();
|
||||
|
||||
const azureDevOpsInput = await screen.findByTestId("azure-devops-token-input");
|
||||
const azureDevOpsInput = await screen.findByTestId(
|
||||
"azure-devops-token-input",
|
||||
);
|
||||
const submit = await screen.findByTestId("submit-button");
|
||||
|
||||
await userEvent.type(azureDevOpsInput, "test-token");
|
||||
@@ -560,3 +572,101 @@ describe("Status toasts", () => {
|
||||
expect(displayErrorToastSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("GitLab Webhook Manager Integration", () => {
|
||||
it("should not render GitLab webhook manager in OSS mode", async () => {
|
||||
// Arrange
|
||||
const getConfigSpy = vi.spyOn(OptionService, "getConfig");
|
||||
getConfigSpy.mockResolvedValue(VALID_OSS_CONFIG);
|
||||
|
||||
// Act
|
||||
renderGitSettingsScreen();
|
||||
await screen.findByTestId("git-settings-screen");
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByText("GITLAB$WEBHOOK_MANAGER_TITLE"),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should not render GitLab webhook manager in SaaS mode without APP_SLUG", async () => {
|
||||
// Arrange
|
||||
const getConfigSpy = vi.spyOn(OptionService, "getConfig");
|
||||
getConfigSpy.mockResolvedValue(VALID_SAAS_CONFIG);
|
||||
|
||||
// Act
|
||||
renderGitSettingsScreen();
|
||||
await screen.findByTestId("git-settings-screen");
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByText("GITLAB$WEBHOOK_MANAGER_TITLE"),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should not render GitLab webhook manager when token is not set", async () => {
|
||||
// Arrange
|
||||
const getConfigSpy = vi.spyOn(OptionService, "getConfig");
|
||||
const getSettingsSpy = vi.spyOn(SettingsService, "getSettings");
|
||||
|
||||
getConfigSpy.mockResolvedValue({
|
||||
...VALID_SAAS_CONFIG,
|
||||
APP_SLUG: "test-slug",
|
||||
});
|
||||
getSettingsSpy.mockResolvedValue({
|
||||
...MOCK_DEFAULT_USER_SETTINGS,
|
||||
provider_tokens_set: {},
|
||||
});
|
||||
|
||||
// Act
|
||||
renderGitSettingsScreen();
|
||||
await screen.findByTestId("git-settings-screen");
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.queryByText("GITLAB$WEBHOOK_MANAGER_TITLE"),
|
||||
).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it("should render GitLab webhook manager when token is set", async () => {
|
||||
// Arrange
|
||||
const getConfigSpy = vi.spyOn(OptionService, "getConfig");
|
||||
const getSettingsSpy = vi.spyOn(SettingsService, "getSettings");
|
||||
const getResourcesSpy = vi.spyOn(
|
||||
integrationService,
|
||||
"getGitLabResources",
|
||||
);
|
||||
|
||||
getConfigSpy.mockResolvedValue({
|
||||
...VALID_SAAS_CONFIG,
|
||||
APP_SLUG: "test-slug",
|
||||
});
|
||||
getSettingsSpy.mockResolvedValue({
|
||||
...MOCK_DEFAULT_USER_SETTINGS,
|
||||
provider_tokens_set: {
|
||||
gitlab: null,
|
||||
},
|
||||
});
|
||||
getResourcesSpy.mockResolvedValue({
|
||||
resources: [],
|
||||
});
|
||||
|
||||
// Act
|
||||
renderGitSettingsScreen();
|
||||
await screen.findByTestId("git-settings-screen");
|
||||
|
||||
// Assert
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText("GITLAB$WEBHOOK_MANAGER_TITLE"),
|
||||
).toBeInTheDocument();
|
||||
expect(getResourcesSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,9 +1,26 @@
|
||||
import { test, expect } from "vitest";
|
||||
import { describe, it, expect, vi, test } from "vitest";
|
||||
import {
|
||||
formatTimestamp,
|
||||
getExtension,
|
||||
removeApiKey,
|
||||
} from "../../src/utils/utils";
|
||||
import { getStatusText } from "#/utils/utils";
|
||||
import { AgentState } from "#/types/agent-state";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
|
||||
// Mock translations
|
||||
const t = (key: string) => {
|
||||
const translations: { [key: string]: string } = {
|
||||
COMMON$WAITING_FOR_SANDBOX: "Waiting For Sandbox",
|
||||
COMMON$STOPPING: "Stopping",
|
||||
COMMON$STARTING: "Starting",
|
||||
COMMON$SERVER_STOPPED: "Server stopped",
|
||||
COMMON$RUNNING: "Running",
|
||||
CONVERSATION$READY: "Ready",
|
||||
CONVERSATION$ERROR_STARTING_CONVERSATION: "Error starting conversation",
|
||||
};
|
||||
return translations[key] || key;
|
||||
};
|
||||
|
||||
test("removeApiKey", () => {
|
||||
const data = [{ args: { LLM_API_KEY: "key", LANGUAGE: "en" } }];
|
||||
@@ -23,3 +40,143 @@ test("formatTimestamp", () => {
|
||||
const eveningDate = new Date("2021-10-10T22:10:10.000").toISOString();
|
||||
expect(formatTimestamp(eveningDate)).toBe("10/10/2021, 22:10:10");
|
||||
});
|
||||
|
||||
describe("getStatusText", () => {
|
||||
it("returns STOPPING when pausing", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: true,
|
||||
isTask: false,
|
||||
taskStatus: null,
|
||||
taskDetail: null,
|
||||
isStartingStatus: false,
|
||||
isStopStatus: false,
|
||||
curAgentState: AgentState.RUNNING,
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe(t(I18nKey.COMMON$STOPPING));
|
||||
});
|
||||
|
||||
it("formats task status when polling a task", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: false,
|
||||
isTask: true,
|
||||
taskStatus: "WAITING_FOR_SANDBOX",
|
||||
taskDetail: null,
|
||||
isStartingStatus: false,
|
||||
isStopStatus: false,
|
||||
curAgentState: AgentState.RUNNING,
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe(t(I18nKey.COMMON$WAITING_FOR_SANDBOX));
|
||||
});
|
||||
|
||||
it("returns task detail when task status is ERROR and detail exists", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: false,
|
||||
isTask: true,
|
||||
taskStatus: "ERROR",
|
||||
taskDetail: "Sandbox failed",
|
||||
isStartingStatus: false,
|
||||
isStopStatus: false,
|
||||
curAgentState: AgentState.RUNNING,
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe("Sandbox failed");
|
||||
});
|
||||
|
||||
it("returns translated error when task status is ERROR and no detail", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: false,
|
||||
isTask: true,
|
||||
taskStatus: "ERROR",
|
||||
taskDetail: null,
|
||||
isStartingStatus: false,
|
||||
isStopStatus: false,
|
||||
curAgentState: AgentState.RUNNING,
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe(
|
||||
t(I18nKey.CONVERSATION$ERROR_STARTING_CONVERSATION),
|
||||
);
|
||||
});
|
||||
|
||||
it("returns READY translation when task is ready", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: false,
|
||||
isTask: true,
|
||||
taskStatus: "READY",
|
||||
taskDetail: null,
|
||||
isStartingStatus: false,
|
||||
isStopStatus: false,
|
||||
curAgentState: AgentState.RUNNING,
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe(t(I18nKey.CONVERSATION$READY));
|
||||
});
|
||||
|
||||
it("returns STARTING when starting status is true", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: false,
|
||||
isTask: false,
|
||||
taskStatus: null,
|
||||
taskDetail: null,
|
||||
isStartingStatus: true,
|
||||
isStopStatus: false,
|
||||
curAgentState: AgentState.INIT,
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe(t(I18nKey.COMMON$STARTING));
|
||||
});
|
||||
|
||||
it("returns SERVER_STOPPED when stop status is true", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: false,
|
||||
isTask: false,
|
||||
taskStatus: null,
|
||||
taskDetail: null,
|
||||
isStartingStatus: false,
|
||||
isStopStatus: true,
|
||||
curAgentState: AgentState.STOPPED,
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe(t(I18nKey.COMMON$SERVER_STOPPED));
|
||||
});
|
||||
|
||||
it("returns errorMessage when agent state is ERROR", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: false,
|
||||
isTask: false,
|
||||
taskStatus: null,
|
||||
taskDetail: null,
|
||||
isStartingStatus: false,
|
||||
isStopStatus: false,
|
||||
curAgentState: AgentState.ERROR,
|
||||
errorMessage: "Something broke",
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe("Something broke");
|
||||
});
|
||||
|
||||
it("returns default RUNNING status", () => {
|
||||
const result = getStatusText({
|
||||
isPausing: false,
|
||||
isTask: false,
|
||||
taskStatus: null,
|
||||
taskDetail: null,
|
||||
isStartingStatus: false,
|
||||
isStopStatus: false,
|
||||
curAgentState: AgentState.RUNNING,
|
||||
t,
|
||||
});
|
||||
|
||||
expect(result).toBe(t(I18nKey.COMMON$RUNNING));
|
||||
});
|
||||
});
|
||||
|
||||
37
frontend/package-lock.json
generated
37
frontend/package-lock.json
generated
@@ -23,14 +23,14 @@
|
||||
"clsx": "^2.1.1",
|
||||
"downshift": "^9.0.13",
|
||||
"eslint-config-airbnb-typescript": "^18.0.0",
|
||||
"framer-motion": "^12.23.25",
|
||||
"framer-motion": "^12.24.7",
|
||||
"i18next": "^25.7.3",
|
||||
"i18next-browser-languagedetector": "^8.2.0",
|
||||
"i18next-http-backend": "^3.0.2",
|
||||
"isbot": "^5.1.32",
|
||||
"lucide-react": "^0.562.0",
|
||||
"monaco-editor": "^0.55.1",
|
||||
"posthog-js": "^1.313.0",
|
||||
"posthog-js": "^1.314.0",
|
||||
"react": "^19.2.3",
|
||||
"react-dom": "^19.2.3",
|
||||
"react-hot-toast": "^2.6.0",
|
||||
@@ -9393,13 +9393,12 @@
|
||||
}
|
||||
},
|
||||
"node_modules/framer-motion": {
|
||||
"version": "12.23.26",
|
||||
"resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.23.26.tgz",
|
||||
"integrity": "sha512-cPcIhgR42xBn1Uj+PzOyheMtZ73H927+uWPDVhUMqxy8UHt6Okavb6xIz9J/phFUHUj0OncR6UvMfJTXoc/LKA==",
|
||||
"license": "MIT",
|
||||
"version": "12.24.7",
|
||||
"resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.24.7.tgz",
|
||||
"integrity": "sha512-EolFLm7NdEMhWO/VTMZ0LlR4fLHGDiJItTx3i8dlyQooOOBoYAaysK4paGD4PrwqnoDdeDOS+TxnSBIAnNHs3w==",
|
||||
"dependencies": {
|
||||
"motion-dom": "^12.23.23",
|
||||
"motion-utils": "^12.23.6",
|
||||
"motion-dom": "^12.24.3",
|
||||
"motion-utils": "^12.23.28",
|
||||
"tslib": "^2.4.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
@@ -12589,19 +12588,17 @@
|
||||
}
|
||||
},
|
||||
"node_modules/motion-dom": {
|
||||
"version": "12.23.23",
|
||||
"resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.23.23.tgz",
|
||||
"integrity": "sha512-n5yolOs0TQQBRUFImrRfs/+6X4p3Q4n1dUEqt/H58Vx7OW6RF+foWEgmTVDhIWJIMXOuNNL0apKH2S16en9eiA==",
|
||||
"license": "MIT",
|
||||
"version": "12.24.3",
|
||||
"resolved": "https://registry.npmjs.org/motion-dom/-/motion-dom-12.24.3.tgz",
|
||||
"integrity": "sha512-ZjMZCwhTglim0LM64kC1iFdm4o+2P9IKk3rl/Nb4RKsb5p4O9HJ1C2LWZXOFdsRtp6twpqWRXaFKOduF30ntow==",
|
||||
"dependencies": {
|
||||
"motion-utils": "^12.23.6"
|
||||
"motion-utils": "^12.23.28"
|
||||
}
|
||||
},
|
||||
"node_modules/motion-utils": {
|
||||
"version": "12.23.6",
|
||||
"resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-12.23.6.tgz",
|
||||
"integrity": "sha512-eAWoPgr4eFEOFfg2WjIsMoqJTW6Z8MTUCgn/GZ3VRpClWBdnbjryiA3ZSNLyxCTmCQx4RmYX6jX1iWHbenUPNQ==",
|
||||
"license": "MIT"
|
||||
"version": "12.23.28",
|
||||
"resolved": "https://registry.npmjs.org/motion-utils/-/motion-utils-12.23.28.tgz",
|
||||
"integrity": "sha512-0W6cWd5Okoyf8jmessVK3spOmbyE0yTdNKujHctHH9XdAE4QDuZ1/LjSXC68rrhsJU+TkzXURC5OdSWh9ibOwQ=="
|
||||
},
|
||||
"node_modules/mri": {
|
||||
"version": "1.2.0",
|
||||
@@ -13382,9 +13379,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/posthog-js": {
|
||||
"version": "1.313.0",
|
||||
"resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.313.0.tgz",
|
||||
"integrity": "sha512-CL8RkC7m9BTZrix86w0fdnSCVqC/gxrfs6c4Wfkz/CldFD7f2912S2KqnWFmwRVDGIwm9IR82YhublQ88gdDKw==",
|
||||
"version": "1.314.0",
|
||||
"resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.314.0.tgz",
|
||||
"integrity": "sha512-qW1T73UAFpA0g2Ln0blsOUJxRhv0Tn4DrPdhGyTpw+MupW+qvVjzEg/i9jWQ4Al+8AkrNcmZFafJcSWXxWsWqg==",
|
||||
"dependencies": {
|
||||
"@posthog/core": "1.9.0",
|
||||
"core-js": "^3.38.1",
|
||||
|
||||
@@ -22,14 +22,14 @@
|
||||
"clsx": "^2.1.1",
|
||||
"downshift": "^9.0.13",
|
||||
"eslint-config-airbnb-typescript": "^18.0.0",
|
||||
"framer-motion": "^12.23.25",
|
||||
"framer-motion": "^12.24.7",
|
||||
"i18next": "^25.7.3",
|
||||
"i18next-browser-languagedetector": "^8.2.0",
|
||||
"i18next-http-backend": "^3.0.2",
|
||||
"isbot": "^5.1.32",
|
||||
"lucide-react": "^0.562.0",
|
||||
"monaco-editor": "^0.55.1",
|
||||
"posthog-js": "^1.313.0",
|
||||
"posthog-js": "^1.314.0",
|
||||
"react": "^19.2.3",
|
||||
"react-dom": "^19.2.3",
|
||||
"react-hot-toast": "^2.6.0",
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
import { openHands } from "../open-hands-axios";
|
||||
import {
|
||||
GitLabResourcesResponse,
|
||||
ReinstallWebhookRequest,
|
||||
ResourceIdentifier,
|
||||
ResourceInstallationResult,
|
||||
} from "./integration-service.types";
|
||||
|
||||
export const integrationService = {
|
||||
/**
|
||||
* Get all GitLab projects and groups where the user has admin access
|
||||
* @returns Promise with list of resources and their webhook status
|
||||
*/
|
||||
getGitLabResources: async (): Promise<GitLabResourcesResponse> => {
|
||||
const { data } = await openHands.get<GitLabResourcesResponse>(
|
||||
"/integration/gitlab/resources",
|
||||
);
|
||||
return data;
|
||||
},
|
||||
|
||||
/**
|
||||
* Reinstall webhook on a specific GitLab resource
|
||||
* @param resource - Resource to reinstall webhook on
|
||||
* @returns Promise with installation result
|
||||
*/
|
||||
reinstallGitLabWebhook: async ({
|
||||
resource,
|
||||
}: {
|
||||
resource: ResourceIdentifier;
|
||||
}): Promise<ResourceInstallationResult> => {
|
||||
const requestBody: ReinstallWebhookRequest = { resource };
|
||||
const { data } = await openHands.post<ResourceInstallationResult>(
|
||||
"/integration/gitlab/reinstall-webhook",
|
||||
requestBody,
|
||||
);
|
||||
return data;
|
||||
},
|
||||
};
|
||||
@@ -0,0 +1,29 @@
|
||||
export interface GitLabResource {
|
||||
id: string;
|
||||
name: string;
|
||||
full_path: string;
|
||||
type: "project" | "group";
|
||||
webhook_installed: boolean;
|
||||
webhook_uuid: string | null;
|
||||
last_synced: string | null;
|
||||
}
|
||||
|
||||
export interface GitLabResourcesResponse {
|
||||
resources: GitLabResource[];
|
||||
}
|
||||
|
||||
export interface ResourceIdentifier {
|
||||
type: "project" | "group";
|
||||
id: string;
|
||||
}
|
||||
|
||||
export interface ReinstallWebhookRequest {
|
||||
resource: ResourceIdentifier;
|
||||
}
|
||||
|
||||
export interface ResourceInstallationResult {
|
||||
resource_id: string;
|
||||
resource_type: string;
|
||||
success: boolean;
|
||||
error: string | null;
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import { useAgentState } from "#/hooks/use-agent-state";
|
||||
|
||||
import { ScrollToBottomButton } from "#/components/shared/buttons/scroll-to-bottom-button";
|
||||
import { LoadingSpinner } from "#/components/shared/loading-spinner";
|
||||
import { ChatMessagesSkeleton } from "./chat-messages-skeleton";
|
||||
import { displayErrorToast } from "#/utils/custom-toast-handlers";
|
||||
import { useErrorMessageStore } from "#/stores/error-message-store";
|
||||
import { useOptimisticUserMessageStore } from "#/stores/optimistic-user-message-store";
|
||||
@@ -49,6 +50,8 @@ import {
|
||||
import { useActiveConversation } from "#/hooks/query/use-active-conversation";
|
||||
import { useTaskPolling } from "#/hooks/query/use-task-polling";
|
||||
import { useConversationWebSocket } from "#/contexts/conversation-websocket-context";
|
||||
import ChatStatusIndicator from "./chat-status-indicator";
|
||||
import { getStatusColor, getStatusText } from "#/utils/utils";
|
||||
|
||||
function getEntryPoint(
|
||||
hasRepository: boolean | null,
|
||||
@@ -65,7 +68,7 @@ export function ChatInterface() {
|
||||
const { data: conversation } = useActiveConversation();
|
||||
const { errorMessage } = useErrorMessageStore();
|
||||
const { isLoadingMessages } = useWsClient();
|
||||
const { isTask } = useTaskPolling();
|
||||
const { isTask, taskStatus, taskDetail } = useTaskPolling();
|
||||
const conversationWebSocket = useConversationWebSocket();
|
||||
const { send } = useSendMessage();
|
||||
const storeEvents = useEventStore((state) => state.events);
|
||||
@@ -122,6 +125,13 @@ export function ChatInterface() {
|
||||
prevV1LoadingRef.current = isLoading;
|
||||
}, [conversationWebSocket?.isLoadingHistory]);
|
||||
|
||||
const isReturningToConversation = !!params.conversationId;
|
||||
const isHistoryLoading =
|
||||
(isLoadingMessages && !isV1Conversation) ||
|
||||
(isV1Conversation &&
|
||||
(conversationWebSocket?.isLoadingHistory || !showV1Messages));
|
||||
const isChatLoading = isHistoryLoading && !isTask;
|
||||
|
||||
// Filter V0 events
|
||||
const v0Events = storeEvents
|
||||
.filter(isV0Event)
|
||||
@@ -235,12 +245,38 @@ export function ChatInterface() {
|
||||
const v1UserEventsExist = hasV1UserEvent(v1FullEvents);
|
||||
const userEventsExist = v0UserEventsExist || v1UserEventsExist;
|
||||
|
||||
// Get server status indicator props
|
||||
const isStartingStatus =
|
||||
curAgentState === AgentState.LOADING || curAgentState === AgentState.INIT;
|
||||
const isStopStatus = curAgentState === AgentState.STOPPED;
|
||||
const isPausing = curAgentState === AgentState.PAUSED;
|
||||
const serverStatusColor = getStatusColor({
|
||||
isPausing,
|
||||
isTask,
|
||||
taskStatus,
|
||||
isStartingStatus,
|
||||
isStopStatus,
|
||||
curAgentState,
|
||||
});
|
||||
const serverStatusText = getStatusText({
|
||||
isPausing,
|
||||
isTask,
|
||||
taskStatus,
|
||||
taskDetail,
|
||||
isStartingStatus,
|
||||
isStopStatus,
|
||||
curAgentState,
|
||||
errorMessage,
|
||||
t,
|
||||
});
|
||||
|
||||
return (
|
||||
<ScrollProvider value={scrollProviderValue}>
|
||||
<div className="h-full flex flex-col justify-between pr-0 md:pr-4 relative">
|
||||
{!hasSubstantiveAgentActions &&
|
||||
!optimisticUserMessage &&
|
||||
!userEventsExist && (
|
||||
!userEventsExist &&
|
||||
!isChatLoading && (
|
||||
<ChatSuggestions
|
||||
onSuggestionsClick={(message) => setMessageToSend(message)}
|
||||
/>
|
||||
@@ -250,22 +286,18 @@ export function ChatInterface() {
|
||||
<div
|
||||
ref={scrollRef}
|
||||
onScroll={(e) => onChatBodyScroll(e.currentTarget)}
|
||||
className="custom-scrollbar-always flex flex-col grow overflow-y-auto overflow-x-hidden px-4 pt-4 gap-2 fast-smooth-scroll"
|
||||
className="custom-scrollbar-always flex flex-col grow overflow-y-auto overflow-x-hidden px-4 pt-4 gap-2"
|
||||
>
|
||||
{isLoadingMessages && !isV1Conversation && !isTask && (
|
||||
<div className="flex justify-center">
|
||||
{isChatLoading && isReturningToConversation && (
|
||||
<ChatMessagesSkeleton />
|
||||
)}
|
||||
|
||||
{isChatLoading && !isReturningToConversation && (
|
||||
<div className="flex justify-center" data-testid="loading-spinner">
|
||||
<LoadingSpinner size="small" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{(conversationWebSocket?.isLoadingHistory || !showV1Messages) &&
|
||||
isV1Conversation &&
|
||||
!isTask && (
|
||||
<div className="flex justify-center">
|
||||
<LoadingSpinner size="small" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isLoadingMessages && v0UserEventsExist && (
|
||||
<V0Messages
|
||||
messages={v0Events}
|
||||
@@ -282,8 +314,14 @@ export function ChatInterface() {
|
||||
|
||||
<div className="flex flex-col gap-[6px]">
|
||||
<div className="flex justify-between relative">
|
||||
<div className="flex items-center gap-1">
|
||||
<div className="flex items-end gap-1">
|
||||
<ConfirmationModeEnabled />
|
||||
{isStartingStatus && (
|
||||
<ChatStatusIndicator
|
||||
statusColor={serverStatusColor}
|
||||
status={serverStatusText}
|
||||
/>
|
||||
)}
|
||||
{totalEvents > 0 && !isV1Conversation && (
|
||||
<TrajectoryActions
|
||||
onPositiveFeedback={() =>
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
import React from "react";
|
||||
|
||||
const SKELETON_PATTERN = [
|
||||
{ width: "w-[25%]", height: "h-4", align: "justify-end" },
|
||||
{ width: "w-[60%]", height: "h-4", align: "justify-start" },
|
||||
{ width: "w-[45%]", height: "h-4", align: "justify-start" },
|
||||
{ width: "w-[65%]", height: "h-20", align: "justify-start" },
|
||||
{ width: "w-[35%]", height: "h-4", align: "justify-end" },
|
||||
{ width: "w-[50%]", height: "h-4", align: "justify-start" },
|
||||
{ width: "w-[30%]", height: "h-4", align: "justify-end" },
|
||||
{ width: "w-[75%]", height: "h-4", align: "justify-start" },
|
||||
{ width: "w-[55%]", height: "h-4", align: "justify-start" },
|
||||
];
|
||||
|
||||
function SkeletonBlock({ width, height }: { width: string; height: string }) {
|
||||
return (
|
||||
<div
|
||||
className={`rounded-md bg-foreground/5 animate-pulse ${width} ${height}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
export function ChatMessagesSkeleton() {
|
||||
return (
|
||||
<div
|
||||
className="flex flex-col gap-6 p-4 w-full h-full overflow-hidden"
|
||||
data-testid="chat-messages-skeleton"
|
||||
aria-label="Loading conversation"
|
||||
>
|
||||
{SKELETON_PATTERN.map((item, i) => (
|
||||
<div key={i} className={`flex w-full ${item.align}`}>
|
||||
<SkeletonBlock width={item.width} height={item.height} />
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
import { cn } from "@heroui/react";
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
import DebugStackframeDot from "#/icons/debug-stackframe-dot.svg?react";
|
||||
|
||||
interface ChatStatusIndicatorProps {
|
||||
status: string;
|
||||
statusColor: string;
|
||||
}
|
||||
|
||||
function ChatStatusIndicator({
|
||||
status,
|
||||
statusColor,
|
||||
}: ChatStatusIndicatorProps) {
|
||||
return (
|
||||
<div
|
||||
data-testid="chat-status-indicator"
|
||||
className={cn(
|
||||
"h-[31px] w-fit rounded-[100px] pt-[20px] pr-[16px] pb-[20px] pl-[5px] bg-[#25272D] flex items-center gap-2",
|
||||
)}
|
||||
>
|
||||
<AnimatePresence mode="wait">
|
||||
{/* Dot */}
|
||||
<motion.span
|
||||
key={`dot-${status}`}
|
||||
className="animate-[pulse_1.2s_ease-in-out_infinite]"
|
||||
initial={{ opacity: 0 }}
|
||||
animate={{ opacity: 1 }}
|
||||
exit={{ opacity: 0 }}
|
||||
transition={{ duration: 0.3 }}
|
||||
>
|
||||
<DebugStackframeDot
|
||||
className="w-6 h-6 shrink-0"
|
||||
color={statusColor}
|
||||
/>
|
||||
</motion.span>
|
||||
|
||||
{/* Text */}
|
||||
<motion.span
|
||||
key={`text-${status}`}
|
||||
initial={{ opacity: 0, y: -2 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
exit={{ opacity: 0, y: 2 }}
|
||||
transition={{ duration: 0.3 }}
|
||||
className="font-normal text-[11px] leading-[20px] normal-case"
|
||||
>
|
||||
{status}
|
||||
</motion.span>
|
||||
</AnimatePresence>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default ChatStatusIndicator;
|
||||
@@ -1,11 +1,10 @@
|
||||
import { useTranslation } from "react-i18next";
|
||||
import DebugStackframeDot from "#/icons/debug-stackframe-dot.svg?react";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
import { ConversationStatus } from "#/types/conversation-status";
|
||||
import { AgentState } from "#/types/agent-state";
|
||||
import { useAgentState } from "#/hooks/use-agent-state";
|
||||
import { useTaskPolling } from "#/hooks/query/use-task-polling";
|
||||
import { getStatusColor } from "#/utils/utils";
|
||||
import { getStatusColor, getStatusText } from "#/utils/utils";
|
||||
import { useErrorMessageStore } from "#/stores/error-message-store";
|
||||
|
||||
export interface ServerStatusProps {
|
||||
@@ -20,13 +19,12 @@ export function ServerStatus({
|
||||
isPausing = false,
|
||||
}: ServerStatusProps) {
|
||||
const { curAgentState } = useAgentState();
|
||||
const { t } = useTranslation();
|
||||
const { isTask, taskStatus, taskDetail } = useTaskPolling();
|
||||
const { t } = useTranslation();
|
||||
const { errorMessage } = useErrorMessageStore();
|
||||
|
||||
const isStartingStatus =
|
||||
curAgentState === AgentState.LOADING || curAgentState === AgentState.INIT;
|
||||
|
||||
const isStopStatus = conversationStatus === "STOPPED";
|
||||
|
||||
const statusColor = getStatusColor({
|
||||
@@ -38,45 +36,17 @@ export function ServerStatus({
|
||||
curAgentState,
|
||||
});
|
||||
|
||||
const getStatusText = (): string => {
|
||||
// Show pausing status
|
||||
if (isPausing) {
|
||||
return t(I18nKey.COMMON$STOPPING);
|
||||
}
|
||||
|
||||
// Show task status if we're polling a task
|
||||
if (isTask && taskStatus) {
|
||||
if (taskStatus === "ERROR") {
|
||||
return (
|
||||
taskDetail || t(I18nKey.CONVERSATION$ERROR_STARTING_CONVERSATION)
|
||||
);
|
||||
}
|
||||
if (taskStatus === "READY") {
|
||||
return t(I18nKey.CONVERSATION$READY);
|
||||
}
|
||||
// Format status text: "WAITING_FOR_SANDBOX" -> "Waiting for sandbox"
|
||||
return (
|
||||
taskDetail ||
|
||||
taskStatus
|
||||
.toLowerCase()
|
||||
.replace(/_/g, " ")
|
||||
.replace(/\b\w/g, (c) => c.toUpperCase())
|
||||
);
|
||||
}
|
||||
|
||||
if (isStartingStatus) {
|
||||
return t(I18nKey.COMMON$STARTING);
|
||||
}
|
||||
if (isStopStatus) {
|
||||
return t(I18nKey.COMMON$SERVER_STOPPED);
|
||||
}
|
||||
if (curAgentState === AgentState.ERROR) {
|
||||
return errorMessage || t(I18nKey.COMMON$ERROR);
|
||||
}
|
||||
return t(I18nKey.COMMON$RUNNING);
|
||||
};
|
||||
|
||||
const statusText = getStatusText();
|
||||
const statusText = getStatusText({
|
||||
isPausing,
|
||||
isTask,
|
||||
taskStatus,
|
||||
taskDetail,
|
||||
isStartingStatus,
|
||||
isStopStatus,
|
||||
curAgentState,
|
||||
errorMessage,
|
||||
t,
|
||||
});
|
||||
|
||||
return (
|
||||
<div className={className} data-testid="server-status">
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
import { render, screen } from "@testing-library/react";
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { ConversationCardSkeleton } from "./conversation-card-skeleton";
|
||||
|
||||
describe("ConversationCardSkeleton", () => {
|
||||
it("renders skeleton card", () => {
|
||||
render(<ConversationCardSkeleton />);
|
||||
expect(
|
||||
screen.getByTestId("conversation-card-skeleton"),
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,24 @@
|
||||
import React from "react";
|
||||
|
||||
export function ConversationCardSkeleton() {
|
||||
return (
|
||||
<div
|
||||
data-testid="conversation-card-skeleton"
|
||||
className="relative h-auto w-full p-3.5 border-b border-neutral-600"
|
||||
>
|
||||
<div className="flex items-center justify-between w-full">
|
||||
<div className="flex items-center gap-2 w-full">
|
||||
<div className="skeleton-round h-1.5 w-1.5" />
|
||||
<div className="skeleton h-3 w-2/3 rounded" />
|
||||
</div>
|
||||
</div>
|
||||
<div className="mt-2 flex flex-col gap-1">
|
||||
<div className="skeleton h-2 w-1/2 rounded" />
|
||||
<div className="flex justify-between">
|
||||
<div className="skeleton h-2 w-1/4 rounded" />
|
||||
<div className="skeleton h-2 w-8 rounded" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -17,6 +17,7 @@ import { useUpdateConversation } from "#/hooks/mutation/use-update-conversation"
|
||||
import { displaySuccessToast } from "#/utils/custom-toast-handlers";
|
||||
import { ConversationCard } from "./conversation-card/conversation-card";
|
||||
import { StartTaskCard } from "./start-task-card/start-task-card";
|
||||
import { ConversationCardSkeleton } from "./conversation-card/conversation-card-skeleton";
|
||||
|
||||
interface ConversationPanelProps {
|
||||
onClose: () => void;
|
||||
@@ -140,10 +141,13 @@ export function ConversationPanel({ onClose }: ConversationPanelProps) {
|
||||
className="w-full md:w-[400px] h-full border border-[#525252] bg-[#25272D] rounded-lg overflow-y-auto absolute custom-scrollbar-always"
|
||||
>
|
||||
{isFetching && conversations.length === 0 && (
|
||||
<div className="w-full h-full absolute flex justify-center items-center">
|
||||
<LoadingSpinner size="small" />
|
||||
<div className="space-y-2">
|
||||
{Array.from({ length: 5 }).map((_, index) => (
|
||||
<ConversationCardSkeleton key={index} />
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{error && (
|
||||
<div className="flex flex-col items-center justify-center h-full">
|
||||
<p className="text-danger">{error.message}</p>
|
||||
|
||||
@@ -0,0 +1,16 @@
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { FaCodeCompare } from "react-icons/fa6";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
|
||||
export function EmptyChangesMessage() {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<div className="flex flex-col items-center justify-center w-full h-full p-10 gap-4">
|
||||
<FaCodeCompare size={100} className="text-[#A1A1A1]" />
|
||||
<span className="text-[#8D95A9] text-[19px] font-normal leading-5">
|
||||
{t(I18nKey.DIFF_VIEWER$NO_CHANGES)}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
import { cn } from "#/utils/utils";
|
||||
import { Typography } from "#/ui/typography";
|
||||
|
||||
interface GitLabWebhookManagerStateProps {
|
||||
className?: string;
|
||||
titleKey: I18nKey;
|
||||
messageKey: I18nKey;
|
||||
messageColor?: string;
|
||||
}
|
||||
|
||||
export function GitLabWebhookManagerState({
|
||||
className,
|
||||
titleKey,
|
||||
messageKey,
|
||||
messageColor = "text-gray-400",
|
||||
}: GitLabWebhookManagerStateProps) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<div className={cn("flex flex-col gap-4", className)}>
|
||||
<Typography.H3 className="text-lg font-medium text-white">
|
||||
{t(titleKey)}
|
||||
</Typography.H3>
|
||||
<Typography.Text className={cn("text-sm", messageColor)}>
|
||||
{t(messageKey)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,199 @@
|
||||
import React, { useState } from "react";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
import { useGitLabResources } from "#/hooks/query/use-gitlab-resources-list";
|
||||
import { useReinstallGitLabWebhook } from "#/hooks/mutation/use-reinstall-gitlab-webhook";
|
||||
import { BrandButton } from "#/components/features/settings/brand-button";
|
||||
import type { GitLabResource } from "#/api/integration-service/integration-service.types";
|
||||
import { cn } from "#/utils/utils";
|
||||
import { Typography } from "#/ui/typography";
|
||||
import { WebhookStatusBadge } from "./webhook-status-badge";
|
||||
import { GitLabWebhookManagerState } from "./gitlab-webhook-manager-state";
|
||||
|
||||
interface GitLabWebhookManagerProps {
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function GitLabWebhookManager({ className }: GitLabWebhookManagerProps) {
|
||||
const { t } = useTranslation();
|
||||
const [installingResource, setInstallingResource] = useState<string | null>(
|
||||
null,
|
||||
);
|
||||
const [installationResults, setInstallationResults] = useState<
|
||||
Map<string, { success: boolean; error: string | null }>
|
||||
>(new Map());
|
||||
|
||||
const { data, isLoading, isError } = useGitLabResources(true);
|
||||
const reinstallMutation = useReinstallGitLabWebhook();
|
||||
|
||||
const resources = data?.resources || [];
|
||||
|
||||
const handleReinstall = async (resource: GitLabResource) => {
|
||||
const key = `${resource.type}:${resource.id}`;
|
||||
setInstallingResource(key);
|
||||
|
||||
// Clear previous result for this resource
|
||||
const newResults = new Map(installationResults);
|
||||
newResults.delete(key);
|
||||
setInstallationResults(newResults);
|
||||
|
||||
try {
|
||||
const result = await reinstallMutation.mutateAsync({
|
||||
type: resource.type,
|
||||
id: resource.id,
|
||||
});
|
||||
|
||||
// Store result for display
|
||||
const resultsMap = new Map(installationResults);
|
||||
resultsMap.set(key, {
|
||||
success: result.success,
|
||||
error: result.error,
|
||||
});
|
||||
setInstallationResults(resultsMap);
|
||||
} catch (error: unknown) {
|
||||
// Store error result
|
||||
const resultsMap = new Map(installationResults);
|
||||
const errorMessage =
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: t(I18nKey.GITLAB$WEBHOOK_REINSTALL_FAILED);
|
||||
resultsMap.set(key, {
|
||||
success: false,
|
||||
error: errorMessage,
|
||||
});
|
||||
setInstallationResults(resultsMap);
|
||||
} finally {
|
||||
setInstallingResource(null);
|
||||
}
|
||||
};
|
||||
|
||||
const getResourceKey = (resource: GitLabResource) =>
|
||||
`${resource.type}:${resource.id}`;
|
||||
|
||||
if (isLoading) {
|
||||
return (
|
||||
<GitLabWebhookManagerState
|
||||
className={className}
|
||||
titleKey={I18nKey.GITLAB$WEBHOOK_MANAGER_TITLE}
|
||||
messageKey={I18nKey.GITLAB$WEBHOOK_MANAGER_LOADING}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (isError) {
|
||||
return (
|
||||
<GitLabWebhookManagerState
|
||||
className={className}
|
||||
titleKey={I18nKey.GITLAB$WEBHOOK_MANAGER_TITLE}
|
||||
messageKey={I18nKey.GITLAB$WEBHOOK_MANAGER_ERROR}
|
||||
messageColor="text-red-400"
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (resources.length === 0) {
|
||||
return (
|
||||
<GitLabWebhookManagerState
|
||||
className={className}
|
||||
titleKey={I18nKey.GITLAB$WEBHOOK_MANAGER_TITLE}
|
||||
messageKey={I18nKey.GITLAB$WEBHOOK_MANAGER_NO_RESOURCES}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn("flex flex-col gap-4", className)}>
|
||||
<div className="flex items-center justify-between">
|
||||
<Typography.H3 className="text-lg font-medium text-white">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_MANAGER_TITLE)}
|
||||
</Typography.H3>
|
||||
</div>
|
||||
|
||||
<Typography.Text className="text-sm text-gray-400">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_MANAGER_DESCRIPTION)}
|
||||
</Typography.Text>
|
||||
|
||||
<div className="border border-neutral-700 rounded-lg overflow-hidden">
|
||||
<table className="w-full">
|
||||
<thead className="bg-neutral-800">
|
||||
<tr>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-400 uppercase tracking-wider">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_COLUMN_RESOURCE)}
|
||||
</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-400 uppercase tracking-wider">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_COLUMN_TYPE)}
|
||||
</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-400 uppercase tracking-wider">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_COLUMN_STATUS)}
|
||||
</th>
|
||||
<th className="px-4 py-3 text-left text-xs font-medium text-gray-400 uppercase tracking-wider">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_COLUMN_ACTION)}
|
||||
</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody className="divide-y divide-neutral-700">
|
||||
{resources.map((resource) => {
|
||||
const key = getResourceKey(resource);
|
||||
const result = installationResults.get(key);
|
||||
const isInstalling = installingResource === key;
|
||||
|
||||
return (
|
||||
<tr
|
||||
key={key}
|
||||
className="hover:bg-neutral-800/50 transition-colors"
|
||||
>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex flex-col">
|
||||
<Typography.Text className="text-sm font-medium text-white">
|
||||
{resource.name}
|
||||
</Typography.Text>
|
||||
<Typography.Text className="text-xs text-gray-400">
|
||||
{resource.full_path}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<Typography.Text className="text-sm text-gray-300 capitalize">
|
||||
{resource.type}
|
||||
</Typography.Text>
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<div className="flex flex-col gap-1">
|
||||
<WebhookStatusBadge
|
||||
webhookInstalled={resource.webhook_installed}
|
||||
installationResult={result}
|
||||
/>
|
||||
{result?.error && (
|
||||
<Typography.Text className="text-xs text-red-400">
|
||||
{result.error}
|
||||
</Typography.Text>
|
||||
)}
|
||||
</div>
|
||||
</td>
|
||||
<td className="px-4 py-3">
|
||||
<BrandButton
|
||||
type="button"
|
||||
variant="primary"
|
||||
onClick={() => handleReinstall(resource)}
|
||||
isDisabled={
|
||||
installingResource !== null ||
|
||||
resource.webhook_installed ||
|
||||
result?.success === true
|
||||
}
|
||||
className="cursor-pointer"
|
||||
testId={`reinstall-webhook-button-${key}`}
|
||||
>
|
||||
{isInstalling
|
||||
? t(I18nKey.GITLAB$WEBHOOK_REINSTALLING)
|
||||
: t(I18nKey.GITLAB$WEBHOOK_REINSTALL)}
|
||||
</BrandButton>
|
||||
</td>
|
||||
</tr>
|
||||
);
|
||||
})}
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
import { Typography } from "#/ui/typography";
|
||||
|
||||
export interface WebhookStatusBadgeProps {
|
||||
webhookInstalled: boolean;
|
||||
installationResult?: { success: boolean; error: string | null } | null;
|
||||
}
|
||||
|
||||
export function WebhookStatusBadge({
|
||||
webhookInstalled,
|
||||
installationResult,
|
||||
}: WebhookStatusBadgeProps) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
if (installationResult) {
|
||||
if (installationResult.success) {
|
||||
return (
|
||||
<Typography.Text className="px-2 py-1 text-xs rounded bg-green-500/20 text-green-400">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_STATUS_INSTALLED)}
|
||||
</Typography.Text>
|
||||
);
|
||||
}
|
||||
return (
|
||||
<span title={installationResult.error || undefined}>
|
||||
<Typography.Text className="px-2 py-1 text-xs rounded bg-red-500/20 text-red-400">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_STATUS_FAILED)}
|
||||
</Typography.Text>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
if (webhookInstalled) {
|
||||
return (
|
||||
<Typography.Text className="px-2 py-1 text-xs rounded bg-green-500/20 text-green-400">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_STATUS_INSTALLED)}
|
||||
</Typography.Text>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<Typography.Text className="px-2 py-1 text-xs rounded bg-gray-500/20 text-gray-400">
|
||||
{t(I18nKey.GITLAB$WEBHOOK_STATUS_NOT_INSTALLED)}
|
||||
</Typography.Text>
|
||||
);
|
||||
}
|
||||
@@ -13,22 +13,24 @@ export function RandomTip() {
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<p>
|
||||
<div>
|
||||
<h4 className="font-bold">{t(I18nKey.TIPS$PROTIP)}:</h4>
|
||||
{t(randomTip.key)}
|
||||
{randomTip.link && (
|
||||
<>
|
||||
{" "}
|
||||
<a
|
||||
href={randomTip.link}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="underline"
|
||||
>
|
||||
{t(I18nKey.TIPS$LEARN_MORE)}
|
||||
</a>
|
||||
</>
|
||||
)}
|
||||
</p>
|
||||
<p>
|
||||
{t(randomTip.key)}
|
||||
{randomTip.link && (
|
||||
<>
|
||||
{" "}
|
||||
<a
|
||||
href={randomTip.link}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="underline"
|
||||
>
|
||||
{t(I18nKey.TIPS$LEARN_MORE)}
|
||||
</a>
|
||||
</>
|
||||
)}
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
47
frontend/src/hooks/mutation/use-reinstall-gitlab-webhook.ts
Normal file
47
frontend/src/hooks/mutation/use-reinstall-gitlab-webhook.ts
Normal file
@@ -0,0 +1,47 @@
|
||||
import { useMutation, useQueryClient } from "@tanstack/react-query";
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { integrationService } from "#/api/integration-service/integration-service.api";
|
||||
import type {
|
||||
ResourceIdentifier,
|
||||
ResourceInstallationResult,
|
||||
} from "#/api/integration-service/integration-service.types";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
import {
|
||||
displayErrorToast,
|
||||
displaySuccessToast,
|
||||
} from "#/utils/custom-toast-handlers";
|
||||
|
||||
/**
|
||||
* Hook to reinstall webhook on a specific resource
|
||||
*/
|
||||
export function useReinstallGitLabWebhook() {
|
||||
const queryClient = useQueryClient();
|
||||
const { t } = useTranslation();
|
||||
|
||||
return useMutation<
|
||||
ResourceInstallationResult,
|
||||
Error,
|
||||
ResourceIdentifier,
|
||||
unknown
|
||||
>({
|
||||
mutationFn: (resource: ResourceIdentifier) =>
|
||||
integrationService.reinstallGitLabWebhook({ resource }),
|
||||
onSuccess: (data) => {
|
||||
// Invalidate and refetch the resources list
|
||||
queryClient.invalidateQueries({ queryKey: ["gitlab-resources"] });
|
||||
|
||||
if (data.success) {
|
||||
displaySuccessToast(t(I18nKey.GITLAB$WEBHOOK_REINSTALL_SUCCESS));
|
||||
} else if (data.error) {
|
||||
displayErrorToast(data.error);
|
||||
} else {
|
||||
displayErrorToast(t(I18nKey.GITLAB$WEBHOOK_REINSTALL_FAILED));
|
||||
}
|
||||
},
|
||||
onError: (error) => {
|
||||
const errorMessage =
|
||||
error?.message || t(I18nKey.GITLAB$WEBHOOK_REINSTALL_FAILED);
|
||||
displayErrorToast(errorMessage);
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -9,7 +9,6 @@ import {
|
||||
pauseV1ConversationSandbox,
|
||||
stopV0Conversation,
|
||||
updateConversationStatusInCache,
|
||||
invalidateConversationQueries,
|
||||
} from "./conversation-mutation-utils";
|
||||
|
||||
/**
|
||||
@@ -76,9 +75,6 @@ export const useUnifiedPauseConversationSandbox = () => {
|
||||
);
|
||||
}
|
||||
},
|
||||
onSettled: (_, __, variables) => {
|
||||
invalidateConversationQueries(queryClient, variables.conversationId);
|
||||
},
|
||||
onSuccess: (_, variables, context) => {
|
||||
if (context?.toastId) {
|
||||
toast.dismiss(context.toastId);
|
||||
|
||||
16
frontend/src/hooks/query/use-gitlab-resources-list.ts
Normal file
16
frontend/src/hooks/query/use-gitlab-resources-list.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { integrationService } from "#/api/integration-service/integration-service.api";
|
||||
import type { GitLabResourcesResponse } from "#/api/integration-service/integration-service.types";
|
||||
|
||||
/**
|
||||
* Hook to fetch GitLab resources with webhook status
|
||||
*/
|
||||
export function useGitLabResources(enabled: boolean = true) {
|
||||
return useQuery<GitLabResourcesResponse>({
|
||||
queryKey: ["gitlab-resources"],
|
||||
queryFn: () => integrationService.getGitLabResources(),
|
||||
enabled,
|
||||
staleTime: 1000 * 60 * 2, // 2 minutes
|
||||
gcTime: 1000 * 60 * 10, // 10 minutes
|
||||
});
|
||||
}
|
||||
105
frontend/src/hooks/use-app-title.test.tsx
Normal file
105
frontend/src/hooks/use-app-title.test.tsx
Normal file
@@ -0,0 +1,105 @@
|
||||
import { renderHook, waitFor } from "@testing-library/react";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { QueryClient, QueryClientProvider } from "@tanstack/react-query";
|
||||
import { useParams } from "react-router";
|
||||
import OptionService from "#/api/option-service/option-service.api";
|
||||
import { useUserConversation } from "./query/use-user-conversation";
|
||||
import { useAppTitle } from "./use-app-title";
|
||||
|
||||
const renderAppTitleHook = () =>
|
||||
renderHook(() => useAppTitle(), {
|
||||
wrapper: ({ children }) => (
|
||||
<QueryClientProvider client={new QueryClient()}>
|
||||
{children}
|
||||
</QueryClientProvider>
|
||||
),
|
||||
});
|
||||
|
||||
vi.mock("./query/use-user-conversation");
|
||||
vi.mock("react-router", async () => {
|
||||
const actual = await vi.importActual("react-router");
|
||||
return {
|
||||
...actual,
|
||||
useParams: vi.fn(),
|
||||
};
|
||||
});
|
||||
|
||||
describe("useAppTitle", () => {
|
||||
const getConfigSpy = vi.spyOn(OptionService, "getConfig");
|
||||
const mockUseUserConversation = vi.mocked(useUserConversation);
|
||||
const mockUseParams = vi.mocked(useParams);
|
||||
|
||||
beforeEach(() => {
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
mockUseUserConversation.mockReturnValue({ data: null });
|
||||
mockUseParams.mockReturnValue({});
|
||||
});
|
||||
|
||||
it("should return 'OpenHands' if is OSS and NOT in /conversations", async () => {
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
getConfigSpy.mockResolvedValue({
|
||||
APP_MODE: "oss",
|
||||
});
|
||||
|
||||
const { result } = renderAppTitleHook();
|
||||
|
||||
await waitFor(() => expect(result.current).toBe("OpenHands"));
|
||||
});
|
||||
|
||||
it("should return 'OpenHands Cloud' if is SaaS and NOT in /conversations", async () => {
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
getConfigSpy.mockResolvedValue({
|
||||
APP_MODE: "saas",
|
||||
});
|
||||
|
||||
const { result } = renderAppTitleHook();
|
||||
|
||||
await waitFor(() => expect(result.current).toBe("OpenHands Cloud"));
|
||||
});
|
||||
|
||||
it("should return '{some title} | OpenHands' if is OSS and in /conversations", async () => {
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
getConfigSpy.mockResolvedValue({ APP_MODE: "oss" });
|
||||
mockUseParams.mockReturnValue({ conversationId: "123" });
|
||||
mockUseUserConversation.mockReturnValue({
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
data: { title: "My Conversation" },
|
||||
});
|
||||
|
||||
const { result } = renderAppTitleHook();
|
||||
|
||||
await waitFor(() =>
|
||||
expect(result.current).toBe("My Conversation | OpenHands"),
|
||||
);
|
||||
});
|
||||
|
||||
it("should return '{some title} | OpenHands Cloud' if is SaaS and in /conversations", async () => {
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
getConfigSpy.mockResolvedValue({ APP_MODE: "saas" });
|
||||
mockUseParams.mockReturnValue({ conversationId: "456" });
|
||||
mockUseUserConversation.mockReturnValue({
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
data: { title: "Another Conversation Title" },
|
||||
});
|
||||
|
||||
const { result } = renderAppTitleHook();
|
||||
|
||||
await waitFor(() =>
|
||||
expect(result.current).toBe(
|
||||
"Another Conversation Title | OpenHands Cloud",
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it("should return app name while conversation is loading", async () => {
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
getConfigSpy.mockResolvedValue({ APP_MODE: "oss" });
|
||||
mockUseParams.mockReturnValue({ conversationId: "123" });
|
||||
// @ts-expect-error - only returning partial config for test
|
||||
mockUseUserConversation.mockReturnValue({ data: undefined });
|
||||
|
||||
const { result } = renderAppTitleHook();
|
||||
|
||||
await waitFor(() => expect(result.current).toBe("OpenHands"));
|
||||
});
|
||||
});
|
||||
26
frontend/src/hooks/use-app-title.ts
Normal file
26
frontend/src/hooks/use-app-title.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { useParams } from "react-router";
|
||||
import { useConfig } from "#/hooks/query/use-config";
|
||||
import { useUserConversation } from "#/hooks/query/use-user-conversation";
|
||||
|
||||
const APP_TITLE_OSS = "OpenHands";
|
||||
const APP_TITLE_SAAS = "OpenHands Cloud";
|
||||
|
||||
/**
|
||||
* Hook that returns the appropriate document title based on APP_MODE and current route.
|
||||
* - For conversation pages: "Conversation Title | OpenHands" or "Conversation Title | OpenHands Cloud"
|
||||
* - For other pages: "OpenHands" or "OpenHands Cloud"
|
||||
*/
|
||||
export const useAppTitle = () => {
|
||||
const { data: config } = useConfig();
|
||||
const { conversationId } = useParams<{ conversationId: string }>();
|
||||
const { data: conversation } = useUserConversation(conversationId ?? null);
|
||||
|
||||
const appTitle = config?.APP_MODE === "oss" ? APP_TITLE_OSS : APP_TITLE_SAAS;
|
||||
const conversationTitle = conversation?.title;
|
||||
|
||||
if (conversationId && conversationTitle) {
|
||||
return `${conversationTitle} | ${appTitle}`;
|
||||
}
|
||||
|
||||
return appTitle;
|
||||
};
|
||||
@@ -1,26 +0,0 @@
|
||||
import { useEffect, useRef } from "react";
|
||||
import { useActiveConversation } from "./query/use-active-conversation";
|
||||
|
||||
/**
|
||||
* Hook that updates the document title based on the current conversation.
|
||||
* This ensures that any changes to the conversation title are reflected in the document title.
|
||||
*
|
||||
* @param suffix Optional suffix to append to the title (default: "OpenHands")
|
||||
*/
|
||||
export function useDocumentTitleFromState(suffix = "OpenHands") {
|
||||
const { data: conversation } = useActiveConversation();
|
||||
const lastValidTitleRef = useRef<string | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
if (conversation?.title) {
|
||||
lastValidTitleRef.current = conversation.title;
|
||||
document.title = `${conversation.title} | ${suffix}`;
|
||||
} else {
|
||||
document.title = suffix;
|
||||
}
|
||||
|
||||
return () => {
|
||||
document.title = suffix;
|
||||
};
|
||||
}, [conversation?.title, suffix]);
|
||||
}
|
||||
@@ -61,11 +61,7 @@ export function useScrollToBottom(scrollRef: RefObject<HTMLDivElement | null>) {
|
||||
setAutoscroll(true);
|
||||
setHitBottom(true);
|
||||
|
||||
// Use smooth scrolling but with a fast duration
|
||||
dom.scrollTo({
|
||||
top: dom.scrollHeight,
|
||||
behavior: "smooth",
|
||||
});
|
||||
dom.scrollTop = dom.scrollHeight;
|
||||
});
|
||||
}
|
||||
}, [scrollRef]);
|
||||
@@ -77,11 +73,7 @@ export function useScrollToBottom(scrollRef: RefObject<HTMLDivElement | null>) {
|
||||
if (autoscroll) {
|
||||
const dom = scrollRef.current;
|
||||
if (dom) {
|
||||
// Scroll to bottom - this will trigger on any DOM change
|
||||
dom.scrollTo({
|
||||
top: dom.scrollHeight,
|
||||
behavior: "smooth",
|
||||
});
|
||||
dom.scrollTop = dom.scrollHeight;
|
||||
}
|
||||
}
|
||||
}); // No dependency array - runs after every render to follow new content
|
||||
|
||||
@@ -126,6 +126,11 @@ export enum I18nKey {
|
||||
SETTINGS$GITHUB = "SETTINGS$GITHUB",
|
||||
SETTINGS$AZURE_DEVOPS = "SETTINGS$AZURE_DEVOPS",
|
||||
SETTINGS$SLACK = "SETTINGS$SLACK",
|
||||
COMMON$STATUS = "COMMON$STATUS",
|
||||
SETTINGS$GITLAB_NOT_CONNECTED = "SETTINGS$GITLAB_NOT_CONNECTED",
|
||||
SETTINGS$GITLAB_REINSTALL_WEBHOOK = "SETTINGS$GITLAB_REINSTALL_WEBHOOK",
|
||||
SETTINGS$GITLAB_INSTALLING_WEBHOOK = "SETTINGS$GITLAB_INSTALLING_WEBHOOK",
|
||||
SETTINGS$GITLAB = "SETTINGS$GITLAB",
|
||||
SETTINGS$NAV_LLM = "SETTINGS$NAV_LLM",
|
||||
GIT$MERGE_REQUEST = "GIT$MERGE_REQUEST",
|
||||
GIT$GITLAB_API = "GIT$GITLAB_API",
|
||||
@@ -618,6 +623,22 @@ export enum I18nKey {
|
||||
GITLAB$TOKEN_HELP_TEXT = "GITLAB$TOKEN_HELP_TEXT",
|
||||
GITLAB$TOKEN_LINK_TEXT = "GITLAB$TOKEN_LINK_TEXT",
|
||||
GITLAB$INSTRUCTIONS_LINK_TEXT = "GITLAB$INSTRUCTIONS_LINK_TEXT",
|
||||
GITLAB$WEBHOOK_MANAGER_TITLE = "GITLAB$WEBHOOK_MANAGER_TITLE",
|
||||
GITLAB$WEBHOOK_MANAGER_DESCRIPTION = "GITLAB$WEBHOOK_MANAGER_DESCRIPTION",
|
||||
GITLAB$WEBHOOK_MANAGER_LOADING = "GITLAB$WEBHOOK_MANAGER_LOADING",
|
||||
GITLAB$WEBHOOK_MANAGER_ERROR = "GITLAB$WEBHOOK_MANAGER_ERROR",
|
||||
GITLAB$WEBHOOK_MANAGER_NO_RESOURCES = "GITLAB$WEBHOOK_MANAGER_NO_RESOURCES",
|
||||
GITLAB$WEBHOOK_REINSTALL = "GITLAB$WEBHOOK_REINSTALL",
|
||||
GITLAB$WEBHOOK_REINSTALLING = "GITLAB$WEBHOOK_REINSTALLING",
|
||||
GITLAB$WEBHOOK_REINSTALL_SUCCESS = "GITLAB$WEBHOOK_REINSTALL_SUCCESS",
|
||||
GITLAB$WEBHOOK_COLUMN_RESOURCE = "GITLAB$WEBHOOK_COLUMN_RESOURCE",
|
||||
GITLAB$WEBHOOK_COLUMN_TYPE = "GITLAB$WEBHOOK_COLUMN_TYPE",
|
||||
GITLAB$WEBHOOK_COLUMN_STATUS = "GITLAB$WEBHOOK_COLUMN_STATUS",
|
||||
GITLAB$WEBHOOK_COLUMN_ACTION = "GITLAB$WEBHOOK_COLUMN_ACTION",
|
||||
GITLAB$WEBHOOK_STATUS_INSTALLED = "GITLAB$WEBHOOK_STATUS_INSTALLED",
|
||||
GITLAB$WEBHOOK_STATUS_NOT_INSTALLED = "GITLAB$WEBHOOK_STATUS_NOT_INSTALLED",
|
||||
GITLAB$WEBHOOK_STATUS_FAILED = "GITLAB$WEBHOOK_STATUS_FAILED",
|
||||
GITLAB$WEBHOOK_REINSTALL_FAILED = "GITLAB$WEBHOOK_REINSTALL_FAILED",
|
||||
BITBUCKET$TOKEN_LABEL = "BITBUCKET$TOKEN_LABEL",
|
||||
BITBUCKET$HOST_LABEL = "BITBUCKET$HOST_LABEL",
|
||||
BITBUCKET$GET_TOKEN = "BITBUCKET$GET_TOKEN",
|
||||
@@ -908,6 +929,7 @@ export enum I18nKey {
|
||||
COMMON$RECENT_PROJECTS = "COMMON$RECENT_PROJECTS",
|
||||
COMMON$RUN = "COMMON$RUN",
|
||||
COMMON$RUNNING = "COMMON$RUNNING",
|
||||
COMMON$WAITING_FOR_SANDBOX = "COMMON$WAITING_FOR_SANDBOX",
|
||||
COMMON$SELECT_GIT_PROVIDER = "COMMON$SELECT_GIT_PROVIDER",
|
||||
COMMON$SERVER_STATUS = "COMMON$SERVER_STATUS",
|
||||
COMMON$SERVER_STOPPED = "COMMON$SERVER_STOPPED",
|
||||
|
||||
@@ -2015,6 +2015,86 @@
|
||||
"de": "Slack",
|
||||
"uk": "Slack"
|
||||
},
|
||||
"COMMON$STATUS": {
|
||||
"en": "Status",
|
||||
"ja": "ステータス",
|
||||
"zh-CN": "状态",
|
||||
"zh-TW": "狀態",
|
||||
"ko-KR": "상태",
|
||||
"no": "Status",
|
||||
"it": "Stato",
|
||||
"pt": "Status",
|
||||
"es": "Estado",
|
||||
"ar": "الحالة",
|
||||
"fr": "Statut",
|
||||
"tr": "Durum",
|
||||
"de": "Status",
|
||||
"uk": "Статус"
|
||||
},
|
||||
"SETTINGS$GITLAB_NOT_CONNECTED": {
|
||||
"en": "Not Connected",
|
||||
"ja": "未接続",
|
||||
"zh-CN": "未连接",
|
||||
"zh-TW": "未連接",
|
||||
"ko-KR": "연결되지 않음",
|
||||
"no": "Ikke tilkoblet",
|
||||
"it": "Non connesso",
|
||||
"pt": "Não conectado",
|
||||
"es": "No conectado",
|
||||
"ar": "غير متصل",
|
||||
"fr": "Non connecté",
|
||||
"tr": "Bağlı değil",
|
||||
"de": "Nicht verbunden",
|
||||
"uk": "Не підключено"
|
||||
},
|
||||
"SETTINGS$GITLAB_REINSTALL_WEBHOOK": {
|
||||
"en": "Reinstall Webhook",
|
||||
"ja": "Webhookを再インストール",
|
||||
"zh-CN": "重新安装 Webhook",
|
||||
"zh-TW": "重新安裝 Webhook",
|
||||
"ko-KR": "Webhook 재설치",
|
||||
"no": "Installer Webhook på nytt",
|
||||
"it": "Reinstalla Webhook",
|
||||
"pt": "Reinstalar Webhook",
|
||||
"es": "Reinstalar Webhook",
|
||||
"ar": "إعادة تثبيت Webhook",
|
||||
"fr": "Réinstaller le Webhook",
|
||||
"tr": "Webhook'u Yeniden Kur",
|
||||
"de": "Webhook neu installieren",
|
||||
"uk": "Перевстановити Webhook"
|
||||
},
|
||||
"SETTINGS$GITLAB_INSTALLING_WEBHOOK": {
|
||||
"en": "Installing GitLab webhook, please wait a few minutes.",
|
||||
"ja": "GitLabのWebhookをインストールしています。数分お待ちください。",
|
||||
"zh-CN": "正在安装 GitLab webhook,请稍等几分钟。",
|
||||
"zh-TW": "正在安裝 GitLab webhook,請稍候幾分鐘。",
|
||||
"ko-KR": "GitLab webhook을 설치 중입니다. 잠시만 기다려주세요.",
|
||||
"no": "Installerer GitLab-webhook, vennligst vent noen minutter.",
|
||||
"it": "Installazione del webhook GitLab in corso, attendi alcuni minuti.",
|
||||
"pt": "Instalando o webhook do GitLab, por favor aguarde alguns minutos.",
|
||||
"es": "Instalando el webhook de GitLab, por favor espera unos minutos.",
|
||||
"ar": "يتم تثبيت Webhook الخاص بـ GitLab، يرجى الانتظار لبضع دقائق.",
|
||||
"fr": "Installation du webhook GitLab, veuillez patienter quelques minutes.",
|
||||
"tr": "GitLab webhook'u yükleniyor, lütfen birkaç dakika bekleyin.",
|
||||
"de": "GitLab-Webhook wird installiert. Bitte warten Sie einige Minuten.",
|
||||
"uk": "Встановлення GitLab webhook, зачекайте кілька хвилин."
|
||||
},
|
||||
"SETTINGS$GITLAB": {
|
||||
"en": "GitLab",
|
||||
"ja": "GitLab",
|
||||
"zh-CN": "GitLab",
|
||||
"zh-TW": "GitLab",
|
||||
"ko-KR": "GitLab",
|
||||
"no": "GitLab",
|
||||
"it": "GitLab",
|
||||
"pt": "GitLab",
|
||||
"es": "GitLab",
|
||||
"ar": "GitLab",
|
||||
"fr": "GitLab",
|
||||
"tr": "GitLab",
|
||||
"de": "GitLab",
|
||||
"uk": "GitLab"
|
||||
},
|
||||
"SETTINGS$NAV_LLM": {
|
||||
"en": "LLM",
|
||||
"ja": "LLM",
|
||||
@@ -9887,6 +9967,262 @@
|
||||
"de": "klicken Sie hier für Anweisungen",
|
||||
"uk": "натисніть тут, щоб отримати інструкції"
|
||||
},
|
||||
"GITLAB$WEBHOOK_MANAGER_TITLE": {
|
||||
"en": "Webhook Management",
|
||||
"ja": "Webhook管理",
|
||||
"zh-CN": "Webhook管理",
|
||||
"zh-TW": "Webhook管理",
|
||||
"ko-KR": "웹훅 관리",
|
||||
"no": "Webhook-administrasjon",
|
||||
"it": "Gestione Webhook",
|
||||
"pt": "Gerenciamento de Webhook",
|
||||
"es": "Gestión de Webhook",
|
||||
"ar": "إدارة Webhook",
|
||||
"fr": "Gestion des Webhooks",
|
||||
"tr": "Webhook Yönetimi",
|
||||
"de": "Webhook-Verwaltung",
|
||||
"uk": "Керування Webhook"
|
||||
},
|
||||
"GITLAB$WEBHOOK_MANAGER_DESCRIPTION": {
|
||||
"en": "Manage webhooks for your GitLab projects and groups. Webhooks enable OpenHands to receive notifications from GitLab. Note: If a webhook is already installed, you must first delete it through the GitLab UI before reinstalling.",
|
||||
"ja": "GitLabプロジェクトとグループのWebhookを管理します。WebhookによりOpenHandsはGitLabから通知を受け取ることができます。注:Webhookが既にインストールされている場合は、再インストールする前にGitLab UIから削除する必要があります。",
|
||||
"zh-CN": "管理您的GitLab项目和组的Webhook。Webhook使OpenHands能够接收来自GitLab的通知。注意:如果Webhook已安装,您必须先通过GitLab UI删除它,然后才能重新安装。",
|
||||
"zh-TW": "管理您的GitLab專案和群組的Webhook。Webhook使OpenHands能夠接收來自GitLab的通知。注意:如果Webhook已安裝,您必須先透過GitLab UI刪除它,然後才能重新安裝。",
|
||||
"ko-KR": "GitLab 프로젝트 및 그룹의 웹훅을 관리합니다. 웹훅을 통해 OpenHands가 GitLab에서 알림을 받을 수 있습니다. 참고: 웹훅이 이미 설치되어 있는 경우 재설치하기 전에 GitLab UI를 통해 먼저 삭제해야 합니다.",
|
||||
"no": "Administrer webhooks for dine GitLab-prosjekter og grupper. Webhooks gjør det mulig for OpenHands å motta varsler fra GitLab. Merk: Hvis en webhook allerede er installert, må du først slette den via GitLab-grensesnittet før du installerer den på nytt.",
|
||||
"it": "Gestisci i webhook per i tuoi progetti e gruppi GitLab. I webhook consentono a OpenHands di ricevere notifiche da GitLab. Nota: se un webhook è già installato, devi prima eliminarlo tramite l'interfaccia utente di GitLab prima di reinstallarlo.",
|
||||
"pt": "Gerencie webhooks para seus projetos e grupos do GitLab. Os webhooks permitem que o OpenHands receba notificações do GitLab. Nota: Se um webhook já estiver instalado, você deve primeiro excluí-lo através da interface do GitLab antes de reinstalá-lo.",
|
||||
"es": "Administre webhooks para sus proyectos y grupos de GitLab. Los webhooks permiten que OpenHands reciba notificaciones de GitLab. Nota: Si un webhook ya está instalado, primero debe eliminarlo a través de la interfaz de GitLab antes de reinstalarlo.",
|
||||
"ar": "إدارة webhooks لمشاريعك ومجموعاتك في GitLab. تمكن Webhooks OpenHands من تلقي الإشعارات من GitLab. ملاحظة: إذا كان webhook مثبتًا بالفعل، يجب عليك أولاً حذفه من خلال واجهة GitLab قبل إعادة التثبيت.",
|
||||
"fr": "Gérez les webhooks pour vos projets et groupes GitLab. Les webhooks permettent à OpenHands de recevoir des notifications de GitLab. Remarque : Si un webhook est déjà installé, vous devez d'abord le supprimer via l'interface GitLab avant de le réinstaller.",
|
||||
"tr": "GitLab projeleriniz ve gruplarınız için webhook'ları yönetin. Webhook'lar OpenHands'in GitLab'dan bildirim almasını sağlar. Not: Bir webhook zaten yüklüyse, yeniden yüklemeden önce GitLab arayüzü üzerinden silmeniz gerekir.",
|
||||
"de": "Verwalten Sie Webhooks für Ihre GitLab-Projekte und -Gruppen. Webhooks ermöglichen es OpenHands, Benachrichtigungen von GitLab zu empfangen. Hinweis: Wenn ein Webhook bereits installiert ist, müssen Sie ihn zuerst über die GitLab-Benutzeroberfläche löschen, bevor Sie ihn neu installieren.",
|
||||
"uk": "Керуйте вебхуками для ваших проектів та груп GitLab. Вебхуки дозволяють OpenHands отримувати сповіщення від GitLab. Примітка: Якщо вебхук вже встановлено, ви повинні спочатку видалити його через інтерфейс GitLab перед повторним встановленням."
|
||||
},
|
||||
"GITLAB$WEBHOOK_MANAGER_LOADING": {
|
||||
"en": "Loading resources...",
|
||||
"ja": "リソースを読み込み中...",
|
||||
"zh-CN": "正在加载资源...",
|
||||
"zh-TW": "正在載入資源...",
|
||||
"ko-KR": "리소스 로드 중...",
|
||||
"no": "Laster ressurser...",
|
||||
"it": "Caricamento risorse...",
|
||||
"pt": "Carregando recursos...",
|
||||
"es": "Cargando recursos...",
|
||||
"ar": "جارٍ تحميل الموارد...",
|
||||
"fr": "Chargement des ressources...",
|
||||
"tr": "Kaynaklar yükleniyor...",
|
||||
"de": "Ressourcen werden geladen...",
|
||||
"uk": "Завантаження ресурсів..."
|
||||
},
|
||||
"GITLAB$WEBHOOK_MANAGER_ERROR": {
|
||||
"en": "Failed to load resources. Please try again.",
|
||||
"ja": "リソースの読み込みに失敗しました。もう一度お試しください。",
|
||||
"zh-CN": "加载资源失败。请重试。",
|
||||
"zh-TW": "載入資源失敗。請重試。",
|
||||
"ko-KR": "리소스 로드에 실패했습니다. 다시 시도해주세요.",
|
||||
"no": "Kunne ikke laste ressurser. Vennligst prøv igjen.",
|
||||
"it": "Impossibile caricare le risorse. Riprova.",
|
||||
"pt": "Falha ao carregar recursos. Por favor, tente novamente.",
|
||||
"es": "Error al cargar recursos. Por favor, inténtelo de nuevo.",
|
||||
"ar": "فشل تحميل الموارد. يرجى المحاولة مرة أخرى.",
|
||||
"fr": "Échec du chargement des ressources. Veuillez réessayer.",
|
||||
"tr": "Kaynaklar yüklenemedi. Lütfen tekrar deneyin.",
|
||||
"de": "Ressourcen konnten nicht geladen werden. Bitte versuchen Sie es erneut.",
|
||||
"uk": "Не вдалося завантажити ресурси. Будь ласка, спробуйте ще раз."
|
||||
},
|
||||
"GITLAB$WEBHOOK_MANAGER_NO_RESOURCES": {
|
||||
"en": "No projects or groups found where you have admin access.",
|
||||
"ja": "管理者アクセス権を持つプロジェクトまたはグループが見つかりませんでした。",
|
||||
"zh-CN": "未找到您具有管理员访问权限的项目或组。",
|
||||
"zh-TW": "未找到您具有管理員存取權限的專案或群組。",
|
||||
"ko-KR": "관리자 액세스 권한이 있는 프로젝트 또는 그룹을 찾을 수 없습니다.",
|
||||
"no": "Ingen prosjekter eller grupper funnet der du har administratortilgang.",
|
||||
"it": "Nessun progetto o gruppo trovato in cui hai accesso amministratore.",
|
||||
"pt": "Nenhum projeto ou grupo encontrado onde você tem acesso de administrador.",
|
||||
"es": "No se encontraron proyectos o grupos donde tenga acceso de administrador.",
|
||||
"ar": "لم يتم العثور على مشاريع أو مجموعات لديك فيها وصول المسؤول.",
|
||||
"fr": "Aucun projet ou groupe trouvé où vous avez un accès administrateur.",
|
||||
"tr": "Yönetici erişiminizin olduğu proje veya grup bulunamadı.",
|
||||
"de": "Keine Projekte oder Gruppen gefunden, auf die Sie Administratorzugriff haben.",
|
||||
"uk": "Не знайдено проектів або груп, де ви маєте адміністраторський доступ."
|
||||
},
|
||||
"GITLAB$WEBHOOK_REINSTALL": {
|
||||
"en": "Reinstall",
|
||||
"ja": "再インストール",
|
||||
"zh-CN": "重新安装",
|
||||
"zh-TW": "重新安裝",
|
||||
"ko-KR": "재설치",
|
||||
"no": "Installer på nytt",
|
||||
"it": "Reinstalla",
|
||||
"pt": "Reinstalar",
|
||||
"es": "Reinstalar",
|
||||
"ar": "إعادة التثبيت",
|
||||
"fr": "Réinstaller",
|
||||
"tr": "Yeniden Yükle",
|
||||
"de": "Neu installieren",
|
||||
"uk": "Перевстановити"
|
||||
},
|
||||
"GITLAB$WEBHOOK_REINSTALLING": {
|
||||
"en": "Reinstalling...",
|
||||
"ja": "再インストール中...",
|
||||
"zh-CN": "正在重新安装...",
|
||||
"zh-TW": "正在重新安裝...",
|
||||
"ko-KR": "재설치 중...",
|
||||
"no": "Installerer på nytt...",
|
||||
"it": "Reinstallazione...",
|
||||
"pt": "Reinstalando...",
|
||||
"es": "Reinstalando...",
|
||||
"ar": "جارٍ إعادة التثبيت...",
|
||||
"fr": "Réinstallation...",
|
||||
"tr": "Yeniden yükleniyor...",
|
||||
"de": "Wird neu installiert...",
|
||||
"uk": "Перевстановлення..."
|
||||
},
|
||||
"GITLAB$WEBHOOK_REINSTALL_SUCCESS": {
|
||||
"en": "Webhook reinstalled successfully",
|
||||
"ja": "ウェブフックの再インストールが完了しました",
|
||||
"zh-CN": "Webhook 重新安装成功",
|
||||
"zh-TW": "Webhook 重新安裝成功",
|
||||
"ko-KR": "웹훅 재설치 성공",
|
||||
"no": "Webhook ble installert på nytt",
|
||||
"it": "Webhook reinstallato con successo",
|
||||
"pt": "Webhook reinstalado com sucesso",
|
||||
"es": "Webhook reinstalado correctamente",
|
||||
"ar": "تم إعادة تثبيت الخطاف بنجاح",
|
||||
"fr": "Webhook réinstallé avec succès",
|
||||
"tr": "Webhook başarıyla yeniden yüklendi",
|
||||
"de": "Webhook erfolgreich neu installiert",
|
||||
"uk": "Вебхук успішно перевстановлено"
|
||||
},
|
||||
"GITLAB$WEBHOOK_COLUMN_RESOURCE": {
|
||||
"en": "Resource",
|
||||
"ja": "リソース",
|
||||
"zh-CN": "资源",
|
||||
"zh-TW": "資源",
|
||||
"ko-KR": "리소스",
|
||||
"no": "Ressurs",
|
||||
"it": "Risorsa",
|
||||
"pt": "Recurso",
|
||||
"es": "Recurso",
|
||||
"ar": "المورد",
|
||||
"fr": "Ressource",
|
||||
"tr": "Kaynak",
|
||||
"de": "Ressource",
|
||||
"uk": "Ресурс"
|
||||
},
|
||||
"GITLAB$WEBHOOK_COLUMN_TYPE": {
|
||||
"en": "Type",
|
||||
"ja": "タイプ",
|
||||
"zh-CN": "类型",
|
||||
"zh-TW": "類型",
|
||||
"ko-KR": "유형",
|
||||
"no": "Type",
|
||||
"it": "Tipo",
|
||||
"pt": "Tipo",
|
||||
"es": "Tipo",
|
||||
"ar": "النوع",
|
||||
"fr": "Type",
|
||||
"tr": "Tür",
|
||||
"de": "Typ",
|
||||
"uk": "Тип"
|
||||
},
|
||||
"GITLAB$WEBHOOK_COLUMN_STATUS": {
|
||||
"en": "Status",
|
||||
"ja": "ステータス",
|
||||
"zh-CN": "状态",
|
||||
"zh-TW": "狀態",
|
||||
"ko-KR": "상태",
|
||||
"no": "Status",
|
||||
"it": "Stato",
|
||||
"pt": "Status",
|
||||
"es": "Estado",
|
||||
"ar": "الحالة",
|
||||
"fr": "Statut",
|
||||
"tr": "Durum",
|
||||
"de": "Status",
|
||||
"uk": "Статус"
|
||||
},
|
||||
"GITLAB$WEBHOOK_COLUMN_ACTION": {
|
||||
"en": "Action",
|
||||
"ja": "アクション",
|
||||
"zh-CN": "操作",
|
||||
"zh-TW": "操作",
|
||||
"ko-KR": "작업",
|
||||
"no": "Handling",
|
||||
"it": "Azione",
|
||||
"pt": "Ação",
|
||||
"es": "Acción",
|
||||
"ar": "الإجراء",
|
||||
"fr": "Action",
|
||||
"tr": "Eylem",
|
||||
"de": "Aktion",
|
||||
"uk": "Дія"
|
||||
},
|
||||
"GITLAB$WEBHOOK_STATUS_INSTALLED": {
|
||||
"en": "Installed",
|
||||
"ja": "インストール済み",
|
||||
"zh-CN": "已安装",
|
||||
"zh-TW": "已安裝",
|
||||
"ko-KR": "설치됨",
|
||||
"no": "Installert",
|
||||
"it": "Installato",
|
||||
"pt": "Instalado",
|
||||
"es": "Instalado",
|
||||
"ar": "مثبت",
|
||||
"fr": "Installé",
|
||||
"tr": "Yüklü",
|
||||
"de": "Installiert",
|
||||
"uk": "Встановлено"
|
||||
},
|
||||
"GITLAB$WEBHOOK_STATUS_NOT_INSTALLED": {
|
||||
"en": "Not Installed",
|
||||
"ja": "未インストール",
|
||||
"zh-CN": "未安装",
|
||||
"zh-TW": "未安裝",
|
||||
"ko-KR": "설치되지 않음",
|
||||
"no": "Ikke installert",
|
||||
"it": "Non installato",
|
||||
"pt": "Não instalado",
|
||||
"es": "No instalado",
|
||||
"ar": "غير مثبت",
|
||||
"fr": "Non installé",
|
||||
"tr": "Yüklü değil",
|
||||
"de": "Nicht installiert",
|
||||
"uk": "Не встановлено"
|
||||
},
|
||||
"GITLAB$WEBHOOK_STATUS_FAILED": {
|
||||
"en": "Failed",
|
||||
"ja": "失敗",
|
||||
"zh-CN": "失败",
|
||||
"zh-TW": "失敗",
|
||||
"ko-KR": "실패",
|
||||
"no": "Mislyktes",
|
||||
"it": "Fallito",
|
||||
"pt": "Falhou",
|
||||
"es": "Fallido",
|
||||
"ar": "فشل",
|
||||
"fr": "Échoué",
|
||||
"tr": "Başarısız",
|
||||
"de": "Fehlgeschlagen",
|
||||
"uk": "Помилка"
|
||||
},
|
||||
"GITLAB$WEBHOOK_REINSTALL_FAILED": {
|
||||
"en": "Failed to reinstall webhook",
|
||||
"ja": "ウェブフックの再インストールに失敗しました",
|
||||
"zh-CN": "重新安装 Webhook 失败",
|
||||
"zh-TW": "重新安裝 Webhook 失敗",
|
||||
"ko-KR": "웹훅 재설치 실패",
|
||||
"no": "Kunne ikke installere webhook på nytt",
|
||||
"it": "Reinstallazione webhook non riuscita",
|
||||
"pt": "Falha ao reinstalar webhook",
|
||||
"es": "Error al reinstalar webhook",
|
||||
"ar": "فشل في إعادة تثبيت الخطاف",
|
||||
"fr": "Échec de la réinstallation du webhook",
|
||||
"tr": "Webhook yeniden yüklenemedi",
|
||||
"de": "Webhook konnte nicht neu installiert werden",
|
||||
"uk": "Не вдалося перевстановити вебхук"
|
||||
},
|
||||
"BITBUCKET$TOKEN_LABEL": {
|
||||
"en": "Bitbucket Token",
|
||||
"ja": "Bitbucketトークン",
|
||||
@@ -10096,20 +10432,20 @@
|
||||
"uk": "Попросіть OpenHands ініціалізувати git-репозиторій, щоб активувати цей інтерфейс користувача."
|
||||
},
|
||||
"DIFF_VIEWER$NO_CHANGES": {
|
||||
"en": "OpenHands hasn't made any changes yet...",
|
||||
"ja": "OpenHandsはまだ変更を加えていません...",
|
||||
"zh-CN": "OpenHands尚未进行任何更改...",
|
||||
"zh-TW": "OpenHands尚未進行任何更改...",
|
||||
"ko-KR": "OpenHands는 아직 변경하지 않았습니다...",
|
||||
"no": "OpenHands har ikke gjort noen endringer ennå...",
|
||||
"it": "OpenHands non ha ancora apportato modifiche...",
|
||||
"pt": "O OpenHands ainda não fez nenhuma alteração...",
|
||||
"es": "OpenHands aún no ha realizado ningún cambio...",
|
||||
"ar": "لم يقم OpenHands بإجراء أي تغييرات بعد ...",
|
||||
"fr": "OpenHands n'a pas encore apporté de modifications ...",
|
||||
"tr": "OpenHands henüz herhangi bir değişiklik yapmadı ...",
|
||||
"de": "OpenHands hat noch keine Änderungen vorgenommen...",
|
||||
"uk": "OpenHands ще не вніс жодних змін..."
|
||||
"en": "OpenHands hasn't made any changes yet",
|
||||
"ja": "OpenHandsはまだ変更を加えていません",
|
||||
"zh-CN": "OpenHands尚未进行任何更改",
|
||||
"zh-TW": "OpenHands尚未進行任何更改",
|
||||
"ko-KR": "OpenHands는 아직 변경하지 않았습니다",
|
||||
"no": "OpenHands har ikke gjort noen endringer ennå",
|
||||
"it": "OpenHands non ha ancora apportato modifiche",
|
||||
"pt": "O OpenHands ainda não fez nenhuma alteração",
|
||||
"es": "OpenHands aún no ha realizado ningún cambio",
|
||||
"ar": "لم يقم OpenHands بإجراء أي تغييرات بعد",
|
||||
"fr": "OpenHands n'a pas encore apporté de modifications",
|
||||
"tr": "OpenHands henüz herhangi bir değişiklik yapmadı",
|
||||
"de": "OpenHands hat noch keine Änderungen vorgenommen",
|
||||
"uk": "OpenHands ще не вніс жодних змін"
|
||||
},
|
||||
"DIFF_VIEWER$WAITING_FOR_RUNTIME": {
|
||||
"en": "Waiting for runtime to start...",
|
||||
@@ -14527,6 +14863,22 @@
|
||||
"de": "Läuft",
|
||||
"uk": "Працює"
|
||||
},
|
||||
"COMMON$WAITING_FOR_SANDBOX": {
|
||||
"en": "Waiting for sandbox",
|
||||
"ja": "サンドボックスを待機中",
|
||||
"zh-CN": "等待沙盒",
|
||||
"zh-TW": "等待沙盒",
|
||||
"ko-KR": "샌드박스를 기다리는 중",
|
||||
"no": "Venter på sandkasse",
|
||||
"it": "In attesa del sandbox",
|
||||
"pt": "Aguardando sandbox",
|
||||
"es": "Esperando el sandbox",
|
||||
"ar": "في انتظار البيئة المعزولة",
|
||||
"fr": "En attente du bac à sable",
|
||||
"tr": "Sandbox bekleniyor",
|
||||
"de": "Warten auf Sandbox",
|
||||
"uk": "Очікування пісочниці"
|
||||
},
|
||||
"COMMON$SELECT_GIT_PROVIDER": {
|
||||
"en": "Select Git provider",
|
||||
"ja": "Gitプロバイダーを選択",
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useTranslation } from "react-i18next";
|
||||
import React from "react";
|
||||
import { FileDiffViewer } from "#/components/features/diff-viewer/file-diff-viewer";
|
||||
import { EmptyChangesMessage } from "#/components/features/diff-viewer/empty-changes-message";
|
||||
import { retrieveAxiosErrorMessage } from "#/utils/retrieve-axios-error-message";
|
||||
import { useUnifiedGetGitChanges } from "#/hooks/query/use-unified-get-git-changes";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
@@ -77,6 +78,9 @@ function GitChanges() {
|
||||
))}
|
||||
</StatusMessage>
|
||||
)}
|
||||
{!statusMessage && isSuccess && gitChanges.length === 0 && (
|
||||
<EmptyChangesMessage />
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="absolute inset-x-0 bottom-0">
|
||||
|
||||
@@ -16,7 +16,6 @@ import { useActiveConversation } from "#/hooks/query/use-active-conversation";
|
||||
import { useTaskPolling } from "#/hooks/query/use-task-polling";
|
||||
|
||||
import { displayErrorToast } from "#/utils/custom-toast-handlers";
|
||||
import { useDocumentTitleFromState } from "#/hooks/use-document-title-from-state";
|
||||
import { useIsAuthed } from "#/hooks/query/use-is-authed";
|
||||
import { ConversationSubscriptionsProvider } from "#/context/conversation-subscriptions-provider";
|
||||
import { useUserProviders } from "#/hooks/use-user-providers";
|
||||
@@ -33,7 +32,6 @@ import { useEventStore } from "#/stores/use-event-store";
|
||||
|
||||
function AppContent() {
|
||||
useConversationConfig();
|
||||
|
||||
const { t } = useTranslation();
|
||||
const { conversationId } = useConversationId();
|
||||
const clearEvents = useEventStore((state) => state.clearEvents);
|
||||
@@ -62,9 +60,6 @@ function AppContent() {
|
||||
// Fetch batch feedback data when conversation is loaded
|
||||
useBatchFeedback();
|
||||
|
||||
// Set the document title to the conversation title when available
|
||||
useDocumentTitleFromState();
|
||||
|
||||
// 1. Cleanup Effect - runs when navigating to a different conversation
|
||||
React.useEffect(() => {
|
||||
clearTerminal();
|
||||
|
||||
@@ -6,11 +6,13 @@ import { BrandButton } from "#/components/features/settings/brand-button";
|
||||
import { useLogout } from "#/hooks/mutation/use-logout";
|
||||
import { GitHubTokenInput } from "#/components/features/settings/git-settings/github-token-input";
|
||||
import { GitLabTokenInput } from "#/components/features/settings/git-settings/gitlab-token-input";
|
||||
import { GitLabWebhookManager } from "#/components/features/settings/git-settings/gitlab-webhook-manager";
|
||||
import { BitbucketTokenInput } from "#/components/features/settings/git-settings/bitbucket-token-input";
|
||||
import { AzureDevOpsTokenInput } from "#/components/features/settings/git-settings/azure-devops-token-input";
|
||||
import { ForgejoTokenInput } from "#/components/features/settings/git-settings/forgejo-token-input";
|
||||
import { ConfigureGitHubRepositoriesAnchor } from "#/components/features/settings/git-settings/configure-github-repositories-anchor";
|
||||
import { InstallSlackAppAnchor } from "#/components/features/settings/git-settings/install-slack-app-anchor";
|
||||
import DebugStackframeDot from "#/icons/debug-stackframe-dot.svg?react";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
import {
|
||||
displayErrorToast,
|
||||
@@ -21,6 +23,7 @@ import { GitSettingInputsSkeleton } from "#/components/features/settings/git-set
|
||||
import { useAddGitProviders } from "#/hooks/mutation/use-add-git-providers";
|
||||
import { useUserProviders } from "#/hooks/use-user-providers";
|
||||
import { ProjectManagementIntegration } from "#/components/features/settings/project-management/project-management-integration";
|
||||
import { Typography } from "#/ui/typography";
|
||||
|
||||
function GitSettingsScreen() {
|
||||
const { t } = useTranslation();
|
||||
@@ -182,6 +185,33 @@ function GitSettingsScreen() {
|
||||
</>
|
||||
)}
|
||||
|
||||
{shouldRenderExternalConfigureButtons && !isLoading && (
|
||||
<>
|
||||
<div className="mt-6 flex flex-col gap-4 pb-8">
|
||||
<Typography.H3 className="text-xl">
|
||||
{t(I18nKey.SETTINGS$GITLAB)}
|
||||
</Typography.H3>
|
||||
<div className="flex items-center">
|
||||
<DebugStackframeDot
|
||||
className="w-6 h-6 shrink-0"
|
||||
color={isGitLabTokenSet ? "#BCFF8C" : "#FF684E"}
|
||||
/>
|
||||
<Typography.Text
|
||||
className="text-sm text-gray-400"
|
||||
testId="gitlab-status-text"
|
||||
>
|
||||
{t(I18nKey.COMMON$STATUS)}:{" "}
|
||||
{isGitLabTokenSet
|
||||
? t(I18nKey.STATUS$CONNECTED)
|
||||
: t(I18nKey.SETTINGS$GITLAB_NOT_CONNECTED)}
|
||||
</Typography.Text>
|
||||
</div>
|
||||
{isGitLabTokenSet && <GitLabWebhookManager />}
|
||||
</div>
|
||||
<div className="w-1/2 border-b border-gray-200" />
|
||||
</>
|
||||
)}
|
||||
|
||||
{shouldRenderExternalConfigureButtons && !isLoading && (
|
||||
<>
|
||||
<div className="pb-1 mt-6 flex flex-col">
|
||||
|
||||
@@ -32,6 +32,7 @@ import { LOCAL_STORAGE_KEYS } from "#/utils/local-storage";
|
||||
import { EmailVerificationGuard } from "#/components/features/guards/email-verification-guard";
|
||||
import { MaintenanceBanner } from "#/components/features/maintenance/maintenance-banner";
|
||||
import { cn, isMobileDevice } from "#/utils/utils";
|
||||
import { useAppTitle } from "#/hooks/use-app-title";
|
||||
|
||||
export function ErrorBoundary() {
|
||||
const error = useRouteError();
|
||||
@@ -67,6 +68,7 @@ export function ErrorBoundary() {
|
||||
}
|
||||
|
||||
export default function MainApp() {
|
||||
const appTitle = useAppTitle();
|
||||
const navigate = useNavigate();
|
||||
const { pathname } = useLocation();
|
||||
const isOnTosPage = useIsOnTosPage();
|
||||
@@ -223,6 +225,7 @@ export default function MainApp() {
|
||||
isMobileDevice() && "overflow-hidden",
|
||||
)}
|
||||
>
|
||||
<title>{appTitle}</title>
|
||||
<Sidebar />
|
||||
|
||||
<div className="flex flex-col w-full h-[calc(100%-50px)] md:h-full gap-3">
|
||||
|
||||
@@ -15,7 +15,7 @@ export const DEFAULT_SETTINGS: Settings = {
|
||||
remote_runtime_resource_factor: 1,
|
||||
provider_tokens_set: {},
|
||||
enable_default_condenser: true,
|
||||
condenser_max_size: 120,
|
||||
condenser_max_size: 240,
|
||||
enable_sound_notifications: false,
|
||||
user_consents_to_analytics: false,
|
||||
enable_proactive_conversation_starters: false,
|
||||
|
||||
@@ -27,7 +27,7 @@ export const hasAdvancedSettingsSet = (
|
||||
settings.agent !== undefined && settings.agent !== DEFAULT_SETTINGS.agent;
|
||||
// Default is true, so only check if explicitly disabled
|
||||
const hasDisabledCondenser = settings.enable_default_condenser === false;
|
||||
// Check if condenser size differs from default (default is 120)
|
||||
// Check if condenser size differs from default (default is 240)
|
||||
const hasCustomCondenserSize =
|
||||
settings.condenser_max_size !== undefined &&
|
||||
settings.condenser_max_size !== null &&
|
||||
|
||||
@@ -7,6 +7,7 @@ import { GitRepository } from "#/types/git";
|
||||
import { sanitizeQuery } from "#/utils/sanitize-query";
|
||||
import { PRODUCT_URL } from "#/utils/constants";
|
||||
import { AgentState } from "#/types/agent-state";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs));
|
||||
@@ -746,3 +747,91 @@ export const getStatusColor = (options: {
|
||||
}
|
||||
return "#BCFF8C";
|
||||
};
|
||||
|
||||
interface GetStatusTextArgs {
|
||||
isPausing: boolean;
|
||||
isTask: boolean;
|
||||
taskStatus?: string | null;
|
||||
taskDetail?: string | null;
|
||||
isStartingStatus: boolean;
|
||||
isStopStatus: boolean;
|
||||
curAgentState: AgentState;
|
||||
errorMessage?: string | null;
|
||||
t: (t: string) => string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the server status text based on agent and task state
|
||||
*
|
||||
* @param options Configuration object for status text calculation
|
||||
* @param options.isPausing Whether the agent is currently pausing
|
||||
* @param options.isTask Whether we're polling a task
|
||||
* @param options.taskStatus The task status string (e.g., "ERROR", "READY")
|
||||
* @param options.taskDetail Optional task-specific detail text
|
||||
* @param options.isStartingStatus Whether the conversation is in STARTING state
|
||||
* @param options.isStopStatus Whether the conversation is STOPPED
|
||||
* @param options.curAgentState The current agent state
|
||||
* @param options.errorMessage Optional agent error message
|
||||
* @returns Localized human-readable status text
|
||||
*
|
||||
* @example
|
||||
* getStatusText({
|
||||
* isPausing: false,
|
||||
* isTask: true,
|
||||
* taskStatus: "WAITING_FOR_SANDBOX",
|
||||
* taskDetail: null,
|
||||
* isStartingStatus: false,
|
||||
* isStopStatus: false,
|
||||
* curAgentState: AgentState.RUNNING
|
||||
* }) // Returns "Waiting For Sandbox"
|
||||
*/
|
||||
export function getStatusText({
|
||||
isPausing = false,
|
||||
isTask,
|
||||
taskStatus,
|
||||
taskDetail,
|
||||
isStartingStatus,
|
||||
isStopStatus,
|
||||
curAgentState,
|
||||
errorMessage,
|
||||
t,
|
||||
}: GetStatusTextArgs): string {
|
||||
// Show pausing status
|
||||
if (isPausing) {
|
||||
return t(I18nKey.COMMON$STOPPING);
|
||||
}
|
||||
|
||||
// Show task status if we're polling a task
|
||||
if (isTask && taskStatus) {
|
||||
if (taskStatus === "ERROR") {
|
||||
return taskDetail || t(I18nKey.CONVERSATION$ERROR_STARTING_CONVERSATION);
|
||||
}
|
||||
|
||||
if (taskStatus === "READY") {
|
||||
return t(I18nKey.CONVERSATION$READY);
|
||||
}
|
||||
|
||||
// Format status text: "WAITING_FOR_SANDBOX" -> "Waiting for sandbox"
|
||||
return (
|
||||
taskDetail ||
|
||||
taskStatus
|
||||
.toLowerCase()
|
||||
.replace(/_/g, " ")
|
||||
.replace(/\b\w/g, (c) => c.toUpperCase())
|
||||
);
|
||||
}
|
||||
|
||||
if (isStartingStatus) {
|
||||
return t(I18nKey.COMMON$STARTING);
|
||||
}
|
||||
|
||||
if (isStopStatus) {
|
||||
return t(I18nKey.COMMON$SERVER_STOPPED);
|
||||
}
|
||||
|
||||
if (curAgentState === AgentState.ERROR) {
|
||||
return errorMessage || t(I18nKey.COMMON$ERROR);
|
||||
}
|
||||
|
||||
return t(I18nKey.COMMON$RUNNING);
|
||||
}
|
||||
|
||||
@@ -8,13 +8,17 @@ import i18n from "i18next";
|
||||
import { vi } from "vitest";
|
||||
import { AxiosError } from "axios";
|
||||
|
||||
export const useParamsMock = vi.fn(() => ({
|
||||
conversationId: "test-conversation-id",
|
||||
}));
|
||||
|
||||
// Mock useParams before importing components
|
||||
vi.mock("react-router", async () => {
|
||||
const actual =
|
||||
await vi.importActual<typeof import("react-router")>("react-router");
|
||||
return {
|
||||
...actual,
|
||||
useParams: () => ({ conversationId: "test-conversation-id" }),
|
||||
useParams: useParamsMock,
|
||||
};
|
||||
});
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from openhands.app_server.sandbox.sandbox_models import SandboxStatus
|
||||
from openhands.integrations.service_types import ProviderType
|
||||
from openhands.sdk.conversation.state import ConversationExecutionStatus
|
||||
from openhands.sdk.llm import MetricsSnapshot
|
||||
from openhands.sdk.utils.models import OpenHandsModel
|
||||
from openhands.storage.data_models.conversation_metadata import ConversationTrigger
|
||||
|
||||
|
||||
@@ -91,7 +92,7 @@ class AppConversationPage(BaseModel):
|
||||
next_page_id: str | None = None
|
||||
|
||||
|
||||
class AppConversationStartRequest(BaseModel):
|
||||
class AppConversationStartRequest(OpenHandsModel):
|
||||
"""Start conversation request object.
|
||||
|
||||
Although a user can go directly to the sandbox and start conversations, they
|
||||
@@ -142,7 +143,7 @@ class AppConversationStartTaskSortOrder(Enum):
|
||||
UPDATED_AT_DESC = 'UPDATED_AT_DESC'
|
||||
|
||||
|
||||
class AppConversationStartTask(BaseModel):
|
||||
class AppConversationStartTask(OpenHandsModel):
|
||||
"""Object describing the start process for an app conversation.
|
||||
|
||||
Because starting an app conversation can be slow (And can involve starting a sandbox),
|
||||
@@ -167,7 +168,7 @@ class AppConversationStartTask(BaseModel):
|
||||
updated_at: datetime = Field(default_factory=utc_now)
|
||||
|
||||
|
||||
class AppConversationStartTaskPage(BaseModel):
|
||||
class AppConversationStartTaskPage(OpenHandsModel):
|
||||
items: list[AppConversationStartTask]
|
||||
next_page_id: str | None = None
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Sandboxed Conversation router for OpenHands Server."""
|
||||
"""Sandboxed Conversation router for OpenHands App Server."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
@@ -380,7 +380,7 @@ class AppConversationServiceBase(AppConversationService, ABC):
|
||||
Returns:
|
||||
Configured LLMSummarizingCondenser instance
|
||||
"""
|
||||
# LLMSummarizingCondenser has defaults: max_size=120, keep_first=4
|
||||
# LLMSummarizingCondenser SDK defaults: max_size=240, keep_first=2
|
||||
condenser_kwargs = {
|
||||
'llm': llm.model_copy(
|
||||
update={
|
||||
|
||||
@@ -72,6 +72,10 @@ from openhands.app_server.user.user_models import UserInfo
|
||||
from openhands.app_server.utils.docker_utils import (
|
||||
replace_localhost_hostname_for_docker,
|
||||
)
|
||||
from openhands.app_server.utils.llm_metadata import (
|
||||
get_llm_metadata,
|
||||
should_set_litellm_extra_body,
|
||||
)
|
||||
from openhands.experiments.experiment_manager import ExperimentManagerImpl
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.sdk import Agent, AgentContext, LocalWorkspace
|
||||
@@ -892,6 +896,63 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
|
||||
|
||||
return agent
|
||||
|
||||
def _update_agent_with_llm_metadata(
|
||||
self,
|
||||
agent: Agent,
|
||||
conversation_id: UUID,
|
||||
user_id: str | None,
|
||||
) -> Agent:
|
||||
"""Update agent's LLM and condenser LLM with litellm_extra_body metadata.
|
||||
|
||||
This adds tracing metadata (conversation_id, user_id, etc.) to the LLM
|
||||
for analytics and debugging purposes. Only applies to openhands/ models.
|
||||
|
||||
Args:
|
||||
agent: The agent to update
|
||||
conversation_id: The conversation ID
|
||||
user_id: The user ID (can be None)
|
||||
|
||||
Returns:
|
||||
Updated agent with LLM metadata
|
||||
"""
|
||||
updates: dict[str, Any] = {}
|
||||
|
||||
# Update main LLM if it's an openhands model
|
||||
if should_set_litellm_extra_body(agent.llm.model):
|
||||
llm_metadata = get_llm_metadata(
|
||||
model_name=agent.llm.model,
|
||||
llm_type=agent.llm.usage_id or 'agent',
|
||||
conversation_id=conversation_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
updated_llm = agent.llm.model_copy(
|
||||
update={'litellm_extra_body': {'metadata': llm_metadata}}
|
||||
)
|
||||
updates['llm'] = updated_llm
|
||||
|
||||
# Update condenser LLM if it exists and is an openhands model
|
||||
if agent.condenser and hasattr(agent.condenser, 'llm'):
|
||||
condenser_llm = agent.condenser.llm
|
||||
if should_set_litellm_extra_body(condenser_llm.model):
|
||||
condenser_metadata = get_llm_metadata(
|
||||
model_name=condenser_llm.model,
|
||||
llm_type=condenser_llm.usage_id or 'condenser',
|
||||
conversation_id=conversation_id,
|
||||
user_id=user_id,
|
||||
)
|
||||
updated_condenser_llm = condenser_llm.model_copy(
|
||||
update={'litellm_extra_body': {'metadata': condenser_metadata}}
|
||||
)
|
||||
updated_condenser = agent.condenser.model_copy(
|
||||
update={'llm': updated_condenser_llm}
|
||||
)
|
||||
updates['condenser'] = updated_condenser
|
||||
|
||||
# Return updated agent if there are changes
|
||||
if updates:
|
||||
return agent.model_copy(update=updates)
|
||||
return agent
|
||||
|
||||
async def _finalize_conversation_request(
|
||||
self,
|
||||
agent: Agent,
|
||||
@@ -930,6 +991,10 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
|
||||
user.id, conversation_id, agent
|
||||
)
|
||||
|
||||
# Update agent's LLM with litellm_extra_body metadata for tracing
|
||||
# This is done after experiment variants to ensure the final LLM config is used
|
||||
agent = self._update_agent_with_llm_metadata(agent, conversation_id, user.id)
|
||||
|
||||
# Load and merge skills if remote workspace is available
|
||||
if remote_workspace:
|
||||
try:
|
||||
|
||||
@@ -135,7 +135,7 @@ class SQLAppConversationStartTaskService(AppConversationStartTaskService):
|
||||
if has_more:
|
||||
rows = rows[:limit]
|
||||
|
||||
items = [AppConversationStartTask(**row2dict(row)) for row in rows]
|
||||
items = [AppConversationStartTask.model_validate(row2dict(row)) for row in rows]
|
||||
|
||||
# Calculate next page ID
|
||||
next_page_id = None
|
||||
@@ -196,7 +196,7 @@ class SQLAppConversationStartTaskService(AppConversationStartTaskService):
|
||||
# Return tasks in the same order as requested, with None for missing ones
|
||||
return [
|
||||
(
|
||||
AppConversationStartTask(**row2dict(tasks_by_id[task_id]))
|
||||
AppConversationStartTask.model_validate(row2dict(tasks_by_id[task_id]))
|
||||
if task_id in tasks_by_id
|
||||
else None
|
||||
)
|
||||
@@ -218,7 +218,7 @@ class SQLAppConversationStartTaskService(AppConversationStartTaskService):
|
||||
result = await self.session.execute(query)
|
||||
stored_task = result.scalar_one_or_none()
|
||||
if stored_task:
|
||||
return AppConversationStartTask(**row2dict(stored_task))
|
||||
return AppConversationStartTask.model_validate(row2dict(stored_task))
|
||||
return None
|
||||
|
||||
async def save_app_conversation_start_task(
|
||||
|
||||
@@ -82,7 +82,7 @@ def get_openhands_provider_base_url() -> str | None:
|
||||
|
||||
def _get_default_lifespan():
|
||||
# Check legacy parameters for saas mode. If we are in SAAS mode do not apply
|
||||
# OSS alembic migrations
|
||||
# OpenHands alembic migrations
|
||||
if 'saas' in (os.getenv('OPENHANDS_CONFIG_CLS') or '').lower():
|
||||
return None
|
||||
return OssAppLifespanService()
|
||||
@@ -133,6 +133,9 @@ def config_from_env() -> AppServerConfig:
|
||||
from openhands.app_server.event.filesystem_event_service import (
|
||||
FilesystemEventServiceInjector,
|
||||
)
|
||||
from openhands.app_server.event.google_cloud_event_service import (
|
||||
GoogleCloudEventServiceInjector,
|
||||
)
|
||||
from openhands.app_server.event_callback.sql_event_callback_service import (
|
||||
SQLEventCallbackServiceInjector,
|
||||
)
|
||||
@@ -161,7 +164,13 @@ def config_from_env() -> AppServerConfig:
|
||||
config: AppServerConfig = from_env(AppServerConfig, 'OH') # type: ignore
|
||||
|
||||
if config.event is None:
|
||||
config.event = FilesystemEventServiceInjector()
|
||||
if os.environ.get('FILE_STORE') == 'google_cloud':
|
||||
# Legacy V0 google cloud storage configuration
|
||||
config.event = GoogleCloudEventServiceInjector(
|
||||
bucket_name=os.environ.get('FILE_STORE_PATH')
|
||||
)
|
||||
else:
|
||||
config.event = FilesystemEventServiceInjector()
|
||||
|
||||
if config.event_callback is None:
|
||||
config.event_callback = SQLEventCallbackServiceInjector()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Event router for OpenHands Server."""
|
||||
"""Event router for OpenHands App Server."""
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Annotated
|
||||
@@ -12,7 +12,7 @@ from openhands.app_server.event.event_service import EventService
|
||||
from openhands.app_server.event_callback.event_callback_models import EventKind
|
||||
from openhands.sdk import Event
|
||||
|
||||
router = APIRouter(prefix='/events', tags=['Events'])
|
||||
router = APIRouter(prefix='/conversation/{conversation_id}/events', tags=['Events'])
|
||||
event_service_dependency = depends_event_service()
|
||||
|
||||
|
||||
@@ -21,10 +21,7 @@ event_service_dependency = depends_event_service()
|
||||
|
||||
@router.get('/search')
|
||||
async def search_events(
|
||||
conversation_id__eq: Annotated[
|
||||
str | None,
|
||||
Query(title='Optional filter by conversation ID'),
|
||||
] = None,
|
||||
conversation_id: str,
|
||||
kind__eq: Annotated[
|
||||
EventKind | None,
|
||||
Query(title='Optional filter by event kind'),
|
||||
@@ -55,7 +52,7 @@ async def search_events(
|
||||
assert limit > 0
|
||||
assert limit <= 100
|
||||
return await event_service.search_events(
|
||||
conversation_id__eq=UUID(conversation_id__eq) if conversation_id__eq else None,
|
||||
conversation_id=UUID(conversation_id),
|
||||
kind__eq=kind__eq,
|
||||
timestamp__gte=timestamp__gte,
|
||||
timestamp__lt=timestamp__lt,
|
||||
@@ -67,10 +64,7 @@ async def search_events(
|
||||
|
||||
@router.get('/count')
|
||||
async def count_events(
|
||||
conversation_id__eq: Annotated[
|
||||
str | None,
|
||||
Query(title='Optional filter by conversation ID'),
|
||||
] = None,
|
||||
conversation_id: str,
|
||||
kind__eq: Annotated[
|
||||
EventKind | None,
|
||||
Query(title='Optional filter by event kind'),
|
||||
@@ -83,28 +77,25 @@ async def count_events(
|
||||
datetime | None,
|
||||
Query(title='Optional filter by timestamp less than'),
|
||||
] = None,
|
||||
sort_order: Annotated[
|
||||
EventSortOrder,
|
||||
Query(title='Sort order for results'),
|
||||
] = EventSortOrder.TIMESTAMP,
|
||||
event_service: EventService = event_service_dependency,
|
||||
) -> int:
|
||||
"""Count events matching the given filters."""
|
||||
return await event_service.count_events(
|
||||
conversation_id__eq=UUID(conversation_id__eq) if conversation_id__eq else None,
|
||||
conversation_id=UUID(conversation_id),
|
||||
kind__eq=kind__eq,
|
||||
timestamp__gte=timestamp__gte,
|
||||
timestamp__lt=timestamp__lt,
|
||||
sort_order=sort_order,
|
||||
)
|
||||
|
||||
|
||||
@router.get('')
|
||||
async def batch_get_events(
|
||||
conversation_id: str,
|
||||
id: Annotated[list[str], Query()],
|
||||
event_service: EventService = event_service_dependency,
|
||||
) -> list[Event | None]:
|
||||
"""Get a batch of events given their ids, returning null for any missing event."""
|
||||
event_ids = [UUID(id_) for id_ in id]
|
||||
assert len(id) <= 100
|
||||
events = await event_service.batch_get_events(id)
|
||||
events = await event_service.batch_get_events(UUID(conversation_id), event_ids)
|
||||
return events
|
||||
|
||||
@@ -17,13 +17,13 @@ class EventService(ABC):
|
||||
"""Event Service for getting events."""
|
||||
|
||||
@abstractmethod
|
||||
async def get_event(self, event_id: str) -> Event | None:
|
||||
async def get_event(self, conversation_id: UUID, event_id: UUID) -> Event | None:
|
||||
"""Given an id, retrieve an event."""
|
||||
|
||||
@abstractmethod
|
||||
async def search_events(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
conversation_id: UUID,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
@@ -36,11 +36,10 @@ class EventService(ABC):
|
||||
@abstractmethod
|
||||
async def count_events(
|
||||
self,
|
||||
conversation_id__eq: UUID | None = None,
|
||||
conversation_id: UUID,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
||||
) -> int:
|
||||
"""Count events matching the given filters."""
|
||||
|
||||
@@ -48,10 +47,12 @@ class EventService(ABC):
|
||||
async def save_event(self, conversation_id: UUID, event: Event):
|
||||
"""Save an event. Internal method intended not be part of the REST api."""
|
||||
|
||||
async def batch_get_events(self, event_ids: list[str]) -> list[Event | None]:
|
||||
async def batch_get_events(
|
||||
self, conversation_id: UUID, event_ids: list[UUID]
|
||||
) -> list[Event | None]:
|
||||
"""Given a list of ids, get events (Or none for any which were not found)."""
|
||||
return await asyncio.gather(
|
||||
*[self.get_event(event_id) for event_id in event_ids]
|
||||
*[self.get_event(conversation_id, event_id) for event_id in event_ids]
|
||||
)
|
||||
|
||||
|
||||
|
||||
165
openhands/app_server/event/event_service_base.py
Normal file
165
openhands/app_server/event/event_service_base.py
Normal file
@@ -0,0 +1,165 @@
|
||||
import asyncio
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from uuid import UUID
|
||||
|
||||
from openhands.agent_server.models import EventPage, EventSortOrder
|
||||
from openhands.agent_server.sockets import page_iterator
|
||||
from openhands.app_server.app_conversation.app_conversation_info_service import (
|
||||
AppConversationInfoService,
|
||||
)
|
||||
from openhands.app_server.app_conversation.app_conversation_models import (
|
||||
AppConversationInfo,
|
||||
)
|
||||
from openhands.app_server.event.event_service import EventService
|
||||
from openhands.app_server.event_callback.event_callback_models import EventKind
|
||||
from openhands.sdk import Event
|
||||
|
||||
|
||||
@dataclass
|
||||
class EventServiceBase(EventService, ABC):
|
||||
"""Event Service for getting events - the only check on permissions for events is
|
||||
in the strict prefix for storage.
|
||||
"""
|
||||
|
||||
prefix: Path
|
||||
user_id: str | None
|
||||
app_conversation_info_service: AppConversationInfoService | None
|
||||
app_conversation_info_load_tasks: dict[
|
||||
UUID, asyncio.Task[AppConversationInfo | None]
|
||||
]
|
||||
|
||||
@abstractmethod
|
||||
def _load_event(self, path: Path) -> Event | None:
|
||||
"""Get the event at the path given."""
|
||||
|
||||
@abstractmethod
|
||||
def _store_event(self, path: Path, event: Event):
|
||||
"""Store the event given at the path given."""
|
||||
|
||||
@abstractmethod
|
||||
def _search_paths(self, prefix: Path) -> list[Path]:
|
||||
"""Search paths."""
|
||||
|
||||
async def get_conversation_path(self, conversation_id: UUID) -> Path:
|
||||
"""Get a path for a conversation. Ensure user_id is included if possible."""
|
||||
path = self.prefix
|
||||
if self.user_id:
|
||||
path /= self.user_id
|
||||
elif self.app_conversation_info_service:
|
||||
task = self.app_conversation_info_load_tasks.get(conversation_id)
|
||||
if task is None:
|
||||
task = asyncio.create_task(
|
||||
self.app_conversation_info_service.get_app_conversation_info(
|
||||
conversation_id
|
||||
)
|
||||
)
|
||||
self.app_conversation_info_load_tasks[conversation_id] = task
|
||||
conversation_info = await task
|
||||
if conversation_info and conversation_info.created_by_user_id:
|
||||
path /= conversation_info.created_by_user_id
|
||||
path = path / 'v1_conversations' / conversation_id.hex
|
||||
return path
|
||||
|
||||
async def get_event(self, conversation_id: UUID, event_id: UUID) -> Event | None:
|
||||
"""Get the event with the given id, or None if not found."""
|
||||
conversation_path = await self.get_conversation_path(conversation_id)
|
||||
path = conversation_path / f'{event_id.hex}.json'
|
||||
loop = asyncio.get_running_loop()
|
||||
event: Event = await loop.run_in_executor(None, self._load_event, path)
|
||||
return event
|
||||
|
||||
async def search_events(
|
||||
self,
|
||||
conversation_id: UUID,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
sort_order: EventSortOrder = EventSortOrder.TIMESTAMP,
|
||||
page_id: str | None = None,
|
||||
limit: int = 100,
|
||||
) -> EventPage:
|
||||
"""Search events matching the given filters."""
|
||||
loop = asyncio.get_running_loop()
|
||||
prefix = await self.get_conversation_path(conversation_id)
|
||||
paths = await loop.run_in_executor(None, self._search_paths, prefix)
|
||||
events = await asyncio.gather(
|
||||
*[loop.run_in_executor(None, self._load_event, path) for path in paths]
|
||||
)
|
||||
items = []
|
||||
for event in events:
|
||||
if not event:
|
||||
continue
|
||||
if kind__eq and event.kind != kind__eq:
|
||||
continue
|
||||
if timestamp__gte and event.timestamp < timestamp__gte:
|
||||
continue
|
||||
if timestamp__lt and event.timestamp >= timestamp__lt:
|
||||
continue
|
||||
items.append(event)
|
||||
|
||||
if sort_order:
|
||||
items.sort(
|
||||
key=lambda e: e.timestamp,
|
||||
reverse=(sort_order == EventSortOrder.TIMESTAMP_DESC),
|
||||
)
|
||||
|
||||
start_offset = 0
|
||||
if page_id:
|
||||
start_offset = int(page_id)
|
||||
paths = paths[start_offset:]
|
||||
if len(paths) > limit:
|
||||
paths = paths[:limit]
|
||||
next_page_id = str(start_offset + limit)
|
||||
|
||||
return EventPage(items, next_page_id=next_page_id)
|
||||
|
||||
async def count_events(
|
||||
self,
|
||||
conversation_id: UUID,
|
||||
kind__eq: EventKind | None = None,
|
||||
timestamp__gte: datetime | None = None,
|
||||
timestamp__lt: datetime | None = None,
|
||||
) -> int:
|
||||
"""Count events matching the given filters."""
|
||||
# If we are not filtering, we can simply count the paths
|
||||
if not (kind__eq or timestamp__gte or timestamp__lt):
|
||||
conversation_path = await self.get_conversation_path(conversation_id)
|
||||
result = await self._count_events_no_filter(conversation_path)
|
||||
return result
|
||||
|
||||
events = page_iterator(
|
||||
self.search_events,
|
||||
conversation_id=conversation_id,
|
||||
kind__eq=kind__eq,
|
||||
timestamp__gte=timestamp__gte,
|
||||
timestamp__lt=timestamp__lt,
|
||||
)
|
||||
result = sum(1 for event in events)
|
||||
return result
|
||||
|
||||
async def _count_events_no_filter(self, conversation_path: Path) -> int:
|
||||
paths = page_iterator(self._search_paths, conversation_path)
|
||||
result = 0
|
||||
async for _ in paths:
|
||||
result += 1
|
||||
return result
|
||||
|
||||
async def save_event(self, conversation_id: UUID, event: Event):
|
||||
if isinstance(event.id, str):
|
||||
id_hex = event.id.replace('-', '')
|
||||
else:
|
||||
id_hex = event.id.hex
|
||||
path = (await self.get_conversation_path(conversation_id)) / f'{id_hex}.json'
|
||||
loop = asyncio.get_running_loop()
|
||||
await loop.run_in_executor(None, self._store_event, path, event)
|
||||
|
||||
async def batch_get_events(
|
||||
self, conversation_id: UUID, event_ids: list[UUID]
|
||||
) -> list[Event | None]:
|
||||
"""Given a list of ids, get events (Or none for any which were not found)."""
|
||||
return await asyncio.gather(
|
||||
*[self.get_event(conversation_id, event_id) for event_id in event_ids]
|
||||
)
|
||||
0
openhands/app_server/event/event_store.py
Normal file
0
openhands/app_server/event/event_store.py
Normal file
@@ -1,87 +1,44 @@
|
||||
"""Filesystem-based EventService implementation."""
|
||||
|
||||
import json
|
||||
import glob
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import AsyncGenerator
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import Request
|
||||
|
||||
from openhands.app_server.app_conversation.app_conversation_info_service import (
|
||||
AppConversationInfoService,
|
||||
)
|
||||
from openhands.app_server.errors import OpenHandsError
|
||||
from openhands.app_server.event.event_service import EventService, EventServiceInjector
|
||||
from openhands.app_server.event.filesystem_event_service_base import (
|
||||
FilesystemEventServiceBase,
|
||||
)
|
||||
from openhands.app_server.event.event_service_base import EventServiceBase
|
||||
from openhands.app_server.services.injector import InjectorState
|
||||
from openhands.sdk import Event
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class FilesystemEventService(FilesystemEventServiceBase, EventService):
|
||||
"""Filesystem-based implementation of EventService.
|
||||
class FilesystemEventService(EventServiceBase):
|
||||
"""Event service based on file system"""
|
||||
|
||||
Events are stored in files with the naming format:
|
||||
{conversation_id}/{YYYYMMDDHHMMSS}_{kind}_{id.hex}
|
||||
limit: int = 500
|
||||
|
||||
Uses an AppConversationInfoService to lookup conversations
|
||||
"""
|
||||
def _load_event(self, path: Path) -> Event | None:
|
||||
try:
|
||||
content = path.read_text(str(path))
|
||||
content = Event.model_validate_json(content)
|
||||
return content
|
||||
except Exception:
|
||||
_logger.exception('Error reading event', stack_info=True)
|
||||
return None
|
||||
|
||||
app_conversation_info_service: AppConversationInfoService
|
||||
events_dir: Path
|
||||
def _store_event(self, path: Path, event: Event):
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
content = event.model_dump_json(indent=2)
|
||||
path.write_text(content)
|
||||
|
||||
def _ensure_events_dir(self, conversation_id: UUID | None = None) -> Path:
|
||||
"""Ensure the events directory exists."""
|
||||
if conversation_id:
|
||||
events_path = self.events_dir / str(conversation_id)
|
||||
else:
|
||||
events_path = self.events_dir
|
||||
events_path.mkdir(parents=True, exist_ok=True)
|
||||
return events_path
|
||||
|
||||
def _save_event_to_file(self, conversation_id: UUID, event: Event) -> None:
|
||||
"""Save an event to a file."""
|
||||
events_path = self._ensure_events_dir(conversation_id)
|
||||
filename = self._get_event_filename(conversation_id, event)
|
||||
filepath = events_path / filename
|
||||
|
||||
with open(filepath, 'w') as f:
|
||||
# Use model_dump with mode='json' to handle UUID serialization
|
||||
data = event.model_dump(mode='json')
|
||||
f.write(json.dumps(data, indent=2))
|
||||
|
||||
async def save_event(self, conversation_id: UUID, event: Event):
|
||||
"""Save an event. Internal method intended not be part of the REST api."""
|
||||
conversation = (
|
||||
await self.app_conversation_info_service.get_app_conversation_info(
|
||||
conversation_id
|
||||
)
|
||||
)
|
||||
if not conversation:
|
||||
# This is either an illegal state or somebody is trying to hack
|
||||
raise OpenHandsError('No such conversation: {conversaiont_id}')
|
||||
self._save_event_to_file(conversation_id, event)
|
||||
|
||||
async def _filter_files_by_conversation(self, files: list[Path]) -> list[Path]:
|
||||
conversation_ids = list(self._get_conversation_ids(files))
|
||||
conversations = (
|
||||
await self.app_conversation_info_service.batch_get_app_conversation_info(
|
||||
conversation_ids
|
||||
)
|
||||
)
|
||||
permitted_conversation_ids = set()
|
||||
for conversation in conversations:
|
||||
if conversation:
|
||||
permitted_conversation_ids.add(conversation.id)
|
||||
result = [
|
||||
file
|
||||
for file in files
|
||||
if self._get_conversation_id(file) in permitted_conversation_ids
|
||||
]
|
||||
return result
|
||||
def _search_paths(self, prefix: Path, page_id: str | None = None) -> list[Path]:
|
||||
search_path = f'{prefix}*'
|
||||
files = glob.glob(str(search_path))
|
||||
paths = [Path(file) for file in files]
|
||||
return paths
|
||||
|
||||
|
||||
class FilesystemEventServiceInjector(EventServiceInjector):
|
||||
@@ -91,14 +48,22 @@ class FilesystemEventServiceInjector(EventServiceInjector):
|
||||
from openhands.app_server.config import (
|
||||
get_app_conversation_info_service,
|
||||
get_global_config,
|
||||
get_user_context,
|
||||
)
|
||||
|
||||
async with get_app_conversation_info_service(
|
||||
state, request
|
||||
) as app_conversation_info_service:
|
||||
persistence_dir = get_global_config().persistence_dir
|
||||
async with (
|
||||
get_user_context(state, request) as user_context,
|
||||
get_app_conversation_info_service(
|
||||
state, request
|
||||
) as app_conversation_info_service,
|
||||
):
|
||||
# Set up a service with a path {persistence_dir}/{user_id}/v1_conversations
|
||||
prefix = get_global_config().persistence_dir
|
||||
user_id = await user_context.get_user_id()
|
||||
|
||||
yield FilesystemEventService(
|
||||
prefix=prefix,
|
||||
user_id=user_id,
|
||||
app_conversation_info_service=app_conversation_info_service,
|
||||
events_dir=persistence_dir / 'v1' / 'events',
|
||||
app_conversation_info_load_tasks={},
|
||||
)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user