From d57880f84981829421f66fbd74944ff95c77b739 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Sat, 13 Dec 2025 08:16:41 -0700 Subject: [PATCH 01/80] Agent server image from env (#12003) Co-authored-by: openhands --- .../app_server/sandbox/docker_sandbox_spec_service.py | 4 ++-- .../app_server/sandbox/process_sandbox_spec_service.py | 4 ++-- .../app_server/sandbox/remote_sandbox_spec_service.py | 4 ++-- openhands/app_server/sandbox/sandbox_spec_service.py | 9 +++++++++ 4 files changed, 15 insertions(+), 6 deletions(-) diff --git a/openhands/app_server/sandbox/docker_sandbox_spec_service.py b/openhands/app_server/sandbox/docker_sandbox_spec_service.py index b7a9553e7e..063b4e8a96 100644 --- a/openhands/app_server/sandbox/docker_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/docker_sandbox_spec_service.py @@ -14,9 +14,9 @@ from openhands.app_server.sandbox.sandbox_spec_models import ( SandboxSpecInfo, ) from openhands.app_server.sandbox.sandbox_spec_service import ( - AGENT_SERVER_IMAGE, SandboxSpecService, SandboxSpecServiceInjector, + get_default_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -34,7 +34,7 @@ def get_docker_client() -> docker.DockerClient: def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=AGENT_SERVER_IMAGE, + id=get_default_agent_server_image(), command=['--port', '8000'], initial_env={ 'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server', diff --git a/openhands/app_server/sandbox/process_sandbox_spec_service.py b/openhands/app_server/sandbox/process_sandbox_spec_service.py index b5476669f7..4e2e88a2f9 100644 --- a/openhands/app_server/sandbox/process_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/process_sandbox_spec_service.py @@ -10,9 +10,9 @@ from openhands.app_server.sandbox.sandbox_spec_models import ( SandboxSpecInfo, ) from openhands.app_server.sandbox.sandbox_spec_service import ( - AGENT_SERVER_IMAGE, SandboxSpecService, SandboxSpecServiceInjector, + get_default_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -20,7 +20,7 @@ from openhands.app_server.services.injector import InjectorState def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=AGENT_SERVER_IMAGE, + id=get_default_agent_server_image(), command=['python', '-m', 'openhands.agent_server'], initial_env={ # VSCode disabled for now diff --git a/openhands/app_server/sandbox/remote_sandbox_spec_service.py b/openhands/app_server/sandbox/remote_sandbox_spec_service.py index a2a7c58099..6228338d72 100644 --- a/openhands/app_server/sandbox/remote_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_spec_service.py @@ -10,9 +10,9 @@ from openhands.app_server.sandbox.sandbox_spec_models import ( SandboxSpecInfo, ) from openhands.app_server.sandbox.sandbox_spec_service import ( - AGENT_SERVER_IMAGE, SandboxSpecService, SandboxSpecServiceInjector, + get_default_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -20,7 +20,7 @@ from openhands.app_server.services.injector import InjectorState def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=AGENT_SERVER_IMAGE, + id=get_default_agent_server_image(), command=['/usr/local/bin/openhands-agent-server', '--port', '60000'], initial_env={ 'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server', diff --git a/openhands/app_server/sandbox/sandbox_spec_service.py b/openhands/app_server/sandbox/sandbox_spec_service.py index d9de63adbb..edaecc1b76 100644 --- a/openhands/app_server/sandbox/sandbox_spec_service.py +++ b/openhands/app_server/sandbox/sandbox_spec_service.py @@ -1,4 +1,5 @@ import asyncio +import os from abc import ABC, abstractmethod from openhands.app_server.errors import SandboxError @@ -57,3 +58,11 @@ class SandboxSpecServiceInjector( DiscriminatedUnionMixin, Injector[SandboxSpecService], ABC ): pass + + +def get_default_agent_server_image(): + agent_server_image_repository = os.getenv('AGENT_SERVER_IMAGE_REPOSITORY') + agent_server_image_tag = os.getenv('AGENT_SERVER_IMAGE_TAG') + if agent_server_image_repository and agent_server_image_tag: + return f'{agent_server_image_repository}:{agent_server_image_tag}' + return AGENT_SERVER_IMAGE From eb9a22ef7e13057c686919e62f919108e0b6feea Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Sun, 14 Dec 2025 23:30:49 +0700 Subject: [PATCH 02/80] fix(backend): unable to use custom mcp servers (v1 conversations) (#12038) --- .../live_status_app_conversation_service.py | 254 ++++++++-- ...st_live_status_app_conversation_service.py | 466 +++++++++++++++++- 2 files changed, 650 insertions(+), 70 deletions(-) diff --git a/openhands/app_server/app_conversation/live_status_app_conversation_service.py b/openhands/app_server/app_conversation/live_status_app_conversation_service.py index dd4b2dd499..e7641ad487 100644 --- a/openhands/app_server/app_conversation/live_status_app_conversation_service.py +++ b/openhands/app_server/app_conversation/live_status_app_conversation_service.py @@ -585,6 +585,204 @@ class LiveStatusAppConversationService(AppConversationServiceBase): return secrets + def _configure_llm(self, user: UserInfo, llm_model: str | None) -> LLM: + """Configure LLM settings. + + Args: + user: User information containing LLM preferences + llm_model: Optional specific model to use, falls back to user default + + Returns: + Configured LLM instance + """ + model = llm_model or user.llm_model + base_url = user.llm_base_url + if model and model.startswith('openhands/'): + base_url = user.llm_base_url or self.openhands_provider_base_url + + return LLM( + model=model, + base_url=base_url, + api_key=user.llm_api_key, + usage_id='agent', + ) + + async def _get_tavily_api_key(self, user: UserInfo) -> str | None: + """Get Tavily search API key, prioritizing user's key over service key. + + Args: + user: User information + + Returns: + Tavily API key if available, None otherwise + """ + # Get the actual API key values, prioritizing user's key over service key + user_search_key = None + if user.search_api_key: + key_value = user.search_api_key.get_secret_value() + if key_value and key_value.strip(): + user_search_key = key_value + + service_tavily_key = None + if self.tavily_api_key: + # tavily_api_key is already a string (extracted in the factory method) + if self.tavily_api_key.strip(): + service_tavily_key = self.tavily_api_key + + return user_search_key or service_tavily_key + + async def _add_system_mcp_servers( + self, mcp_servers: dict[str, Any], user: UserInfo + ) -> None: + """Add system-generated MCP servers (default OpenHands server and Tavily). + + Args: + mcp_servers: Dictionary to add servers to + user: User information for API keys + """ + if not self.web_url: + return + + # Add default OpenHands MCP server + mcp_url = f'{self.web_url}/mcp/mcp' + mcp_servers['default'] = {'url': mcp_url} + + # Add API key if available + mcp_api_key = await self.user_context.get_mcp_api_key() + if mcp_api_key: + mcp_servers['default']['headers'] = { + 'X-Session-API-Key': mcp_api_key, + } + + # Add Tavily search if API key is available + tavily_api_key = await self._get_tavily_api_key(user) + if tavily_api_key: + _logger.info('Adding search engine to MCP config') + mcp_servers['tavily'] = { + 'url': f'https://mcp.tavily.com/mcp/?tavilyApiKey={tavily_api_key}' + } + else: + _logger.info('No search engine API key found, skipping search engine') + + def _add_custom_sse_servers( + self, mcp_servers: dict[str, Any], sse_servers: list + ) -> None: + """Add custom SSE MCP servers from user configuration. + + Args: + mcp_servers: Dictionary to add servers to + sse_servers: List of SSE server configurations + """ + for sse_server in sse_servers: + server_config = { + 'url': sse_server.url, + 'transport': 'sse', + } + if sse_server.api_key: + server_config['headers'] = { + 'Authorization': f'Bearer {sse_server.api_key}' + } + + # Generate unique server name using UUID + # TODO: Let the users specify the server name + server_name = f'sse_{uuid4().hex[:8]}' + mcp_servers[server_name] = server_config + _logger.debug( + f'Added custom SSE server: {server_name} for {sse_server.url}' + ) + + def _add_custom_shttp_servers( + self, mcp_servers: dict[str, Any], shttp_servers: list + ) -> None: + """Add custom SHTTP MCP servers from user configuration. + + Args: + mcp_servers: Dictionary to add servers to + shttp_servers: List of SHTTP server configurations + """ + for shttp_server in shttp_servers: + server_config = { + 'url': shttp_server.url, + 'transport': 'streamable-http', + } + if shttp_server.api_key: + server_config['headers'] = { + 'Authorization': f'Bearer {shttp_server.api_key}' + } + if shttp_server.timeout: + server_config['timeout'] = shttp_server.timeout + + # Generate unique server name using UUID + # TODO: Let the users specify the server name + server_name = f'shttp_{uuid4().hex[:8]}' + mcp_servers[server_name] = server_config + _logger.debug( + f'Added custom SHTTP server: {server_name} for {shttp_server.url}' + ) + + def _add_custom_stdio_servers( + self, mcp_servers: dict[str, Any], stdio_servers: list + ) -> None: + """Add custom STDIO MCP servers from user configuration. + + Args: + mcp_servers: Dictionary to add servers to + stdio_servers: List of STDIO server configurations + """ + for stdio_server in stdio_servers: + server_config = { + 'command': stdio_server.command, + 'args': stdio_server.args, + } + if stdio_server.env: + server_config['env'] = stdio_server.env + + # STDIO servers have an explicit name field + mcp_servers[stdio_server.name] = server_config + _logger.debug(f'Added custom STDIO server: {stdio_server.name}') + + def _merge_custom_mcp_config( + self, mcp_servers: dict[str, Any], user: UserInfo + ) -> None: + """Merge custom MCP configuration from user settings. + + Args: + mcp_servers: Dictionary to add servers to + user: User information containing custom MCP config + """ + if not user.mcp_config: + return + + try: + sse_count = len(user.mcp_config.sse_servers) + shttp_count = len(user.mcp_config.shttp_servers) + stdio_count = len(user.mcp_config.stdio_servers) + + _logger.info( + f'Loading custom MCP config from user settings: ' + f'{sse_count} SSE, {shttp_count} SHTTP, {stdio_count} STDIO servers' + ) + + # Add each type of custom server + self._add_custom_sse_servers(mcp_servers, user.mcp_config.sse_servers) + self._add_custom_shttp_servers(mcp_servers, user.mcp_config.shttp_servers) + self._add_custom_stdio_servers(mcp_servers, user.mcp_config.stdio_servers) + + _logger.info( + f'Successfully merged custom MCP config: added {sse_count} SSE, ' + f'{shttp_count} SHTTP, and {stdio_count} STDIO servers' + ) + + except Exception as e: + _logger.error( + f'Error loading custom MCP config from user settings: {e}', + exc_info=True, + ) + # Continue with system config only, don't fail conversation startup + _logger.warning( + 'Continuing with system-generated MCP config only due to custom config error' + ) + async def _configure_llm_and_mcp( self, user: UserInfo, llm_model: str | None ) -> tuple[LLM, dict]: @@ -598,56 +796,20 @@ class LiveStatusAppConversationService(AppConversationServiceBase): Tuple of (configured LLM instance, MCP config dictionary) """ # Configure LLM - model = llm_model or user.llm_model - base_url = user.llm_base_url - if model and model.startswith('openhands/'): - base_url = user.llm_base_url or self.openhands_provider_base_url - llm = LLM( - model=model, - base_url=base_url, - api_key=user.llm_api_key, - usage_id='agent', - ) + llm = self._configure_llm(user, llm_model) - # Configure MCP - mcp_config: dict[str, Any] = {} - if self.web_url: - mcp_url = f'{self.web_url}/mcp/mcp' - mcp_config = { - 'default': { - 'url': mcp_url, - } - } + # Configure MCP - SDK expects format: {'mcpServers': {'server_name': {...}}} + mcp_servers: dict[str, Any] = {} - # Add API key if available - mcp_api_key = await self.user_context.get_mcp_api_key() - if mcp_api_key: - mcp_config['default']['headers'] = { - 'X-Session-API-Key': mcp_api_key, - } + # Add system-generated servers (default + tavily) + await self._add_system_mcp_servers(mcp_servers, user) - # Get the actual API key values, prioritizing user's key over service key - user_search_key = None - if user.search_api_key: - key_value = user.search_api_key.get_secret_value() - if key_value and key_value.strip(): - user_search_key = key_value + # Merge custom servers from user settings + self._merge_custom_mcp_config(mcp_servers, user) - service_tavily_key = None - if self.tavily_api_key: - # tavily_api_key is already a string (extracted in the factory method) - if self.tavily_api_key.strip(): - service_tavily_key = self.tavily_api_key - - tavily_api_key = user_search_key or service_tavily_key - - if tavily_api_key: - _logger.info('Adding search engine to MCP config') - mcp_config['tavily'] = { - 'url': f'https://mcp.tavily.com/mcp/?tavilyApiKey={tavily_api_key}' - } - else: - _logger.info('No search engine API key found, skipping search engine') + # Wrap in the mcpServers structure required by the SDK + mcp_config = {'mcpServers': mcp_servers} if mcp_servers else {} + _logger.info(f'Final MCP configuration: {mcp_config}') return llm, mcp_config diff --git a/tests/unit/app_server/test_live_status_app_conversation_service.py b/tests/unit/app_server/test_live_status_app_conversation_service.py index 273d79ca25..6a9821b9f3 100644 --- a/tests/unit/app_server/test_live_status_app_conversation_service.py +++ b/tests/unit/app_server/test_live_status_app_conversation_service.py @@ -68,6 +68,7 @@ class TestLiveStatusAppConversationService: self.mock_user.search_api_key = None # Default to None self.mock_user.condenser_max_size = None # Default to None self.mock_user.llm_base_url = 'https://api.openai.com/v1' + self.mock_user.mcp_config = None # Default to None to avoid error handling path # Mock sandbox self.mock_sandbox = Mock(spec=SandboxInfo) @@ -239,9 +240,16 @@ class TestLiveStatusAppConversationService: assert llm.api_key.get_secret_value() == self.mock_user.llm_api_key assert llm.usage_id == 'agent' - assert 'default' in mcp_config - assert mcp_config['default']['url'] == 'https://test.example.com/mcp/mcp' - assert mcp_config['default']['headers']['X-Session-API-Key'] == 'mcp_api_key' + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert ( + mcp_config['mcpServers']['default']['url'] + == 'https://test.example.com/mcp/mcp' + ) + assert ( + mcp_config['mcpServers']['default']['headers']['X-Session-API-Key'] + == 'mcp_api_key' + ) @pytest.mark.asyncio async def test_configure_llm_and_mcp_openhands_model_prefers_user_base_url(self): @@ -320,8 +328,9 @@ class TestLiveStatusAppConversationService: # Assert assert llm.model == self.mock_user.llm_model - assert 'default' in mcp_config - assert 'headers' not in mcp_config['default'] + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'headers' not in mcp_config['mcpServers']['default'] @pytest.mark.asyncio async def test_configure_llm_and_mcp_without_web_url(self): @@ -354,10 +363,11 @@ class TestLiveStatusAppConversationService: # Assert assert isinstance(llm, LLM) - assert 'default' in mcp_config - assert 'tavily' in mcp_config + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' in mcp_config['mcpServers'] assert ( - mcp_config['tavily']['url'] + mcp_config['mcpServers']['tavily']['url'] == 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key' ) @@ -375,10 +385,11 @@ class TestLiveStatusAppConversationService: # Assert assert isinstance(llm, LLM) - assert 'default' in mcp_config - assert 'tavily' in mcp_config + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' in mcp_config['mcpServers'] assert ( - mcp_config['tavily']['url'] + mcp_config['mcpServers']['tavily']['url'] == 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key' ) @@ -399,9 +410,10 @@ class TestLiveStatusAppConversationService: # Assert assert isinstance(llm, LLM) - assert 'tavily' in mcp_config + assert 'mcpServers' in mcp_config + assert 'tavily' in mcp_config['mcpServers'] assert ( - mcp_config['tavily']['url'] + mcp_config['mcpServers']['tavily']['url'] == 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key' ) @@ -420,8 +432,9 @@ class TestLiveStatusAppConversationService: # Assert assert isinstance(llm, LLM) - assert 'default' in mcp_config - assert 'tavily' not in mcp_config + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' not in mcp_config['mcpServers'] @pytest.mark.asyncio async def test_configure_llm_and_mcp_saas_mode_no_tavily_without_user_key(self): @@ -443,8 +456,9 @@ class TestLiveStatusAppConversationService: # Assert assert isinstance(llm, LLM) - assert 'default' in mcp_config - assert 'tavily' not in mcp_config + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' not in mcp_config['mcpServers'] @pytest.mark.asyncio async def test_configure_llm_and_mcp_saas_mode_with_user_search_key(self): @@ -467,10 +481,11 @@ class TestLiveStatusAppConversationService: # Assert assert isinstance(llm, LLM) - assert 'default' in mcp_config - assert 'tavily' in mcp_config + assert 'mcpServers' in mcp_config + assert 'default' in mcp_config['mcpServers'] + assert 'tavily' in mcp_config['mcpServers'] assert ( - mcp_config['tavily']['url'] + mcp_config['mcpServers']['tavily']['url'] == 'https://mcp.tavily.com/mcp/?tavilyApiKey=user_search_key' ) @@ -491,10 +506,11 @@ class TestLiveStatusAppConversationService: # Assert assert isinstance(llm, LLM) - assert 'tavily' in mcp_config + assert 'mcpServers' in mcp_config + assert 'tavily' in mcp_config['mcpServers'] # Should fall back to env key since user key is empty assert ( - mcp_config['tavily']['url'] + mcp_config['mcpServers']['tavily']['url'] == 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key' ) @@ -515,10 +531,11 @@ class TestLiveStatusAppConversationService: # Assert assert isinstance(llm, LLM) - assert 'tavily' in mcp_config + assert 'mcpServers' in mcp_config + assert 'tavily' in mcp_config['mcpServers'] # Should fall back to env key since user key is whitespace only assert ( - mcp_config['tavily']['url'] + mcp_config['mcpServers']['tavily']['url'] == 'https://mcp.tavily.com/mcp/?tavilyApiKey=env_tavily_key' ) @@ -824,3 +841,404 @@ class TestLiveStatusAppConversationService: secrets=mock_secrets, ) self.service._finalize_conversation_request.assert_called_once() + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_with_custom_sse_servers(self): + """Test _configure_llm_and_mcp merges custom SSE servers with UUID-based names.""" + # Arrange + + from openhands.core.config.mcp_config import MCPConfig, MCPSSEServerConfig + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[ + MCPSSEServerConfig(url='https://linear.app/sse', api_key='linear_key'), + MCPSSEServerConfig(url='https://notion.com/sse'), + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + assert 'mcpServers' in mcp_config + + # Should have default server + 2 custom SSE servers + mcp_servers = mcp_config['mcpServers'] + assert 'default' in mcp_servers + + # Find SSE servers (they have sse_ prefix) + sse_servers = {k: v for k, v in mcp_servers.items() if k.startswith('sse_')} + assert len(sse_servers) == 2 + + # Verify SSE server configurations + for server_name, server_config in sse_servers.items(): + assert server_name.startswith('sse_') + assert len(server_name) > 4 # Has UUID suffix + assert 'url' in server_config + assert 'transport' in server_config + assert server_config['transport'] == 'sse' + + # Check if this is the Linear server (has headers) + if 'headers' in server_config: + assert server_config['headers']['Authorization'] == 'Bearer linear_key' + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_with_custom_shttp_servers(self): + """Test _configure_llm_and_mcp merges custom SHTTP servers with timeout.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPSHTTPServerConfig + + self.mock_user.mcp_config = MCPConfig( + shttp_servers=[ + MCPSHTTPServerConfig( + url='https://example.com/mcp', + api_key='test_key', + timeout=120, + ) + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + mcp_servers = mcp_config['mcpServers'] + + # Find SHTTP servers + shttp_servers = {k: v for k, v in mcp_servers.items() if k.startswith('shttp_')} + assert len(shttp_servers) == 1 + + server_config = list(shttp_servers.values())[0] + assert server_config['url'] == 'https://example.com/mcp' + assert server_config['transport'] == 'streamable-http' + assert server_config['headers']['Authorization'] == 'Bearer test_key' + assert server_config['timeout'] == 120 + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_with_custom_stdio_servers(self): + """Test _configure_llm_and_mcp merges custom STDIO servers with explicit names.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPStdioServerConfig + + self.mock_user.mcp_config = MCPConfig( + stdio_servers=[ + MCPStdioServerConfig( + name='my-custom-server', + command='npx', + args=['-y', 'my-package'], + env={'API_KEY': 'secret'}, + ) + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + assert isinstance(llm, LLM) + mcp_servers = mcp_config['mcpServers'] + + # STDIO server should use its explicit name + assert 'my-custom-server' in mcp_servers + server_config = mcp_servers['my-custom-server'] + assert server_config['command'] == 'npx' + assert server_config['args'] == ['-y', 'my-package'] + assert server_config['env'] == {'API_KEY': 'secret'} + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_merges_system_and_custom_servers(self): + """Test _configure_llm_and_mcp merges both system and custom MCP servers.""" + # Arrange + from pydantic import SecretStr + + from openhands.core.config.mcp_config import ( + MCPConfig, + MCPSSEServerConfig, + MCPStdioServerConfig, + ) + + self.mock_user.search_api_key = SecretStr('tavily_key') + self.mock_user.mcp_config = MCPConfig( + sse_servers=[MCPSSEServerConfig(url='https://custom.com/sse')], + stdio_servers=[ + MCPStdioServerConfig( + name='custom-stdio', command='node', args=['app.js'] + ) + ], + ) + self.mock_user_context.get_mcp_api_key.return_value = 'mcp_api_key' + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + + # Should have system servers + assert 'default' in mcp_servers + assert 'tavily' in mcp_servers + + # Should have custom SSE server with UUID name + sse_servers = [k for k in mcp_servers if k.startswith('sse_')] + assert len(sse_servers) == 1 + + # Should have custom STDIO server with explicit name + assert 'custom-stdio' in mcp_servers + + # Total: default + tavily + 1 SSE + 1 STDIO = 4 servers + assert len(mcp_servers) == 4 + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_custom_config_error_handling(self): + """Test _configure_llm_and_mcp handles errors in custom MCP config gracefully.""" + # Arrange + self.mock_user.mcp_config = Mock() + # Simulate error when accessing sse_servers + self.mock_user.mcp_config.sse_servers = property( + lambda self: (_ for _ in ()).throw(Exception('Config error')) + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert - should still return valid config with system servers only + assert isinstance(llm, LLM) + mcp_servers = mcp_config['mcpServers'] + assert 'default' in mcp_servers + # Custom servers should not be added due to error + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_sdk_format_with_mcpservers_wrapper(self): + """Test _configure_llm_and_mcp returns SDK-required format with mcpServers key.""" + # Arrange + self.mock_user_context.get_mcp_api_key.return_value = 'mcp_key' + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert - SDK expects {'mcpServers': {...}} format + assert 'mcpServers' in mcp_config + assert isinstance(mcp_config['mcpServers'], dict) + + # Verify structure matches SDK expectations + for server_name, server_config in mcp_config['mcpServers'].items(): + assert isinstance(server_name, str) + assert isinstance(server_config, dict) + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_empty_custom_config(self): + """Test _configure_llm_and_mcp handles empty custom MCP config.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[], stdio_servers=[], shttp_servers=[] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + # Should only have system default server + assert 'default' in mcp_servers + assert len(mcp_servers) == 1 + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_sse_server_without_api_key(self): + """Test _configure_llm_and_mcp handles SSE servers without API keys.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPSSEServerConfig + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[MCPSSEServerConfig(url='https://public.com/sse')] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + sse_servers = {k: v for k, v in mcp_servers.items() if k.startswith('sse_')} + + # Server should exist but without headers + assert len(sse_servers) == 1 + server_config = list(sse_servers.values())[0] + assert 'headers' not in server_config + assert server_config['url'] == 'https://public.com/sse' + assert server_config['transport'] == 'sse' + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_shttp_server_without_timeout(self): + """Test _configure_llm_and_mcp handles SHTTP servers without timeout.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPSHTTPServerConfig + + self.mock_user.mcp_config = MCPConfig( + shttp_servers=[MCPSHTTPServerConfig(url='https://example.com/mcp')] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + shttp_servers = {k: v for k, v in mcp_servers.items() if k.startswith('shttp_')} + + assert len(shttp_servers) == 1 + server_config = list(shttp_servers.values())[0] + # Timeout should be included even if None (defaults to 60) + assert 'timeout' in server_config + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_stdio_server_without_env(self): + """Test _configure_llm_and_mcp handles STDIO servers without environment variables.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPStdioServerConfig + + self.mock_user.mcp_config = MCPConfig( + stdio_servers=[ + MCPStdioServerConfig( + name='simple-server', command='node', args=['app.js'] + ) + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + assert 'simple-server' in mcp_servers + server_config = mcp_servers['simple-server'] + + # Should not have env key if not provided + assert 'env' not in server_config + assert server_config['command'] == 'node' + assert server_config['args'] == ['app.js'] + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_multiple_servers_same_type(self): + """Test _configure_llm_and_mcp handles multiple custom servers of the same type.""" + # Arrange + from openhands.core.config.mcp_config import MCPConfig, MCPSSEServerConfig + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[ + MCPSSEServerConfig(url='https://server1.com/sse'), + MCPSSEServerConfig(url='https://server2.com/sse'), + MCPSSEServerConfig(url='https://server3.com/sse'), + ] + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + sse_servers = {k: v for k, v in mcp_servers.items() if k.startswith('sse_')} + + # All 3 servers should be present with unique UUID-based names + assert len(sse_servers) == 3 + + # Verify all have unique names + server_names = list(sse_servers.keys()) + assert len(set(server_names)) == 3 # All names are unique + + # Verify all URLs are preserved + urls = [v['url'] for v in sse_servers.values()] + assert 'https://server1.com/sse' in urls + assert 'https://server2.com/sse' in urls + assert 'https://server3.com/sse' in urls + + @pytest.mark.asyncio + async def test_configure_llm_and_mcp_mixed_server_types(self): + """Test _configure_llm_and_mcp handles all three server types together.""" + # Arrange + from openhands.core.config.mcp_config import ( + MCPConfig, + MCPSHTTPServerConfig, + MCPSSEServerConfig, + MCPStdioServerConfig, + ) + + self.mock_user.mcp_config = MCPConfig( + sse_servers=[ + MCPSSEServerConfig(url='https://sse.example.com/sse', api_key='sse_key') + ], + shttp_servers=[ + MCPSHTTPServerConfig(url='https://shttp.example.com/mcp', timeout=90) + ], + stdio_servers=[ + MCPStdioServerConfig( + name='stdio-server', + command='npx', + args=['mcp-server'], + env={'TOKEN': 'value'}, + ) + ], + ) + self.mock_user_context.get_mcp_api_key.return_value = None + + # Act + llm, mcp_config = await self.service._configure_llm_and_mcp( + self.mock_user, None + ) + + # Assert + mcp_servers = mcp_config['mcpServers'] + + # Check all server types are present + sse_count = len([k for k in mcp_servers if k.startswith('sse_')]) + shttp_count = len([k for k in mcp_servers if k.startswith('shttp_')]) + stdio_count = 1 if 'stdio-server' in mcp_servers else 0 + + assert sse_count == 1 + assert shttp_count == 1 + assert stdio_count == 1 + + # Verify each type has correct configuration + sse_server = next(v for k, v in mcp_servers.items() if k.startswith('sse_')) + assert sse_server['transport'] == 'sse' + assert sse_server['headers']['Authorization'] == 'Bearer sse_key' + + shttp_server = next(v for k, v in mcp_servers.items() if k.startswith('shttp_')) + assert shttp_server['transport'] == 'streamable-http' + assert shttp_server['timeout'] == 90 + + stdio_server = mcp_servers['stdio-server'] + assert stdio_server['command'] == 'npx' + assert stdio_server['env'] == {'TOKEN': 'value'} From f2def8fd7f94746926f5ad044c6d211e1eafebb5 Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Sun, 14 Dec 2025 23:31:07 +0700 Subject: [PATCH 03/80] fix(backend): organizational skills do not trigger (v1 conversations) (#12037) --- .../app_conversation_service_base.py | 10 +- .../app_conversation/skill_loader.py | 335 ++++++++ .../test_app_conversation_service_base.py | 348 ++++++++ tests/unit/app_server/test_skill_loader.py | 782 ++++++++++++++++++ 4 files changed, 1474 insertions(+), 1 deletion(-) diff --git a/openhands/app_server/app_conversation/app_conversation_service_base.py b/openhands/app_server/app_conversation/app_conversation_service_base.py index d5d34bd109..fb1eb0001e 100644 --- a/openhands/app_server/app_conversation/app_conversation_service_base.py +++ b/openhands/app_server/app_conversation/app_conversation_service_base.py @@ -22,6 +22,7 @@ from openhands.app_server.app_conversation.app_conversation_service import ( ) from openhands.app_server.app_conversation.skill_loader import ( load_global_skills, + load_org_skills, load_repo_skills, load_sandbox_skills, merge_skills, @@ -94,13 +95,20 @@ class AppConversationServiceBase(AppConversationService, ABC): except Exception as e: _logger.warning(f'Failed to load user skills: {str(e)}') user_skills = [] + + # Load organization-level skills + org_skills = await load_org_skills( + remote_workspace, selected_repository, working_dir, self.user_context + ) + repo_skills = await load_repo_skills( remote_workspace, selected_repository, working_dir ) # Merge all skills (later lists override earlier ones) + # Precedence: sandbox < global < user < org < repo all_skills = merge_skills( - [sandbox_skills, global_skills, user_skills, repo_skills] + [sandbox_skills, global_skills, user_skills, org_skills, repo_skills] ) _logger.info( diff --git a/openhands/app_server/app_conversation/skill_loader.py b/openhands/app_server/app_conversation/skill_loader.py index d8fca7cfc3..d237ff0542 100644 --- a/openhands/app_server/app_conversation/skill_loader.py +++ b/openhands/app_server/app_conversation/skill_loader.py @@ -14,6 +14,9 @@ from pathlib import Path import openhands from openhands.app_server.sandbox.sandbox_models import SandboxInfo +from openhands.app_server.user.user_context import UserContext +from openhands.integrations.provider import ProviderType +from openhands.integrations.service_types import AuthenticationError from openhands.sdk.context.skills import Skill from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace @@ -119,6 +122,96 @@ def _determine_repo_root(working_dir: str, selected_repository: str | None) -> s return working_dir +async def _is_gitlab_repository(repo_name: str, user_context: UserContext) -> bool: + """Check if a repository is hosted on GitLab. + + Args: + repo_name: Repository name (e.g., "gitlab.com/org/repo" or "org/repo") + user_context: UserContext to access provider handler + + Returns: + True if the repository is hosted on GitLab, False otherwise + """ + try: + provider_handler = await user_context.get_provider_handler() # type: ignore[attr-defined] + repository = await provider_handler.verify_repo_provider(repo_name) + return repository.git_provider == ProviderType.GITLAB + except Exception: + # If we can't determine the provider, assume it's not GitLab + # This is a safe fallback since we'll just use the default .openhands + return False + + +async def _is_azure_devops_repository( + repo_name: str, user_context: UserContext +) -> bool: + """Check if a repository is hosted on Azure DevOps. + + Args: + repo_name: Repository name (e.g., "org/project/repo") + user_context: UserContext to access provider handler + + Returns: + True if the repository is hosted on Azure DevOps, False otherwise + """ + try: + provider_handler = await user_context.get_provider_handler() # type: ignore[attr-defined] + repository = await provider_handler.verify_repo_provider(repo_name) + return repository.git_provider == ProviderType.AZURE_DEVOPS + except Exception: + # If we can't determine the provider, assume it's not Azure DevOps + return False + + +async def _determine_org_repo_path( + selected_repository: str, user_context: UserContext +) -> tuple[str, str]: + """Determine the organization repository path and organization name. + + Args: + selected_repository: Repository name (e.g., 'owner/repo' or 'org/project/repo') + user_context: UserContext to access provider handler + + Returns: + Tuple of (org_repo_path, org_name) where: + - org_repo_path: Full path to org-level config repo + - org_name: Organization name extracted from repository + + Examples: + - GitHub/Bitbucket: ('owner/.openhands', 'owner') + - GitLab: ('owner/openhands-config', 'owner') + - Azure DevOps: ('org/openhands-config/openhands-config', 'org') + """ + repo_parts = selected_repository.split('/') + + # Determine repository type + is_azure_devops = await _is_azure_devops_repository( + selected_repository, user_context + ) + is_gitlab = await _is_gitlab_repository(selected_repository, user_context) + + # Extract the org/user name + # Azure DevOps format: org/project/repo (3 parts) - extract org (first part) + # GitHub/GitLab/Bitbucket format: owner/repo (2 parts) - extract owner (first part) + if is_azure_devops and len(repo_parts) >= 3: + org_name = repo_parts[0] # Get org from org/project/repo + else: + org_name = repo_parts[-2] # Get owner from owner/repo + + # For GitLab and Azure DevOps, use openhands-config (since .openhands is not a valid repo name) + # For other providers, use .openhands + if is_gitlab: + org_openhands_repo = f'{org_name}/openhands-config' + elif is_azure_devops: + # Azure DevOps format: org/project/repo + # For org-level config, use: org/openhands-config/openhands-config + org_openhands_repo = f'{org_name}/openhands-config/openhands-config' + else: + org_openhands_repo = f'{org_name}/.openhands' + + return org_openhands_repo, org_name + + async def _read_file_from_workspace( workspace: AsyncRemoteWorkspace, file_path: str, working_dir: str ) -> str | None: @@ -322,6 +415,248 @@ async def load_repo_skills( return [] +def _validate_repository_for_org_skills(selected_repository: str) -> bool: + """Validate that the repository path has sufficient parts for org skills. + + Args: + selected_repository: Repository name (e.g., 'owner/repo') + + Returns: + True if repository is valid for org skills loading, False otherwise + """ + repo_parts = selected_repository.split('/') + if len(repo_parts) < 2: + _logger.warning( + f'Repository path has insufficient parts ({len(repo_parts)} < 2), skipping org-level skills' + ) + return False + return True + + +async def _get_org_repository_url( + org_openhands_repo: str, user_context: UserContext +) -> str | None: + """Get authenticated Git URL for organization repository. + + Args: + org_openhands_repo: Organization repository path + user_context: UserContext to access authentication + + Returns: + Authenticated Git URL if successful, None otherwise + """ + try: + remote_url = await user_context.get_authenticated_git_url(org_openhands_repo) + return remote_url + except AuthenticationError as e: + _logger.debug( + f'org-level skill directory {org_openhands_repo} not found: {str(e)}' + ) + return None + except Exception as e: + _logger.debug( + f'Failed to get authenticated URL for {org_openhands_repo}: {str(e)}' + ) + return None + + +async def _clone_org_repository( + workspace: AsyncRemoteWorkspace, + remote_url: str, + org_repo_dir: str, + working_dir: str, + org_openhands_repo: str, +) -> bool: + """Clone organization repository to temporary directory. + + Args: + workspace: AsyncRemoteWorkspace to execute commands + remote_url: Authenticated Git URL + org_repo_dir: Temporary directory path for cloning + working_dir: Working directory for command execution + org_openhands_repo: Organization repository path (for logging) + + Returns: + True if clone successful, False otherwise + """ + _logger.debug(f'Creating temporary directory for org repo: {org_repo_dir}') + + # Clone the repo (shallow clone for efficiency) + clone_cmd = f'GIT_TERMINAL_PROMPT=0 git clone --depth 1 {remote_url} {org_repo_dir}' + _logger.info('Executing clone command for org-level repo') + + result = await workspace.execute_command(clone_cmd, working_dir, timeout=120.0) + + if result.exit_code != 0: + _logger.info( + f'No org-level skills found at {org_openhands_repo} (exit_code: {result.exit_code})' + ) + _logger.debug(f'Clone command output: {result.stderr}') + return False + + _logger.info(f'Successfully cloned org-level skills from {org_openhands_repo}') + return True + + +async def _load_skills_from_org_directories( + workspace: AsyncRemoteWorkspace, org_repo_dir: str, working_dir: str +) -> tuple[list[Skill], list[Skill]]: + """Load skills from both skills/ and microagents/ directories in org repo. + + Args: + workspace: AsyncRemoteWorkspace to execute commands + org_repo_dir: Path to cloned organization repository + working_dir: Working directory for command execution + + Returns: + Tuple of (skills_dir_skills, microagents_dir_skills) + """ + skills_dir = f'{org_repo_dir}/skills' + skills_dir_skills = await _find_and_load_skill_md_files( + workspace, skills_dir, working_dir + ) + + microagents_dir = f'{org_repo_dir}/microagents' + microagents_dir_skills = await _find_and_load_skill_md_files( + workspace, microagents_dir, working_dir + ) + + return skills_dir_skills, microagents_dir_skills + + +def _merge_org_skills_with_precedence( + skills_dir_skills: list[Skill], microagents_dir_skills: list[Skill] +) -> list[Skill]: + """Merge skills from skills/ and microagents/ with proper precedence. + + Precedence: skills/ > microagents/ (skills/ overrides microagents/ for same name) + + Args: + skills_dir_skills: Skills loaded from skills/ directory + microagents_dir_skills: Skills loaded from microagents/ directory + + Returns: + Merged list of skills with proper precedence applied + """ + skills_by_name = {} + for skill in microagents_dir_skills + skills_dir_skills: + # Later sources (skills/) override earlier ones (microagents/) + if skill.name not in skills_by_name: + skills_by_name[skill.name] = skill + else: + _logger.debug( + f'Overriding org skill "{skill.name}" from microagents/ with skills/' + ) + skills_by_name[skill.name] = skill + + return list(skills_by_name.values()) + + +async def _cleanup_org_repository( + workspace: AsyncRemoteWorkspace, org_repo_dir: str, working_dir: str +) -> None: + """Clean up cloned organization repository directory. + + Args: + workspace: AsyncRemoteWorkspace to execute commands + org_repo_dir: Path to cloned organization repository + working_dir: Working directory for command execution + """ + cleanup_cmd = f'rm -rf {org_repo_dir}' + await workspace.execute_command(cleanup_cmd, working_dir, timeout=10.0) + + +async def load_org_skills( + workspace: AsyncRemoteWorkspace, + selected_repository: str | None, + working_dir: str, + user_context: UserContext, +) -> list[Skill]: + """Load organization-level skills from the organization repository. + + For example, if the repository is github.com/acme-co/api, this will check if + github.com/acme-co/.openhands exists. If it does, it will clone it and load + the skills from both the ./skills/ and ./microagents/ folders. + + For GitLab repositories, it will use openhands-config instead of .openhands + since GitLab doesn't support repository names starting with non-alphanumeric + characters. + + For Azure DevOps repositories, it will use org/openhands-config/openhands-config + format to match Azure DevOps's three-part repository structure (org/project/repo). + + Args: + workspace: AsyncRemoteWorkspace to execute commands in the sandbox + selected_repository: Repository name (e.g., 'owner/repo') or None + working_dir: Working directory path + user_context: UserContext to access provider handler and authentication + + Returns: + List of Skill objects loaded from organization repository. + Returns empty list if no repository selected or on errors. + """ + if not selected_repository: + return [] + + try: + _logger.debug( + f'Starting org-level skill loading for repository: {selected_repository}' + ) + + # Validate repository path + if not _validate_repository_for_org_skills(selected_repository): + return [] + + # Determine organization repository path + org_openhands_repo, org_name = await _determine_org_repo_path( + selected_repository, user_context + ) + + _logger.info(f'Checking for org-level skills at {org_openhands_repo}') + + # Get authenticated URL for org repository + remote_url = await _get_org_repository_url(org_openhands_repo, user_context) + if not remote_url: + return [] + + # Clone the organization repository + org_repo_dir = f'{working_dir}/_org_openhands_{org_name}' + clone_success = await _clone_org_repository( + workspace, remote_url, org_repo_dir, working_dir, org_openhands_repo + ) + if not clone_success: + return [] + + # Load skills from both skills/ and microagents/ directories + ( + skills_dir_skills, + microagents_dir_skills, + ) = await _load_skills_from_org_directories( + workspace, org_repo_dir, working_dir + ) + + # Merge skills with proper precedence + loaded_skills = _merge_org_skills_with_precedence( + skills_dir_skills, microagents_dir_skills + ) + + _logger.info( + f'Loaded {len(loaded_skills)} skills from org-level repository {org_openhands_repo}: {[s.name for s in loaded_skills]}' + ) + + # Clean up the org repo directory + await _cleanup_org_repository(workspace, org_repo_dir, working_dir) + + return loaded_skills + + except AuthenticationError as e: + _logger.debug(f'org-level skill directory not found: {str(e)}') + return [] + except Exception as e: + _logger.warning(f'Failed to load org-level skills: {str(e)}') + return [] + + def merge_skills(skill_lists: list[list[Skill]]) -> list[Skill]: """Merge multiple skill lists, avoiding duplicates by name. diff --git a/tests/unit/app_server/test_app_conversation_service_base.py b/tests/unit/app_server/test_app_conversation_service_base.py index 356c454fcf..01fc63bc5d 100644 --- a/tests/unit/app_server/test_app_conversation_service_base.py +++ b/tests/unit/app_server/test_app_conversation_service_base.py @@ -15,6 +15,7 @@ from openhands.app_server.app_conversation.app_conversation_models import AgentT from openhands.app_server.app_conversation.app_conversation_service_base import ( AppConversationServiceBase, ) +from openhands.app_server.sandbox.sandbox_models import SandboxInfo from openhands.app_server.user.user_context import UserContext @@ -916,3 +917,350 @@ async def test_configure_git_user_settings_special_characters_in_name(mock_works mock_workspace.execute_command.assert_any_call( 'git config --global user.name "Test O\'Brien"', '/workspace/project' ) + + +# ============================================================================= +# Tests for _load_and_merge_all_skills with org skills +# ============================================================================= + + +class TestLoadAndMergeAllSkillsWithOrgSkills: + """Test _load_and_merge_all_skills includes organization skills.""" + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_includes_org_skills( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test that _load_and_merge_all_skills loads and merges org skills.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + # Create distinct mock skills for each source + sandbox_skill = Mock() + sandbox_skill.name = 'sandbox_skill' + global_skill = Mock() + global_skill.name = 'global_skill' + user_skill = Mock() + user_skill.name = 'user_skill' + org_skill = Mock() + org_skill.name = 'org_skill' + repo_skill = Mock() + repo_skill.name = 'repo_skill' + + mock_load_sandbox.return_value = [sandbox_skill] + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [repo_skill] + + # Act + result = await service._load_and_merge_all_skills( + sandbox, remote_workspace, 'owner/repo', '/workspace' + ) + + # Assert + assert len(result) == 5 + names = {s.name for s in result} + assert names == { + 'sandbox_skill', + 'global_skill', + 'user_skill', + 'org_skill', + 'repo_skill', + } + mock_load_org.assert_called_once_with( + remote_workspace, 'owner/repo', '/workspace', mock_user_context + ) + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_org_skills_precedence( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test that org skills have correct precedence (higher than user, lower than repo).""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + # Create skills with same name but different sources + user_skill = Mock() + user_skill.name = 'common_skill' + user_skill.source = 'user' + + org_skill = Mock() + org_skill.name = 'common_skill' + org_skill.source = 'org' + + repo_skill = Mock() + repo_skill.name = 'common_skill' + repo_skill.source = 'repo' + + mock_load_sandbox.return_value = [] + mock_load_global.return_value = [] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [repo_skill] + + # Act + result = await service._load_and_merge_all_skills( + sandbox, remote_workspace, 'owner/repo', '/workspace' + ) + + # Assert + # Should have only one skill with repo source (highest precedence) + assert len(result) == 1 + assert result[0].source == 'repo' + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_org_skills_override_user_skills( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test that org skills override user skills for same name.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + # Create skills with same name + user_skill = Mock() + user_skill.name = 'shared_skill' + user_skill.priority = 'low' + + org_skill = Mock() + org_skill.name = 'shared_skill' + org_skill.priority = 'high' + + mock_load_sandbox.return_value = [] + mock_load_global.return_value = [] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [] + + # Act + result = await service._load_and_merge_all_skills( + sandbox, remote_workspace, 'owner/repo', '/workspace' + ) + + # Assert + assert len(result) == 1 + assert result[0].priority == 'high' # Org skill should win + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_handles_org_skills_failure( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test that failure to load org skills doesn't break the overall process.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + global_skill = Mock() + global_skill.name = 'global_skill' + repo_skill = Mock() + repo_skill.name = 'repo_skill' + + mock_load_sandbox.return_value = [] + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [] + mock_load_org.return_value = [] # Org skills failed/empty + mock_load_repo.return_value = [repo_skill] + + # Act + result = await service._load_and_merge_all_skills( + sandbox, remote_workspace, 'owner/repo', '/workspace' + ) + + # Assert + # Should still have skills from other sources + assert len(result) == 2 + names = {s.name for s in result} + assert names == {'global_skill', 'repo_skill'} + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_sandbox_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_global_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_user_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.app_conversation_service_base.load_repo_skills' + ) + async def test_load_and_merge_no_selected_repository( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_load_sandbox, + ): + """Test skill loading when no repository is selected.""" + # Arrange + mock_user_context = Mock(spec=UserContext) + with patch.object( + AppConversationServiceBase, + '__abstractmethods__', + set(), + ): + service = AppConversationServiceBase( + init_git_in_empty_workspace=True, + user_context=mock_user_context, + ) + + sandbox = Mock(spec=SandboxInfo) + sandbox.exposed_urls = [] + remote_workspace = AsyncMock() + + global_skill = Mock() + global_skill.name = 'global_skill' + + mock_load_sandbox.return_value = [] + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [] + mock_load_org.return_value = [] + mock_load_repo.return_value = [] + + # Act + result = await service._load_and_merge_all_skills( + sandbox, remote_workspace, None, '/workspace' + ) + + # Assert + assert len(result) == 1 + # Org skills should be called even with None repository + mock_load_org.assert_called_once_with( + remote_workspace, None, '/workspace', mock_user_context + ) diff --git a/tests/unit/app_server/test_skill_loader.py b/tests/unit/app_server/test_skill_loader.py index c9e54ba5a1..e4daadfa14 100644 --- a/tests/unit/app_server/test_skill_loader.py +++ b/tests/unit/app_server/test_skill_loader.py @@ -11,15 +11,27 @@ from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from openhands.app_server.app_conversation.skill_loader import ( + _cleanup_org_repository, + _clone_org_repository, + _determine_org_repo_path, _determine_repo_root, _find_and_load_global_skill_files, _find_and_load_skill_md_files, + _get_org_repository_url, + _is_azure_devops_repository, + _is_gitlab_repository, + _load_skills_from_org_directories, _load_special_files, + _merge_org_skills_with_precedence, _read_file_from_workspace, + _validate_repository_for_org_skills, load_global_skills, + load_org_skills, load_repo_skills, merge_skills, ) +from openhands.integrations.provider import ProviderType +from openhands.integrations.service_types import AuthenticationError # ===== Test Fixtures ===== @@ -667,6 +679,669 @@ class TestMergeSkills: assert len(result) == 2 +# ===== Tests for Organization Skills Functions ===== + + +class TestIsGitlabRepository: + """Test _is_gitlab_repository helper function.""" + + @pytest.mark.asyncio + async def test_is_gitlab_repository_true(self): + """Test GitLab repository detection returns True.""" + # Arrange + mock_user_context = AsyncMock() + mock_provider_handler = AsyncMock() + mock_repository = Mock() + mock_repository.git_provider = ProviderType.GITLAB + + mock_user_context.get_provider_handler.return_value = mock_provider_handler + mock_provider_handler.verify_repo_provider.return_value = mock_repository + + # Act + result = await _is_gitlab_repository('owner/repo', mock_user_context) + + # Assert + assert result is True + mock_provider_handler.verify_repo_provider.assert_called_once_with('owner/repo') + + @pytest.mark.asyncio + async def test_is_gitlab_repository_false(self): + """Test non-GitLab repository detection returns False.""" + # Arrange + mock_user_context = AsyncMock() + mock_provider_handler = AsyncMock() + mock_repository = Mock() + mock_repository.git_provider = ProviderType.GITHUB + + mock_user_context.get_provider_handler.return_value = mock_provider_handler + mock_provider_handler.verify_repo_provider.return_value = mock_repository + + # Act + result = await _is_gitlab_repository('owner/repo', mock_user_context) + + # Assert + assert result is False + + @pytest.mark.asyncio + async def test_is_gitlab_repository_exception_handling(self): + """Test exception handling returns False.""" + # Arrange + mock_user_context = AsyncMock() + mock_user_context.get_provider_handler.side_effect = Exception('API error') + + # Act + result = await _is_gitlab_repository('owner/repo', mock_user_context) + + # Assert + assert result is False + + +class TestIsAzureDevOpsRepository: + """Test _is_azure_devops_repository helper function.""" + + @pytest.mark.asyncio + async def test_is_azure_devops_repository_true(self): + """Test Azure DevOps repository detection returns True.""" + # Arrange + mock_user_context = AsyncMock() + mock_provider_handler = AsyncMock() + mock_repository = Mock() + mock_repository.git_provider = ProviderType.AZURE_DEVOPS + + mock_user_context.get_provider_handler.return_value = mock_provider_handler + mock_provider_handler.verify_repo_provider.return_value = mock_repository + + # Act + result = await _is_azure_devops_repository( + 'org/project/repo', mock_user_context + ) + + # Assert + assert result is True + mock_provider_handler.verify_repo_provider.assert_called_once_with( + 'org/project/repo' + ) + + @pytest.mark.asyncio + async def test_is_azure_devops_repository_false(self): + """Test non-Azure DevOps repository detection returns False.""" + # Arrange + mock_user_context = AsyncMock() + mock_provider_handler = AsyncMock() + mock_repository = Mock() + mock_repository.git_provider = ProviderType.GITHUB + + mock_user_context.get_provider_handler.return_value = mock_provider_handler + mock_provider_handler.verify_repo_provider.return_value = mock_repository + + # Act + result = await _is_azure_devops_repository('owner/repo', mock_user_context) + + # Assert + assert result is False + + @pytest.mark.asyncio + async def test_is_azure_devops_repository_exception_handling(self): + """Test exception handling returns False.""" + # Arrange + mock_user_context = AsyncMock() + mock_user_context.get_provider_handler.side_effect = Exception('Network error') + + # Act + result = await _is_azure_devops_repository('owner/repo', mock_user_context) + + # Assert + assert result is False + + +class TestDetermineOrgRepoPath: + """Test _determine_org_repo_path helper function.""" + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader._is_gitlab_repository') + @patch( + 'openhands.app_server.app_conversation.skill_loader._is_azure_devops_repository' + ) + async def test_github_repository_path(self, mock_is_azure, mock_is_gitlab): + """Test org path for GitHub repository.""" + # Arrange + mock_user_context = AsyncMock() + mock_is_gitlab.return_value = False + mock_is_azure.return_value = False + + # Act + org_repo, org_name = await _determine_org_repo_path( + 'owner/repo', mock_user_context + ) + + # Assert + assert org_repo == 'owner/.openhands' + assert org_name == 'owner' + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader._is_gitlab_repository') + @patch( + 'openhands.app_server.app_conversation.skill_loader._is_azure_devops_repository' + ) + async def test_gitlab_repository_path(self, mock_is_azure, mock_is_gitlab): + """Test org path for GitLab repository.""" + # Arrange + mock_user_context = AsyncMock() + mock_is_gitlab.return_value = True + mock_is_azure.return_value = False + + # Act + org_repo, org_name = await _determine_org_repo_path( + 'owner/repo', mock_user_context + ) + + # Assert + assert org_repo == 'owner/openhands-config' + assert org_name == 'owner' + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader._is_gitlab_repository') + @patch( + 'openhands.app_server.app_conversation.skill_loader._is_azure_devops_repository' + ) + async def test_azure_devops_repository_path(self, mock_is_azure, mock_is_gitlab): + """Test org path for Azure DevOps repository.""" + # Arrange + mock_user_context = AsyncMock() + mock_is_gitlab.return_value = False + mock_is_azure.return_value = True + + # Act + org_repo, org_name = await _determine_org_repo_path( + 'org/project/repo', mock_user_context + ) + + # Assert + assert org_repo == 'org/openhands-config/openhands-config' + assert org_name == 'org' + + +class TestValidateRepositoryForOrgSkills: + """Test _validate_repository_for_org_skills helper function.""" + + def test_valid_repository_two_parts(self): + """Test validation passes for repository with two parts.""" + # Act + result = _validate_repository_for_org_skills('owner/repo') + + # Assert + assert result is True + + def test_valid_repository_three_parts(self): + """Test validation passes for repository with three parts (Azure DevOps).""" + # Act + result = _validate_repository_for_org_skills('org/project/repo') + + # Assert + assert result is True + + def test_invalid_repository_one_part(self): + """Test validation fails for repository with only one part.""" + # Act + result = _validate_repository_for_org_skills('repo') + + # Assert + assert result is False + + def test_invalid_repository_empty_string(self): + """Test validation fails for empty string.""" + # Act + result = _validate_repository_for_org_skills('') + + # Assert + assert result is False + + +class TestGetOrgRepositoryUrl: + """Test _get_org_repository_url helper function.""" + + @pytest.mark.asyncio + async def test_successful_url_retrieval(self): + """Test successfully retrieving authenticated URL.""" + # Arrange + mock_user_context = AsyncMock() + expected_url = 'https://token@github.com/owner/.openhands.git' + mock_user_context.get_authenticated_git_url.return_value = expected_url + + # Act + result = await _get_org_repository_url('owner/.openhands', mock_user_context) + + # Assert + assert result == expected_url + mock_user_context.get_authenticated_git_url.assert_called_once_with( + 'owner/.openhands' + ) + + @pytest.mark.asyncio + async def test_authentication_error(self): + """Test handling of authentication error returns None.""" + # Arrange + mock_user_context = AsyncMock() + mock_user_context.get_authenticated_git_url.side_effect = AuthenticationError( + 'Not found' + ) + + # Act + result = await _get_org_repository_url('owner/.openhands', mock_user_context) + + # Assert + assert result is None + + @pytest.mark.asyncio + async def test_general_exception(self): + """Test handling of general exception returns None.""" + # Arrange + mock_user_context = AsyncMock() + mock_user_context.get_authenticated_git_url.side_effect = Exception( + 'Network error' + ) + + # Act + result = await _get_org_repository_url('owner/.openhands', mock_user_context) + + # Assert + assert result is None + + +class TestCloneOrgRepository: + """Test _clone_org_repository helper function.""" + + @pytest.mark.asyncio + async def test_successful_clone(self, mock_async_remote_workspace): + """Test successful repository clone.""" + # Arrange + result_obj = Mock() + result_obj.exit_code = 0 + mock_async_remote_workspace.execute_command.return_value = result_obj + + # Act + success = await _clone_org_repository( + mock_async_remote_workspace, + 'https://github.com/owner/.openhands.git', + '/workspace/_org_openhands_owner', + '/workspace', + 'owner/.openhands', + ) + + # Assert + assert success is True + mock_async_remote_workspace.execute_command.assert_called_once() + call_args = mock_async_remote_workspace.execute_command.call_args + assert 'git clone' in call_args[0][0] + assert '--depth 1' in call_args[0][0] + + @pytest.mark.asyncio + async def test_failed_clone(self, mock_async_remote_workspace): + """Test failed repository clone.""" + # Arrange + result_obj = Mock() + result_obj.exit_code = 1 + result_obj.stderr = 'Repository not found' + mock_async_remote_workspace.execute_command.return_value = result_obj + + # Act + success = await _clone_org_repository( + mock_async_remote_workspace, + 'https://github.com/owner/.openhands.git', + '/workspace/_org_openhands_owner', + '/workspace', + 'owner/.openhands', + ) + + # Assert + assert success is False + + +class TestLoadSkillsFromOrgDirectories: + """Test _load_skills_from_org_directories helper function.""" + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._find_and_load_skill_md_files' + ) + async def test_load_from_both_directories( + self, mock_find_and_load, mock_async_remote_workspace, mock_skills_list + ): + """Test loading skills from both skills/ and microagents/ directories.""" + # Arrange + skills_dir_skills = [mock_skills_list[0]] + microagents_dir_skills = [mock_skills_list[1], mock_skills_list[2]] + mock_find_and_load.side_effect = [skills_dir_skills, microagents_dir_skills] + + # Act + result_skills, result_microagents = await _load_skills_from_org_directories( + mock_async_remote_workspace, '/workspace/_org_openhands_owner', '/workspace' + ) + + # Assert + assert result_skills == skills_dir_skills + assert result_microagents == microagents_dir_skills + assert mock_find_and_load.call_count == 2 + + # Verify correct directories were checked + first_call = mock_find_and_load.call_args_list[0] + second_call = mock_find_and_load.call_args_list[1] + assert '/skills' in first_call[0][1] + assert '/microagents' in second_call[0][1] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._find_and_load_skill_md_files' + ) + async def test_load_with_empty_directories( + self, mock_find_and_load, mock_async_remote_workspace + ): + """Test loading when both directories are empty.""" + # Arrange + mock_find_and_load.side_effect = [[], []] + + # Act + result_skills, result_microagents = await _load_skills_from_org_directories( + mock_async_remote_workspace, '/workspace/_org_openhands_owner', '/workspace' + ) + + # Assert + assert result_skills == [] + assert result_microagents == [] + + +class TestMergeOrgSkillsWithPrecedence: + """Test _merge_org_skills_with_precedence helper function.""" + + def test_merge_no_duplicates(self, mock_skills_list): + """Test merging skills with no name conflicts.""" + # Arrange + skills_dir_skills = [mock_skills_list[0]] + microagents_dir_skills = [mock_skills_list[1], mock_skills_list[2]] + + # Act + result = _merge_org_skills_with_precedence( + skills_dir_skills, microagents_dir_skills + ) + + # Assert + assert len(result) == 3 + names = {s.name for s in result} + assert names == {'skill_0', 'skill_1', 'skill_2'} + + def test_merge_with_duplicate_skills_dir_wins(self): + """Test skills/ directory takes precedence over microagents/.""" + # Arrange + skill_from_microagents = Mock() + skill_from_microagents.name = 'common_skill' + skill_from_microagents.source = 'microagents' + + skill_from_skills = Mock() + skill_from_skills.name = 'common_skill' + skill_from_skills.source = 'skills' + + # Act + result = _merge_org_skills_with_precedence( + [skill_from_skills], [skill_from_microagents] + ) + + # Assert + assert len(result) == 1 + assert result[0].source == 'skills' + + def test_merge_with_empty_lists(self): + """Test merging with empty skill lists.""" + # Act + result = _merge_org_skills_with_precedence([], []) + + # Assert + assert result == [] + + def test_merge_with_only_skills_dir(self, mock_skills_list): + """Test merging with only skills/ directory populated.""" + # Act + result = _merge_org_skills_with_precedence([mock_skills_list[0]], []) + + # Assert + assert len(result) == 1 + assert result[0] == mock_skills_list[0] + + def test_merge_with_only_microagents_dir(self, mock_skills_list): + """Test merging with only microagents/ directory populated.""" + # Act + result = _merge_org_skills_with_precedence([], [mock_skills_list[0]]) + + # Assert + assert len(result) == 1 + assert result[0] == mock_skills_list[0] + + +class TestCleanupOrgRepository: + """Test _cleanup_org_repository helper function.""" + + @pytest.mark.asyncio + async def test_cleanup_successful(self, mock_async_remote_workspace): + """Test successful cleanup of org repository directory.""" + # Arrange + result_obj = Mock() + result_obj.exit_code = 0 + mock_async_remote_workspace.execute_command.return_value = result_obj + + # Act + await _cleanup_org_repository( + mock_async_remote_workspace, + '/workspace/_org_openhands_owner', + '/workspace', + ) + + # Assert + mock_async_remote_workspace.execute_command.assert_called_once() + call_args = mock_async_remote_workspace.execute_command.call_args + assert 'rm -rf' in call_args[0][0] + assert '/workspace/_org_openhands_owner' in call_args[0][0] + + +class TestLoadOrgSkills: + """Test load_org_skills main function.""" + + @pytest.mark.asyncio + async def test_load_org_skills_no_selected_repository( + self, mock_async_remote_workspace + ): + """Test load_org_skills returns empty list when no repository selected.""" + # Arrange + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, None, '/workspace', mock_user_context + ) + + # Assert + assert result == [] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + async def test_load_org_skills_invalid_repository( + self, mock_validate, mock_async_remote_workspace + ): + """Test load_org_skills returns empty list for invalid repository.""" + # Arrange + mock_validate.return_value = False + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, 'invalid', '/workspace', mock_user_context + ) + + # Assert + assert result == [] + mock_validate.assert_called_once_with('invalid') + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.skill_loader._determine_org_repo_path' + ) + @patch('openhands.app_server.app_conversation.skill_loader._get_org_repository_url') + async def test_load_org_skills_no_url_available( + self, + mock_get_url, + mock_determine_path, + mock_validate, + mock_async_remote_workspace, + ): + """Test load_org_skills returns empty list when URL cannot be retrieved.""" + # Arrange + mock_validate.return_value = True + mock_determine_path.return_value = ('owner/.openhands', 'owner') + mock_get_url.return_value = None + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert result == [] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.skill_loader._determine_org_repo_path' + ) + @patch('openhands.app_server.app_conversation.skill_loader._get_org_repository_url') + @patch('openhands.app_server.app_conversation.skill_loader._clone_org_repository') + async def test_load_org_skills_clone_fails( + self, + mock_clone, + mock_get_url, + mock_determine_path, + mock_validate, + mock_async_remote_workspace, + ): + """Test load_org_skills returns empty list when clone fails.""" + # Arrange + mock_validate.return_value = True + mock_determine_path.return_value = ('owner/.openhands', 'owner') + mock_get_url.return_value = 'https://github.com/owner/.openhands.git' + mock_clone.return_value = False + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert result == [] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + @patch( + 'openhands.app_server.app_conversation.skill_loader._determine_org_repo_path' + ) + @patch('openhands.app_server.app_conversation.skill_loader._get_org_repository_url') + @patch('openhands.app_server.app_conversation.skill_loader._clone_org_repository') + @patch( + 'openhands.app_server.app_conversation.skill_loader._load_skills_from_org_directories' + ) + @patch('openhands.app_server.app_conversation.skill_loader._cleanup_org_repository') + async def test_load_org_skills_success( + self, + mock_cleanup, + mock_load_skills, + mock_clone, + mock_get_url, + mock_determine_path, + mock_validate, + mock_async_remote_workspace, + mock_skills_list, + ): + """Test successful org skills loading.""" + # Arrange + mock_validate.return_value = True + mock_determine_path.return_value = ('owner/.openhands', 'owner') + mock_get_url.return_value = 'https://github.com/owner/.openhands.git' + mock_clone.return_value = True + mock_load_skills.return_value = ([mock_skills_list[0]], [mock_skills_list[1]]) + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert len(result) == 2 + mock_cleanup.assert_called_once() + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + async def test_load_org_skills_handles_authentication_error( + self, mock_validate, mock_async_remote_workspace + ): + """Test load_org_skills handles AuthenticationError gracefully.""" + # Arrange + mock_validate.side_effect = AuthenticationError('Auth failed') + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert result == [] + + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.skill_loader._validate_repository_for_org_skills' + ) + async def test_load_org_skills_handles_general_exception( + self, mock_validate, mock_async_remote_workspace + ): + """Test load_org_skills handles general exceptions gracefully.""" + # Arrange + mock_validate.side_effect = Exception('Unexpected error') + mock_user_context = AsyncMock() + + # Act + result = await load_org_skills( + mock_async_remote_workspace, + 'owner/repo', + '/workspace', + mock_user_context, + ) + + # Assert + assert result == [] + + # ===== Integration Tests ===== @@ -754,3 +1429,110 @@ class TestSkillLoaderIntegration: # Should have only one skill with repo source (highest precedence) assert len(all_skills) == 1 assert all_skills[0].source == 'repo' + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader.load_global_skills') + @patch('openhands.sdk.context.skills.load_user_skills') + @patch('openhands.app_server.app_conversation.skill_loader.load_org_skills') + @patch('openhands.app_server.app_conversation.skill_loader.load_repo_skills') + async def test_loading_with_org_skills_precedence( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_async_remote_workspace, + ): + """Test that org skills fit correctly in precedence order.""" + # Arrange + # Create skills with same name but different sources + global_skill = Mock() + global_skill.name = 'shared_skill' + global_skill.priority = 'low' + + user_skill = Mock() + user_skill.name = 'shared_skill' + user_skill.priority = 'medium' + + org_skill = Mock() + org_skill.name = 'shared_skill' + org_skill.priority = 'high' + + repo_skill = Mock() + repo_skill.name = 'shared_skill' + repo_skill.priority = 'highest' + + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [repo_skill] + + mock_user_context = AsyncMock() + + # Act + global_skills = mock_load_global() + user_skills = mock_load_user() + org_skills = await mock_load_org( + mock_async_remote_workspace, 'owner/repo', '/workspace', mock_user_context + ) + repo_skills = await mock_load_repo( + mock_async_remote_workspace, 'owner/repo', '/workspace' + ) + + # Merge with correct precedence: global < user < org < repo + all_skills = merge_skills([global_skills, user_skills, org_skills, repo_skills]) + + # Assert + assert len(all_skills) == 1 + assert all_skills[0].priority == 'highest' # Repo has highest precedence + + @pytest.mark.asyncio + @patch('openhands.app_server.app_conversation.skill_loader.load_global_skills') + @patch('openhands.sdk.context.skills.load_user_skills') + @patch('openhands.app_server.app_conversation.skill_loader.load_org_skills') + @patch('openhands.app_server.app_conversation.skill_loader.load_repo_skills') + async def test_loading_org_skills_with_unique_names( + self, + mock_load_repo, + mock_load_org, + mock_load_user, + mock_load_global, + mock_async_remote_workspace, + ): + """Test loading org skills with unique names alongside other sources.""" + # Arrange + global_skill = Mock() + global_skill.name = 'global_skill' + + user_skill = Mock() + user_skill.name = 'user_skill' + + org_skill = Mock() + org_skill.name = 'org_skill' + + repo_skill = Mock() + repo_skill.name = 'repo_skill' + + mock_load_global.return_value = [global_skill] + mock_load_user.return_value = [user_skill] + mock_load_org.return_value = [org_skill] + mock_load_repo.return_value = [repo_skill] + + mock_user_context = AsyncMock() + + # Act + global_skills = mock_load_global() + user_skills = mock_load_user() + org_skills = await mock_load_org( + mock_async_remote_workspace, 'owner/repo', '/workspace', mock_user_context + ) + repo_skills = await mock_load_repo( + mock_async_remote_workspace, 'owner/repo', '/workspace' + ) + + all_skills = merge_skills([global_skills, user_skills, org_skills, repo_skills]) + + # Assert + assert len(all_skills) == 4 + names = {s.name for s in all_skills} + assert names == {'global_skill', 'user_skill', 'org_skill', 'repo_skill'} From b937d344db235e68cd9909505e42e90b723f9472 Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Mon, 15 Dec 2025 00:39:32 +0700 Subject: [PATCH 04/80] fix(backend): initial titles show full uuids instead of shortened uuids (v1 conversations) (#12020) --- .../live_status_app_conversation_service.py | 2 +- ...st_live_status_app_conversation_service.py | 116 +++++++++++++++++- 2 files changed, 115 insertions(+), 3 deletions(-) diff --git a/openhands/app_server/app_conversation/live_status_app_conversation_service.py b/openhands/app_server/app_conversation/live_status_app_conversation_service.py index e7641ad487..a8d490489c 100644 --- a/openhands/app_server/app_conversation/live_status_app_conversation_service.py +++ b/openhands/app_server/app_conversation/live_status_app_conversation_service.py @@ -270,7 +270,7 @@ class LiveStatusAppConversationService(AppConversationServiceBase): user_id = await self.user_context.get_user_id() app_conversation_info = AppConversationInfo( id=info.id, - title=f'Conversation {info.id.hex}', + title=f'Conversation {info.id.hex[:5]}', sandbox_id=sandbox.id, created_by_user_id=user_id, llm_model=start_conversation_request.agent.llm.model, diff --git a/tests/unit/app_server/test_live_status_app_conversation_service.py b/tests/unit/app_server/test_live_status_app_conversation_service.py index 6a9821b9f3..f662f33146 100644 --- a/tests/unit/app_server/test_live_status_app_conversation_service.py +++ b/tests/unit/app_server/test_live_status_app_conversation_service.py @@ -6,11 +6,20 @@ from uuid import UUID, uuid4 import pytest from openhands.agent_server.models import SendMessageRequest, StartConversationRequest -from openhands.app_server.app_conversation.app_conversation_models import AgentType +from openhands.app_server.app_conversation.app_conversation_models import ( + AgentType, + AppConversationStartRequest, +) from openhands.app_server.app_conversation.live_status_app_conversation_service import ( LiveStatusAppConversationService, ) -from openhands.app_server.sandbox.sandbox_models import SandboxInfo, SandboxStatus +from openhands.app_server.sandbox.sandbox_models import ( + AGENT_SERVER, + ExposedUrl, + SandboxInfo, + SandboxStatus, +) +from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo from openhands.app_server.user.user_context import UserContext from openhands.integrations.provider import ProviderType from openhands.sdk import Agent @@ -842,6 +851,109 @@ class TestLiveStatusAppConversationService: ) self.service._finalize_conversation_request.assert_called_once() + @pytest.mark.asyncio + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.AsyncRemoteWorkspace' + ) + @patch( + 'openhands.app_server.app_conversation.live_status_app_conversation_service.ConversationInfo' + ) + async def test_start_app_conversation_default_title_uses_first_five_characters( + self, mock_conversation_info_class, mock_remote_workspace_class + ): + """Test that v1 conversations use first 5 characters of conversation ID for default title.""" + # Arrange + conversation_id = uuid4() + conversation_id_hex = conversation_id.hex + expected_title = f'Conversation {conversation_id_hex[:5]}' + + # Mock user context + self.mock_user_context.get_user_id = AsyncMock(return_value='test_user_123') + self.mock_user_context.get_user_info = AsyncMock(return_value=self.mock_user) + + # Mock sandbox and sandbox spec + mock_sandbox_spec = Mock(spec=SandboxSpecInfo) + mock_sandbox_spec.working_dir = '/test/workspace' + self.mock_sandbox.sandbox_spec_id = str(uuid4()) + self.mock_sandbox.id = str(uuid4()) # Ensure sandbox.id is a string + self.mock_sandbox.session_api_key = 'test_session_key' + exposed_url = ExposedUrl( + name=AGENT_SERVER, url='http://agent-server:8000', port=60000 + ) + self.mock_sandbox.exposed_urls = [exposed_url] + + self.mock_sandbox_service.get_sandbox = AsyncMock( + return_value=self.mock_sandbox + ) + self.mock_sandbox_spec_service.get_sandbox_spec = AsyncMock( + return_value=mock_sandbox_spec + ) + + # Mock remote workspace + mock_remote_workspace = Mock() + mock_remote_workspace_class.return_value = mock_remote_workspace + + # Mock the wait for sandbox and setup scripts + async def mock_wait_for_sandbox(task): + task.sandbox_id = self.mock_sandbox.id + yield task + + async def mock_run_setup_scripts(task, sandbox, workspace): + yield task + + self.service._wait_for_sandbox_start = mock_wait_for_sandbox + self.service.run_setup_scripts = mock_run_setup_scripts + + # Mock build start conversation request + mock_agent = Mock(spec=Agent) + mock_agent.llm = Mock(spec=LLM) + mock_agent.llm.model = 'gpt-4' + mock_start_request = Mock(spec=StartConversationRequest) + mock_start_request.agent = mock_agent + mock_start_request.model_dump.return_value = {'test': 'data'} + + self.service._build_start_conversation_request_for_user = AsyncMock( + return_value=mock_start_request + ) + + # Mock ConversationInfo returned from agent server + mock_conversation_info = Mock() + mock_conversation_info.id = conversation_id + mock_conversation_info_class.model_validate.return_value = ( + mock_conversation_info + ) + + # Mock HTTP response from agent server + mock_response = Mock() + mock_response.json.return_value = {'id': str(conversation_id)} + mock_response.raise_for_status = Mock() + self.mock_httpx_client.post = AsyncMock(return_value=mock_response) + + # Mock event callback service + self.mock_event_callback_service.save_event_callback = AsyncMock() + + # Create request + request = AppConversationStartRequest() + + # Act + async for task in self.service._start_app_conversation(request): + # Consume all tasks to reach the point where title is set + pass + + # Assert + # Verify that save_app_conversation_info was called with the correct title format + self.mock_app_conversation_info_service.save_app_conversation_info.assert_called_once() + call_args = ( + self.mock_app_conversation_info_service.save_app_conversation_info.call_args + ) + saved_info = call_args[0][0] # First positional argument + + assert saved_info.title == expected_title, ( + f'Expected title to be "{expected_title}" (first 5 chars), ' + f'but got "{saved_info.title}"' + ) + assert saved_info.id == conversation_id + @pytest.mark.asyncio async def test_configure_llm_and_mcp_with_custom_sse_servers(self): """Test _configure_llm_and_mcp merges custom SSE servers with UUID-based names.""" From 67c9b6cf86c0c40197bc7481d8f8c7a4ff7f615c Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Mon, 15 Dec 2025 01:31:12 +0700 Subject: [PATCH 05/80] refactor(frontend): websocket error message (v1 conversations) (#12045) --- .../conversation-websocket-context.tsx | 8 +++-- frontend/src/i18n/declaration.ts | 2 ++ frontend/src/i18n/translation.json | 32 +++++++++++++++++++ 3 files changed, 40 insertions(+), 2 deletions(-) diff --git a/frontend/src/contexts/conversation-websocket-context.tsx b/frontend/src/contexts/conversation-websocket-context.tsx index 8a8a205cd6..0cf43b49ce 100644 --- a/frontend/src/contexts/conversation-websocket-context.tsx +++ b/frontend/src/contexts/conversation-websocket-context.tsx @@ -7,6 +7,7 @@ import React, { useMemo, useRef, } from "react"; +import { useTranslation } from "react-i18next"; import { useQueryClient } from "@tanstack/react-query"; import { useWebSocket, WebSocketHookOptions } from "#/hooks/use-websocket"; import { useEventStore } from "#/stores/use-event-store"; @@ -44,6 +45,7 @@ import { isBudgetOrCreditError } from "#/utils/error-handler"; import { useTracking } from "#/hooks/use-tracking"; import { useReadConversationFile } from "#/hooks/mutation/use-read-conversation-file"; import useMetricsStore from "#/stores/metrics-store"; +import { I18nKey } from "#/i18n/declaration"; // eslint-disable-next-line @typescript-eslint/naming-convention export type V1_WebSocketConnectionState = @@ -123,6 +125,8 @@ export function ConversationWebSocketProvider({ conversationId: string; } | null>(null); + const { t } = useTranslation(); + // Helper function to update metrics from stats event const updateMetricsFromStats = useCallback( (event: ConversationStateUpdateEventStats) => { @@ -603,7 +607,7 @@ export function ConversationWebSocketProvider({ // This prevents showing errors during initial connection attempts (e.g., when auto-starting a conversation) if (event.code !== 1000 && hasConnectedRefMain.current) { setErrorMessage( - `Connection lost: ${event.reason || "Unexpected disconnect"}`, + `${t(I18nKey.STATUS$CONNECTION_LOST)}: ${event.reason || t(I18nKey.STATUS$DISCONNECTED_REFRESH_PAGE)}`, ); } }, @@ -675,7 +679,7 @@ export function ConversationWebSocketProvider({ // This prevents showing errors during initial connection attempts (e.g., when auto-starting a conversation) if (event.code !== 1000 && hasConnectedRefPlanning.current) { setErrorMessage( - `Connection lost: ${event.reason || "Unexpected disconnect"}`, + `${t(I18nKey.STATUS$CONNECTION_LOST)}: ${event.reason || t(I18nKey.STATUS$DISCONNECTED_REFRESH_PAGE)}`, ); } }, diff --git a/frontend/src/i18n/declaration.ts b/frontend/src/i18n/declaration.ts index 420709ef9b..f5a6cacfec 100644 --- a/frontend/src/i18n/declaration.ts +++ b/frontend/src/i18n/declaration.ts @@ -532,6 +532,8 @@ export enum I18nKey { SUGGESTIONS$ADD_DOCS = "SUGGESTIONS$ADD_DOCS", SUGGESTIONS$ADD_DOCKERFILE = "SUGGESTIONS$ADD_DOCKERFILE", STATUS$CONNECTED = "STATUS$CONNECTED", + STATUS$CONNECTION_LOST = "STATUS$CONNECTION_LOST", + STATUS$DISCONNECTED_REFRESH_PAGE = "STATUS$DISCONNECTED_REFRESH_PAGE", BROWSER$NO_PAGE_LOADED = "BROWSER$NO_PAGE_LOADED", USER$AVATAR_PLACEHOLDER = "USER$AVATAR_PLACEHOLDER", ACCOUNT_SETTINGS$LOGOUT = "ACCOUNT_SETTINGS$LOGOUT", diff --git a/frontend/src/i18n/translation.json b/frontend/src/i18n/translation.json index 2278092e8e..2966c1aa5f 100644 --- a/frontend/src/i18n/translation.json +++ b/frontend/src/i18n/translation.json @@ -8511,6 +8511,38 @@ "tr": "Bağlandı", "uk": "Підключено" }, + "STATUS$CONNECTION_LOST": { + "en": "Connection lost", + "ja": "接続が切断されました", + "zh-CN": "连接已断开", + "zh-TW": "連接已斷開", + "ko-KR": "연결이 끊어졌습니다", + "de": "Verbindung verloren", + "no": "Tilkobling mistet", + "it": "Connessione persa", + "pt": "Conexão perdida", + "es": "Conexión perdida", + "ar": "فُقد الاتصال", + "fr": "Connexion perdue", + "tr": "Bağlantı kesildi", + "uk": "Втрачено з'єднання" + }, + "STATUS$DISCONNECTED_REFRESH_PAGE": { + "en": "Disconnected. Please refresh the page", + "ja": "切断されました。ページを更新してください", + "zh-CN": "已断开连接。请刷新页面", + "zh-TW": "已斷開連接。請重新整理頁面", + "ko-KR": "연결이 끊어졌습니다. 페이지를 새로고침하세요", + "de": "Getrennt. Bitte aktualisieren Sie die Seite", + "no": "Koblet fra. Vennligst oppdater siden", + "it": "Disconnesso. Si prega di aggiornare la pagina", + "pt": "Desconectado. Por favor, atualize a página", + "es": "Desconectado. Por favor, actualice la página", + "ar": "تم قطع الاتصال. يرجى تحديث الصفحة", + "fr": "Déconnecté. Veuillez actualiser la page", + "tr": "Bağlantı kesildi. Lütfen sayfayı yenileyin", + "uk": "Відключено. Будь ласка, оновіть сторінку" + }, "BROWSER$NO_PAGE_LOADED": { "en": "No page loaded", "ja": "ブラウザは空です", From 47914c3576f986ebc214b0d5e100ecc5a38e58fd Mon Sep 17 00:00:00 2001 From: Neha Prasad Date: Mon, 15 Dec 2025 10:35:42 +0530 Subject: [PATCH 06/80] chore: remove pnpm settings from npmrc (#12028) --- frontend/.npmrc | 2 -- 1 file changed, 2 deletions(-) delete mode 100644 frontend/.npmrc diff --git a/frontend/.npmrc b/frontend/.npmrc deleted file mode 100644 index daecc6941a..0000000000 --- a/frontend/.npmrc +++ /dev/null @@ -1,2 +0,0 @@ -public-hoist-pattern[]=*@nextui-org/* -enable-pre-post-scripts=true From f52d9899e2b0419f1d0c1af157f36e4b9c789e6f Mon Sep 17 00:00:00 2001 From: Abhay Mishra Date: Mon, 15 Dec 2025 21:28:09 +0530 Subject: [PATCH 07/80] Consolidate scattered test files into a unified frontend/__tests__/ directory (#12002) --- .../settings/mcp-settings}/mcp-server-form.validation.test.tsx | 2 +- .../features/settings/mcp-settings}/mcp-server-list.test.tsx | 2 +- .../chat/event-content-helpers}/get-observation-content.test.ts | 2 +- .../services/__tests__ => __tests__/services}/actions.test.ts | 2 +- .../__tests__ => __tests__/utils}/custom-toast-handlers.test.ts | 2 +- .../utils/__tests__ => __tests__/utils}/settings-utils.test.ts | 2 +- .../utils/__tests__ => __tests__/utils}/toast-duration.test.ts | 2 +- .../__tests__ => __tests__/utils}/vscode-url-helper.test.ts | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) rename frontend/{src/components/features/settings/mcp-settings/__tests__ => __tests__/components/features/settings/mcp-settings}/mcp-server-form.validation.test.tsx (96%) rename frontend/{src/components/features/settings/mcp-settings/__tests__ => __tests__/components/features/settings/mcp-settings}/mcp-server-list.test.tsx (98%) rename frontend/{src/components/v1/chat/event-content-helpers/__tests__ => __tests__/components/v1/chat/event-content-helpers}/get-observation-content.test.ts (96%) rename frontend/{src/services/__tests__ => __tests__/services}/actions.test.ts (98%) rename frontend/{src/utils/__tests__ => __tests__/utils}/custom-toast-handlers.test.ts (98%) rename frontend/{src/utils/__tests__ => __tests__/utils}/settings-utils.test.ts (97%) rename frontend/{src/utils/__tests__ => __tests__/utils}/toast-duration.test.ts (97%) rename frontend/{src/utils/__tests__ => __tests__/utils}/vscode-url-helper.test.ts (96%) diff --git a/frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-form.validation.test.tsx b/frontend/__tests__/components/features/settings/mcp-settings/mcp-server-form.validation.test.tsx similarity index 96% rename from frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-form.validation.test.tsx rename to frontend/__tests__/components/features/settings/mcp-settings/mcp-server-form.validation.test.tsx index a2546ac15c..6b290c94b6 100644 --- a/frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-form.validation.test.tsx +++ b/frontend/__tests__/components/features/settings/mcp-settings/mcp-server-form.validation.test.tsx @@ -1,6 +1,6 @@ import { render, screen, fireEvent } from "@testing-library/react"; import { describe, it, expect, vi } from "vitest"; -import { MCPServerForm } from "../mcp-server-form"; +import { MCPServerForm } from "#/components/features/settings/mcp-settings/mcp-server-form"; // i18n mock vi.mock("react-i18next", () => ({ diff --git a/frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-list.test.tsx b/frontend/__tests__/components/features/settings/mcp-settings/mcp-server-list.test.tsx similarity index 98% rename from frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-list.test.tsx rename to frontend/__tests__/components/features/settings/mcp-settings/mcp-server-list.test.tsx index 4e1c4fa986..9e75f24483 100644 --- a/frontend/src/components/features/settings/mcp-settings/__tests__/mcp-server-list.test.tsx +++ b/frontend/__tests__/components/features/settings/mcp-settings/mcp-server-list.test.tsx @@ -1,6 +1,6 @@ import { render, screen } from "@testing-library/react"; import { describe, it, expect, vi } from "vitest"; -import { MCPServerList } from "../mcp-server-list"; +import { MCPServerList } from "#/components/features/settings/mcp-settings/mcp-server-list"; // Mock react-i18next vi.mock("react-i18next", () => ({ diff --git a/frontend/src/components/v1/chat/event-content-helpers/__tests__/get-observation-content.test.ts b/frontend/__tests__/components/v1/chat/event-content-helpers/get-observation-content.test.ts similarity index 96% rename from frontend/src/components/v1/chat/event-content-helpers/__tests__/get-observation-content.test.ts rename to frontend/__tests__/components/v1/chat/event-content-helpers/get-observation-content.test.ts index d35dc97925..9e2da14a26 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/__tests__/get-observation-content.test.ts +++ b/frontend/__tests__/components/v1/chat/event-content-helpers/get-observation-content.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from "vitest"; -import { getObservationContent } from "../get-observation-content"; +import { getObservationContent } from "#/components/v1/chat/event-content-helpers/get-observation-content"; import { ObservationEvent } from "#/types/v1/core"; import { BrowserObservation } from "#/types/v1/core/base/observation"; diff --git a/frontend/src/services/__tests__/actions.test.ts b/frontend/__tests__/services/actions.test.ts similarity index 98% rename from frontend/src/services/__tests__/actions.test.ts rename to frontend/__tests__/services/actions.test.ts index a0df1915a8..44700aef2c 100644 --- a/frontend/src/services/__tests__/actions.test.ts +++ b/frontend/__tests__/services/actions.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; -import { handleStatusMessage } from "../actions"; +import { handleStatusMessage } from "#/services/actions"; import { StatusMessage } from "#/types/message"; import { queryClient } from "#/query-client-config"; import { useStatusStore } from "#/state/status-store"; diff --git a/frontend/src/utils/__tests__/custom-toast-handlers.test.ts b/frontend/__tests__/utils/custom-toast-handlers.test.ts similarity index 98% rename from frontend/src/utils/__tests__/custom-toast-handlers.test.ts rename to frontend/__tests__/utils/custom-toast-handlers.test.ts index 09023b517a..404bc1d4dd 100644 --- a/frontend/src/utils/__tests__/custom-toast-handlers.test.ts +++ b/frontend/__tests__/utils/custom-toast-handlers.test.ts @@ -3,7 +3,7 @@ import toast from "react-hot-toast"; import { displaySuccessToast, displayErrorToast, -} from "../custom-toast-handlers"; +} from "#/utils/custom-toast-handlers"; // Mock react-hot-toast vi.mock("react-hot-toast", () => ({ diff --git a/frontend/src/utils/__tests__/settings-utils.test.ts b/frontend/__tests__/utils/settings-utils.test.ts similarity index 97% rename from frontend/src/utils/__tests__/settings-utils.test.ts rename to frontend/__tests__/utils/settings-utils.test.ts index bf2ae794f2..9eb9a038a5 100644 --- a/frontend/src/utils/__tests__/settings-utils.test.ts +++ b/frontend/__tests__/utils/settings-utils.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from "vitest"; -import { parseMaxBudgetPerTask, extractSettings } from "../settings-utils"; +import { parseMaxBudgetPerTask, extractSettings } from "#/utils/settings-utils"; describe("parseMaxBudgetPerTask", () => { it("should return null for empty string", () => { diff --git a/frontend/src/utils/__tests__/toast-duration.test.ts b/frontend/__tests__/utils/toast-duration.test.ts similarity index 97% rename from frontend/src/utils/__tests__/toast-duration.test.ts rename to frontend/__tests__/utils/toast-duration.test.ts index 3b5ffa8b69..3ef6c803d9 100644 --- a/frontend/src/utils/__tests__/toast-duration.test.ts +++ b/frontend/__tests__/utils/toast-duration.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect } from "vitest"; -import { calculateToastDuration } from "../toast-duration"; +import { calculateToastDuration } from "#/utils/toast-duration"; describe("calculateToastDuration", () => { it("should return minimum duration for short messages", () => { diff --git a/frontend/src/utils/__tests__/vscode-url-helper.test.ts b/frontend/__tests__/utils/vscode-url-helper.test.ts similarity index 96% rename from frontend/src/utils/__tests__/vscode-url-helper.test.ts rename to frontend/__tests__/utils/vscode-url-helper.test.ts index c85804089b..a55b03bbbf 100644 --- a/frontend/src/utils/__tests__/vscode-url-helper.test.ts +++ b/frontend/__tests__/utils/vscode-url-helper.test.ts @@ -1,5 +1,5 @@ import { describe, it, expect, beforeEach, afterEach } from "vitest"; -import { transformVSCodeUrl } from "../vscode-url-helper"; +import { transformVSCodeUrl } from "#/utils/vscode-url-helper"; describe("transformVSCodeUrl", () => { const originalWindowLocation = window.location; From 089d9c1ee526d0d8f7422f5a94a3344dfa00489f Mon Sep 17 00:00:00 2001 From: Graham Neubig Date: Mon, 15 Dec 2025 11:21:13 -0500 Subject: [PATCH 08/80] Add deprecation warning to evaluation README (#11997) Co-authored-by: openhands --- evaluation/README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/evaluation/README.md b/evaluation/README.md index 694623f63d..b4a125b3fc 100644 --- a/evaluation/README.md +++ b/evaluation/README.md @@ -1,5 +1,10 @@ # Evaluation +> [!WARNING] +> **This directory is deprecated.** Our new benchmarks are located at [OpenHands/benchmarks](https://github.com/OpenHands/benchmarks). +> +> If you have already implemented a benchmark in this directory and would like to contribute it, we are happy to have the contribution. However, if you are starting anew, please use the new location. + This folder contains code and resources to run experiments and evaluations. ## For Benchmark Users From 5c377f303f654670961efec2bd0554aae2433408 Mon Sep 17 00:00:00 2001 From: Xingyao Wang Date: Mon, 15 Dec 2025 10:25:31 -0600 Subject: [PATCH 09/80] Update SWEBench score in README (#12051) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9fabb37a6e..3928ed32d9 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@
MIT License - Benchmark Score + Benchmark Score
Check out the documentation Tech Report From a12170e4c9ece88ff7703b36e0794598be74f515 Mon Sep 17 00:00:00 2001 From: Abhay Mishra Date: Mon, 15 Dec 2025 22:07:52 +0530 Subject: [PATCH 10/80] refactor(frontend): Extracted useQuery and useMutation from the main branch (#12031) Co-authored-by: sp.wack <83104063+amanape@users.noreply.github.com> --- .../features/payment/setup-payment-modal.tsx | 14 +---- .../features/settings/api-keys-manager.tsx | 6 +-- frontend/src/hooks/mutation/use-accept-tos.ts | 54 +++++++++++++++++++ .../mutation/use-create-billing-session.ts | 19 +++++++ .../hooks/mutation/use-refresh-llm-api-key.ts | 23 ++++++++ frontend/src/hooks/query/use-llm-api-key.ts | 19 +------ frontend/src/routes/accept-tos.tsx | 47 ++-------------- 7 files changed, 105 insertions(+), 77 deletions(-) create mode 100644 frontend/src/hooks/mutation/use-accept-tos.ts create mode 100644 frontend/src/hooks/mutation/use-create-billing-session.ts create mode 100644 frontend/src/hooks/mutation/use-refresh-llm-api-key.ts diff --git a/frontend/src/components/features/payment/setup-payment-modal.tsx b/frontend/src/components/features/payment/setup-payment-modal.tsx index 30cb0a4e54..7d8883a719 100644 --- a/frontend/src/components/features/payment/setup-payment-modal.tsx +++ b/frontend/src/components/features/payment/setup-payment-modal.tsx @@ -1,24 +1,14 @@ -import { useMutation } from "@tanstack/react-query"; import { Trans, useTranslation } from "react-i18next"; import { I18nKey } from "#/i18n/declaration"; import OpenHandsLogo from "#/assets/branding/openhands-logo.svg?react"; import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; import { ModalBody } from "#/components/shared/modals/modal-body"; -import BillingService from "#/api/billing-service/billing-service.api"; import { BrandButton } from "../settings/brand-button"; -import { displayErrorToast } from "#/utils/custom-toast-handlers"; +import { useCreateBillingSession } from "#/hooks/mutation/use-create-billing-session"; export function SetupPaymentModal() { const { t } = useTranslation(); - const { mutate, isPending } = useMutation({ - mutationFn: BillingService.createBillingSessionResponse, - onSuccess: (data) => { - window.location.href = data; - }, - onError: () => { - displayErrorToast(t(I18nKey.BILLING$ERROR_WHILE_CREATING_SESSION)); - }, - }); + const { mutate, isPending } = useCreateBillingSession(); return ( diff --git a/frontend/src/components/features/settings/api-keys-manager.tsx b/frontend/src/components/features/settings/api-keys-manager.tsx index 82d86fb4a9..20a8807aa0 100644 --- a/frontend/src/components/features/settings/api-keys-manager.tsx +++ b/frontend/src/components/features/settings/api-keys-manager.tsx @@ -13,10 +13,8 @@ import { CreateApiKeyModal } from "./create-api-key-modal"; import { DeleteApiKeyModal } from "./delete-api-key-modal"; import { NewApiKeyModal } from "./new-api-key-modal"; import { useApiKeys } from "#/hooks/query/use-api-keys"; -import { - useLlmApiKey, - useRefreshLlmApiKey, -} from "#/hooks/query/use-llm-api-key"; +import { useLlmApiKey } from "#/hooks/query/use-llm-api-key"; +import { useRefreshLlmApiKey } from "#/hooks/mutation/use-refresh-llm-api-key"; interface LlmApiKeyManagerProps { llmApiKey: { key: string | null } | undefined; diff --git a/frontend/src/hooks/mutation/use-accept-tos.ts b/frontend/src/hooks/mutation/use-accept-tos.ts new file mode 100644 index 0000000000..a159b1458c --- /dev/null +++ b/frontend/src/hooks/mutation/use-accept-tos.ts @@ -0,0 +1,54 @@ +import { useMutation } from "@tanstack/react-query"; +import { usePostHog } from "posthog-js/react"; +import { useNavigate } from "react-router"; +import { openHands } from "#/api/open-hands-axios"; +import { handleCaptureConsent } from "#/utils/handle-capture-consent"; +import { useTracking } from "#/hooks/use-tracking"; + +interface AcceptTosVariables { + redirectUrl: string; +} + +interface AcceptTosResponse { + redirect_url?: string; +} + +export const useAcceptTos = () => { + const posthog = usePostHog(); + const navigate = useNavigate(); + const { trackUserSignupCompleted } = useTracking(); + + return useMutation({ + mutationFn: async ({ redirectUrl }: AcceptTosVariables) => { + // Set consent for analytics + handleCaptureConsent(posthog, true); + + // Call the API to record TOS acceptance in the database + return openHands.post("/api/accept_tos", { + redirect_url: redirectUrl, + }); + }, + onSuccess: (response, { redirectUrl }) => { + // Track user signup completion + trackUserSignupCompleted(); + + // Get the redirect URL from the response + const finalRedirectUrl = response.data.redirect_url || redirectUrl; + + // Check if the redirect URL is an external URL (starts with http or https) + if ( + finalRedirectUrl.startsWith("http://") || + finalRedirectUrl.startsWith("https://") + ) { + // For external URLs, redirect using window.location + window.location.href = finalRedirectUrl; + } else { + // For internal routes, use navigate + navigate(finalRedirectUrl); + } + }, + onError: () => { + window.location.href = "/"; + }, + }); +}; diff --git a/frontend/src/hooks/mutation/use-create-billing-session.ts b/frontend/src/hooks/mutation/use-create-billing-session.ts new file mode 100644 index 0000000000..f8f0716cb2 --- /dev/null +++ b/frontend/src/hooks/mutation/use-create-billing-session.ts @@ -0,0 +1,19 @@ +import { useMutation } from "@tanstack/react-query"; +import { useTranslation } from "react-i18next"; +import { I18nKey } from "#/i18n/declaration"; +import BillingService from "#/api/billing-service/billing-service.api"; +import { displayErrorToast } from "#/utils/custom-toast-handlers"; + +export const useCreateBillingSession = () => { + const { t } = useTranslation(); + + return useMutation({ + mutationFn: BillingService.createBillingSessionResponse, + onSuccess: (data) => { + window.location.href = data; + }, + onError: () => { + displayErrorToast(t(I18nKey.BILLING$ERROR_WHILE_CREATING_SESSION)); + }, + }); +}; diff --git a/frontend/src/hooks/mutation/use-refresh-llm-api-key.ts b/frontend/src/hooks/mutation/use-refresh-llm-api-key.ts new file mode 100644 index 0000000000..11a112e182 --- /dev/null +++ b/frontend/src/hooks/mutation/use-refresh-llm-api-key.ts @@ -0,0 +1,23 @@ +import { useMutation, useQueryClient } from "@tanstack/react-query"; +import { openHands } from "#/api/open-hands-axios"; +import { + LLM_API_KEY_QUERY_KEY, + LlmApiKeyResponse, +} from "#/hooks/query/use-llm-api-key"; + +export function useRefreshLlmApiKey() { + const queryClient = useQueryClient(); + + return useMutation({ + mutationFn: async () => { + const { data } = await openHands.post( + "/api/keys/llm/byor/refresh", + ); + return data; + }, + onSuccess: () => { + // Invalidate the LLM API key query to trigger a refetch + queryClient.invalidateQueries({ queryKey: [LLM_API_KEY_QUERY_KEY] }); + }, + }); +} diff --git a/frontend/src/hooks/query/use-llm-api-key.ts b/frontend/src/hooks/query/use-llm-api-key.ts index 5dcea9f714..58dee11411 100644 --- a/frontend/src/hooks/query/use-llm-api-key.ts +++ b/frontend/src/hooks/query/use-llm-api-key.ts @@ -1,4 +1,4 @@ -import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query"; +import { useQuery } from "@tanstack/react-query"; import { openHands } from "#/api/open-hands-axios"; import { useConfig } from "./use-config"; @@ -23,20 +23,3 @@ export function useLlmApiKey() { gcTime: 1000 * 60 * 15, // 15 minutes }); } - -export function useRefreshLlmApiKey() { - const queryClient = useQueryClient(); - - return useMutation({ - mutationFn: async () => { - const { data } = await openHands.post( - "/api/keys/llm/byor/refresh", - ); - return data; - }, - onSuccess: () => { - // Invalidate the LLM API key query to trigger a refetch - queryClient.invalidateQueries({ queryKey: [LLM_API_KEY_QUERY_KEY] }); - }, - }); -} diff --git a/frontend/src/routes/accept-tos.tsx b/frontend/src/routes/accept-tos.tsx index f723f2a5f6..a3732273e3 100644 --- a/frontend/src/routes/accept-tos.tsx +++ b/frontend/src/routes/accept-tos.tsx @@ -1,66 +1,27 @@ import React from "react"; import { useTranslation } from "react-i18next"; -import { useNavigate, useSearchParams } from "react-router"; -import { useMutation } from "@tanstack/react-query"; -import { usePostHog } from "posthog-js/react"; +import { useSearchParams } from "react-router"; import { I18nKey } from "#/i18n/declaration"; import OpenHandsLogo from "#/assets/branding/openhands-logo.svg?react"; import { TOSCheckbox } from "#/components/features/waitlist/tos-checkbox"; import { BrandButton } from "#/components/features/settings/brand-button"; -import { handleCaptureConsent } from "#/utils/handle-capture-consent"; -import { openHands } from "#/api/open-hands-axios"; import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; -import { useTracking } from "#/hooks/use-tracking"; +import { useAcceptTos } from "#/hooks/mutation/use-accept-tos"; export default function AcceptTOS() { - const posthog = usePostHog(); const { t } = useTranslation(); - const navigate = useNavigate(); const [searchParams] = useSearchParams(); const [isTosAccepted, setIsTosAccepted] = React.useState(false); - const { trackUserSignupCompleted } = useTracking(); // Get the redirect URL from the query parameters const redirectUrl = searchParams.get("redirect_url") || "/"; // Use mutation for accepting TOS - const { mutate: acceptTOS, isPending: isSubmitting } = useMutation({ - mutationFn: async () => { - // Set consent for analytics - handleCaptureConsent(posthog, true); - - // Call the API to record TOS acceptance in the database - return openHands.post("/api/accept_tos", { - redirect_url: redirectUrl, - }); - }, - onSuccess: (response) => { - // Track user signup completion - trackUserSignupCompleted(); - - // Get the redirect URL from the response - const finalRedirectUrl = response.data.redirect_url || redirectUrl; - - // Check if the redirect URL is an external URL (starts with http or https) - if ( - finalRedirectUrl.startsWith("http://") || - finalRedirectUrl.startsWith("https://") - ) { - // For external URLs, redirect using window.location - window.location.href = finalRedirectUrl; - } else { - // For internal routes, use navigate - navigate(finalRedirectUrl); - } - }, - onError: () => { - window.location.href = "/"; - }, - }); + const { mutate: acceptTOS, isPending: isSubmitting } = useAcceptTos(); const handleAcceptTOS = () => { if (isTosAccepted && !isSubmitting) { - acceptTOS(); + acceptTOS({ redirectUrl }); } }; From 9753ad3a487b864074c6e1d0e7ab079381353f60 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 15 Dec 2025 10:47:21 -0700 Subject: [PATCH 11/80] Removed Legacy Conversation Manager (#12053) --- .../server/legacy_conversation_manager.py | 331 ------------ .../unit/test_legacy_conversation_manager.py | 485 ------------------ 2 files changed, 816 deletions(-) delete mode 100644 enterprise/server/legacy_conversation_manager.py delete mode 100644 enterprise/tests/unit/test_legacy_conversation_manager.py diff --git a/enterprise/server/legacy_conversation_manager.py b/enterprise/server/legacy_conversation_manager.py deleted file mode 100644 index 5c82b5b420..0000000000 --- a/enterprise/server/legacy_conversation_manager.py +++ /dev/null @@ -1,331 +0,0 @@ -from __future__ import annotations - -import time -from dataclasses import dataclass, field - -import socketio -from server.clustered_conversation_manager import ClusteredConversationManager -from server.saas_nested_conversation_manager import SaasNestedConversationManager - -from openhands.core.config import LLMConfig, OpenHandsConfig -from openhands.events.action import MessageAction -from openhands.server.config.server_config import ServerConfig -from openhands.server.conversation_manager.conversation_manager import ( - ConversationManager, -) -from openhands.server.data_models.agent_loop_info import AgentLoopInfo -from openhands.server.monitoring import MonitoringListener -from openhands.server.session.conversation import ServerConversation -from openhands.storage.data_models.settings import Settings -from openhands.storage.files import FileStore -from openhands.utils.async_utils import wait_all - -_LEGACY_ENTRY_TIMEOUT_SECONDS = 3600 - - -@dataclass -class LegacyCacheEntry: - """Cache entry for legacy mode status.""" - - is_legacy: bool - timestamp: float - - -@dataclass -class LegacyConversationManager(ConversationManager): - """ - Conversation manager for use while migrating - since existing conversations are not nested! - Separate class from SaasNestedConversationManager so it can be easliy removed in a few weeks. - (As of 2025-07-23) - """ - - sio: socketio.AsyncServer - config: OpenHandsConfig - server_config: ServerConfig - file_store: FileStore - conversation_manager: SaasNestedConversationManager - legacy_conversation_manager: ClusteredConversationManager - _legacy_cache: dict[str, LegacyCacheEntry] = field(default_factory=dict) - - async def __aenter__(self): - await wait_all( - [ - self.conversation_manager.__aenter__(), - self.legacy_conversation_manager.__aenter__(), - ] - ) - return self - - async def __aexit__(self, exc_type, exc_value, traceback): - await wait_all( - [ - self.conversation_manager.__aexit__(exc_type, exc_value, traceback), - self.legacy_conversation_manager.__aexit__( - exc_type, exc_value, traceback - ), - ] - ) - - async def request_llm_completion( - self, - sid: str, - service_id: str, - llm_config: LLMConfig, - messages: list[dict[str, str]], - ) -> str: - session = self.get_agent_session(sid) - llm_registry = session.llm_registry - return llm_registry.request_extraneous_completion( - service_id, llm_config, messages - ) - - async def attach_to_conversation( - self, sid: str, user_id: str | None = None - ) -> ServerConversation | None: - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.attach_to_conversation( - sid, user_id - ) - return await self.conversation_manager.attach_to_conversation(sid, user_id) - - async def detach_from_conversation(self, conversation: ServerConversation): - if await self.should_start_in_legacy_mode(conversation.sid): - return await self.legacy_conversation_manager.detach_from_conversation( - conversation - ) - return await self.conversation_manager.detach_from_conversation(conversation) - - async def join_conversation( - self, - sid: str, - connection_id: str, - settings: Settings, - user_id: str | None, - ) -> AgentLoopInfo: - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.join_conversation( - sid, connection_id, settings, user_id - ) - return await self.conversation_manager.join_conversation( - sid, connection_id, settings, user_id - ) - - def get_agent_session(self, sid: str): - session = self.legacy_conversation_manager.get_agent_session(sid) - if session is None: - session = self.conversation_manager.get_agent_session(sid) - return session - - async def get_running_agent_loops( - self, user_id: str | None = None, filter_to_sids: set[str] | None = None - ) -> set[str]: - if filter_to_sids and len(filter_to_sids) == 1: - sid = next(iter(filter_to_sids)) - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.get_running_agent_loops( - user_id, filter_to_sids - ) - return await self.conversation_manager.get_running_agent_loops( - user_id, filter_to_sids - ) - - # Get all running agent loops from both managers - agent_loops, legacy_agent_loops = await wait_all( - [ - self.conversation_manager.get_running_agent_loops( - user_id, filter_to_sids - ), - self.legacy_conversation_manager.get_running_agent_loops( - user_id, filter_to_sids - ), - ] - ) - - # Combine the results - result = set() - for sid in legacy_agent_loops: - if await self.should_start_in_legacy_mode(sid): - result.add(sid) - - for sid in agent_loops: - if not await self.should_start_in_legacy_mode(sid): - result.add(sid) - - return result - - async def is_agent_loop_running(self, sid: str) -> bool: - return bool(await self.get_running_agent_loops(filter_to_sids={sid})) - - async def get_connections( - self, user_id: str | None = None, filter_to_sids: set[str] | None = None - ) -> dict[str, str]: - if filter_to_sids and len(filter_to_sids) == 1: - sid = next(iter(filter_to_sids)) - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.get_connections( - user_id, filter_to_sids - ) - return await self.conversation_manager.get_connections( - user_id, filter_to_sids - ) - agent_loops, legacy_agent_loops = await wait_all( - [ - self.conversation_manager.get_connections(user_id, filter_to_sids), - self.legacy_conversation_manager.get_connections( - user_id, filter_to_sids - ), - ] - ) - legacy_agent_loops.update(agent_loops) - return legacy_agent_loops - - async def maybe_start_agent_loop( - self, - sid: str, - settings: Settings, - user_id: str, # type: ignore[override] - initial_user_msg: MessageAction | None = None, - replay_json: str | None = None, - ) -> AgentLoopInfo: - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.maybe_start_agent_loop( - sid, settings, user_id, initial_user_msg, replay_json - ) - return await self.conversation_manager.maybe_start_agent_loop( - sid, settings, user_id, initial_user_msg, replay_json - ) - - async def send_to_event_stream(self, connection_id: str, data: dict): - return await self.legacy_conversation_manager.send_to_event_stream( - connection_id, data - ) - - async def send_event_to_conversation(self, sid: str, data: dict): - if await self.should_start_in_legacy_mode(sid): - await self.legacy_conversation_manager.send_event_to_conversation(sid, data) - await self.conversation_manager.send_event_to_conversation(sid, data) - - async def disconnect_from_session(self, connection_id: str): - return await self.legacy_conversation_manager.disconnect_from_session( - connection_id - ) - - async def close_session(self, sid: str): - if await self.should_start_in_legacy_mode(sid): - await self.legacy_conversation_manager.close_session(sid) - await self.conversation_manager.close_session(sid) - - async def get_agent_loop_info( - self, user_id: str | None = None, filter_to_sids: set[str] | None = None - ) -> list[AgentLoopInfo]: - if filter_to_sids and len(filter_to_sids) == 1: - sid = next(iter(filter_to_sids)) - if await self.should_start_in_legacy_mode(sid): - return await self.legacy_conversation_manager.get_agent_loop_info( - user_id, filter_to_sids - ) - return await self.conversation_manager.get_agent_loop_info( - user_id, filter_to_sids - ) - agent_loops, legacy_agent_loops = await wait_all( - [ - self.conversation_manager.get_agent_loop_info(user_id, filter_to_sids), - self.legacy_conversation_manager.get_agent_loop_info( - user_id, filter_to_sids - ), - ] - ) - - # Combine results - result = [] - legacy_sids = set() - - # Add legacy agent loops - for agent_loop in legacy_agent_loops: - if await self.should_start_in_legacy_mode(agent_loop.conversation_id): - result.append(agent_loop) - legacy_sids.add(agent_loop.conversation_id) - - # Add non-legacy agent loops - for agent_loop in agent_loops: - if ( - agent_loop.conversation_id not in legacy_sids - and not await self.should_start_in_legacy_mode( - agent_loop.conversation_id - ) - ): - result.append(agent_loop) - - return result - - def _cleanup_expired_cache_entries(self): - """Remove expired entries from the local cache.""" - current_time = time.time() - expired_keys = [ - key - for key, entry in self._legacy_cache.items() - if current_time - entry.timestamp > _LEGACY_ENTRY_TIMEOUT_SECONDS - ] - for key in expired_keys: - del self._legacy_cache[key] - - async def should_start_in_legacy_mode(self, conversation_id: str) -> bool: - """ - Check if a conversation should run in legacy mode by directly checking the runtime. - The /list method does not include stopped conversations even though the PVC for these - may not yet have been deleted, so we need to check /sessions/{session_id} directly. - """ - # Clean up expired entries periodically - self._cleanup_expired_cache_entries() - - # First check the local cache - if conversation_id in self._legacy_cache: - cached_entry = self._legacy_cache[conversation_id] - # Check if the cached value is still valid - if time.time() - cached_entry.timestamp <= _LEGACY_ENTRY_TIMEOUT_SECONDS: - return cached_entry.is_legacy - - # If not in cache or expired, check the runtime directly - runtime = await self.conversation_manager._get_runtime(conversation_id) - is_legacy = self.is_legacy_runtime(runtime) - - # Cache the result with current timestamp - self._legacy_cache[conversation_id] = LegacyCacheEntry(is_legacy, time.time()) - - return is_legacy - - def is_legacy_runtime(self, runtime: dict | None) -> bool: - """ - Determine if a runtime is a legacy runtime based on its command. - - Args: - runtime: The runtime dictionary or None if not found - - Returns: - bool: True if this is a legacy runtime, False otherwise - """ - if runtime is None: - return False - return 'openhands.server' not in runtime['command'] - - @classmethod - def get_instance( - cls, - sio: socketio.AsyncServer, - config: OpenHandsConfig, - file_store: FileStore, - server_config: ServerConfig, - monitoring_listener: MonitoringListener, - ) -> ConversationManager: - return LegacyConversationManager( - sio=sio, - config=config, - server_config=server_config, - file_store=file_store, - conversation_manager=SaasNestedConversationManager.get_instance( - sio, config, file_store, server_config, monitoring_listener - ), - legacy_conversation_manager=ClusteredConversationManager.get_instance( - sio, config, file_store, server_config, monitoring_listener - ), - ) diff --git a/enterprise/tests/unit/test_legacy_conversation_manager.py b/enterprise/tests/unit/test_legacy_conversation_manager.py deleted file mode 100644 index 55b424dabc..0000000000 --- a/enterprise/tests/unit/test_legacy_conversation_manager.py +++ /dev/null @@ -1,485 +0,0 @@ -import time -from unittest.mock import AsyncMock, MagicMock, patch - -import pytest -from server.legacy_conversation_manager import ( - _LEGACY_ENTRY_TIMEOUT_SECONDS, - LegacyCacheEntry, - LegacyConversationManager, -) - -from openhands.core.config.openhands_config import OpenHandsConfig -from openhands.server.config.server_config import ServerConfig -from openhands.server.monitoring import MonitoringListener -from openhands.storage.memory import InMemoryFileStore - - -@pytest.fixture -def mock_sio(): - """Create a mock SocketIO server.""" - return MagicMock() - - -@pytest.fixture -def mock_config(): - """Create a mock OpenHands config.""" - return MagicMock(spec=OpenHandsConfig) - - -@pytest.fixture -def mock_server_config(): - """Create a mock server config.""" - return MagicMock(spec=ServerConfig) - - -@pytest.fixture -def mock_file_store(): - """Create a mock file store.""" - return MagicMock(spec=InMemoryFileStore) - - -@pytest.fixture -def mock_monitoring_listener(): - """Create a mock monitoring listener.""" - return MagicMock(spec=MonitoringListener) - - -@pytest.fixture -def mock_conversation_manager(): - """Create a mock SaasNestedConversationManager.""" - mock_cm = MagicMock() - mock_cm._get_runtime = AsyncMock() - return mock_cm - - -@pytest.fixture -def mock_legacy_conversation_manager(): - """Create a mock ClusteredConversationManager.""" - return MagicMock() - - -@pytest.fixture -def legacy_manager( - mock_sio, - mock_config, - mock_server_config, - mock_file_store, - mock_conversation_manager, - mock_legacy_conversation_manager, -): - """Create a LegacyConversationManager instance for testing.""" - return LegacyConversationManager( - sio=mock_sio, - config=mock_config, - server_config=mock_server_config, - file_store=mock_file_store, - conversation_manager=mock_conversation_manager, - legacy_conversation_manager=mock_legacy_conversation_manager, - ) - - -class TestLegacyCacheEntry: - """Test the LegacyCacheEntry dataclass.""" - - def test_cache_entry_creation(self): - """Test creating a cache entry.""" - timestamp = time.time() - entry = LegacyCacheEntry(is_legacy=True, timestamp=timestamp) - - assert entry.is_legacy is True - assert entry.timestamp == timestamp - - def test_cache_entry_false(self): - """Test creating a cache entry with False value.""" - timestamp = time.time() - entry = LegacyCacheEntry(is_legacy=False, timestamp=timestamp) - - assert entry.is_legacy is False - assert entry.timestamp == timestamp - - -class TestLegacyConversationManagerCacheCleanup: - """Test cache cleanup functionality.""" - - def test_cleanup_expired_cache_entries_removes_expired(self, legacy_manager): - """Test that expired entries are removed from cache.""" - current_time = time.time() - expired_time = current_time - _LEGACY_ENTRY_TIMEOUT_SECONDS - 1 - valid_time = current_time - 100 # Well within timeout - - # Add both expired and valid entries - legacy_manager._legacy_cache = { - 'expired_conversation': LegacyCacheEntry(True, expired_time), - 'valid_conversation': LegacyCacheEntry(False, valid_time), - 'another_expired': LegacyCacheEntry(True, expired_time - 100), - } - - legacy_manager._cleanup_expired_cache_entries() - - # Only valid entry should remain - assert len(legacy_manager._legacy_cache) == 1 - assert 'valid_conversation' in legacy_manager._legacy_cache - assert 'expired_conversation' not in legacy_manager._legacy_cache - assert 'another_expired' not in legacy_manager._legacy_cache - - def test_cleanup_expired_cache_entries_keeps_valid(self, legacy_manager): - """Test that valid entries are kept during cleanup.""" - current_time = time.time() - valid_time = current_time - 100 # Well within timeout - - legacy_manager._legacy_cache = { - 'valid_conversation_1': LegacyCacheEntry(True, valid_time), - 'valid_conversation_2': LegacyCacheEntry(False, valid_time - 50), - } - - legacy_manager._cleanup_expired_cache_entries() - - # Both entries should remain - assert len(legacy_manager._legacy_cache) == 2 - assert 'valid_conversation_1' in legacy_manager._legacy_cache - assert 'valid_conversation_2' in legacy_manager._legacy_cache - - def test_cleanup_expired_cache_entries_empty_cache(self, legacy_manager): - """Test cleanup with empty cache.""" - legacy_manager._legacy_cache = {} - - legacy_manager._cleanup_expired_cache_entries() - - assert len(legacy_manager._legacy_cache) == 0 - - -class TestIsLegacyRuntime: - """Test the is_legacy_runtime method.""" - - def test_is_legacy_runtime_none(self, legacy_manager): - """Test with None runtime.""" - result = legacy_manager.is_legacy_runtime(None) - assert result is False - - def test_is_legacy_runtime_legacy_command(self, legacy_manager): - """Test with legacy runtime command.""" - runtime = {'command': 'some_old_legacy_command'} - result = legacy_manager.is_legacy_runtime(runtime) - assert result is True - - def test_is_legacy_runtime_new_command(self, legacy_manager): - """Test with new runtime command containing openhands.server.""" - runtime = {'command': 'python -m openhands.server.listen'} - result = legacy_manager.is_legacy_runtime(runtime) - assert result is False - - def test_is_legacy_runtime_partial_match(self, legacy_manager): - """Test with command that partially matches but is still legacy.""" - runtime = {'command': 'openhands.client.start'} - result = legacy_manager.is_legacy_runtime(runtime) - assert result is True - - def test_is_legacy_runtime_empty_command(self, legacy_manager): - """Test with empty command.""" - runtime = {'command': ''} - result = legacy_manager.is_legacy_runtime(runtime) - assert result is True - - def test_is_legacy_runtime_missing_command_key(self, legacy_manager): - """Test with runtime missing command key.""" - runtime = {'other_key': 'value'} - # This should raise a KeyError - with pytest.raises(KeyError): - legacy_manager.is_legacy_runtime(runtime) - - -class TestShouldStartInLegacyMode: - """Test the should_start_in_legacy_mode method.""" - - @pytest.mark.asyncio - async def test_cache_hit_valid_entry_legacy(self, legacy_manager): - """Test cache hit with valid legacy entry.""" - conversation_id = 'test_conversation' - current_time = time.time() - - # Add valid cache entry - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry( - True, current_time - 100 - ) - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is True - # Should not call _get_runtime since we hit cache - legacy_manager.conversation_manager._get_runtime.assert_not_called() - - @pytest.mark.asyncio - async def test_cache_hit_valid_entry_non_legacy(self, legacy_manager): - """Test cache hit with valid non-legacy entry.""" - conversation_id = 'test_conversation' - current_time = time.time() - - # Add valid cache entry - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry( - False, current_time - 100 - ) - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is False - # Should not call _get_runtime since we hit cache - legacy_manager.conversation_manager._get_runtime.assert_not_called() - - @pytest.mark.asyncio - async def test_cache_miss_legacy_runtime(self, legacy_manager): - """Test cache miss with legacy runtime.""" - conversation_id = 'test_conversation' - runtime = {'command': 'old_command'} - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is True - # Should call _get_runtime - legacy_manager.conversation_manager._get_runtime.assert_called_once_with( - conversation_id - ) - # Should cache the result - assert conversation_id in legacy_manager._legacy_cache - assert legacy_manager._legacy_cache[conversation_id].is_legacy is True - - @pytest.mark.asyncio - async def test_cache_miss_non_legacy_runtime(self, legacy_manager): - """Test cache miss with non-legacy runtime.""" - conversation_id = 'test_conversation' - runtime = {'command': 'python -m openhands.server.listen'} - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is False - # Should call _get_runtime - legacy_manager.conversation_manager._get_runtime.assert_called_once_with( - conversation_id - ) - # Should cache the result - assert conversation_id in legacy_manager._legacy_cache - assert legacy_manager._legacy_cache[conversation_id].is_legacy is False - - @pytest.mark.asyncio - async def test_cache_expired_entry(self, legacy_manager): - """Test with expired cache entry.""" - conversation_id = 'test_conversation' - expired_time = time.time() - _LEGACY_ENTRY_TIMEOUT_SECONDS - 1 - runtime = {'command': 'python -m openhands.server.listen'} - - # Add expired cache entry - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry( - True, - expired_time, # This should be considered expired - ) - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is False # Runtime indicates non-legacy - # Should call _get_runtime since cache is expired - legacy_manager.conversation_manager._get_runtime.assert_called_once_with( - conversation_id - ) - # Should update cache with new result - assert legacy_manager._legacy_cache[conversation_id].is_legacy is False - - @pytest.mark.asyncio - async def test_cache_exactly_at_timeout(self, legacy_manager): - """Test with cache entry exactly at timeout boundary.""" - conversation_id = 'test_conversation' - timeout_time = time.time() - _LEGACY_ENTRY_TIMEOUT_SECONDS - runtime = {'command': 'python -m openhands.server.listen'} - - # Add cache entry exactly at timeout - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry( - True, timeout_time - ) - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - # Should treat as expired and fetch from runtime - assert result is False - legacy_manager.conversation_manager._get_runtime.assert_called_once_with( - conversation_id - ) - - @pytest.mark.asyncio - async def test_runtime_returns_none(self, legacy_manager): - """Test when runtime returns None.""" - conversation_id = 'test_conversation' - - legacy_manager.conversation_manager._get_runtime.return_value = None - - result = await legacy_manager.should_start_in_legacy_mode(conversation_id) - - assert result is False - # Should cache the result - assert conversation_id in legacy_manager._legacy_cache - assert legacy_manager._legacy_cache[conversation_id].is_legacy is False - - @pytest.mark.asyncio - async def test_cleanup_called_on_each_invocation(self, legacy_manager): - """Test that cleanup is called on each invocation.""" - conversation_id = 'test_conversation' - runtime = {'command': 'test'} - - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - # Mock the cleanup method to verify it's called - with patch.object( - legacy_manager, '_cleanup_expired_cache_entries' - ) as mock_cleanup: - await legacy_manager.should_start_in_legacy_mode(conversation_id) - mock_cleanup.assert_called_once() - - @pytest.mark.asyncio - async def test_multiple_conversations_cached_independently(self, legacy_manager): - """Test that multiple conversations are cached independently.""" - conv1 = 'conversation_1' - conv2 = 'conversation_2' - - runtime1 = {'command': 'old_command'} # Legacy - runtime2 = {'command': 'python -m openhands.server.listen'} # Non-legacy - - # Mock to return different runtimes based on conversation_id - def mock_get_runtime(conversation_id): - if conversation_id == conv1: - return runtime1 - return runtime2 - - legacy_manager.conversation_manager._get_runtime.side_effect = mock_get_runtime - - result1 = await legacy_manager.should_start_in_legacy_mode(conv1) - result2 = await legacy_manager.should_start_in_legacy_mode(conv2) - - assert result1 is True - assert result2 is False - - # Both should be cached - assert conv1 in legacy_manager._legacy_cache - assert conv2 in legacy_manager._legacy_cache - assert legacy_manager._legacy_cache[conv1].is_legacy is True - assert legacy_manager._legacy_cache[conv2].is_legacy is False - - @pytest.mark.asyncio - async def test_cache_timestamp_updated_on_refresh(self, legacy_manager): - """Test that cache timestamp is updated when entry is refreshed.""" - conversation_id = 'test_conversation' - old_time = time.time() - _LEGACY_ENTRY_TIMEOUT_SECONDS - 1 - runtime = {'command': 'test'} - - # Add expired entry - legacy_manager._legacy_cache[conversation_id] = LegacyCacheEntry(True, old_time) - legacy_manager.conversation_manager._get_runtime.return_value = runtime - - # Record time before call - before_call = time.time() - await legacy_manager.should_start_in_legacy_mode(conversation_id) - after_call = time.time() - - # Timestamp should be updated - cached_entry = legacy_manager._legacy_cache[conversation_id] - assert cached_entry.timestamp >= before_call - assert cached_entry.timestamp <= after_call - - -class TestLegacyConversationManagerIntegration: - """Integration tests for LegacyConversationManager.""" - - @pytest.mark.asyncio - async def test_get_instance_creates_proper_manager( - self, - mock_sio, - mock_config, - mock_file_store, - mock_server_config, - mock_monitoring_listener, - ): - """Test that get_instance creates a properly configured manager.""" - with patch( - 'server.legacy_conversation_manager.SaasNestedConversationManager' - ) as mock_saas, patch( - 'server.legacy_conversation_manager.ClusteredConversationManager' - ) as mock_clustered: - mock_saas.get_instance.return_value = MagicMock() - mock_clustered.get_instance.return_value = MagicMock() - - manager = LegacyConversationManager.get_instance( - mock_sio, - mock_config, - mock_file_store, - mock_server_config, - mock_monitoring_listener, - ) - - assert isinstance(manager, LegacyConversationManager) - assert manager.sio == mock_sio - assert manager.config == mock_config - assert manager.file_store == mock_file_store - assert manager.server_config == mock_server_config - - # Verify that both nested managers are created - mock_saas.get_instance.assert_called_once() - mock_clustered.get_instance.assert_called_once() - - def test_legacy_cache_initialized_empty(self, legacy_manager): - """Test that legacy cache is initialized as empty dict.""" - assert isinstance(legacy_manager._legacy_cache, dict) - assert len(legacy_manager._legacy_cache) == 0 - - -class TestEdgeCases: - """Test edge cases and error scenarios.""" - - @pytest.mark.asyncio - async def test_get_runtime_raises_exception(self, legacy_manager): - """Test behavior when _get_runtime raises an exception.""" - conversation_id = 'test_conversation' - - legacy_manager.conversation_manager._get_runtime.side_effect = Exception( - 'Runtime error' - ) - - # Should propagate the exception - with pytest.raises(Exception, match='Runtime error'): - await legacy_manager.should_start_in_legacy_mode(conversation_id) - - @pytest.mark.asyncio - async def test_very_large_cache(self, legacy_manager): - """Test behavior with a large number of cache entries.""" - current_time = time.time() - - # Add many cache entries - for i in range(1000): - legacy_manager._legacy_cache[f'conversation_{i}'] = LegacyCacheEntry( - i % 2 == 0, current_time - i - ) - - # This should work without issues - await legacy_manager.should_start_in_legacy_mode('new_conversation') - - # Should have added one more entry - assert len(legacy_manager._legacy_cache) == 1001 - - def test_cleanup_with_concurrent_modifications(self, legacy_manager): - """Test cleanup behavior when cache is modified during cleanup.""" - current_time = time.time() - expired_time = current_time - _LEGACY_ENTRY_TIMEOUT_SECONDS - 1 - - # Add expired entries - legacy_manager._legacy_cache = { - f'conversation_{i}': LegacyCacheEntry(True, expired_time) for i in range(10) - } - - # This should work without raising exceptions - legacy_manager._cleanup_expired_cache_entries() - - # All entries should be removed - assert len(legacy_manager._legacy_cache) == 0 From ee9754208080ed97989aedfaae05019b443838f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 15 Dec 2025 23:08:17 +0400 Subject: [PATCH 12/80] chore(deps): bump the version-all group in /frontend with 7 updates (#12050) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/package-lock.json | 1168 ++++++++++++++---------------------- frontend/package.json | 14 +- 2 files changed, 469 insertions(+), 713 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index b4425a3c58..9334b494cf 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,7 +8,7 @@ "name": "openhands-frontend", "version": "0.62.0", "dependencies": { - "@heroui/react": "2.8.5", + "@heroui/react": "2.8.6", "@microlink/react-json-view": "^1.26.2", "@monaco-editor/react": "^4.7.0-rc.0", "@react-router/node": "^7.10.1", @@ -24,13 +24,13 @@ "downshift": "^9.0.13", "eslint-config-airbnb-typescript": "^18.0.0", "framer-motion": "^12.23.25", - "i18next": "^25.7.2", + "i18next": "^25.7.3", "i18next-browser-languagedetector": "^8.2.0", "i18next-http-backend": "^3.0.2", "isbot": "^5.1.32", "lucide-react": "^0.561.0", "monaco-editor": "^0.55.1", - "posthog-js": "^1.306.0", + "posthog-js": "^1.306.1", "react": "^19.2.3", "react-dom": "^19.2.3", "react-hot-toast": "^2.6.0", @@ -45,7 +45,7 @@ "socket.io-client": "^4.8.1", "tailwind-merge": "^3.4.0", "tailwind-scrollbar": "^4.0.2", - "vite": "^7.2.7", + "vite": "^7.3.0", "zustand": "^5.0.9" }, "devDependencies": { @@ -56,9 +56,9 @@ "@tanstack/eslint-plugin-query": "^5.91.0", "@testing-library/dom": "^10.4.1", "@testing-library/jest-dom": "^6.9.1", - "@testing-library/react": "^16.3.0", + "@testing-library/react": "^16.3.1", "@testing-library/user-event": "^14.6.1", - "@types/node": "^25.0.1", + "@types/node": "^25.0.2", "@types/react": "^19.2.7", "@types/react-dom": "^19.2.3", "@types/react-syntax-highlighter": "^15.5.13", @@ -85,7 +85,7 @@ "tailwindcss": "^4.1.8", "typescript": "^5.9.3", "vite-plugin-svgr": "^4.5.0", - "vite-tsconfig-paths": "^5.1.4", + "vite-tsconfig-paths": "^6.0.1", "vitest": "^4.0.14" }, "engines": { @@ -789,13 +789,12 @@ "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.25.12.tgz", - "integrity": "sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.27.1.tgz", + "integrity": "sha512-HHB50pdsBX6k47S4u5g/CaLjqS3qwaOVE5ILsq64jyzgMhLuCuZ8rGzM9yhsAjfjkbgUPMzZEPa7DAp7yz6vuA==", "cpu": [ "ppc64" ], - "license": "MIT", "optional": true, "os": [ "aix" @@ -805,13 +804,12 @@ } }, "node_modules/@esbuild/android-arm": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.25.12.tgz", - "integrity": "sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.27.1.tgz", + "integrity": "sha512-kFqa6/UcaTbGm/NncN9kzVOODjhZW8e+FRdSeypWe6j33gzclHtwlANs26JrupOntlcWmB0u8+8HZo8s7thHvg==", "cpu": [ "arm" ], - "license": "MIT", "optional": true, "os": [ "android" @@ -821,13 +819,12 @@ } }, "node_modules/@esbuild/android-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.25.12.tgz", - "integrity": "sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.27.1.tgz", + "integrity": "sha512-45fuKmAJpxnQWixOGCrS+ro4Uvb4Re9+UTieUY2f8AEc+t7d4AaZ6eUJ3Hva7dtrxAAWHtlEFsXFMAgNnGU9uQ==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "android" @@ -837,13 +834,12 @@ } }, "node_modules/@esbuild/android-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.25.12.tgz", - "integrity": "sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.27.1.tgz", + "integrity": "sha512-LBEpOz0BsgMEeHgenf5aqmn/lLNTFXVfoWMUox8CtWWYK9X4jmQzWjoGoNb8lmAYml/tQ/Ysvm8q7szu7BoxRQ==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "android" @@ -853,13 +849,12 @@ } }, "node_modules/@esbuild/darwin-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.25.12.tgz", - "integrity": "sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.27.1.tgz", + "integrity": "sha512-veg7fL8eMSCVKL7IW4pxb54QERtedFDfY/ASrumK/SbFsXnRazxY4YykN/THYqFnFwJ0aVjiUrVG2PwcdAEqQQ==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "darwin" @@ -869,13 +864,12 @@ } }, "node_modules/@esbuild/darwin-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.25.12.tgz", - "integrity": "sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.27.1.tgz", + "integrity": "sha512-+3ELd+nTzhfWb07Vol7EZ+5PTbJ/u74nC6iv4/lwIU99Ip5uuY6QoIf0Hn4m2HoV0qcnRivN3KSqc+FyCHjoVQ==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "darwin" @@ -885,13 +879,12 @@ } }, "node_modules/@esbuild/freebsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.25.12.tgz", - "integrity": "sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.27.1.tgz", + "integrity": "sha512-/8Rfgns4XD9XOSXlzUDepG8PX+AVWHliYlUkFI3K3GB6tqbdjYqdhcb4BKRd7C0BhZSoaCxhv8kTcBrcZWP+xg==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "freebsd" @@ -901,13 +894,12 @@ } }, "node_modules/@esbuild/freebsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.25.12.tgz", - "integrity": "sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.27.1.tgz", + "integrity": "sha512-GITpD8dK9C+r+5yRT/UKVT36h/DQLOHdwGVwwoHidlnA168oD3uxA878XloXebK4Ul3gDBBIvEdL7go9gCUFzQ==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "freebsd" @@ -917,13 +909,12 @@ } }, "node_modules/@esbuild/linux-arm": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.25.12.tgz", - "integrity": "sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.27.1.tgz", + "integrity": "sha512-ieMID0JRZY/ZeCrsFQ3Y3NlHNCqIhTprJfDgSB3/lv5jJZ8FX3hqPyXWhe+gvS5ARMBJ242PM+VNz/ctNj//eA==", "cpu": [ "arm" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -933,13 +924,12 @@ } }, "node_modules/@esbuild/linux-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.25.12.tgz", - "integrity": "sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.27.1.tgz", + "integrity": "sha512-W9//kCrh/6in9rWIBdKaMtuTTzNj6jSeG/haWBADqLLa9P8O5YSRDzgD5y9QBok4AYlzS6ARHifAb75V6G670Q==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -949,13 +939,12 @@ } }, "node_modules/@esbuild/linux-ia32": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.25.12.tgz", - "integrity": "sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.27.1.tgz", + "integrity": "sha512-VIUV4z8GD8rtSVMfAj1aXFahsi/+tcoXXNYmXgzISL+KB381vbSTNdeZHHHIYqFyXcoEhu9n5cT+05tRv13rlw==", "cpu": [ "ia32" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -965,13 +954,12 @@ } }, "node_modules/@esbuild/linux-loong64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.25.12.tgz", - "integrity": "sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.27.1.tgz", + "integrity": "sha512-l4rfiiJRN7sTNI//ff65zJ9z8U+k6zcCg0LALU5iEWzY+a1mVZ8iWC1k5EsNKThZ7XCQ6YWtsZ8EWYm7r1UEsg==", "cpu": [ "loong64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -981,13 +969,12 @@ } }, "node_modules/@esbuild/linux-mips64el": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.25.12.tgz", - "integrity": "sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.27.1.tgz", + "integrity": "sha512-U0bEuAOLvO/DWFdygTHWY8C067FXz+UbzKgxYhXC0fDieFa0kDIra1FAhsAARRJbvEyso8aAqvPdNxzWuStBnA==", "cpu": [ "mips64el" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -997,13 +984,12 @@ } }, "node_modules/@esbuild/linux-ppc64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.25.12.tgz", - "integrity": "sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.27.1.tgz", + "integrity": "sha512-NzdQ/Xwu6vPSf/GkdmRNsOfIeSGnh7muundsWItmBsVpMoNPVpM61qNzAVY3pZ1glzzAxLR40UyYM23eaDDbYQ==", "cpu": [ "ppc64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1013,13 +999,12 @@ } }, "node_modules/@esbuild/linux-riscv64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.25.12.tgz", - "integrity": "sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.27.1.tgz", + "integrity": "sha512-7zlw8p3IApcsN7mFw0O1Z1PyEk6PlKMu18roImfl3iQHTnr/yAfYv6s4hXPidbDoI2Q0pW+5xeoM4eTCC0UdrQ==", "cpu": [ "riscv64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1029,13 +1014,12 @@ } }, "node_modules/@esbuild/linux-s390x": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.25.12.tgz", - "integrity": "sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.27.1.tgz", + "integrity": "sha512-cGj5wli+G+nkVQdZo3+7FDKC25Uh4ZVwOAK6A06Hsvgr8WqBBuOy/1s+PUEd/6Je+vjfm6stX0kmib5b/O2Ykw==", "cpu": [ "s390x" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1045,13 +1029,12 @@ } }, "node_modules/@esbuild/linux-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.25.12.tgz", - "integrity": "sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.27.1.tgz", + "integrity": "sha512-z3H/HYI9MM0HTv3hQZ81f+AKb+yEoCRlUby1F80vbQ5XdzEMyY/9iNlAmhqiBKw4MJXwfgsh7ERGEOhrM1niMA==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "linux" @@ -1061,13 +1044,12 @@ } }, "node_modules/@esbuild/netbsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.25.12.tgz", - "integrity": "sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-arm64/-/netbsd-arm64-0.27.1.tgz", + "integrity": "sha512-wzC24DxAvk8Em01YmVXyjl96Mr+ecTPyOuADAvjGg+fyBpGmxmcr2E5ttf7Im8D0sXZihpxzO1isus8MdjMCXQ==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "netbsd" @@ -1077,13 +1059,12 @@ } }, "node_modules/@esbuild/netbsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.25.12.tgz", - "integrity": "sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.27.1.tgz", + "integrity": "sha512-1YQ8ybGi2yIXswu6eNzJsrYIGFpnlzEWRl6iR5gMgmsrR0FcNoV1m9k9sc3PuP5rUBLshOZylc9nqSgymI+TYg==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "netbsd" @@ -1093,13 +1074,12 @@ } }, "node_modules/@esbuild/openbsd-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.25.12.tgz", - "integrity": "sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-arm64/-/openbsd-arm64-0.27.1.tgz", + "integrity": "sha512-5Z+DzLCrq5wmU7RDaMDe2DVXMRm2tTDvX2KU14JJVBN2CT/qov7XVix85QoJqHltpvAOZUAc3ndU56HSMWrv8g==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "openbsd" @@ -1109,13 +1089,12 @@ } }, "node_modules/@esbuild/openbsd-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.25.12.tgz", - "integrity": "sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.27.1.tgz", + "integrity": "sha512-Q73ENzIdPF5jap4wqLtsfh8YbYSZ8Q0wnxplOlZUOyZy7B4ZKW8DXGWgTCZmF8VWD7Tciwv5F4NsRf6vYlZtqg==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "openbsd" @@ -1125,13 +1104,12 @@ } }, "node_modules/@esbuild/openharmony-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.25.12.tgz", - "integrity": "sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/openharmony-arm64/-/openharmony-arm64-0.27.1.tgz", + "integrity": "sha512-ajbHrGM/XiK+sXM0JzEbJAen+0E+JMQZ2l4RR4VFwvV9JEERx+oxtgkpoKv1SevhjavK2z2ReHk32pjzktWbGg==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "openharmony" @@ -1141,13 +1119,12 @@ } }, "node_modules/@esbuild/sunos-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.25.12.tgz", - "integrity": "sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.27.1.tgz", + "integrity": "sha512-IPUW+y4VIjuDVn+OMzHc5FV4GubIwPnsz6ubkvN8cuhEqH81NovB53IUlrlBkPMEPxvNnf79MGBoz8rZ2iW8HA==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "sunos" @@ -1157,13 +1134,12 @@ } }, "node_modules/@esbuild/win32-arm64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.25.12.tgz", - "integrity": "sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.27.1.tgz", + "integrity": "sha512-RIVRWiljWA6CdVu8zkWcRmGP7iRRIIwvhDKem8UMBjPql2TXM5PkDVvvrzMtj1V+WFPB4K7zkIGM7VzRtFkjdg==", "cpu": [ "arm64" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -1173,13 +1149,12 @@ } }, "node_modules/@esbuild/win32-ia32": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.25.12.tgz", - "integrity": "sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.27.1.tgz", + "integrity": "sha512-2BR5M8CPbptC1AK5JbJT1fWrHLvejwZidKx3UMSF0ecHMa+smhi16drIrCEggkgviBwLYd5nwrFLSl5Kho96RQ==", "cpu": [ "ia32" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -1189,13 +1164,12 @@ } }, "node_modules/@esbuild/win32-x64": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.25.12.tgz", - "integrity": "sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.27.1.tgz", + "integrity": "sha512-d5X6RMYv6taIymSk8JBP+nxv8DQAMY6A51GPgusqLdK9wBz5wWIXy1KjTck6HnjE9hqJzJRdk+1p/t5soSbCtw==", "cpu": [ "x64" ], - "license": "MIT", "optional": true, "os": [ "win32" @@ -1295,7 +1269,6 @@ "version": "2.3.6", "resolved": "https://registry.npmjs.org/@formatjs/ecma402-abstract/-/ecma402-abstract-2.3.6.tgz", "integrity": "sha512-HJnTFeRM2kVFVr5gr5kH1XP6K0JcJtE7Lzvtr3FS/so5f1kpsqqqxy5JF+FRaO6H2qmcMfAUIox7AJteieRtVw==", - "license": "MIT", "dependencies": { "@formatjs/fast-memoize": "2.2.7", "@formatjs/intl-localematcher": "0.6.2", @@ -1307,7 +1280,6 @@ "version": "2.2.7", "resolved": "https://registry.npmjs.org/@formatjs/fast-memoize/-/fast-memoize-2.2.7.tgz", "integrity": "sha512-Yabmi9nSvyOMrlSeGGWDiH7rf3a7sIwplbvo/dlz9WCIjzIQAfy1RMf4S0X3yG724n5Ghu2GmEl5NJIV6O9sZQ==", - "license": "MIT", "dependencies": { "tslib": "^2.8.0" } @@ -1316,7 +1288,6 @@ "version": "2.11.4", "resolved": "https://registry.npmjs.org/@formatjs/icu-messageformat-parser/-/icu-messageformat-parser-2.11.4.tgz", "integrity": "sha512-7kR78cRrPNB4fjGFZg3Rmj5aah8rQj9KPzuLsmcSn4ipLXQvC04keycTI1F7kJYDwIXtT2+7IDEto842CfZBtw==", - "license": "MIT", "dependencies": { "@formatjs/ecma402-abstract": "2.3.6", "@formatjs/icu-skeleton-parser": "1.8.16", @@ -1327,7 +1298,6 @@ "version": "1.8.16", "resolved": "https://registry.npmjs.org/@formatjs/icu-skeleton-parser/-/icu-skeleton-parser-1.8.16.tgz", "integrity": "sha512-H13E9Xl+PxBd8D5/6TVUluSpxGNvFSlN/b3coUp0e0JpuWXXnQDiavIpY3NnvSp4xhEMoXyyBvVfdFX8jglOHQ==", - "license": "MIT", "dependencies": { "@formatjs/ecma402-abstract": "2.3.6", "tslib": "^2.8.0" @@ -1337,21 +1307,19 @@ "version": "0.6.2", "resolved": "https://registry.npmjs.org/@formatjs/intl-localematcher/-/intl-localematcher-0.6.2.tgz", "integrity": "sha512-XOMO2Hupl0wdd172Y06h6kLpBz6Dv+J4okPLl4LPtzbr8f66WbIoy4ev98EBuZ6ZK4h5ydTN6XneT4QVpD7cdA==", - "license": "MIT", "dependencies": { "tslib": "^2.8.0" } }, "node_modules/@heroui/accordion": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/accordion/-/accordion-2.2.24.tgz", - "integrity": "sha512-iVJVKKsGN4t3hn4Exwic6n5SOQOmmmsodSsCt0VUcs5VTHu9876sAC44xlEMpc9CP8pC1wQS3DzWl3mN6Z120g==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/accordion/-/accordion-2.2.25.tgz", + "integrity": "sha512-cukvjTXfSLxjCZJ2PwLYUdkJuzKgKfbYkA+l2yvtYfrAQ8G0uz8a+tAGKGcciVLtYke1KsZ/pKjbpInWgGUV7A==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/divider": "2.2.20", + "@heroui/aria-utils": "2.2.25", + "@heroui/divider": "2.2.21", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1364,19 +1332,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/alert": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/alert/-/alert-2.2.27.tgz", - "integrity": "sha512-Y6oX9SV//tdhxhpgkSZvnjwdx7d8S7RAhgVlxCs2Hla//nCFC3yiMHIv8UotTryAGdOwZIsffmcna9vqbNL5vw==", - "license": "MIT", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/alert/-/alert-2.2.28.tgz", + "integrity": "sha512-1FgaRWCSj2/s8L1DyQR0ao8cfdC60grC1EInNoqAyvcSJt6j9gK/zWKZTQn+NXDjV2N14dG+b7EjMUc8cJnUjA==", "dependencies": { - "@heroui/button": "2.2.27", + "@heroui/button": "2.2.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1384,18 +1351,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.19", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/aria-utils": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/aria-utils/-/aria-utils-2.2.24.tgz", - "integrity": "sha512-Y7FfQl2jvJr8JjpH+iuJElDwbn3eSWohuxHg6e5+xk5GcPYrEecgr0F/9qD6VU8IvVrRzJ00JzmT87lgA5iE3Q==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/aria-utils/-/aria-utils-2.2.25.tgz", + "integrity": "sha512-7ofC3q6qVksIIJMJu3X07oQKrVijw+eaE4LV8AHY/wRl1FFxuTwhxQmjW5JGsGQ0iwlzxf4D5rogYa4YCUcFag==", "dependencies": { - "@heroui/system": "2.4.23", + "@heroui/system": "2.4.24", "@react-aria/utils": "3.31.0", "@react-stately/collections": "3.12.8", "@react-types/overlays": "3.9.2", @@ -1407,19 +1373,18 @@ } }, "node_modules/@heroui/autocomplete": { - "version": "2.3.29", - "resolved": "https://registry.npmjs.org/@heroui/autocomplete/-/autocomplete-2.3.29.tgz", - "integrity": "sha512-BQkiWrrhPbNMFF1Hd60QDyG4iwD+sdsjWh0h7sw2XhcT6Bjw/6Hqpf4eHsTvPElW/554vPZVtChjugRY1N2zsw==", - "license": "MIT", + "version": "2.3.30", + "resolved": "https://registry.npmjs.org/@heroui/autocomplete/-/autocomplete-2.3.30.tgz", + "integrity": "sha512-TT5p/EybRdxRs9g3DZGHYVpp4Sgs1X0kLZvc7qO4hzNyKEqmBOx8VESVZs43ZVmLxVWf7fOd3kbGVt9Sbm2U8A==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/button": "2.2.27", - "@heroui/form": "2.1.27", - "@heroui/input": "2.4.28", - "@heroui/listbox": "2.3.26", - "@heroui/popover": "2.3.27", + "@heroui/aria-utils": "2.2.25", + "@heroui/button": "2.2.28", + "@heroui/form": "2.1.28", + "@heroui/input": "2.4.29", + "@heroui/listbox": "2.3.27", + "@heroui/popover": "2.3.28", "@heroui/react-utils": "2.1.14", - "@heroui/scroll-shadow": "2.3.18", + "@heroui/scroll-shadow": "2.3.19", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", "@heroui/use-safe-layout-effect": "2.1.8", @@ -1431,17 +1396,16 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/avatar": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/avatar/-/avatar-2.2.22.tgz", - "integrity": "sha512-znmKdsrVj91Fg8+wm/HA/b8zi3iAg5g3MezliBfS2PmwgZcpBR6VtwgeeP6uN49+TR+faGIrck0Zxceuw4U0FQ==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/avatar/-/avatar-2.2.23.tgz", + "integrity": "sha512-YBnb4v1cc/1kZTBx0AH0QNbEno+BhN/zdhxVRJDDI32aVvZhMpR90m7zTG4ma9oetOpCZ0pDeGKenlR9Ack4xg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -1451,32 +1415,30 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/badge": { - "version": "2.2.17", - "resolved": "https://registry.npmjs.org/@heroui/badge/-/badge-2.2.17.tgz", - "integrity": "sha512-UNILRsAIJn+B6aWml+Rv2QCyYB7sadNqRPDPzNeVKJd8j3MNgZyyEHDwvqM2FWrgGccQIuWFaUgGdnPxRJpwwg==", - "license": "MIT", + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/@heroui/badge/-/badge-2.2.18.tgz", + "integrity": "sha512-OfGove8YJ9oDrdugzq05FC15ZKD5nzqe+thPZ+1SY1LZorJQjZvqSD9QnoEH1nG7fu2IdH6pYJy3sZ/b6Vj5Kg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/breadcrumbs": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/breadcrumbs/-/breadcrumbs-2.2.22.tgz", - "integrity": "sha512-2fWfpbwhRPeC99Kuzu+DnzOYL4TOkDm9sznvSj0kIAbw/Rvl+D2/6fmBOaTRIUXfswWpHVRUCcNYczIAp0PkoA==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/breadcrumbs/-/breadcrumbs-2.2.23.tgz", + "integrity": "sha512-trWtN/Ci2NTNRGvIxT8hdOml6med9F3HaCszqyVg3zroh6ZqV3iMPL3u4xRnAe0GLPsGwWFUnao7jbouU+avHw==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", @@ -1487,21 +1449,20 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/button": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/button/-/button-2.2.27.tgz", - "integrity": "sha512-Fxb8rtjPQm9T4GAtB1oW2QMUiQCtn7EtvO5AN41ANxAgmsNMM5wnLTkxQ05vNueCrp47kTDtSuyMhKU2llATHQ==", - "license": "MIT", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/button/-/button-2.2.28.tgz", + "integrity": "sha512-B4SSMeKXrbENs4VQ3U/MF+RTncPCU3DPYLYhhrDVVo/LXUIcN/KU/mJwF89eYQjvFXVyaZphC+i/5yLiN3uDcw==", "dependencies": { "@heroui/react-utils": "2.1.14", - "@heroui/ripple": "2.2.20", + "@heroui/ripple": "2.2.21", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.24", + "@heroui/spinner": "2.2.25", "@heroui/use-aria-button": "2.2.20", "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", @@ -1509,21 +1470,20 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/calendar": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/calendar/-/calendar-2.2.27.tgz", - "integrity": "sha512-VtyXQSoT9u9tC4HjBkJIaSSmhau1LwPUwvof0LjYDpBfTsJKqn+308wI3nAp75BTbAkK+vFM8LI0VfbALCwR4Q==", - "license": "MIT", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/calendar/-/calendar-2.2.28.tgz", + "integrity": "sha512-iJ1jOljJQCgowGLesl27LPh44JjwYLyxuqwIIJqBspiARdtbCWyVRTXb5RaphnbNcZFDuYhyadkVtzZOYVUn8g==", "dependencies": { - "@heroui/button": "2.2.27", + "@heroui/button": "2.2.28", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1543,20 +1503,19 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/card": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/card/-/card-2.2.25.tgz", - "integrity": "sha512-dtd/G24zePIHPutRIxWC69IO3IGJs8X+zh9rBYM9cY5Q972D8Eet5WdWTfDBhw//fFIoagDAs5YcI9emGczGaQ==", - "license": "MIT", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/card/-/card-2.2.26.tgz", + "integrity": "sha512-L+q1VLhEqA/s8o3DchojwtA66IE4MZzAhhPqivBD+mYCVtrCaueDMlU1q0o73SO2iloemRz33T5s4Uyf+1b8Bg==", "dependencies": { "@heroui/react-utils": "2.1.14", - "@heroui/ripple": "2.2.20", + "@heroui/ripple": "2.2.21", "@heroui/shared-utils": "2.1.12", "@heroui/use-aria-button": "2.2.20", "@react-aria/focus": "3.21.2", @@ -1565,19 +1524,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/checkbox": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/checkbox/-/checkbox-2.3.27.tgz", - "integrity": "sha512-YC0deiB7EOzcpJtk9SdySugD1Z2TNtfyYee2voDBHrng7ZBRB+cmAvizXINHnaQGFi0yuVPrZ5ixR/wsvTNW+Q==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/checkbox/-/checkbox-2.3.28.tgz", + "integrity": "sha512-lbnPihxNJXVxvpJeta6o17k7vu6fSvR6w+JsT/s5iurKk5qrkCrNBXmIZYdKJ43MmG3C/A0FWh3uNhZOM5Q04Q==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-callback-ref": "2.1.8", @@ -1592,16 +1550,15 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/chip": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/chip/-/chip-2.2.22.tgz", - "integrity": "sha512-6O4Sv1chP+xxftp7E5gHUJIzo04ML9BW9N9jjxWCqT0Qtl+a/ZxnDalCyup6oraMiVLLHp+zEVX93C+3LONgkg==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/chip/-/chip-2.2.23.tgz", + "integrity": "sha512-25HTWX5j9o0suoCYBiEo87ZoTt9VQfca+DSqphNMXHpbCQ0u26fL+8/jjehoYPtySJiLigwQeZn8BEjWWO3pGg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", @@ -1611,34 +1568,32 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/code": { - "version": "2.2.21", - "resolved": "https://registry.npmjs.org/@heroui/code/-/code-2.2.21.tgz", - "integrity": "sha512-ExHcfTGr9tCbAaBOfMzTla8iHHfwIV5/xRk4WApeVmL4MiIlLMykc9bSi1c88ltaJInQGFAmE6MOFHXuGHxBXw==", - "license": "MIT", + "version": "2.2.22", + "resolved": "https://registry.npmjs.org/@heroui/code/-/code-2.2.22.tgz", + "integrity": "sha512-i3pDe5Mzzh04jVx0gFwi2NMtCmsYfIRhLvkebXQcmfUDYl0+IGRJLcBsrWoOzes0pE/s7yyv+yJ/VhoU8F5jcg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/system-rsc": "2.3.20" + "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/date-input": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/date-input/-/date-input-2.3.27.tgz", - "integrity": "sha512-IxvZYezbR9jRxTWdsuHH47nsnB6RV1HPY7VwiJd9ZCy6P6oUV0Rx3cdwIRtUnyXbvz1G7+I22NL4C2Ku194l8A==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/date-input/-/date-input-2.3.28.tgz", + "integrity": "sha512-fzdfo9QMY9R+XffcuLOXXliM87eEu5Hz2wsUnsEAakXEbzAkFfzdSd72DRAbIiTD7yzSvaoyJHVAJ71+3/tCQg==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@internationalized/date": "3.10.0", @@ -1650,23 +1605,22 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/date-picker": { - "version": "2.3.28", - "resolved": "https://registry.npmjs.org/@heroui/date-picker/-/date-picker-2.3.28.tgz", - "integrity": "sha512-duKvXijabpafxU04sItrozf982tXkUDymcT3SoEvW4LDg6bECgPI8bYNN49hlzkI8+zuwJdKzJ4hDmANGVaL8Q==", - "license": "MIT", + "version": "2.3.29", + "resolved": "https://registry.npmjs.org/@heroui/date-picker/-/date-picker-2.3.29.tgz", + "integrity": "sha512-kSvFjNuST2UhlDjDMvOHlbixyTsb4Dm7QNTXxeQGyKd6D5bUaBRzVSNaLnJ6Od/nEh30xqy3lZEq6nT5VqupMA==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/button": "2.2.27", - "@heroui/calendar": "2.2.27", - "@heroui/date-input": "2.3.27", - "@heroui/form": "2.1.27", - "@heroui/popover": "2.3.27", + "@heroui/aria-utils": "2.2.25", + "@heroui/button": "2.2.28", + "@heroui/calendar": "2.2.28", + "@heroui/date-input": "2.3.28", + "@heroui/form": "2.1.28", + "@heroui/popover": "2.3.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1680,24 +1634,23 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/divider": { - "version": "2.2.20", - "resolved": "https://registry.npmjs.org/@heroui/divider/-/divider-2.2.20.tgz", - "integrity": "sha512-t+NNJ2e5okZraLKQoj+rS2l49IMy5AeXTixjsR+QRZ/WPrETNpMj4lw5cBSxG0i7WhRhlBa+KgqweUUezvCdAg==", - "license": "MIT", + "version": "2.2.21", + "resolved": "https://registry.npmjs.org/@heroui/divider/-/divider-2.2.21.tgz", + "integrity": "sha512-aVvl8/3fWUc+/fHbg+hD/0wrkoMKmXG0yRgyNrJSeu0pkRwhb0eD4ZjnBK1pCYqnstoltNE33J8ko/sU+WlmPw==", "dependencies": { "@heroui/react-rsc-utils": "2.1.9", - "@heroui/system-rsc": "2.3.20", + "@heroui/system-rsc": "2.3.21", "@react-types/shared": "3.32.1" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } @@ -1706,38 +1659,35 @@ "version": "2.1.10", "resolved": "https://registry.npmjs.org/@heroui/dom-animation/-/dom-animation-2.1.10.tgz", "integrity": "sha512-dt+0xdVPbORwNvFT5pnqV2ULLlSgOJeqlg/DMo97s9RWeD6rD4VedNY90c8C9meqWqGegQYBQ9ztsfX32mGEPA==", - "license": "MIT", "peerDependencies": { "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1" } }, "node_modules/@heroui/drawer": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/drawer/-/drawer-2.2.24.tgz", - "integrity": "sha512-gb51Lj9A8jlL1UvUrQ+MLS9tz+Qw+cdXwIJd39RXDkJwDmxqhzkz+WoOPZZwcOAHtATmwlTuxxlv6Cro59iswg==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/drawer/-/drawer-2.2.25.tgz", + "integrity": "sha512-+TFagy61+8dm+EWXLY5NJUGJ4COPL4anRiynw92iSD+arKUGN5b6lJUnjf9NkqwM5jqWKk1vxWdGDZEKZva8Bg==", "dependencies": { - "@heroui/framer-utils": "2.1.23", - "@heroui/modal": "2.2.24", + "@heroui/framer-utils": "2.1.24", + "@heroui/modal": "2.2.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/dropdown": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/dropdown/-/dropdown-2.3.27.tgz", - "integrity": "sha512-6aedMmxC+St5Ixz9o3s0ERkLOR6ZQE2uRccmRchPCEt7ZJU6TAeJo7fSpxIvdEUjFDe+pNhR2ojIocZEXtBZZg==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/dropdown/-/dropdown-2.3.28.tgz", + "integrity": "sha512-q+bSLxdsHtauqpQ4529cSkjj8L20UdvbrRGmhRL3YLZyLEzGcCCp6kDRCchkCpTaxK7u869eF9TGSNoFeum92g==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/menu": "2.2.26", - "@heroui/popover": "2.3.27", + "@heroui/aria-utils": "2.2.25", + "@heroui/menu": "2.2.27", + "@heroui/popover": "2.3.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@react-aria/focus": "3.21.2", @@ -1747,39 +1697,37 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/form": { - "version": "2.1.27", - "resolved": "https://registry.npmjs.org/@heroui/form/-/form-2.1.27.tgz", - "integrity": "sha512-vtaBqWhxppkJeWgbAZA/A1bRj6XIudBqJWSkoqYlejtLuvaxNwxQ2Z9u7ewxN96R6QqPrQwChlknIn0NgCWlXQ==", - "license": "MIT", + "version": "2.1.28", + "resolved": "https://registry.npmjs.org/@heroui/form/-/form-2.1.28.tgz", + "integrity": "sha512-skg9GooN1+rgQwM0/7wNqUenq6JBEf3T2tDBItJU/oeNC9oaX00JDpy8rpMz9zS0oUqfbJ0auT11+0FRo2W6CQ==", "dependencies": { "@heroui/shared-utils": "2.1.12", - "@heroui/system": "2.4.23", - "@heroui/theme": "2.4.23", + "@heroui/system": "2.4.24", + "@heroui/theme": "2.4.24", "@react-stately/form": "3.2.2", "@react-types/form": "3.7.16", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18", "react-dom": ">=18" } }, "node_modules/@heroui/framer-utils": { - "version": "2.1.23", - "resolved": "https://registry.npmjs.org/@heroui/framer-utils/-/framer-utils-2.1.23.tgz", - "integrity": "sha512-crLLMjRmxs8/fysFv5gwghSGcDmYYkhNfAWh1rFzDy+FRPZN4f/bPH2rt85hdApmuHbWt0QCocqsrjHxLEzrAw==", - "license": "MIT", + "version": "2.1.24", + "resolved": "https://registry.npmjs.org/@heroui/framer-utils/-/framer-utils-2.1.24.tgz", + "integrity": "sha512-PiHEV8YS3Q0ve3ZnTASVvTeBK0fTFdLtLiPtCuLucC2WGeDFjUerE7++Y+HhWB85Jj/USknEpl0aGsatl3cbgg==", "dependencies": { - "@heroui/system": "2.4.23", + "@heroui/system": "2.4.24", "@heroui/use-measure": "2.1.8" }, "peerDependencies": { @@ -1789,10 +1737,9 @@ } }, "node_modules/@heroui/image": { - "version": "2.2.17", - "resolved": "https://registry.npmjs.org/@heroui/image/-/image-2.2.17.tgz", - "integrity": "sha512-B/MrWafTsiCBFnRc0hPTLDBh7APjb/lRuQf18umuh20/1n6KiQXJ7XGSjnrHaA6HQcrtMGh6mDFZDaXq9rHuoA==", - "license": "MIT", + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/@heroui/image/-/image-2.2.18.tgz", + "integrity": "sha512-hrvj/hDM0+Khb9EqstZOPeO0vIGZvhrJWPMxk7a6i2PqhWWQI+ws+nrwsG5XqAkwE4mqqf9Uw8EMfIG1XE5YYg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -1800,18 +1747,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/input": { - "version": "2.4.28", - "resolved": "https://registry.npmjs.org/@heroui/input/-/input-2.4.28.tgz", - "integrity": "sha512-uaBubg814YOlVvX13yCAMqsR9HC4jg+asQdukbOvOnFtHY/d53her1BDdXhR9tMcrRTdYWQ3FoHqWbpvd5X4OQ==", - "license": "MIT", + "version": "2.4.29", + "resolved": "https://registry.npmjs.org/@heroui/input/-/input-2.4.29.tgz", + "integrity": "sha512-PIjFmN6BTLvnlI0I9f7PjxvnviauOczRJGaTnlHKDniknoh7mi8j0voXwL/f6BAkVKrgpT5JiFvdjq6og+cfSA==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1826,18 +1772,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.19", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/input-otp": { - "version": "2.1.27", - "resolved": "https://registry.npmjs.org/@heroui/input-otp/-/input-otp-2.1.27.tgz", - "integrity": "sha512-VUzQ1u6/0okE0eqDx/2I/8zpGItSsn7Zml01IVwGM4wY2iJeQA+uRjfP+B1ff9jO/y8n582YU4uv/ZSOmmEQ7A==", - "license": "MIT", + "version": "2.1.28", + "resolved": "https://registry.npmjs.org/@heroui/input-otp/-/input-otp-2.1.28.tgz", + "integrity": "sha512-IHr35WqOHb8SBoMXYt6wxzKQg8iFMdc7iqFa8jqdshfVIS3bvxvJj6PGND3LoZxrRFplCv12lfmp2fWymQLleA==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-form-reset": "2.0.1", @@ -1850,32 +1795,30 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18", "react-dom": ">=18" } }, "node_modules/@heroui/kbd": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/kbd/-/kbd-2.2.22.tgz", - "integrity": "sha512-PKhgwGB7i53kBuqB1YdFZsg7H9fJ8YESMRRPwRRyPSz5feMdwGidyXs+/ix7lrlYp4mlC3wtPp7L79SEyPCpBA==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/kbd/-/kbd-2.2.23.tgz", + "integrity": "sha512-nKL1Kl044l1Xsk4U8Nib3wFD2NlZCZo6kdqiqUv+DchOo4s3BJcxWSWqHn6fDVmHNyj3DFMYDvA2f/geMasaHQ==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/system-rsc": "2.3.20" + "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/link": { - "version": "2.2.23", - "resolved": "https://registry.npmjs.org/@heroui/link/-/link-2.2.23.tgz", - "integrity": "sha512-lObtPRLy8ModlTvJiKhczuAV/CIt31hde6xPGFYRpPsaQN1b7RgQMmai5/Iv/M8WrzFmFZRpgW75RKYIB6hHVQ==", - "license": "MIT", + "version": "2.2.24", + "resolved": "https://registry.npmjs.org/@heroui/link/-/link-2.2.24.tgz", + "integrity": "sha512-rxtSC/8++wCtZs2GqBCukQHtDAbqB5bXT24v03q86oz7VOlbn8pox38LwFKrb/H+A3o+BjSKuTJsYidJcQ5clg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", @@ -1886,19 +1829,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/listbox": { - "version": "2.3.26", - "resolved": "https://registry.npmjs.org/@heroui/listbox/-/listbox-2.3.26.tgz", - "integrity": "sha512-/k3k+xyl2d+aFfT02h+/0njhsDX8vJDEkPK+dl9ETYI9Oz3L+xbHN9yIzuWjBXYkNGlQCjQ46N+0jWjhP5B4pA==", - "license": "MIT", + "version": "2.3.27", + "resolved": "https://registry.npmjs.org/@heroui/listbox/-/listbox-2.3.27.tgz", + "integrity": "sha512-NUBDwP9Xzx3A/0iX/09hhs4/y8Loo+bCTm/vqFqYyufR8AOGLw1Xn0poTybPfE4L5U+6Y1P7GM0VjgZVw9dFQQ==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/divider": "2.2.20", + "@heroui/aria-utils": "2.2.25", + "@heroui/divider": "2.2.21", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mobile": "2.2.12", @@ -1911,19 +1853,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/menu": { - "version": "2.2.26", - "resolved": "https://registry.npmjs.org/@heroui/menu/-/menu-2.2.26.tgz", - "integrity": "sha512-raR5pXgEqizKD9GsWS1yKqTm4RPWMrSQlqXLE2zNMQk0TkDqmPVw1z5griMqu2Zt9Vf2Ectf55vh4c0DNOUGlg==", - "license": "MIT", + "version": "2.2.27", + "resolved": "https://registry.npmjs.org/@heroui/menu/-/menu-2.2.27.tgz", + "integrity": "sha512-Ifsb9QBVpAFFcIEEcp3nU28DBtIU0iI7B5HHpblHDJoDtjIbkyNOnyxoEj8eX63QTWQcKrmNnFYdtsrtS9K1RA==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/divider": "2.2.20", + "@heroui/aria-utils": "2.2.25", + "@heroui/divider": "2.2.21", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mobile": "2.2.12", @@ -1936,19 +1877,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/modal": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/modal/-/modal-2.2.24.tgz", - "integrity": "sha512-ISbgorNqgps9iUvQdgANxprdN+6H3Sx9TrGKpuW798qjc2f0T4rTbjrEfFPT8tFx6XYF4P5j7T7m3zoKcortHQ==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/modal/-/modal-2.2.25.tgz", + "integrity": "sha512-qoUk0fe/GMbKHUWcW8XThp+TifEG6GgmpBKZ4x8hhM5o/t1cKAD4+F2pKahtih0ba5qjM+tFtwnUV7z7Mt8+xg==", "dependencies": { "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -1964,20 +1904,19 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/navbar": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/navbar/-/navbar-2.2.25.tgz", - "integrity": "sha512-5fNIMDpX2htDTMb/Xgv81qw/FuNWb+0Wpfc6rkFtNYd968I7G6Kjm782QB8WQjZ8DsMugcLEYUN4lpbJHRSdwg==", - "license": "MIT", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/navbar/-/navbar-2.2.26.tgz", + "integrity": "sha512-uQhISgbQgea1ki0et3hDJ8+IXc35zMNowRQTKgWeEF8T3yS5X2fKuLzJc7/cf0vUGnxH0FPB3Z5Cb7o1nwjr9A==", "dependencies": { "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-resize": "2.1.8", @@ -1991,20 +1930,19 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/number-input": { - "version": "2.0.18", - "resolved": "https://registry.npmjs.org/@heroui/number-input/-/number-input-2.0.18.tgz", - "integrity": "sha512-28v0/0FABs+yy3CcJimcr5uNlhaJSyKt1ENMSXfzPxdN2WgIs14+6NLMT+KV7ibcJl7kmqG0uc8vuIDLVrM5bQ==", - "license": "MIT", + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/@heroui/number-input/-/number-input-2.0.19.tgz", + "integrity": "sha512-5UHdznU9XIqjRH17dG277YQrTnUeifWmHdU76Jzf78+SVsJgQdLqcRINHPVj382q0kd6vLMzc4Hyb2fQ0g2WXg==", "dependencies": { - "@heroui/button": "2.2.27", - "@heroui/form": "2.1.27", + "@heroui/button": "2.2.28", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", @@ -2020,16 +1958,15 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.19", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/pagination": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/pagination/-/pagination-2.2.24.tgz", - "integrity": "sha512-5ObSJ1PzB9D1CjHV0MfDNzLR69vSYpx/rNQLBo/D4g5puaAR7kkGgw5ncf5eirhdKuy9y8VGAhjwhBxO4NUdpQ==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/pagination/-/pagination-2.2.25.tgz", + "integrity": "sha512-PQZMNQ7wiv++cLEpEXDAdID3IQE2FlG1UkcuYhVYLPJgGSxoKKcM81wmE/HYMgmIMXySiZ+9E/UM8HATrpvTzA==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", @@ -2044,21 +1981,20 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/popover": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/popover/-/popover-2.3.27.tgz", - "integrity": "sha512-PmSCKQcAvKIegK59Flr9cglbsEu7OAegQMtwNIjqWHsPT18NNphimmUSJrtuD78rcfKekrZ+Uo9qJEUf0zGZDw==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/popover/-/popover-2.3.28.tgz", + "integrity": "sha512-0KHClVQVhLTCqUOtsKEZQ3dqPpNjd7qTISD2Ud3vACdLXprSLWmOzo2ItT6PAh881oIZnPS8l/0/jZ1ON/izdA==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/button": "2.2.27", + "@heroui/aria-utils": "2.2.25", + "@heroui/button": "2.2.28", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-aria-button": "2.2.20", @@ -2072,17 +2008,16 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/progress": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/progress/-/progress-2.2.22.tgz", - "integrity": "sha512-ch+iWEDo8d+Owz81vu4+Kj6CLfxi0nUlivQBhXeOzgU3VZbRmxJyW8S6l7wk6GyKJZxsCbYbjV1wPSjZhKJXCg==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/progress/-/progress-2.2.23.tgz", + "integrity": "sha512-5mfFPv5oW69yD5m/Y1cz0R+s4W8cwvLCZXzVtevoqyzkInNks8w2FKeGptkXcDeXVxqfhwDmNU4DXUmc4nRx3w==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -2092,18 +2027,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/radio": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/radio/-/radio-2.3.27.tgz", - "integrity": "sha512-kfDxzPR0u4++lZX2Gf6wbEe/hGbFnoXI4XLbe4e+ZDjGdBSakNuJlcDvWHVoDFZH1xXyOO9w/dHfZuE6O2VGLA==", - "license": "MIT", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/radio/-/radio-2.3.28.tgz", + "integrity": "sha512-qrzZpEXRl4EH3zKeCujyKeK2yvcvaOaosxdZnMrT2O7wxX9LeOp6ZPMwIdMFmJYj7iyPym2nUwFfQBne7JNuvA==", "dependencies": { - "@heroui/form": "2.1.27", + "@heroui/form": "2.1.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@react-aria/focus": "3.21.2", @@ -2116,66 +2050,65 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/react": { - "version": "2.8.5", - "resolved": "https://registry.npmjs.org/@heroui/react/-/react-2.8.5.tgz", - "integrity": "sha512-cGiG0/DCPsYopa+zACFDmtx9LQDfY5KU58Tt82ELANhmKRyYAesAq9tSa01dG+MjOXUTUR6cxp5i5RmRn8rPYg==", - "license": "MIT", + "version": "2.8.6", + "resolved": "https://registry.npmjs.org/@heroui/react/-/react-2.8.6.tgz", + "integrity": "sha512-iDmmkqZZtBqVqsSSZiV6PIWN3AEOZLQFXwt9Lob2Oy7gQQuFDP+iljg/ARc3fZ9JBNbJTfgGFdNjrnaFpPtRyw==", "dependencies": { - "@heroui/accordion": "2.2.24", - "@heroui/alert": "2.2.27", - "@heroui/autocomplete": "2.3.29", - "@heroui/avatar": "2.2.22", - "@heroui/badge": "2.2.17", - "@heroui/breadcrumbs": "2.2.22", - "@heroui/button": "2.2.27", - "@heroui/calendar": "2.2.27", - "@heroui/card": "2.2.25", - "@heroui/checkbox": "2.3.27", - "@heroui/chip": "2.2.22", - "@heroui/code": "2.2.21", - "@heroui/date-input": "2.3.27", - "@heroui/date-picker": "2.3.28", - "@heroui/divider": "2.2.20", - "@heroui/drawer": "2.2.24", - "@heroui/dropdown": "2.3.27", - "@heroui/form": "2.1.27", - "@heroui/framer-utils": "2.1.23", - "@heroui/image": "2.2.17", - "@heroui/input": "2.4.28", - "@heroui/input-otp": "2.1.27", - "@heroui/kbd": "2.2.22", - "@heroui/link": "2.2.23", - "@heroui/listbox": "2.3.26", - "@heroui/menu": "2.2.26", - "@heroui/modal": "2.2.24", - "@heroui/navbar": "2.2.25", - "@heroui/number-input": "2.0.18", - "@heroui/pagination": "2.2.24", - "@heroui/popover": "2.3.27", - "@heroui/progress": "2.2.22", - "@heroui/radio": "2.3.27", - "@heroui/ripple": "2.2.20", - "@heroui/scroll-shadow": "2.3.18", - "@heroui/select": "2.4.28", - "@heroui/skeleton": "2.2.17", - "@heroui/slider": "2.4.24", - "@heroui/snippet": "2.2.28", - "@heroui/spacer": "2.2.21", - "@heroui/spinner": "2.2.24", - "@heroui/switch": "2.2.24", - "@heroui/system": "2.4.23", - "@heroui/table": "2.2.27", - "@heroui/tabs": "2.2.24", - "@heroui/theme": "2.4.23", - "@heroui/toast": "2.0.17", - "@heroui/tooltip": "2.2.24", - "@heroui/user": "2.2.22", + "@heroui/accordion": "2.2.25", + "@heroui/alert": "2.2.28", + "@heroui/autocomplete": "2.3.30", + "@heroui/avatar": "2.2.23", + "@heroui/badge": "2.2.18", + "@heroui/breadcrumbs": "2.2.23", + "@heroui/button": "2.2.28", + "@heroui/calendar": "2.2.28", + "@heroui/card": "2.2.26", + "@heroui/checkbox": "2.3.28", + "@heroui/chip": "2.2.23", + "@heroui/code": "2.2.22", + "@heroui/date-input": "2.3.28", + "@heroui/date-picker": "2.3.29", + "@heroui/divider": "2.2.21", + "@heroui/drawer": "2.2.25", + "@heroui/dropdown": "2.3.28", + "@heroui/form": "2.1.28", + "@heroui/framer-utils": "2.1.24", + "@heroui/image": "2.2.18", + "@heroui/input": "2.4.29", + "@heroui/input-otp": "2.1.28", + "@heroui/kbd": "2.2.23", + "@heroui/link": "2.2.24", + "@heroui/listbox": "2.3.27", + "@heroui/menu": "2.2.27", + "@heroui/modal": "2.2.25", + "@heroui/navbar": "2.2.26", + "@heroui/number-input": "2.0.19", + "@heroui/pagination": "2.2.25", + "@heroui/popover": "2.3.28", + "@heroui/progress": "2.2.23", + "@heroui/radio": "2.3.28", + "@heroui/ripple": "2.2.21", + "@heroui/scroll-shadow": "2.3.19", + "@heroui/select": "2.4.29", + "@heroui/skeleton": "2.2.18", + "@heroui/slider": "2.4.25", + "@heroui/snippet": "2.2.29", + "@heroui/spacer": "2.2.22", + "@heroui/spinner": "2.2.25", + "@heroui/switch": "2.2.25", + "@heroui/system": "2.4.24", + "@heroui/table": "2.2.28", + "@heroui/tabs": "2.2.25", + "@heroui/theme": "2.4.24", + "@heroui/toast": "2.0.18", + "@heroui/tooltip": "2.2.25", + "@heroui/user": "2.2.23", "@react-aria/visually-hidden": "3.8.28" }, "peerDependencies": { @@ -2188,7 +2121,6 @@ "version": "2.1.9", "resolved": "https://registry.npmjs.org/@heroui/react-rsc-utils/-/react-rsc-utils-2.1.9.tgz", "integrity": "sha512-e77OEjNCmQxE9/pnLDDb93qWkX58/CcgIqdNAczT/zUP+a48NxGq2A2WRimvc1uviwaNL2StriE2DmyZPyYW7Q==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2197,7 +2129,6 @@ "version": "2.1.14", "resolved": "https://registry.npmjs.org/@heroui/react-utils/-/react-utils-2.1.14.tgz", "integrity": "sha512-hhKklYKy9sRH52C9A8P0jWQ79W4MkIvOnKBIuxEMHhigjfracy0o0lMnAUdEsJni4oZKVJYqNGdQl+UVgcmeDA==", - "license": "MIT", "dependencies": { "@heroui/react-rsc-utils": "2.1.9", "@heroui/shared-utils": "2.1.12" @@ -2207,27 +2138,25 @@ } }, "node_modules/@heroui/ripple": { - "version": "2.2.20", - "resolved": "https://registry.npmjs.org/@heroui/ripple/-/ripple-2.2.20.tgz", - "integrity": "sha512-3+fBx5jO7l8SE84ZG0vB5BOxKKr23Ay180AeIWcf8m8lhXXd4iShVz2S+keW9PewqVHv52YBaxLoSVQ93Ddcxw==", - "license": "MIT", + "version": "2.2.21", + "resolved": "https://registry.npmjs.org/@heroui/ripple/-/ripple-2.2.21.tgz", + "integrity": "sha512-wairSq9LnhbIqTCJmUlJAQURQ1wcRK/L8pjg2s3R/XnvZlPXHy4ZzfphiwIlTI21z/f6tH3arxv/g1uXd1RY0g==", "dependencies": { "@heroui/dom-animation": "2.1.10", "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/scroll-shadow": { - "version": "2.3.18", - "resolved": "https://registry.npmjs.org/@heroui/scroll-shadow/-/scroll-shadow-2.3.18.tgz", - "integrity": "sha512-P/nLQbFPOlbTLRjO2tKoZCljJtU7iq81wsp7C8wZ1rZI1RmkTx3UgLLeoFWgmAp3ZlUIYgaewTnejt6eRx+28w==", - "license": "MIT", + "version": "2.3.19", + "resolved": "https://registry.npmjs.org/@heroui/scroll-shadow/-/scroll-shadow-2.3.19.tgz", + "integrity": "sha512-y5mdBlhiITVrFnQTDqEphYj7p5pHqoFSFtVuRRvl9wUec2lMxEpD85uMGsfL8OgQTKIAqGh2s6M360+VJm7ajQ==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -2235,26 +2164,25 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/select": { - "version": "2.4.28", - "resolved": "https://registry.npmjs.org/@heroui/select/-/select-2.4.28.tgz", - "integrity": "sha512-Dg3jv248Tu+g2WJMWseDjWA0FAG356elZIcE0OufVAIzQoWjLhgbkTqY9ths0HkcHy0nDwQWvyrrwkbif1kNqA==", - "license": "MIT", + "version": "2.4.29", + "resolved": "https://registry.npmjs.org/@heroui/select/-/select-2.4.29.tgz", + "integrity": "sha512-rFsI+UNUtK6WTm6oDM8A45tu8rDqt1zHoSoBQ8RJDkRITDcKRBTaTnvJI/Ez+kMRNH4fQ45LgoSPxw/JOOMg4w==", "dependencies": { - "@heroui/aria-utils": "2.2.24", - "@heroui/form": "2.1.27", - "@heroui/listbox": "2.3.26", - "@heroui/popover": "2.3.27", + "@heroui/aria-utils": "2.2.25", + "@heroui/form": "2.1.28", + "@heroui/listbox": "2.3.27", + "@heroui/popover": "2.3.28", "@heroui/react-utils": "2.1.14", - "@heroui/scroll-shadow": "2.3.18", + "@heroui/scroll-shadow": "2.3.19", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.24", + "@heroui/spinner": "2.2.25", "@heroui/use-aria-button": "2.2.20", "@heroui/use-aria-multiselect": "2.4.19", "@heroui/use-form-reset": "2.0.1", @@ -2268,7 +2196,7 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" @@ -2278,7 +2206,6 @@ "version": "2.1.10", "resolved": "https://registry.npmjs.org/@heroui/shared-icons/-/shared-icons-2.1.10.tgz", "integrity": "sha512-ePo60GjEpM0SEyZBGOeySsLueNDCqLsVL79Fq+5BphzlrBAcaKY7kUp74964ImtkXvknTxAWzuuTr3kCRqj6jg==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2287,33 +2214,30 @@ "version": "2.1.12", "resolved": "https://registry.npmjs.org/@heroui/shared-utils/-/shared-utils-2.1.12.tgz", "integrity": "sha512-0iCnxVAkIPtrHQo26Qa5g0UTqMTpugTbClNOrEPsrQuyRAq7Syux998cPwGlneTfB5E5xcU3LiEdA9GUyeK2cQ==", - "hasInstallScript": true, - "license": "MIT" + "hasInstallScript": true }, "node_modules/@heroui/skeleton": { - "version": "2.2.17", - "resolved": "https://registry.npmjs.org/@heroui/skeleton/-/skeleton-2.2.17.tgz", - "integrity": "sha512-WDzwODs+jW+GgMr3oOdLtXXfv8ScXuuWgxN2iPWWyDBcQYXX2XCKGVjCpM5lSKf1UG4Yp3iXuqKzH1m+E+m7kg==", - "license": "MIT", + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/@heroui/skeleton/-/skeleton-2.2.18.tgz", + "integrity": "sha512-7AjU5kjk9rqrKP9mWQiAVj0dow4/vbK5/ejh4jqdb3DZm7bM2+DGzfnQPiS0c2eWR606CgOuuoImpwDS82HJtA==", "dependencies": { "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/slider": { - "version": "2.4.24", - "resolved": "https://registry.npmjs.org/@heroui/slider/-/slider-2.4.24.tgz", - "integrity": "sha512-GKdqFTCe9O8tT3HEZ/W4TEWkz7ADtUBzuOBXw779Oqqf02HNg9vSnISlNvI6G0ymYjY42EanwA+dChHbPBIVJw==", - "license": "MIT", + "version": "2.4.25", + "resolved": "https://registry.npmjs.org/@heroui/slider/-/slider-2.4.25.tgz", + "integrity": "sha512-1ULgaqsu1Vzyyx6S7TGs+13PX5BGArZhLiApQfKwiA3TFvT0MNzTVoWVgyFZ8XLqh4esSUnqddhivqQhbRzrHw==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/tooltip": "2.2.24", + "@heroui/tooltip": "2.2.25", "@react-aria/focus": "3.21.2", "@react-aria/i18n": "3.12.13", "@react-aria/interactions": "3.25.6", @@ -2323,70 +2247,66 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.19", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/snippet": { - "version": "2.2.28", - "resolved": "https://registry.npmjs.org/@heroui/snippet/-/snippet-2.2.28.tgz", - "integrity": "sha512-UfC/ZcYpmOutAcazxkizJWlhvqzr077szDyQ85thyUC5yhuRRLrsOHDIhyLWQrEKIcWw5+CaEGS2VLwAFlgfzw==", - "license": "MIT", + "version": "2.2.29", + "resolved": "https://registry.npmjs.org/@heroui/snippet/-/snippet-2.2.29.tgz", + "integrity": "sha512-RuyK/DldxvVYb6ToPk5cNNYeDkL+phKZPYHrUxBJK/PzuAkqi3AzQV7zHd+3IfTNxQbevRjzCXENE5F3GKP/MQ==", "dependencies": { - "@heroui/button": "2.2.27", + "@heroui/button": "2.2.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/tooltip": "2.2.24", + "@heroui/tooltip": "2.2.25", "@heroui/use-clipboard": "2.1.9", "@react-aria/focus": "3.21.2" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/spacer": { - "version": "2.2.21", - "resolved": "https://registry.npmjs.org/@heroui/spacer/-/spacer-2.2.21.tgz", - "integrity": "sha512-WKD+BlgHfqJ8lrkkg/6cvzSWNsbRjzr24HpZnv6cDeWX95wVLTOco9HVR8ohwStMqwu5zYeUd1bw6yCDVTo53w==", - "license": "MIT", + "version": "2.2.22", + "resolved": "https://registry.npmjs.org/@heroui/spacer/-/spacer-2.2.22.tgz", + "integrity": "sha512-BJ7RauvSY3gx10ntqZkCcyTy9K2FS4AeeryQUE9RgkMKQxP4t5TbeYLPEyomjWK+cCL/ERQCCruW16D3vKyWmw==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/system-rsc": "2.3.20" + "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/spinner": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/spinner/-/spinner-2.2.24.tgz", - "integrity": "sha512-HfKkFffrIN9UdJY2UaenlB8xEwIzolCCFCwU0j3wVnLMX+Dw+ixwaELdAxX14Z6gPQYec6AROKetkWWit14rlw==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/spinner/-/spinner-2.2.25.tgz", + "integrity": "sha512-zDuLJicUL51vGLEBbHWy/t6DlOvs9YILM4YLmzS/o84ExTgfrCycXNs6JkoteFiNu570qqZMeAA2aYneGfl/PQ==", "dependencies": { "@heroui/shared-utils": "2.1.12", - "@heroui/system": "2.4.23", - "@heroui/system-rsc": "2.3.20" + "@heroui/system": "2.4.24", + "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/switch": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/switch/-/switch-2.2.24.tgz", - "integrity": "sha512-RbV+MECncBKsthX3D8r+CGoQRu8Q3AAYUEdm/7ody6+bMZFmBilm695yLiqziMI33Ct/WQ0WkpvrTClIcmxU/A==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/switch/-/switch-2.2.25.tgz", + "integrity": "sha512-F0Yj+kgVfD2bdy6REFvNySeGuYg1OT2phwMPwSZGUl7ZFeGSvvWSnbYS4/wS3JIM5PyEibSaB8QIPc8r00xq1A==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", @@ -2399,19 +2319,18 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/system": { - "version": "2.4.23", - "resolved": "https://registry.npmjs.org/@heroui/system/-/system-2.4.23.tgz", - "integrity": "sha512-kgYvfkIOQKM6CCBIlNSE2tXMtNrS1mvEUbvwnaU3pEYbMlceBtwA5v7SlpaJy/5dqKcTbfmVMUCmXnY/Kw4vaQ==", - "license": "MIT", + "version": "2.4.24", + "resolved": "https://registry.npmjs.org/@heroui/system/-/system-2.4.24.tgz", + "integrity": "sha512-9GKQgUc91otQfwmq6TLE72QKxtB341aK5NpBHS3gRoWYEuNN714Zl3OXwIZNvdXPJpsTaUo1ID1ibJU9tfgwdg==", "dependencies": { "@heroui/react-utils": "2.1.14", - "@heroui/system-rsc": "2.3.20", + "@heroui/system-rsc": "2.3.21", "@react-aria/i18n": "3.12.13", "@react-aria/overlays": "3.30.0", "@react-aria/utils": "3.31.0" @@ -2423,39 +2342,26 @@ } }, "node_modules/@heroui/system-rsc": { - "version": "2.3.20", - "resolved": "https://registry.npmjs.org/@heroui/system-rsc/-/system-rsc-2.3.20.tgz", - "integrity": "sha512-uZwQErEud/lAX7KRXEdsDcGLyygBffHcgnbCDrLvmTf3cyBE84YziG7AjM7Ts8ZcrF+wBXX4+a1IqnKGlsGEdQ==", - "license": "MIT", + "version": "2.3.21", + "resolved": "https://registry.npmjs.org/@heroui/system-rsc/-/system-rsc-2.3.21.tgz", + "integrity": "sha512-icB7njbNgkI3dcfZhY5LP7VFspaVgWL1lcg9Q7uJMAaj6gGFqqSSnHkSMwpR9AGLxVRKTHey0TUx8CeZDe8XDw==", "dependencies": { - "@react-types/shared": "3.32.1", - "clsx": "^1.2.1" + "@react-types/shared": "3.32.1" }, "peerDependencies": { - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0" } }, - "node_modules/@heroui/system-rsc/node_modules/clsx": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", - "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/@heroui/table": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/table/-/table-2.2.27.tgz", - "integrity": "sha512-XFmbEgBzf89WH1VzmnwENxVzK4JrHV5jdlzyM3snNhk8uDSjfecnUY33qR62cpdZsKiCFFcYf7kQPkCnJGnD0Q==", - "license": "MIT", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/table/-/table-2.2.28.tgz", + "integrity": "sha512-0z3xs0kxDXvvd9gy/uHgvK0/bmpJF0m9t3omNMnB0I0EUx+gJ/CnaaPiF9M5veg/128rc45J7X2FgY3fPAKcmA==", "dependencies": { - "@heroui/checkbox": "2.3.27", + "@heroui/checkbox": "2.3.28", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/spacer": "2.2.21", "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", "@react-aria/table": "3.17.8", @@ -2468,18 +2374,17 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/tabs": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/tabs/-/tabs-2.2.24.tgz", - "integrity": "sha512-2SfxzAXe1t2Zz0v16kqkb7DR2wW86XoDwRUpLex6zhEN4/uT5ILeynxIVSUyAvVN3z95cnaQt0XPQBfUjAIQhQ==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/tabs/-/tabs-2.2.25.tgz", + "integrity": "sha512-bIpz/8TTNMabmzObN2zs+3WhQXbKyr9tZUPkk3rMQxIshpg9oyyEWOS8XiMBxrEzSByLfPNypl5sX1au6Dw2Ew==", "dependencies": { - "@heroui/aria-utils": "2.2.24", + "@heroui/aria-utils": "2.2.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mounted": "2.1.8", @@ -2492,60 +2397,38 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.22", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/theme": { - "version": "2.4.23", - "resolved": "https://registry.npmjs.org/@heroui/theme/-/theme-2.4.23.tgz", - "integrity": "sha512-5hoaRWG+/d/t06p7Pfhz70DUP0Uggjids7/z2Ytgup4A8KAOvDIXxvHUDlk6rRHKiN1wDMNA5H+EWsSXB/m03Q==", - "license": "MIT", + "version": "2.4.24", + "resolved": "https://registry.npmjs.org/@heroui/theme/-/theme-2.4.24.tgz", + "integrity": "sha512-lL+anmY4GGWwKyTbJ2PEBZE4talIZ3hu4yGpku9TktCVG2nC2YTwiWQFJ+Jcbf8Cf9vuLzI1sla5bz2jUqiBRA==", "dependencies": { "@heroui/shared-utils": "2.1.12", - "clsx": "^1.2.1", "color": "^4.2.3", "color2k": "^2.0.3", "deepmerge": "4.3.1", "flat": "^5.0.2", - "tailwind-merge": "3.3.1", - "tailwind-variants": "3.1.1" + "tailwind-merge": "3.4.0", + "tailwind-variants": "3.2.2" }, "peerDependencies": { "tailwindcss": ">=4.0.0" } }, - "node_modules/@heroui/theme/node_modules/clsx": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/clsx/-/clsx-1.2.1.tgz", - "integrity": "sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/@heroui/theme/node_modules/tailwind-merge": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.3.1.tgz", - "integrity": "sha512-gBXpgUm/3rp1lMZZrM/w7D8GKqshif0zAymAhbCyIt8KMe+0v9DQ7cdYLR4FHH/cKpdTXb+A/tKKU3eolfsI+g==", - "license": "MIT", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/dcastil" - } - }, "node_modules/@heroui/toast": { - "version": "2.0.17", - "resolved": "https://registry.npmjs.org/@heroui/toast/-/toast-2.0.17.tgz", - "integrity": "sha512-w3TaA1DYLcwdDjpwf9xw5YSr+odo9GGHsObsrMmLEQDS0JQhmKyK5sQqXUzb9d27EC6KVwGjeVg0hUHYQBK2JA==", - "license": "MIT", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/@heroui/toast/-/toast-2.0.18.tgz", + "integrity": "sha512-5IoqEq10W/AaUgKWKIR7bbTB6U+rHMkikzGwW+IndsvFLR3meyb5l4K5cmVCmDsMHubUaRa9UFDeAokyNXvpWA==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.24", + "@heroui/spinner": "2.2.25", "@heroui/use-is-mobile": "2.2.12", "@react-aria/interactions": "3.25.6", "@react-aria/toast": "3.0.8", @@ -2553,21 +2436,20 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/tooltip": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/tooltip/-/tooltip-2.2.24.tgz", - "integrity": "sha512-H+0STFea2/Z4obDdk+ZPoDzJxJQHIWGSjnW/jieThJbJ5zow/qBfcg5DqzIdiC+FCJ4dDD5jEDZ4W4H/fQUKQA==", - "license": "MIT", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/tooltip/-/tooltip-2.2.25.tgz", + "integrity": "sha512-f+WxkQy0YBzzE6VhzVgA/CeD7nvo0hhOapx0UScU8zsQ1J+n5Kr5YY/7CgMHmFLyC/Amrqlf7WSgljRl4iWivQ==", "dependencies": { - "@heroui/aria-utils": "2.2.24", + "@heroui/aria-utils": "2.2.25", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.23", + "@heroui/framer-utils": "2.1.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-aria-overlay": "2.0.4", @@ -2580,7 +2462,7 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" @@ -2590,7 +2472,6 @@ "version": "2.2.18", "resolved": "https://registry.npmjs.org/@heroui/use-aria-accordion/-/use-aria-accordion-2.2.18.tgz", "integrity": "sha512-qjRkae2p4MFDrNqO6v6YCor0BtVi3idMd1dsI82XM16bxLQ2stqG4Ajrg60xV0AN+WKZUq10oetqkJuY6MYg0w==", - "license": "MIT", "dependencies": { "@react-aria/button": "3.14.2", "@react-aria/focus": "3.21.2", @@ -2607,7 +2488,6 @@ "version": "2.2.20", "resolved": "https://registry.npmjs.org/@heroui/use-aria-button/-/use-aria-button-2.2.20.tgz", "integrity": "sha512-Y0Bmze/pxEACKsHMbA1sYA3ghMJ+9fSnWvZBwlUxqiVXDEy2YrrK2JmXEgsuHGQdKD9RqU2Od3V4VqIIiaHiMA==", - "license": "MIT", "dependencies": { "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", @@ -2623,7 +2503,6 @@ "version": "2.2.21", "resolved": "https://registry.npmjs.org/@heroui/use-aria-link/-/use-aria-link-2.2.21.tgz", "integrity": "sha512-sG2rUutT/E/FYguzZmg715cXcM6+ue9wRfs2Gi6epWJwIVpS51uEagJKY0wIutJDfuCPfQ9AuxXfJek4CnxjKw==", - "license": "MIT", "dependencies": { "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", @@ -2639,7 +2518,6 @@ "version": "2.2.19", "resolved": "https://registry.npmjs.org/@heroui/use-aria-modal-overlay/-/use-aria-modal-overlay-2.2.19.tgz", "integrity": "sha512-MPvszNrt+1DauiSyOAwb0pKbYahpEVi9hrmidnO8cd1SA7B2ES0fNRBeNMAwcaeR/Nzsv+Cw1hRXt3egwqi0lg==", - "license": "MIT", "dependencies": { "@heroui/use-aria-overlay": "2.0.4", "@react-aria/overlays": "3.30.0", @@ -2655,7 +2533,6 @@ "version": "2.4.19", "resolved": "https://registry.npmjs.org/@heroui/use-aria-multiselect/-/use-aria-multiselect-2.4.19.tgz", "integrity": "sha512-RLDSpOLJqNESn6OK/zKuyTriK6sqMby76si/4kTMCs+4lmMPOyFKP3fREywu+zyJjRUCuZPa6xYuN2OHKQRDow==", - "license": "MIT", "dependencies": { "@react-aria/i18n": "3.12.13", "@react-aria/interactions": "3.25.6", @@ -2680,7 +2557,6 @@ "version": "2.0.4", "resolved": "https://registry.npmjs.org/@heroui/use-aria-overlay/-/use-aria-overlay-2.0.4.tgz", "integrity": "sha512-iv+y0+OvQd1eWiZftPI07JE3c5AdK85W5k3rDlhk5MFEI3dllkIpu8z8zLh3ge/BQGFiGkySVC5iXl8w84gMUQ==", - "license": "MIT", "dependencies": { "@react-aria/focus": "3.21.2", "@react-aria/interactions": "3.25.6", @@ -2696,7 +2572,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-callback-ref/-/use-callback-ref-2.1.8.tgz", "integrity": "sha512-D1JDo9YyFAprYpLID97xxQvf86NvyWLay30BeVVZT9kWmar6O9MbCRc7ACi7Ngko60beonj6+amTWkTm7QuY/Q==", - "license": "MIT", "dependencies": { "@heroui/use-safe-layout-effect": "2.1.8" }, @@ -2708,7 +2583,6 @@ "version": "2.1.9", "resolved": "https://registry.npmjs.org/@heroui/use-clipboard/-/use-clipboard-2.1.9.tgz", "integrity": "sha512-lkBq5RpXHiPvk1BXKJG8gMM0f7jRMIGnxAXDjAUzZyXKBuWLoM+XlaUWmZHtmkkjVFMX1L4vzA+vxi9rZbenEQ==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2717,7 +2591,6 @@ "version": "2.2.13", "resolved": "https://registry.npmjs.org/@heroui/use-data-scroll-overflow/-/use-data-scroll-overflow-2.2.13.tgz", "integrity": "sha512-zboLXO1pgYdzMUahDcVt5jf+l1jAQ/D9dFqr7AxWLfn6tn7/EgY0f6xIrgWDgJnM0U3hKxVeY13pAeB4AFTqTw==", - "license": "MIT", "dependencies": { "@heroui/shared-utils": "2.1.12" }, @@ -2729,7 +2602,6 @@ "version": "2.2.17", "resolved": "https://registry.npmjs.org/@heroui/use-disclosure/-/use-disclosure-2.2.17.tgz", "integrity": "sha512-S3pN0WmpcTTZuQHcXw4RcTVsxLaCZ95H5qi/JPN83ahhWTCC+pN8lwE37vSahbMTM1YriiHyTM6AWpv/E3Jq7w==", - "license": "MIT", "dependencies": { "@heroui/use-callback-ref": "2.1.8", "@react-aria/utils": "3.31.0", @@ -2743,7 +2615,6 @@ "version": "2.1.18", "resolved": "https://registry.npmjs.org/@heroui/use-draggable/-/use-draggable-2.1.18.tgz", "integrity": "sha512-ihQdmLGYJ6aTEaJ0/yCXYn6VRdrRV2eO03XD2A3KANZPb1Bj/n4r298xNMql5VnGq5ZNDJB9nTv8NNCu9pmPdg==", - "license": "MIT", "dependencies": { "@react-aria/interactions": "3.25.6" }, @@ -2755,7 +2626,6 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/@heroui/use-form-reset/-/use-form-reset-2.0.1.tgz", "integrity": "sha512-6slKWiLtVfgZnVeHVkM9eXgjwI07u0CUaLt2kQpfKPqTSTGfbHgCYJFduijtThhTdKBhdH6HCmzTcnbVlAxBXw==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2764,7 +2634,6 @@ "version": "2.1.13", "resolved": "https://registry.npmjs.org/@heroui/use-image/-/use-image-2.1.13.tgz", "integrity": "sha512-NLApz+xin2bKHEXr+eSrtB0lN8geKP5VOea5QGbOCiHq4DBXu4QctpRkSfCHGIQzWdBVaLPoV+5wd0lR2S2Egg==", - "license": "MIT", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/use-safe-layout-effect": "2.1.8" @@ -2777,7 +2646,6 @@ "version": "2.2.14", "resolved": "https://registry.npmjs.org/@heroui/use-intersection-observer/-/use-intersection-observer-2.2.14.tgz", "integrity": "sha512-qYJeMk4cTsF+xIckRctazCgWQ4BVOpJu+bhhkB1NrN+MItx19Lcb7ksOqMdN5AiSf85HzDcAEPIQ9w9RBlt5sg==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2786,7 +2654,6 @@ "version": "2.2.12", "resolved": "https://registry.npmjs.org/@heroui/use-is-mobile/-/use-is-mobile-2.2.12.tgz", "integrity": "sha512-2UKa4v1xbvFwerWKoMTrg4q9ZfP9MVIVfCl1a7JuKQlXq3jcyV6z1as5bZ41pCsTOT+wUVOFnlr6rzzQwT9ZOA==", - "license": "MIT", "dependencies": { "@react-aria/ssr": "3.9.10" }, @@ -2798,7 +2665,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-is-mounted/-/use-is-mounted-2.1.8.tgz", "integrity": "sha512-DO/Th1vD4Uy8KGhd17oGlNA4wtdg91dzga+VMpmt94gSZe1WjsangFwoUBxF2uhlzwensCX9voye3kerP/lskg==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2807,7 +2673,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-measure/-/use-measure-2.1.8.tgz", "integrity": "sha512-GjT9tIgluqYMZWfAX6+FFdRQBqyHeuqUMGzAXMTH9kBXHU0U5C5XU2c8WFORkNDoZIg1h13h1QdV+Vy4LE1dEA==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2816,7 +2681,6 @@ "version": "2.2.18", "resolved": "https://registry.npmjs.org/@heroui/use-pagination/-/use-pagination-2.2.18.tgz", "integrity": "sha512-qm1mUe5UgV0kPZItcs/jiX/BxzdDagmcxaJkYR6DkhfMRoCuOdoJhcoh8ncbCAgHpzPESPn1VxsOcG4/Y+Jkdw==", - "license": "MIT", "dependencies": { "@heroui/shared-utils": "2.1.12", "@react-aria/i18n": "3.12.13" @@ -2829,7 +2693,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-resize/-/use-resize-2.1.8.tgz", "integrity": "sha512-htF3DND5GmrSiMGnzRbISeKcH+BqhQ/NcsP9sBTIl7ewvFaWiDhEDiUHdJxflmJGd/c5qZq2nYQM/uluaqIkKA==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2838,7 +2701,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-safe-layout-effect/-/use-safe-layout-effect-2.1.8.tgz", "integrity": "sha512-wbnZxVWCYqk10XRMu0veSOiVsEnLcmGUmJiapqgaz0fF8XcpSScmqjTSoWjHIEWaHjQZ6xr+oscD761D6QJN+Q==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2847,7 +2709,6 @@ "version": "2.1.8", "resolved": "https://registry.npmjs.org/@heroui/use-scroll-position/-/use-scroll-position-2.1.8.tgz", "integrity": "sha512-NxanHKObxVfWaPpNRyBR8v7RfokxrzcHyTyQfbgQgAGYGHTMaOGkJGqF8kBzInc3zJi+F0zbX7Nb0QjUgsLNUQ==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } @@ -2856,25 +2717,23 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/@heroui/use-viewport-size/-/use-viewport-size-2.0.1.tgz", "integrity": "sha512-blv8BEB/QdLePLWODPRzRS2eELJ2eyHbdOIADbL0KcfLzOUEg9EiuVk90hcSUDAFqYiJ3YZ5Z0up8sdPcR8Y7g==", - "license": "MIT", "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/user": { - "version": "2.2.22", - "resolved": "https://registry.npmjs.org/@heroui/user/-/user-2.2.22.tgz", - "integrity": "sha512-kOLxh9Bjgl/ya/f+W7/eKVO/n1GPsU5TPzwocC9+FU/+MbCOrmkevhAGGUrb259KCnp9WCv7WGRIcf8rrsreDw==", - "license": "MIT", + "version": "2.2.23", + "resolved": "https://registry.npmjs.org/@heroui/user/-/user-2.2.23.tgz", + "integrity": "sha512-o/ngJ4yTD4svjYKSP3hJNwhyWLhHk5g/wjqGvH81INfpeV7wPlzpM/C6LIezGB3rZjGM9d4ozSofv6spbCKCiA==", "dependencies": { - "@heroui/avatar": "2.2.22", + "@heroui/avatar": "2.2.23", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@react-aria/focus": "3.21.2" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.17", + "@heroui/theme": ">=2.4.23", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } @@ -3080,7 +2939,6 @@ "version": "3.10.0", "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.10.0.tgz", "integrity": "sha512-oxDR/NTEJ1k+UFVQElaNIk65E/Z83HK1z1WI3lQyhTtnNg4R5oVXaPzK3jcpKG8UHKDVuDQHzn+wsxSz8RP3aw==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -3089,7 +2947,6 @@ "version": "3.1.8", "resolved": "https://registry.npmjs.org/@internationalized/message/-/message-3.1.8.tgz", "integrity": "sha512-Rwk3j/TlYZhn3HQ6PyXUV0XP9Uv42jqZGNegt0BXlxjE6G3+LwHjbQZAGHhCnCPdaA6Tvd3ma/7QzLlLkJxAWA==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0", "intl-messageformat": "^10.1.0" @@ -3099,7 +2956,6 @@ "version": "3.6.5", "resolved": "https://registry.npmjs.org/@internationalized/number/-/number-3.6.5.tgz", "integrity": "sha512-6hY4Kl4HPBvtfS62asS/R22JzNNy8vi/Ssev7x6EobfCp+9QIB2hKvI2EtbdJ0VSQacxVNtqhE/NmF/NZ0gm6g==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -3108,7 +2964,6 @@ "version": "3.2.7", "resolved": "https://registry.npmjs.org/@internationalized/string/-/string-3.2.7.tgz", "integrity": "sha512-D4OHBjrinH+PFZPvfCXvG28n2LSykWcJ7GIioQL+ok0LON15SdfoUssoHzzOUmVZLbRoREsQXVzA6r8JKsbP6A==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -3349,7 +3204,6 @@ "version": "3.5.29", "resolved": "https://registry.npmjs.org/@react-aria/breadcrumbs/-/breadcrumbs-3.5.29.tgz", "integrity": "sha512-rKS0dryllaZJqrr3f/EAf2liz8CBEfmL5XACj+Z1TAig6GIYe1QuA3BtkX0cV9OkMugXdX8e3cbA7nD10ORRqg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/link": "^3.8.6", @@ -3367,7 +3221,6 @@ "version": "3.14.2", "resolved": "https://registry.npmjs.org/@react-aria/button/-/button-3.14.2.tgz", "integrity": "sha512-VbLIA+Kd6f/MDjd+TJBUg2+vNDw66pnvsj2E4RLomjI9dfBuN7d+Yo2UnsqKVyhePjCUZ6xxa2yDuD63IOSIYA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/toolbar": "3.0.0-beta.21", @@ -3386,7 +3239,6 @@ "version": "3.9.2", "resolved": "https://registry.npmjs.org/@react-aria/calendar/-/calendar-3.9.2.tgz", "integrity": "sha512-uSLxLgOPRnEU4Jg59lAhUVA+uDx/55NBg4lpfsP2ynazyiJ5LCXmYceJi+VuOqMml7d9W0dB87OldOeLdIxYVA==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@react-aria/i18n": "^3.12.13", @@ -3408,7 +3260,6 @@ "version": "3.16.2", "resolved": "https://registry.npmjs.org/@react-aria/checkbox/-/checkbox-3.16.2.tgz", "integrity": "sha512-29Mj9ZqXioJ0bcMnNGooHztnTau5pikZqX3qCRj5bYR3by/ZFFavYoMroh9F7s/MbFm/tsKX+Sf02lYFEdXRjA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/form": "^3.1.2", "@react-aria/interactions": "^3.25.6", @@ -3431,7 +3282,6 @@ "version": "3.14.0", "resolved": "https://registry.npmjs.org/@react-aria/combobox/-/combobox-3.14.0.tgz", "integrity": "sha512-z4ro0Hma//p4nL2IJx5iUa7NwxeXbzSoZ0se5uTYjG1rUUMszg+wqQh/AQoL+eiULn7rs18JY9wwNbVIkRNKWA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -3459,7 +3309,6 @@ "version": "3.15.2", "resolved": "https://registry.npmjs.org/@react-aria/datepicker/-/datepicker-3.15.2.tgz", "integrity": "sha512-th078hyNqPf4P2K10su/y32zPDjs3lOYVdHvsL9/+5K1dnTvLHCK5vgUyLuyn8FchhF7cmHV49D+LZVv65PEpQ==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@internationalized/number": "^3.6.5", @@ -3489,7 +3338,6 @@ "version": "3.5.31", "resolved": "https://registry.npmjs.org/@react-aria/dialog/-/dialog-3.5.31.tgz", "integrity": "sha512-inxQMyrzX0UBW9Mhraq0nZ4HjHdygQvllzloT1E/RlDd61lr3RbmJR6pLsrbKOTtSvDIBJpCso1xEdHCFNmA0Q==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/overlays": "^3.30.0", @@ -3507,7 +3355,6 @@ "version": "3.21.2", "resolved": "https://registry.npmjs.org/@react-aria/focus/-/focus-3.21.2.tgz", "integrity": "sha512-JWaCR7wJVggj+ldmM/cb/DXFg47CXR55lznJhZBh4XVqJjMKwaOOqpT5vNN7kpC1wUpXicGNuDnJDN1S/+6dhQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -3524,7 +3371,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/@react-aria/form/-/form-3.1.2.tgz", "integrity": "sha512-R3i7L7Ci61PqZQvOrnL9xJeWEbh28UkTVgkj72EvBBn39y4h7ReH++0stv7rRs8p5ozETSKezBbGfu4UsBewWw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -3541,7 +3387,6 @@ "version": "3.14.5", "resolved": "https://registry.npmjs.org/@react-aria/grid/-/grid-3.14.5.tgz", "integrity": "sha512-XHw6rgjlTqc85e3zjsWo3U0EVwjN5MOYtrolCKc/lc2ItNdcY3OlMhpsU9+6jHwg/U3VCSWkGvwAz9hg7krd8Q==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -3566,7 +3411,6 @@ "version": "3.12.13", "resolved": "https://registry.npmjs.org/@react-aria/i18n/-/i18n-3.12.13.tgz", "integrity": "sha512-YTM2BPg0v1RvmP8keHenJBmlx8FXUKsdYIEX7x6QWRd1hKlcDwphfjzvt0InX9wiLiPHsT5EoBTpuUk8SXc0Mg==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@internationalized/message": "^3.1.8", @@ -3586,7 +3430,6 @@ "version": "3.25.6", "resolved": "https://registry.npmjs.org/@react-aria/interactions/-/interactions-3.25.6.tgz", "integrity": "sha512-5UgwZmohpixwNMVkMvn9K1ceJe6TzlRlAfuYoQDUuOkk62/JVJNDLAPKIf5YMRc7d2B0rmfgaZLMtbREb0Zvkw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/ssr": "^3.9.10", "@react-aria/utils": "^3.31.0", @@ -3603,7 +3446,6 @@ "version": "3.7.22", "resolved": "https://registry.npmjs.org/@react-aria/label/-/label-3.7.22.tgz", "integrity": "sha512-jLquJeA5ZNqDT64UpTc9XJ7kQYltUlNcgxZ37/v4mHe0UZ7QohCKdKQhXHONb0h2jjNUpp2HOZI8J9++jOpzxA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/utils": "^3.31.0", "@react-types/shared": "^3.32.1", @@ -3618,7 +3460,6 @@ "version": "3.0.7", "resolved": "https://registry.npmjs.org/@react-aria/landmark/-/landmark-3.0.7.tgz", "integrity": "sha512-t8c610b8hPLS6Vwv+rbuSyljZosI1s5+Tosfa0Fk4q7d+Ex6Yj7hLfUFy59GxZAufhUYfGX396fT0gPqAbU1tg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/utils": "^3.31.0", "@react-types/shared": "^3.32.1", @@ -3634,7 +3475,6 @@ "version": "3.8.6", "resolved": "https://registry.npmjs.org/@react-aria/link/-/link-3.8.6.tgz", "integrity": "sha512-7F7UDJnwbU9IjfoAdl6f3Hho5/WB7rwcydUOjUux0p7YVWh/fTjIFjfAGyIir7MJhPapun1D0t97QQ3+8jXVcg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -3651,7 +3491,6 @@ "version": "3.15.0", "resolved": "https://registry.npmjs.org/@react-aria/listbox/-/listbox-3.15.0.tgz", "integrity": "sha512-Ub1Wu79R9sgxM7h4HeEdjOgOKDHwduvYcnDqsSddGXgpkL8ADjsy2YUQ0hHY5VnzA4BxK36bLp4mzSna8Qvj1w==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/label": "^3.7.22", @@ -3672,7 +3511,6 @@ "version": "3.4.4", "resolved": "https://registry.npmjs.org/@react-aria/live-announcer/-/live-announcer-3.4.4.tgz", "integrity": "sha512-PTTBIjNRnrdJOIRTDGNifY2d//kA7GUAwRFJNOEwSNG4FW+Bq9awqLiflw0JkpyB0VNIwou6lqKPHZVLsGWOXA==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -3681,7 +3519,6 @@ "version": "3.19.3", "resolved": "https://registry.npmjs.org/@react-aria/menu/-/menu-3.19.3.tgz", "integrity": "sha512-52fh8y8b2776R2VrfZPpUBJYC9oTP7XDy+zZuZTxPEd7Ywk0JNUl5F92y6ru22yPkS13sdhrNM/Op+V/KulmAg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -3707,7 +3544,6 @@ "version": "3.12.2", "resolved": "https://registry.npmjs.org/@react-aria/numberfield/-/numberfield-3.12.2.tgz", "integrity": "sha512-M2b+z0HIXiXpGAWOQkO2kpIjaLNUXJ5Q3/GMa3Fkr+B1piFX0VuOynYrtddKVrmXCe+r5t+XcGb0KS29uqv7nQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/interactions": "^3.25.6", @@ -3730,7 +3566,6 @@ "version": "3.30.0", "resolved": "https://registry.npmjs.org/@react-aria/overlays/-/overlays-3.30.0.tgz", "integrity": "sha512-UpjqSjYZx5FAhceWCRVsW6fX1sEwya1fQ/TKkL53FAlLFR8QKuoKqFlmiL43YUFTcGK3UdEOy3cWTleLQwdSmQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -3753,7 +3588,6 @@ "version": "3.4.27", "resolved": "https://registry.npmjs.org/@react-aria/progress/-/progress-3.4.27.tgz", "integrity": "sha512-0OA1shs1575g1zmO8+rWozdbTnxThFFhOfuoL1m7UV5Dley6FHpueoKB1ECv7B+Qm4dQt6DoEqLg7wsbbQDhmg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/label": "^3.7.22", @@ -3771,7 +3605,6 @@ "version": "3.12.2", "resolved": "https://registry.npmjs.org/@react-aria/radio/-/radio-3.12.2.tgz", "integrity": "sha512-I11f6I90neCh56rT/6ieAs3XyDKvEfbj/QmbU5cX3p+SJpRRPN0vxQi5D1hkh0uxDpeClxygSr31NmZsd4sqfg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/form": "^3.1.2", @@ -3793,7 +3626,6 @@ "version": "3.26.0", "resolved": "https://registry.npmjs.org/@react-aria/selection/-/selection-3.26.0.tgz", "integrity": "sha512-ZBH3EfWZ+RfhTj01dH8L17uT7iNbXWS8u77/fUpHgtrm0pwNVhx0TYVnLU1YpazQ/3WVpvWhmBB8sWwD1FlD/g==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -3812,7 +3644,6 @@ "version": "3.8.2", "resolved": "https://registry.npmjs.org/@react-aria/slider/-/slider-3.8.2.tgz", "integrity": "sha512-6KyUGaVzRE4xAz1LKHbNh1q5wzxe58pdTHFSnxNe6nk1SCoHw7NfI4h2s2m6LgJ0megFxsT0Ir8aHaFyyxmbgg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/interactions": "^3.25.6", @@ -3832,7 +3663,6 @@ "version": "3.6.19", "resolved": "https://registry.npmjs.org/@react-aria/spinbutton/-/spinbutton-3.6.19.tgz", "integrity": "sha512-xOIXegDpts9t3RSHdIN0iYQpdts0FZ3LbpYJIYVvdEHo9OpDS+ElnDzCGtwZLguvZlwc5s1LAKuKopDUsAEMkw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/live-announcer": "^3.4.4", @@ -3850,7 +3680,6 @@ "version": "3.9.10", "resolved": "https://registry.npmjs.org/@react-aria/ssr/-/ssr-3.9.10.tgz", "integrity": "sha512-hvTm77Pf+pMBhuBm760Li0BVIO38jv1IBws1xFm1NoL26PU+fe+FMW5+VZWyANR6nYL65joaJKZqOdTQMkO9IQ==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" }, @@ -3865,7 +3694,6 @@ "version": "3.7.8", "resolved": "https://registry.npmjs.org/@react-aria/switch/-/switch-3.7.8.tgz", "integrity": "sha512-AfsUq1/YiuoprhcBUD9vDPyWaigAwctQNW1fMb8dROL+i/12B+Zekj8Ml+jbU69/kIVtfL0Jl7/0Bo9KK3X0xQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/toggle": "^3.12.2", "@react-stately/toggle": "^3.9.2", @@ -3882,7 +3710,6 @@ "version": "3.17.8", "resolved": "https://registry.npmjs.org/@react-aria/table/-/table-3.17.8.tgz", "integrity": "sha512-bXiZoxTMbsqUJsYDhHPzKc3jw0HFJ/xMsJ49a0f7mp5r9zACxNLeIU0wJ4Uvx37dnYOHKzGliG+rj5l4sph7MA==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/grid": "^3.14.5", @@ -3909,7 +3736,6 @@ "version": "3.10.8", "resolved": "https://registry.npmjs.org/@react-aria/tabs/-/tabs-3.10.8.tgz", "integrity": "sha512-sPPJyTyoAqsBh76JinBAxStOcbjZvyWFYKpJ9Uqw+XT0ObshAPPFSGeh8DiQemPs02RwJdrfARPMhyqiX8t59A==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -3929,7 +3755,6 @@ "version": "3.18.2", "resolved": "https://registry.npmjs.org/@react-aria/textfield/-/textfield-3.18.2.tgz", "integrity": "sha512-G+lM8VYSor6g9Yptc6hLZ6BF+0cq0pYol1z6wdQUQgJN8tg4HPtzq75lsZtlCSIznL3amgRAxJtd0dUrsAnvaQ==", - "license": "Apache-2.0", "dependencies": { "@react-aria/form": "^3.1.2", "@react-aria/interactions": "^3.25.6", @@ -3950,7 +3775,6 @@ "version": "3.0.8", "resolved": "https://registry.npmjs.org/@react-aria/toast/-/toast-3.0.8.tgz", "integrity": "sha512-rfJIms6AkMyQ7ZgKrMZgGfPwGcB/t1JoEwbc1PAmXcAvFI/hzF6YF7ZFDXiq38ucFsP9PnHmbXIzM9w4ccl18A==", - "license": "Apache-2.0", "dependencies": { "@react-aria/i18n": "^3.12.13", "@react-aria/interactions": "^3.25.6", @@ -3970,7 +3794,6 @@ "version": "3.12.2", "resolved": "https://registry.npmjs.org/@react-aria/toggle/-/toggle-3.12.2.tgz", "integrity": "sha512-g25XLYqJuJpt0/YoYz2Rab8ax+hBfbssllcEFh0v0jiwfk2gwTWfRU9KAZUvxIqbV8Nm8EBmrYychDpDcvW1kw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -3988,7 +3811,6 @@ "version": "3.0.0-beta.21", "resolved": "https://registry.npmjs.org/@react-aria/toolbar/-/toolbar-3.0.0-beta.21.tgz", "integrity": "sha512-yRCk/GD8g+BhdDgxd3I0a0c8Ni4Wyo6ERzfSoBkPkwQ4X2E2nkopmraM9D0fXw4UcIr4bnmvADzkHXtBN0XrBg==", - "license": "Apache-2.0", "dependencies": { "@react-aria/focus": "^3.21.2", "@react-aria/i18n": "^3.12.13", @@ -4005,7 +3827,6 @@ "version": "3.8.8", "resolved": "https://registry.npmjs.org/@react-aria/tooltip/-/tooltip-3.8.8.tgz", "integrity": "sha512-CmHUqtXtFWmG4AHMEr9hIVex+oscK6xcM2V47gq9ijNInxe3M6UBu/dBdkgGP/jYv9N7tzCAjTR8nNIHQXwvWw==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -4023,7 +3844,6 @@ "version": "3.31.0", "resolved": "https://registry.npmjs.org/@react-aria/utils/-/utils-3.31.0.tgz", "integrity": "sha512-ABOzCsZrWzf78ysswmguJbx3McQUja7yeGj6/vZo4JVsZNlxAN+E9rs381ExBRI0KzVo6iBTeX5De8eMZPJXig==", - "license": "Apache-2.0", "dependencies": { "@react-aria/ssr": "^3.9.10", "@react-stately/flags": "^3.1.2", @@ -4041,7 +3861,6 @@ "version": "3.8.28", "resolved": "https://registry.npmjs.org/@react-aria/visually-hidden/-/visually-hidden-3.8.28.tgz", "integrity": "sha512-KRRjbVVob2CeBidF24dzufMxBveEUtUu7IM+hpdZKB+gxVROoh4XRLPv9SFmaH89Z7D9To3QoykVZoWD0lan6Q==", - "license": "Apache-2.0", "dependencies": { "@react-aria/interactions": "^3.25.6", "@react-aria/utils": "^3.31.0", @@ -4190,7 +4009,6 @@ "version": "3.9.0", "resolved": "https://registry.npmjs.org/@react-stately/calendar/-/calendar-3.9.0.tgz", "integrity": "sha512-U5Nf2kx9gDhJRxdDUm5gjfyUlt/uUfOvM1vDW2UA62cA6+2k2cavMLc2wNlXOb/twFtl6p0joYKHG7T4xnEFkg==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@react-stately/utils": "^3.10.8", @@ -4206,7 +4024,6 @@ "version": "3.7.2", "resolved": "https://registry.npmjs.org/@react-stately/checkbox/-/checkbox-3.7.2.tgz", "integrity": "sha512-j1ycUVz5JmqhaL6mDZgDNZqBilOB8PBW096sDPFaTtuYreDx2HOd1igxiIvwlvPESZwsJP7FVM3mYnaoXtpKPA==", - "license": "Apache-2.0", "dependencies": { "@react-stately/form": "^3.2.2", "@react-stately/utils": "^3.10.8", @@ -4222,7 +4039,6 @@ "version": "3.12.8", "resolved": "https://registry.npmjs.org/@react-stately/collections/-/collections-3.12.8.tgz", "integrity": "sha512-AceJYLLXt1Y2XIcOPi6LEJSs4G/ubeYW3LqOCQbhfIgMaNqKfQMIfagDnPeJX9FVmPFSlgoCBxb1pTJW2vjCAQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4235,7 +4051,6 @@ "version": "3.12.0", "resolved": "https://registry.npmjs.org/@react-stately/combobox/-/combobox-3.12.0.tgz", "integrity": "sha512-A6q9R/7cEa/qoQsBkdslXWvD7ztNLLQ9AhBhVN9QvzrmrH5B4ymUwcTU8lWl22ykH7RRwfonLeLXJL4C+/L2oQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/form": "^3.2.2", @@ -4254,7 +4069,6 @@ "version": "3.15.2", "resolved": "https://registry.npmjs.org/@react-stately/datepicker/-/datepicker-3.15.2.tgz", "integrity": "sha512-S5GL+W37chvV8knv9v0JRv0L6hKo732qqabCCHXzOpYxkLIkV4f/y3cHdEzFWzpZ0O0Gkg7WgeYo160xOdBKYg==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@internationalized/string": "^3.2.7", @@ -4273,7 +4087,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/@react-stately/flags/-/flags-3.1.2.tgz", "integrity": "sha512-2HjFcZx1MyQXoPqcBGALwWWmgFVUk2TuKVIQxCbRq7fPyWXIl6VHcakCLurdtYC2Iks7zizvz0Idv48MQ38DWg==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -4282,7 +4095,6 @@ "version": "3.2.2", "resolved": "https://registry.npmjs.org/@react-stately/form/-/form-3.2.2.tgz", "integrity": "sha512-soAheOd7oaTO6eNs6LXnfn0tTqvOoe3zN9FvtIhhrErKz9XPc5sUmh3QWwR45+zKbitOi1HOjfA/gifKhZcfWw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4295,7 +4107,6 @@ "version": "3.11.6", "resolved": "https://registry.npmjs.org/@react-stately/grid/-/grid-3.11.6.tgz", "integrity": "sha512-vWPAkzpeTIsrurHfMubzMuqEw7vKzFhIJeEK5sEcLunyr1rlADwTzeWrHNbPMl66NAIAi70Dr1yNq+kahQyvMA==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/selection": "^3.20.6", @@ -4311,7 +4122,6 @@ "version": "3.13.1", "resolved": "https://registry.npmjs.org/@react-stately/list/-/list-3.13.1.tgz", "integrity": "sha512-eHaoauh21twbcl0kkwULhVJ+CzYcy1jUjMikNVMHOQdhr4WIBdExf7PmSgKHKqsSPhpGg6IpTCY2dUX3RycjDg==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/selection": "^3.20.6", @@ -4327,7 +4137,6 @@ "version": "3.9.8", "resolved": "https://registry.npmjs.org/@react-stately/menu/-/menu-3.9.8.tgz", "integrity": "sha512-bo0NOhofnTHLESiYfsSSw6gyXiPVJJ0UlN2igUXtJk5PmyhWjFzUzTzcnd7B028OB0si9w3LIWM3stqz5271Eg==", - "license": "Apache-2.0", "dependencies": { "@react-stately/overlays": "^3.6.20", "@react-types/menu": "^3.10.5", @@ -4342,7 +4151,6 @@ "version": "3.10.2", "resolved": "https://registry.npmjs.org/@react-stately/numberfield/-/numberfield-3.10.2.tgz", "integrity": "sha512-jlKVFYaH3RX5KvQ7a+SAMQuPccZCzxLkeYkBE64u1Zvi7YhJ8hkTMHG/fmZMbk1rHlseE2wfBdk0Rlya3MvoNQ==", - "license": "Apache-2.0", "dependencies": { "@internationalized/number": "^3.6.5", "@react-stately/form": "^3.2.2", @@ -4358,7 +4166,6 @@ "version": "3.6.20", "resolved": "https://registry.npmjs.org/@react-stately/overlays/-/overlays-3.6.20.tgz", "integrity": "sha512-YAIe+uI8GUXX8F/0Pzr53YeC5c/bjqbzDFlV8NKfdlCPa6+Jp4B/IlYVjIooBj9+94QvbQdjylegvYWK/iPwlg==", - "license": "Apache-2.0", "dependencies": { "@react-stately/utils": "^3.10.8", "@react-types/overlays": "^3.9.2", @@ -4372,7 +4179,6 @@ "version": "3.11.2", "resolved": "https://registry.npmjs.org/@react-stately/radio/-/radio-3.11.2.tgz", "integrity": "sha512-UM7L6AW+k8edhSBUEPZAqiWNRNadfOKK7BrCXyBiG79zTz0zPcXRR+N+gzkDn7EMSawDeyK1SHYUuoSltTactg==", - "license": "Apache-2.0", "dependencies": { "@react-stately/form": "^3.2.2", "@react-stately/utils": "^3.10.8", @@ -4388,7 +4194,6 @@ "version": "3.20.6", "resolved": "https://registry.npmjs.org/@react-stately/selection/-/selection-3.20.6.tgz", "integrity": "sha512-a0bjuP2pJYPKEiedz2Us1W1aSz0iHRuyeQEdBOyL6Z6VUa6hIMq9H60kvseir2T85cOa4QggizuRV7mcO6bU5w==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/utils": "^3.10.8", @@ -4403,7 +4208,6 @@ "version": "3.7.2", "resolved": "https://registry.npmjs.org/@react-stately/slider/-/slider-3.7.2.tgz", "integrity": "sha512-EVBHUdUYwj++XqAEiQg2fGi8Reccznba0uyQ3gPejF0pAc390Q/J5aqiTEDfiCM7uJ6WHxTM6lcCqHQBISk2dQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/utils": "^3.10.8", "@react-types/shared": "^3.32.1", @@ -4418,7 +4222,6 @@ "version": "3.15.1", "resolved": "https://registry.npmjs.org/@react-stately/table/-/table-3.15.1.tgz", "integrity": "sha512-MhMAgE/LgAzHcAn1P3p/nQErzJ6DiixSJ1AOt2JlnAKEb5YJg4ATKWCb2IjBLwywt9ZCzfm3KMUzkctZqAoxwA==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/flags": "^3.1.2", @@ -4438,7 +4241,6 @@ "version": "3.8.6", "resolved": "https://registry.npmjs.org/@react-stately/tabs/-/tabs-3.8.6.tgz", "integrity": "sha512-9RYxmgjVIxUpIsGKPIF7uRoHWOEz8muwaYiStCVeyiYBPmarvZoIYtTXcwSMN/vEs7heVN5uGCL6/bfdY4+WiA==", - "license": "Apache-2.0", "dependencies": { "@react-stately/list": "^3.13.1", "@react-types/shared": "^3.32.1", @@ -4453,7 +4255,6 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/@react-stately/toast/-/toast-3.1.2.tgz", "integrity": "sha512-HiInm7bck32khFBHZThTQaAF6e6/qm57F4mYRWdTq8IVeGDzpkbUYibnLxRhk0UZ5ybc6me+nqqPkG/lVmM42Q==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0", "use-sync-external-store": "^1.4.0" @@ -4466,7 +4267,6 @@ "version": "3.9.2", "resolved": "https://registry.npmjs.org/@react-stately/toggle/-/toggle-3.9.2.tgz", "integrity": "sha512-dOxs9wrVXHUmA7lc8l+N9NbTJMAaXcYsnNGsMwfXIXQ3rdq+IjWGNYJ52UmNQyRYFcg0jrzRrU16TyGbNjOdNQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/utils": "^3.10.8", "@react-types/checkbox": "^3.10.2", @@ -4481,7 +4281,6 @@ "version": "3.5.8", "resolved": "https://registry.npmjs.org/@react-stately/tooltip/-/tooltip-3.5.8.tgz", "integrity": "sha512-gkcUx2ROhCiGNAYd2BaTejakXUUNLPnnoJ5+V/mN480pN+OrO8/2V9pqb/IQmpqxLsso93zkM3A4wFHHLBBmPQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/overlays": "^3.6.20", "@react-types/tooltip": "^3.4.21", @@ -4495,7 +4294,6 @@ "version": "3.9.3", "resolved": "https://registry.npmjs.org/@react-stately/tree/-/tree-3.9.3.tgz", "integrity": "sha512-ZngG79nLFxE/GYmpwX6E/Rma2MMkzdoJPRI3iWk3dgqnGMMzpPnUp/cvjDsU3UHF7xDVusC5BT6pjWN0uxCIFQ==", - "license": "Apache-2.0", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/selection": "^3.20.6", @@ -4511,7 +4309,6 @@ "version": "3.10.8", "resolved": "https://registry.npmjs.org/@react-stately/utils/-/utils-3.10.8.tgz", "integrity": "sha512-SN3/h7SzRsusVQjQ4v10LaVsDc81jyyR0DD5HnsQitm/I5WDpaSr2nRHtyloPFU48jlql1XX/S04T2DLQM7Y3g==", - "license": "Apache-2.0", "dependencies": { "@swc/helpers": "^0.5.0" }, @@ -4523,7 +4320,6 @@ "version": "4.4.4", "resolved": "https://registry.npmjs.org/@react-stately/virtualizer/-/virtualizer-4.4.4.tgz", "integrity": "sha512-ri8giqXSZOrznZDCCOE4U36wSkOhy+hrFK7yo/YVcpxTqqp3d3eisfKMqbDsgqBW+XTHycTU/xeAf0u9NqrfpQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4537,7 +4333,6 @@ "version": "3.0.0-alpha.26", "resolved": "https://registry.npmjs.org/@react-types/accordion/-/accordion-3.0.0-alpha.26.tgz", "integrity": "sha512-OXf/kXcD2vFlEnkcZy/GG+a/1xO9BN7Uh3/5/Ceuj9z2E/WwD55YwU3GFM5zzkZ4+DMkdowHnZX37XnmbyD3Mg==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.27.0" }, @@ -4549,7 +4344,6 @@ "version": "3.7.17", "resolved": "https://registry.npmjs.org/@react-types/breadcrumbs/-/breadcrumbs-3.7.17.tgz", "integrity": "sha512-IhvVTcfli5o/UDlGACXxjlor2afGlMQA8pNR3faH0bBUay1Fmm3IWktVw9Xwmk+KraV2RTAg9e+E6p8DOQZfiw==", - "license": "Apache-2.0", "dependencies": { "@react-types/link": "^3.6.5", "@react-types/shared": "^3.32.1" @@ -4562,7 +4356,6 @@ "version": "3.14.1", "resolved": "https://registry.npmjs.org/@react-types/button/-/button-3.14.1.tgz", "integrity": "sha512-D8C4IEwKB7zEtiWYVJ3WE/5HDcWlze9mLWQ5hfsBfpePyWCgO3bT/+wjb/7pJvcAocrkXo90QrMm85LcpBtrpg==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4574,7 +4367,6 @@ "version": "3.8.0", "resolved": "https://registry.npmjs.org/@react-types/calendar/-/calendar-3.8.0.tgz", "integrity": "sha512-ZDZgfZgbz1ydWOFs1mH7QFfX3ioJrmb3Y/lkoubQE0HWXLZzyYNvhhKyFJRS1QJ40IofLSBHriwbQb/tsUnGlw==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@react-types/shared": "^3.32.1" @@ -4587,7 +4379,6 @@ "version": "3.10.2", "resolved": "https://registry.npmjs.org/@react-types/checkbox/-/checkbox-3.10.2.tgz", "integrity": "sha512-ktPkl6ZfIdGS1tIaGSU/2S5Agf2NvXI9qAgtdMDNva0oLyAZ4RLQb6WecPvofw1J7YKXu0VA5Mu7nlX+FM2weQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4599,7 +4390,6 @@ "version": "3.13.9", "resolved": "https://registry.npmjs.org/@react-types/combobox/-/combobox-3.13.9.tgz", "integrity": "sha512-G6GmLbzVkLW6VScxPAr/RtliEyPhBClfYaIllK1IZv+Z42SVnOpKzhnoe79BpmiFqy1AaC3+LjZX783mrsHCwA==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4611,7 +4401,6 @@ "version": "3.13.2", "resolved": "https://registry.npmjs.org/@react-types/datepicker/-/datepicker-3.13.2.tgz", "integrity": "sha512-+M6UZxJnejYY8kz0spbY/hP08QJ5rsZ3aNarRQQHc48xV2oelFLX5MhAqizfLEsvyfb0JYrhWoh4z1xZtAmYCg==", - "license": "Apache-2.0", "dependencies": { "@internationalized/date": "^3.10.0", "@react-types/calendar": "^3.8.0", @@ -4626,7 +4415,6 @@ "version": "3.5.22", "resolved": "https://registry.npmjs.org/@react-types/dialog/-/dialog-3.5.22.tgz", "integrity": "sha512-smSvzOcqKE196rWk0oqJDnz+ox5JM5+OT0PmmJXiUD4q7P5g32O6W5Bg7hMIFUI9clBtngo8kLaX2iMg+GqAzg==", - "license": "Apache-2.0", "dependencies": { "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1" @@ -4639,7 +4427,6 @@ "version": "3.7.16", "resolved": "https://registry.npmjs.org/@react-types/form/-/form-3.7.16.tgz", "integrity": "sha512-Sb7KJoWEaQ/e4XIY+xRbjKvbP1luome98ZXevpD+zVSyGjEcfIroebizP6K1yMHCWP/043xH6GUkgEqWPoVGjg==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4651,7 +4438,6 @@ "version": "3.3.6", "resolved": "https://registry.npmjs.org/@react-types/grid/-/grid-3.3.6.tgz", "integrity": "sha512-vIZJlYTii2n1We9nAugXwM2wpcpsC6JigJFBd6vGhStRdRWRoU4yv1Gc98Usbx0FQ/J7GLVIgeG8+1VMTKBdxw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4663,7 +4449,6 @@ "version": "3.6.5", "resolved": "https://registry.npmjs.org/@react-types/link/-/link-3.6.5.tgz", "integrity": "sha512-+I2s3XWBEvLrzts0GnNeA84mUkwo+a7kLUWoaJkW0TOBDG7my95HFYxF9WnqKye7NgpOkCqz4s3oW96xPdIniQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4675,7 +4460,6 @@ "version": "3.7.4", "resolved": "https://registry.npmjs.org/@react-types/listbox/-/listbox-3.7.4.tgz", "integrity": "sha512-p4YEpTl/VQGrqVE8GIfqTS5LkT5jtjDTbVeZgrkPnX/fiPhsfbTPiZ6g0FNap4+aOGJFGEEZUv2q4vx+rCORww==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4687,7 +4471,6 @@ "version": "3.10.5", "resolved": "https://registry.npmjs.org/@react-types/menu/-/menu-3.10.5.tgz", "integrity": "sha512-HBTrKll2hm0VKJNM4ubIv1L9MNo8JuOnm2G3M+wXvb6EYIyDNxxJkhjsqsGpUXJdAOSkacHBDcNh2HsZABNX4A==", - "license": "Apache-2.0", "dependencies": { "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1" @@ -4700,7 +4483,6 @@ "version": "3.8.15", "resolved": "https://registry.npmjs.org/@react-types/numberfield/-/numberfield-3.8.15.tgz", "integrity": "sha512-97r92D23GKCOjGIGMeW9nt+/KlfM3GeWH39Czcmd2/D5y3k6z4j0avbsfx2OttCtJszrnENjw3GraYGYI2KosQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4712,7 +4494,6 @@ "version": "3.9.2", "resolved": "https://registry.npmjs.org/@react-types/overlays/-/overlays-3.9.2.tgz", "integrity": "sha512-Q0cRPcBGzNGmC8dBuHyoPR7N3057KTS5g+vZfQ53k8WwmilXBtemFJPLsogJbspuewQ/QJ3o2HYsp2pne7/iNw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4724,7 +4505,6 @@ "version": "3.5.16", "resolved": "https://registry.npmjs.org/@react-types/progress/-/progress-3.5.16.tgz", "integrity": "sha512-I9tSdCFfvQ7gHJtm90VAKgwdTWXQgVNvLRStEc0z9h+bXBxdvZb+QuiRPERChwFQ9VkK4p4rDqaFo69nDqWkpw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4736,7 +4516,6 @@ "version": "3.9.2", "resolved": "https://registry.npmjs.org/@react-types/radio/-/radio-3.9.2.tgz", "integrity": "sha512-3UcJXu37JrTkRyP4GJPDBU7NmDTInrEdOe+bVzA1j4EegzdkJmLBkLg5cLDAbpiEHB+xIsvbJdx6dxeMuc+H3g==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4748,7 +4527,6 @@ "version": "3.32.1", "resolved": "https://registry.npmjs.org/@react-types/shared/-/shared-3.32.1.tgz", "integrity": "sha512-famxyD5emrGGpFuUlgOP6fVW2h/ZaF405G5KDi3zPHzyjAWys/8W6NAVJtNbkCkhedmvL0xOhvt8feGXyXaw5w==", - "license": "Apache-2.0", "peerDependencies": { "react": "^16.8.0 || ^17.0.0-rc.1 || ^18.0.0 || ^19.0.0-rc.1" } @@ -4757,7 +4535,6 @@ "version": "3.8.2", "resolved": "https://registry.npmjs.org/@react-types/slider/-/slider-3.8.2.tgz", "integrity": "sha512-MQYZP76OEOYe7/yA2To+Dl0LNb0cKKnvh5JtvNvDnAvEprn1RuLiay8Oi/rTtXmc2KmBa4VdTcsXsmkbbkeN2Q==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4769,7 +4546,6 @@ "version": "3.5.15", "resolved": "https://registry.npmjs.org/@react-types/switch/-/switch-3.5.15.tgz", "integrity": "sha512-r/ouGWQmIeHyYSP1e5luET+oiR7N7cLrAlWsrAfYRWHxqXOSNQloQnZJ3PLHrKFT02fsrQhx2rHaK2LfKeyN3A==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4781,7 +4557,6 @@ "version": "3.13.4", "resolved": "https://registry.npmjs.org/@react-types/table/-/table-3.13.4.tgz", "integrity": "sha512-I/DYiZQl6aNbMmjk90J9SOhkzVDZvyA3Vn3wMWCiajkMNjvubFhTfda5DDf2SgFP5l0Yh6TGGH5XumRv9LqL5Q==", - "license": "Apache-2.0", "dependencies": { "@react-types/grid": "^3.3.6", "@react-types/shared": "^3.32.1" @@ -4794,7 +4569,6 @@ "version": "3.3.19", "resolved": "https://registry.npmjs.org/@react-types/tabs/-/tabs-3.3.19.tgz", "integrity": "sha512-fE+qI43yR5pAMpeqPxGqQq9jDHXEPqXskuxNHERMW0PYMdPyem2Cw6goc5F4qeZO3Hf6uPZgHkvJz2OAq7TbBw==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4806,7 +4580,6 @@ "version": "3.12.6", "resolved": "https://registry.npmjs.org/@react-types/textfield/-/textfield-3.12.6.tgz", "integrity": "sha512-hpEVKE+M3uUkTjw2WrX1NrH/B3rqDJFUa+ViNK2eVranLY4ZwFqbqaYXSzHupOF3ecSjJJv2C103JrwFvx6TPQ==", - "license": "Apache-2.0", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4818,7 +4591,6 @@ "version": "3.4.21", "resolved": "https://registry.npmjs.org/@react-types/tooltip/-/tooltip-3.4.21.tgz", "integrity": "sha512-ugGHOZU6WbOdeTdbjnaEc+Ms7/WhsUCg+T3PCOIeOT9FG02Ce189yJ/+hd7oqL/tVwIhEMYJIqSCgSELFox+QA==", - "license": "Apache-2.0", "dependencies": { "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1" @@ -5425,7 +5197,6 @@ "version": "0.5.17", "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz", "integrity": "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==", - "license": "Apache-2.0", "dependencies": { "tslib": "^2.8.0" } @@ -5786,7 +5557,6 @@ "version": "3.11.3", "resolved": "https://registry.npmjs.org/@tanstack/react-virtual/-/react-virtual-3.11.3.tgz", "integrity": "sha512-vCU+OTylXN3hdC8RKg68tPlBPjjxtzon7Ys46MgrSLE+JhSjSTPvoQifV6DQJeJmA8Q3KT6CphJbejupx85vFw==", - "license": "MIT", "dependencies": { "@tanstack/virtual-core": "3.11.3" }, @@ -5803,7 +5573,6 @@ "version": "3.11.3", "resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.11.3.tgz", "integrity": "sha512-v2mrNSnMwnPJtcVqNvV0c5roGCBqeogN8jDtgtuHCphdwBasOZ17x8UV8qpHUh+u0MLfX43c0uUHKje0s+Zb0w==", - "license": "MIT", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" @@ -5857,11 +5626,10 @@ "license": "MIT" }, "node_modules/@testing-library/react": { - "version": "16.3.0", - "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.0.tgz", - "integrity": "sha512-kFSyxiEDwv1WLl2fgsq6pPBbw5aWKrsY2/noi1Id0TK0UParSF62oFQFGHXIyaG4pp2tEub/Zlel+fjjZILDsw==", + "version": "16.3.1", + "resolved": "https://registry.npmjs.org/@testing-library/react/-/react-16.3.1.tgz", + "integrity": "sha512-gr4KtAWqIOQoucWYD/f6ki+j5chXfcPc74Col/6poTyqTmn7zRmodWahWRCp8tYd+GMqBonw6hstNzqjbs6gjw==", "dev": true, - "license": "MIT", "dependencies": { "@babel/runtime": "^7.12.5" }, @@ -5991,9 +5759,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "25.0.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.1.tgz", - "integrity": "sha512-czWPzKIAXucn9PtsttxmumiQ9N0ok9FrBwgRWrwmVLlp86BrMExzvXRLFYRJ+Ex3g6yqj+KuaxfX1JTgV2lpfg==", + "version": "25.0.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.2.tgz", + "integrity": "sha512-gWEkeiyYE4vqjON/+Obqcoeffmk0NF15WSBwSs7zwVA2bAbTaE0SJ7P0WNGoJn8uE7fiaV5a7dKYIJriEqOrmA==", "devOptional": true, "dependencies": { "undici-types": "~7.16.0" @@ -7568,7 +7336,6 @@ "version": "4.2.3", "resolved": "https://registry.npmjs.org/color/-/color-4.2.3.tgz", "integrity": "sha512-1rXeuUUiGGrykh+CeBdu5Ie7OJwinCgQY0bc7GCRxy5xVHy+moaqkpL/jqQq0MtQOeYcrqEz4abc5f0KtU7W4A==", - "license": "MIT", "dependencies": { "color-convert": "^2.0.1", "color-string": "^1.9.0" @@ -7608,8 +7375,7 @@ "node_modules/color2k": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/color2k/-/color2k-2.0.3.tgz", - "integrity": "sha512-zW190nQTIoXcGCaU08DvVNFTmQhUpnJfVuAKfWqUQkflXKpaDdpaYoM0iluLS9lgJNHyBF58KKA2FBEwkD7wog==", - "license": "MIT" + "integrity": "sha512-zW190nQTIoXcGCaU08DvVNFTmQhUpnJfVuAKfWqUQkflXKpaDdpaYoM0iluLS9lgJNHyBF58KKA2FBEwkD7wog==" }, "node_modules/colorette": { "version": "2.0.20", @@ -8045,7 +7811,6 @@ "version": "4.3.1", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", - "license": "MIT", "engines": { "node": ">=0.10.0" } @@ -8553,11 +8318,10 @@ } }, "node_modules/esbuild": { - "version": "0.25.12", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.12.tgz", - "integrity": "sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==", + "version": "0.27.1", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.27.1.tgz", + "integrity": "sha512-yY35KZckJJuVVPXpvjgxiCuVEJT67F6zDeVTv4rizyPrfGBUpZQsvmxnN+C371c2esD/hNMjj4tpBhuueLN7aA==", "hasInstallScript": true, - "license": "MIT", "bin": { "esbuild": "bin/esbuild" }, @@ -8565,32 +8329,32 @@ "node": ">=18" }, "optionalDependencies": { - "@esbuild/aix-ppc64": "0.25.12", - "@esbuild/android-arm": "0.25.12", - "@esbuild/android-arm64": "0.25.12", - "@esbuild/android-x64": "0.25.12", - "@esbuild/darwin-arm64": "0.25.12", - "@esbuild/darwin-x64": "0.25.12", - "@esbuild/freebsd-arm64": "0.25.12", - "@esbuild/freebsd-x64": "0.25.12", - "@esbuild/linux-arm": "0.25.12", - "@esbuild/linux-arm64": "0.25.12", - "@esbuild/linux-ia32": "0.25.12", - "@esbuild/linux-loong64": "0.25.12", - "@esbuild/linux-mips64el": "0.25.12", - "@esbuild/linux-ppc64": "0.25.12", - "@esbuild/linux-riscv64": "0.25.12", - "@esbuild/linux-s390x": "0.25.12", - "@esbuild/linux-x64": "0.25.12", - "@esbuild/netbsd-arm64": "0.25.12", - "@esbuild/netbsd-x64": "0.25.12", - "@esbuild/openbsd-arm64": "0.25.12", - "@esbuild/openbsd-x64": "0.25.12", - "@esbuild/openharmony-arm64": "0.25.12", - "@esbuild/sunos-x64": "0.25.12", - "@esbuild/win32-arm64": "0.25.12", - "@esbuild/win32-ia32": "0.25.12", - "@esbuild/win32-x64": "0.25.12" + "@esbuild/aix-ppc64": "0.27.1", + "@esbuild/android-arm": "0.27.1", + "@esbuild/android-arm64": "0.27.1", + "@esbuild/android-x64": "0.27.1", + "@esbuild/darwin-arm64": "0.27.1", + "@esbuild/darwin-x64": "0.27.1", + "@esbuild/freebsd-arm64": "0.27.1", + "@esbuild/freebsd-x64": "0.27.1", + "@esbuild/linux-arm": "0.27.1", + "@esbuild/linux-arm64": "0.27.1", + "@esbuild/linux-ia32": "0.27.1", + "@esbuild/linux-loong64": "0.27.1", + "@esbuild/linux-mips64el": "0.27.1", + "@esbuild/linux-ppc64": "0.27.1", + "@esbuild/linux-riscv64": "0.27.1", + "@esbuild/linux-s390x": "0.27.1", + "@esbuild/linux-x64": "0.27.1", + "@esbuild/netbsd-arm64": "0.27.1", + "@esbuild/netbsd-x64": "0.27.1", + "@esbuild/openbsd-arm64": "0.27.1", + "@esbuild/openbsd-x64": "0.27.1", + "@esbuild/openharmony-arm64": "0.27.1", + "@esbuild/sunos-x64": "0.27.1", + "@esbuild/win32-arm64": "0.27.1", + "@esbuild/win32-ia32": "0.27.1", + "@esbuild/win32-x64": "0.27.1" } }, "node_modules/escalade": { @@ -9557,7 +9321,6 @@ "version": "5.0.2", "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "license": "BSD-3-Clause", "bin": { "flat": "cli.js" } @@ -10310,9 +10073,9 @@ } }, "node_modules/i18next": { - "version": "25.7.2", - "resolved": "https://registry.npmjs.org/i18next/-/i18next-25.7.2.tgz", - "integrity": "sha512-58b4kmLpLv1buWUEwegMDUqZVR5J+rT+WTRFaBGL7lxDuJQQ0NrJFrq+eT2N94aYVR1k1Sr13QITNOL88tZCuw==", + "version": "25.7.3", + "resolved": "https://registry.npmjs.org/i18next/-/i18next-25.7.3.tgz", + "integrity": "sha512-2XaT+HpYGuc2uTExq9TVRhLsso+Dxym6PWaKpn36wfBmTI779OQ7iP/XaZHzrnGyzU4SHpFrTYLKfVyBfAhVNA==", "funding": [ { "type": "individual", @@ -10327,7 +10090,6 @@ "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project" } ], - "license": "MIT", "dependencies": { "@babel/runtime": "^7.28.4" }, @@ -10445,7 +10207,6 @@ "version": "1.4.1", "resolved": "https://registry.npmjs.org/input-otp/-/input-otp-1.4.1.tgz", "integrity": "sha512-+yvpmKYKHi9jIGngxagY9oWiiblPB7+nEO75F2l2o4vs+6vpPZZmUl4tBNYuTCvQjhvEIbdNeJu70bhfYP2nbw==", - "license": "MIT", "peerDependencies": { "react": "^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc", "react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc" @@ -10470,7 +10231,6 @@ "version": "10.7.18", "resolved": "https://registry.npmjs.org/intl-messageformat/-/intl-messageformat-10.7.18.tgz", "integrity": "sha512-m3Ofv/X/tV8Y3tHXLohcuVuhWKo7BBq62cqY15etqmLxg2DZ34AGGgQDeR+SCta2+zICb1NX83af0GJmbQ1++g==", - "license": "BSD-3-Clause", "dependencies": { "@formatjs/ecma402-abstract": "2.3.6", "@formatjs/fast-memoize": "2.2.7", @@ -13659,9 +13419,9 @@ } }, "node_modules/posthog-js": { - "version": "1.306.0", - "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.306.0.tgz", - "integrity": "sha512-sjsy0El4HL6PgbyWiUF0CaKb2d1Q8okbSeT4eajan3QSvkWus6ygHQuW2l4lfvp6NLRQrIZKH/0sUanhASptUQ==", + "version": "1.306.1", + "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.306.1.tgz", + "integrity": "sha512-wO7bliv/5tlAlfoKCUzwkGXZVNexk0dHigMf9tNp0q1rzs62wThogREY7Tz7h/iWKYiuXy1RumtVlTmHuBXa1w==", "dependencies": { "@posthog/core": "1.7.1", "core-js": "^3.38.1", @@ -14594,7 +14354,6 @@ "version": "3.0.10", "resolved": "https://registry.npmjs.org/scroll-into-view-if-needed/-/scroll-into-view-if-needed-3.0.10.tgz", "integrity": "sha512-t44QCeDKAPf1mtQH3fYpWz8IM/DyvHLjs8wUvvwMYxk5moOqCzrMSxK6HQVD0QVmVjXFavoFIPRVrMuJPKAvtg==", - "license": "MIT", "dependencies": { "compute-scroll-into-view": "^3.0.2" } @@ -15538,10 +15297,9 @@ } }, "node_modules/tailwind-variants": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/tailwind-variants/-/tailwind-variants-3.1.1.tgz", - "integrity": "sha512-ftLXe3krnqkMHsuBTEmaVUXYovXtPyTK7ckEfDRXS8PBZx0bAUas+A0jYxuKA5b8qg++wvQ3d2MQ7l/xeZxbZQ==", - "license": "MIT", + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/tailwind-variants/-/tailwind-variants-3.2.2.tgz", + "integrity": "sha512-Mi4kHeMTLvKlM98XPnK+7HoBPmf4gygdFmqQPaDivc3DpYS6aIY6KiG/PgThrGvii5YZJqRsPz0aPyhoFzmZgg==", "engines": { "node": ">=16.x", "pnpm": ">=7.x" @@ -16251,12 +16009,11 @@ } }, "node_modules/vite": { - "version": "7.2.7", - "resolved": "https://registry.npmjs.org/vite/-/vite-7.2.7.tgz", - "integrity": "sha512-ITcnkFeR3+fI8P1wMgItjGrR10170d8auB4EpMLPqmx6uxElH3a/hHGQabSHKdqd4FXWO1nFIp9rRn7JQ34ACQ==", - "license": "MIT", + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.0.tgz", + "integrity": "sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==", "dependencies": { - "esbuild": "^0.25.0", + "esbuild": "^0.27.0", "fdir": "^6.5.0", "picomatch": "^4.0.3", "postcss": "^8.5.6", @@ -16370,11 +16127,10 @@ } }, "node_modules/vite-tsconfig-paths": { - "version": "5.1.4", - "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-5.1.4.tgz", - "integrity": "sha512-cYj0LRuLV2c2sMqhqhGpaO3LretdtMn/BVX4cPLanIZuwwrkVl+lK84E/miEXkCHWXuq65rhNN4rXsBcOB3S4w==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-6.0.1.tgz", + "integrity": "sha512-OQuYkfCQhc2T+n//0N7/oogTosgiSyZQ7dydrpUlH5SbnFTtplpekdY4GMi6jDwEpiwWlqeUJMyPfC2ePM1+2A==", "dev": true, - "license": "MIT", "dependencies": { "debug": "^4.1.1", "globrex": "^0.1.2", diff --git a/frontend/package.json b/frontend/package.json index 64892afc5c..bd3af09ef7 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -7,7 +7,7 @@ "node": ">=22.0.0" }, "dependencies": { - "@heroui/react": "2.8.5", + "@heroui/react": "2.8.6", "@microlink/react-json-view": "^1.26.2", "@monaco-editor/react": "^4.7.0-rc.0", "@react-router/node": "^7.10.1", @@ -23,13 +23,13 @@ "downshift": "^9.0.13", "eslint-config-airbnb-typescript": "^18.0.0", "framer-motion": "^12.23.25", - "i18next": "^25.7.2", + "i18next": "^25.7.3", "i18next-browser-languagedetector": "^8.2.0", "i18next-http-backend": "^3.0.2", "isbot": "^5.1.32", "lucide-react": "^0.561.0", "monaco-editor": "^0.55.1", - "posthog-js": "^1.306.0", + "posthog-js": "^1.306.1", "react": "^19.2.3", "react-dom": "^19.2.3", "react-hot-toast": "^2.6.0", @@ -44,7 +44,7 @@ "socket.io-client": "^4.8.1", "tailwind-merge": "^3.4.0", "tailwind-scrollbar": "^4.0.2", - "vite": "^7.2.7", + "vite": "^7.3.0", "zustand": "^5.0.9" }, "scripts": { @@ -87,9 +87,9 @@ "@tanstack/eslint-plugin-query": "^5.91.0", "@testing-library/dom": "^10.4.1", "@testing-library/jest-dom": "^6.9.1", - "@testing-library/react": "^16.3.0", + "@testing-library/react": "^16.3.1", "@testing-library/user-event": "^14.6.1", - "@types/node": "^25.0.1", + "@types/node": "^25.0.2", "@types/react": "^19.2.7", "@types/react-dom": "^19.2.3", "@types/react-syntax-highlighter": "^15.5.13", @@ -116,7 +116,7 @@ "tailwindcss": "^4.1.8", "typescript": "^5.9.3", "vite-plugin-svgr": "^4.5.0", - "vite-tsconfig-paths": "^5.1.4", + "vite-tsconfig-paths": "^6.0.1", "vitest": "^4.0.14" }, "packageManager": "npm@10.5.0", From 49740a463f1d262290ac34793b61515fc3987b4d Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Tue, 16 Dec 2025 20:14:19 +0700 Subject: [PATCH 13/80] fix(frontend): clicking think block in conversation pane breaks ui (v1 conversations) (#12057) --- .../v1/chat/event-content-helpers/get-event-content.tsx | 3 +++ .../chat/event-content-helpers/get-observation-content.ts | 8 +++++++- frontend/src/types/v1/core/base/observation.ts | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/frontend/src/components/v1/chat/event-content-helpers/get-event-content.tsx b/frontend/src/components/v1/chat/event-content-helpers/get-event-content.tsx index d9b9bf2d2d..dec57f385f 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/get-event-content.tsx +++ b/frontend/src/components/v1/chat/event-content-helpers/get-event-content.tsx @@ -159,6 +159,9 @@ const getObservationEventTitle = (event: OpenHandsEvent): React.ReactNode => { } break; } + case "ThinkObservation": + observationKey = "OBSERVATION_MESSAGE$THINK"; + break; default: // For unknown observations, use the type name return observationType.replace("Observation", "").toUpperCase(); diff --git a/frontend/src/components/v1/chat/event-content-helpers/get-observation-content.ts b/frontend/src/components/v1/chat/event-content-helpers/get-observation-content.ts index bf443ea71c..7fb1c2ce1c 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/get-observation-content.ts +++ b/frontend/src/components/v1/chat/event-content-helpers/get-observation-content.ts @@ -190,7 +190,13 @@ const getThinkObservationContent = ( event: ObservationEvent, ): string => { const { observation } = event; - return observation.content || ""; + + const textContent = observation.content + .filter((c) => c.type === "text") + .map((c) => c.text) + .join("\n"); + + return textContent || ""; }; const getFinishObservationContent = ( diff --git a/frontend/src/types/v1/core/base/observation.ts b/frontend/src/types/v1/core/base/observation.ts index 42726c2b32..a1c8a1a48d 100644 --- a/frontend/src/types/v1/core/base/observation.ts +++ b/frontend/src/types/v1/core/base/observation.ts @@ -36,7 +36,7 @@ export interface ThinkObservation extends ObservationBase<"ThinkObservation"> { /** * Confirmation message. DEFAULT: "Your thought has been logged." */ - content: string; + content: Array; } export interface BrowserObservation extends ObservationBase<"BrowserObservation"> { From 3b7b2fd8cc59f096850116d44b76e5a54d242285 Mon Sep 17 00:00:00 2001 From: Mariam Saeed <69825646+Mariam-Saeed@users.noreply.github.com> Date: Tue, 16 Dec 2025 15:39:15 +0200 Subject: [PATCH 14/80] fix(frontend): Separate pause state from agent loading (#12041) Co-authored-by: amanape <83104063+amanape@users.noreply.github.com> --- .../conversation/agent-status.test.tsx | 71 +++++++++++++++++++ .../features/controls/agent-status.tsx | 21 +++--- 2 files changed, 84 insertions(+), 8 deletions(-) create mode 100644 frontend/__tests__/components/features/conversation/agent-status.test.tsx diff --git a/frontend/__tests__/components/features/conversation/agent-status.test.tsx b/frontend/__tests__/components/features/conversation/agent-status.test.tsx new file mode 100644 index 0000000000..a121ed37a8 --- /dev/null +++ b/frontend/__tests__/components/features/conversation/agent-status.test.tsx @@ -0,0 +1,71 @@ +import { render, screen } from "@testing-library/react"; +import { describe, it, expect, vi } from "vitest"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { MemoryRouter } from "react-router"; +import { AgentStatus } from "#/components/features/controls/agent-status"; +import { AgentState } from "#/types/agent-state"; +import { useAgentState } from "#/hooks/use-agent-state"; +import { useConversationStore } from "#/state/conversation-store"; + +vi.mock("#/hooks/use-agent-state"); + +vi.mock("#/hooks/use-conversation-id", () => ({ + useConversationId: () => ({ conversationId: "test-id" }), +})); + +const wrapper = ({ children }: { children: React.ReactNode }) => ( + + + {children} + + +); + +const renderAgentStatus = ({ + isPausing = false, +}: { isPausing?: boolean } = {}) => + render( + , + { wrapper }, + ); + +describe("AgentStatus - isLoading logic", () => { + it("should show loading when curAgentState is INIT", () => { + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.INIT, + }); + + renderAgentStatus(); + + expect(screen.getByTestId("agent-loading-spinner")).toBeInTheDocument(); + }); + + it("should show loading when isPausing is true, even if shouldShownAgentLoading is false", () => { + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.AWAITING_USER_INPUT, + }); + + renderAgentStatus({ isPausing: true }); + + expect(screen.getByTestId("agent-loading-spinner")).toBeInTheDocument(); + }); + + it("should NOT update global shouldShownAgentLoading when only isPausing is true", () => { + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.AWAITING_USER_INPUT, + }); + + renderAgentStatus({ isPausing: true }); + + // Loading spinner shows (because isPausing) + expect(screen.getByTestId("agent-loading-spinner")).toBeInTheDocument(); + + // But global state should be false (because shouldShownAgentLoading is false) + const { shouldShownAgentLoading } = useConversationStore.getState(); + expect(shouldShownAgentLoading).toBe(false); + }); +}); diff --git a/frontend/src/components/features/controls/agent-status.tsx b/frontend/src/components/features/controls/agent-status.tsx index 078eb5f40f..675b5881a3 100644 --- a/frontend/src/components/features/controls/agent-status.tsx +++ b/frontend/src/components/features/controls/agent-status.tsx @@ -59,13 +59,15 @@ export function AgentStatus({ ); const shouldShownAgentLoading = - isPausing || curAgentState === AgentState.INIT || curAgentState === AgentState.LOADING || (webSocketStatus === "CONNECTING" && taskStatus !== "ERROR") || isTaskPolling(taskStatus) || isTaskPolling(subConversationTaskStatus); + // For UI rendering - includes pause state + const isLoading = shouldShownAgentLoading || isPausing; + const shouldShownAgentError = curAgentState === AgentState.ERROR || curAgentState === AgentState.RATE_LIMITED || @@ -93,25 +95,28 @@ export function AgentStatus({
- {shouldShownAgentLoading && } - {!shouldShownAgentLoading && shouldShownAgentStop && ( + {isLoading && } + {!isLoading && shouldShownAgentStop && ( )} - {!shouldShownAgentLoading && shouldShownAgentResume && ( + {!isLoading && shouldShownAgentResume && ( )} - {!shouldShownAgentLoading && shouldShownAgentError && ( - + {!isLoading && shouldShownAgentError && ( + )} - {!shouldShownAgentLoading && + {!isLoading && !shouldShownAgentStop && !shouldShownAgentResume && !shouldShownAgentError && } From 2a98c955571315c81b4da789c005e7a9d8487205 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 16 Dec 2025 09:03:29 -0700 Subject: [PATCH 15/80] Release OpenHands v1.0.0 (#12052) Co-authored-by: mamoodi Co-authored-by: Engel Nyst --- Development.md | 2 +- containers/dev/compose.yml | 2 +- docker-compose.yml | 2 +- frontend/package-lock.json | 4 ++-- frontend/package.json | 2 +- openhands/runtime/impl/kubernetes/README.md | 2 +- openhands/storage/settings/file_settings_store.py | 5 +++++ pyproject.toml | 2 +- 8 files changed, 13 insertions(+), 8 deletions(-) diff --git a/Development.md b/Development.md index bfa057efc1..421959a5ec 100644 --- a/Development.md +++ b/Development.md @@ -161,7 +161,7 @@ poetry run pytest ./tests/unit/test_*.py To reduce build time (e.g., if no changes were made to the client-runtime component), you can use an existing Docker container image by setting the SANDBOX_RUNTIME_CONTAINER_IMAGE environment variable to the desired Docker image. -Example: `export SANDBOX_RUNTIME_CONTAINER_IMAGE=ghcr.io/openhands/runtime:0.62-nikolaik` +Example: `export SANDBOX_RUNTIME_CONTAINER_IMAGE=ghcr.io/openhands/runtime:1.0-nikolaik` ## Develop inside Docker container diff --git a/containers/dev/compose.yml b/containers/dev/compose.yml index c6168b094f..7ff5042081 100644 --- a/containers/dev/compose.yml +++ b/containers/dev/compose.yml @@ -12,7 +12,7 @@ services: - SANDBOX_API_HOSTNAME=host.docker.internal - DOCKER_HOST_ADDR=host.docker.internal # - - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE:-ghcr.io/openhands/runtime:0.62-nikolaik} + - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE:-ghcr.io/openhands/runtime:1.0-nikolaik} - SANDBOX_USER_ID=${SANDBOX_USER_ID:-1234} - WORKSPACE_MOUNT_PATH=${WORKSPACE_BASE:-$PWD/workspace} ports: diff --git a/docker-compose.yml b/docker-compose.yml index b663324625..d4aef552c0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,7 +7,7 @@ services: image: openhands:latest container_name: openhands-app-${DATE:-} environment: - - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE:-docker.openhands.dev/openhands/runtime:0.62-nikolaik} + - SANDBOX_RUNTIME_CONTAINER_IMAGE=${SANDBOX_RUNTIME_CONTAINER_IMAGE:-docker.openhands.dev/openhands/runtime:1.0-nikolaik} #- SANDBOX_USER_ID=${SANDBOX_USER_ID:-1234} # enable this only if you want a specific non-root sandbox user but you will have to manually adjust permissions of ~/.openhands for this user - WORKSPACE_MOUNT_PATH=${WORKSPACE_BASE:-$PWD/workspace} ports: diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 9334b494cf..08011449b8 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -1,12 +1,12 @@ { "name": "openhands-frontend", - "version": "0.62.0", + "version": "1.0.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "openhands-frontend", - "version": "0.62.0", + "version": "1.0.0", "dependencies": { "@heroui/react": "2.8.6", "@microlink/react-json-view": "^1.26.2", diff --git a/frontend/package.json b/frontend/package.json index bd3af09ef7..7dc0c5bcfb 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,6 +1,6 @@ { "name": "openhands-frontend", - "version": "0.62.0", + "version": "1.0.0", "private": true, "type": "module", "engines": { diff --git a/openhands/runtime/impl/kubernetes/README.md b/openhands/runtime/impl/kubernetes/README.md index d16247389d..36b7452d1e 100644 --- a/openhands/runtime/impl/kubernetes/README.md +++ b/openhands/runtime/impl/kubernetes/README.md @@ -40,7 +40,7 @@ Two configuration options are required to use the Kubernetes runtime: 2. **Runtime Container Image**: Specify the container image to use for the runtime environment ```toml [sandbox] - runtime_container_image = "docker.openhands.dev/openhands/runtime:0.62-nikolaik" + runtime_container_image = "docker.openhands.dev/openhands/runtime:1.0-nikolaik" ``` #### Additional Kubernetes Options diff --git a/openhands/storage/settings/file_settings_store.py b/openhands/storage/settings/file_settings_store.py index 3acedeb16f..5b43bf6b80 100644 --- a/openhands/storage/settings/file_settings_store.py +++ b/openhands/storage/settings/file_settings_store.py @@ -21,6 +21,11 @@ class FileSettingsStore(SettingsStore): json_str = await call_sync_from_async(self.file_store.read, self.path) kwargs = json.loads(json_str) settings = Settings(**kwargs) + + # Turn on V1 in OpenHands + # We can simplify / remove this as part of V0 removal + settings.v1_enabled = True + return settings except FileNotFoundError: return None diff --git a/pyproject.toml b/pyproject.toml index e9aa84a836..dc2c52a112 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ requires = [ [tool.poetry] name = "openhands-ai" -version = "0.62.0" +version = "1.0.0" description = "OpenHands: Code Less, Make More" authors = [ "OpenHands" ] license = "MIT" From 7853b41adda0a6069274d236902b1e57a8c213c4 Mon Sep 17 00:00:00 2001 From: Rohit Malhotra Date: Tue, 16 Dec 2025 11:54:01 -0500 Subject: [PATCH 16/80] Add OAuth 2.0 Device Flow backend for CLI authentication (#11984) Co-authored-by: openhands Co-authored-by: Xingyao Wang --- .../allhands-realm-github-provider.json.tmpl | 1 + .../versions/084_create_device_codes_table.py | 49 ++ enterprise/saas_server.py | 2 + enterprise/server/middleware.py | 9 +- enterprise/server/routes/oauth_device.py | 324 ++++++++++ enterprise/storage/api_key_store.py | 39 +- enterprise/storage/device_code.py | 109 ++++ enterprise/storage/device_code_store.py | 167 +++++ enterprise/tests/unit/conftest.py | 1 + .../unit/server/routes/test_oauth_device.py | 610 ++++++++++++++++++ .../tests/unit/storage/test_device_code.py | 83 +++ .../unit/storage/test_device_code_store.py | 193 ++++++ enterprise/tests/unit/test_api_key_store.py | 44 ++ frontend/src/routes.ts | 1 + frontend/src/routes/device-verify.tsx | 274 ++++++++ 15 files changed, 1901 insertions(+), 5 deletions(-) create mode 100644 enterprise/migrations/versions/084_create_device_codes_table.py create mode 100644 enterprise/server/routes/oauth_device.py create mode 100644 enterprise/storage/device_code.py create mode 100644 enterprise/storage/device_code_store.py create mode 100644 enterprise/tests/unit/server/routes/test_oauth_device.py create mode 100644 enterprise/tests/unit/storage/test_device_code.py create mode 100644 enterprise/tests/unit/storage/test_device_code_store.py create mode 100644 frontend/src/routes/device-verify.tsx diff --git a/enterprise/allhands-realm-github-provider.json.tmpl b/enterprise/allhands-realm-github-provider.json.tmpl index 6cdaa34383..35ff5f0afc 100644 --- a/enterprise/allhands-realm-github-provider.json.tmpl +++ b/enterprise/allhands-realm-github-provider.json.tmpl @@ -721,6 +721,7 @@ "https://$WEB_HOST/oauth/keycloak/callback", "https://$WEB_HOST/oauth/keycloak/offline/callback", "https://$WEB_HOST/slack/keycloak-callback", + "https://$WEB_HOST/oauth/device/keycloak-callback", "https://$WEB_HOST/api/email/verified", "/realms/$KEYCLOAK_REALM_NAME/$KEYCLOAK_CLIENT_ID/*" ], diff --git a/enterprise/migrations/versions/084_create_device_codes_table.py b/enterprise/migrations/versions/084_create_device_codes_table.py new file mode 100644 index 0000000000..0898e09ef5 --- /dev/null +++ b/enterprise/migrations/versions/084_create_device_codes_table.py @@ -0,0 +1,49 @@ +"""Create device_codes table for OAuth 2.0 Device Flow + +Revision ID: 084 +Revises: 083 +Create Date: 2024-12-10 12:00:00.000000 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = '084' +down_revision = '083' +branch_labels = None +depends_on = None + + +def upgrade(): + """Create device_codes table for OAuth 2.0 Device Flow.""" + op.create_table( + 'device_codes', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('device_code', sa.String(length=128), nullable=False), + sa.Column('user_code', sa.String(length=16), nullable=False), + sa.Column('status', sa.String(length=32), nullable=False), + sa.Column('keycloak_user_id', sa.String(length=255), nullable=True), + sa.Column('expires_at', sa.DateTime(timezone=True), nullable=False), + sa.Column('authorized_at', sa.DateTime(timezone=True), nullable=True), + # Rate limiting fields for RFC 8628 section 3.5 compliance + sa.Column('last_poll_time', sa.DateTime(timezone=True), nullable=True), + sa.Column('current_interval', sa.Integer(), nullable=False, default=5), + sa.PrimaryKeyConstraint('id'), + ) + + # Create indexes for efficient lookups + op.create_index( + 'ix_device_codes_device_code', 'device_codes', ['device_code'], unique=True + ) + op.create_index( + 'ix_device_codes_user_code', 'device_codes', ['user_code'], unique=True + ) + + +def downgrade(): + """Drop device_codes table.""" + op.drop_index('ix_device_codes_user_code', table_name='device_codes') + op.drop_index('ix_device_codes_device_code', table_name='device_codes') + op.drop_table('device_codes') diff --git a/enterprise/saas_server.py b/enterprise/saas_server.py index 4c3c7c49ba..96e19a9815 100644 --- a/enterprise/saas_server.py +++ b/enterprise/saas_server.py @@ -34,6 +34,7 @@ from server.routes.integration.jira_dc import jira_dc_integration_router # noqa from server.routes.integration.linear import linear_integration_router # noqa: E402 from server.routes.integration.slack import slack_router # noqa: E402 from server.routes.mcp_patch import patch_mcp_server # noqa: E402 +from server.routes.oauth_device import oauth_device_router # noqa: E402 from server.routes.readiness import readiness_router # noqa: E402 from server.routes.user import saas_user_router # noqa: E402 @@ -60,6 +61,7 @@ base_app.mount('/internal/metrics', metrics_app()) base_app.include_router(readiness_router) # Add routes for readiness checks base_app.include_router(api_router) # Add additional route for github auth base_app.include_router(oauth_router) # Add additional route for oauth callback +base_app.include_router(oauth_device_router) # Add OAuth 2.0 Device Flow routes base_app.include_router(saas_user_router) # Add additional route SAAS user calls base_app.include_router( billing_router diff --git a/enterprise/server/middleware.py b/enterprise/server/middleware.py index 2972c1ec38..54e3319595 100644 --- a/enterprise/server/middleware.py +++ b/enterprise/server/middleware.py @@ -152,17 +152,22 @@ class SetAuthCookieMiddleware: return False path = request.url.path - is_api_that_should_attach = path.startswith('/api') and path not in ( + ignore_paths = ( '/api/options/config', '/api/keycloak/callback', '/api/billing/success', '/api/billing/cancel', '/api/billing/customer-setup-success', '/api/billing/stripe-webhook', + '/oauth/device/authorize', + '/oauth/device/token', ) + if path in ignore_paths: + return False is_mcp = path.startswith('/mcp') - return is_api_that_should_attach or is_mcp + is_api_route = path.startswith('/api') + return is_api_route or is_mcp async def _logout(self, request: Request): # Log out of keycloak - this prevents issues where you did not log in with the idp you believe you used diff --git a/enterprise/server/routes/oauth_device.py b/enterprise/server/routes/oauth_device.py new file mode 100644 index 0000000000..39ff9a4081 --- /dev/null +++ b/enterprise/server/routes/oauth_device.py @@ -0,0 +1,324 @@ +"""OAuth 2.0 Device Flow endpoints for CLI authentication.""" + +from datetime import UTC, datetime, timedelta +from typing import Optional + +from fastapi import APIRouter, Depends, Form, HTTPException, Request, status +from fastapi.responses import JSONResponse +from pydantic import BaseModel +from storage.api_key_store import ApiKeyStore +from storage.database import session_maker +from storage.device_code_store import DeviceCodeStore + +from openhands.core.logger import openhands_logger as logger +from openhands.server.user_auth import get_user_id + +# --------------------------------------------------------------------------- +# Constants +# --------------------------------------------------------------------------- + +DEVICE_CODE_EXPIRES_IN = 600 # 10 minutes +DEVICE_TOKEN_POLL_INTERVAL = 5 # seconds + +API_KEY_NAME = 'Device Link Access Key' +KEY_EXPIRATION_TIME = timedelta(days=1) # Key expires in 24 hours + +# --------------------------------------------------------------------------- +# Models +# --------------------------------------------------------------------------- + + +class DeviceAuthorizationResponse(BaseModel): + device_code: str + user_code: str + verification_uri: str + verification_uri_complete: str + expires_in: int + interval: int + + +class DeviceTokenResponse(BaseModel): + access_token: str # This will be the user's API key + token_type: str = 'Bearer' + expires_in: Optional[int] = None # API keys may not have expiration + + +class DeviceTokenErrorResponse(BaseModel): + error: str + error_description: Optional[str] = None + interval: Optional[int] = None # Required for slow_down error + + +# --------------------------------------------------------------------------- +# Router + stores +# --------------------------------------------------------------------------- + +oauth_device_router = APIRouter(prefix='/oauth/device') +device_code_store = DeviceCodeStore(session_maker) + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _oauth_error( + status_code: int, + error: str, + description: str, + interval: Optional[int] = None, +) -> JSONResponse: + """Return a JSON OAuth-style error response.""" + return JSONResponse( + status_code=status_code, + content=DeviceTokenErrorResponse( + error=error, + error_description=description, + interval=interval, + ).model_dump(), + ) + + +# --------------------------------------------------------------------------- +# Endpoints +# --------------------------------------------------------------------------- + + +@oauth_device_router.post('/authorize', response_model=DeviceAuthorizationResponse) +async def device_authorization( + http_request: Request, +) -> DeviceAuthorizationResponse: + """Start device flow by generating device and user codes.""" + try: + device_code_entry = device_code_store.create_device_code( + expires_in=DEVICE_CODE_EXPIRES_IN, + ) + + base_url = str(http_request.base_url).rstrip('/') + verification_uri = f'{base_url}/oauth/device/verify' + verification_uri_complete = ( + f'{verification_uri}?user_code={device_code_entry.user_code}' + ) + + logger.info( + 'Device authorization initiated', + extra={'user_code': device_code_entry.user_code}, + ) + + return DeviceAuthorizationResponse( + device_code=device_code_entry.device_code, + user_code=device_code_entry.user_code, + verification_uri=verification_uri, + verification_uri_complete=verification_uri_complete, + expires_in=DEVICE_CODE_EXPIRES_IN, + interval=device_code_entry.current_interval, + ) + except Exception as e: + logger.exception('Error in device authorization: %s', str(e)) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Internal server error', + ) from e + + +@oauth_device_router.post('/token') +async def device_token(device_code: str = Form(...)): + """Poll for a token until the user authorizes or the code expires.""" + try: + device_code_entry = device_code_store.get_by_device_code(device_code) + + if not device_code_entry: + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'invalid_grant', + 'Invalid device code', + ) + + # Check rate limiting (RFC 8628 section 3.5) + is_too_fast, current_interval = device_code_entry.check_rate_limit() + if is_too_fast: + # Update poll time and increase interval + device_code_store.update_poll_time(device_code, increase_interval=True) + logger.warning( + 'Client polling too fast, returning slow_down error', + extra={ + 'device_code': device_code[:8] + '...', # Log partial for privacy + 'new_interval': current_interval, + }, + ) + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'slow_down', + f'Polling too frequently. Wait at least {current_interval} seconds between requests.', + interval=current_interval, + ) + + # Update poll time for successful rate limit check + device_code_store.update_poll_time(device_code, increase_interval=False) + + if device_code_entry.is_expired(): + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'expired_token', + 'Device code has expired', + ) + + if device_code_entry.status == 'denied': + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'access_denied', + 'User denied the authorization request', + ) + + if device_code_entry.status == 'pending': + return _oauth_error( + status.HTTP_400_BAD_REQUEST, + 'authorization_pending', + 'User has not yet completed authorization', + ) + + if device_code_entry.status == 'authorized': + # Retrieve the specific API key for this device using the user_code + api_key_store = ApiKeyStore.get_instance() + device_key_name = f'{API_KEY_NAME} ({device_code_entry.user_code})' + device_api_key = api_key_store.retrieve_api_key_by_name( + device_code_entry.keycloak_user_id, device_key_name + ) + + if not device_api_key: + logger.error( + 'No device API key found for authorized device', + extra={ + 'user_id': device_code_entry.keycloak_user_id, + 'user_code': device_code_entry.user_code, + }, + ) + return _oauth_error( + status.HTTP_500_INTERNAL_SERVER_ERROR, + 'server_error', + 'API key not found', + ) + + # Return the API key as access_token + return DeviceTokenResponse( + access_token=device_api_key, + ) + + # Fallback for unexpected status values + logger.error( + 'Unknown device code status', + extra={'status': device_code_entry.status}, + ) + return _oauth_error( + status.HTTP_500_INTERNAL_SERVER_ERROR, + 'server_error', + 'Unknown device code status', + ) + + except Exception as e: + logger.exception('Error in device token: %s', str(e)) + return _oauth_error( + status.HTTP_500_INTERNAL_SERVER_ERROR, + 'server_error', + 'Internal server error', + ) + + +@oauth_device_router.post('/verify-authenticated') +async def device_verification_authenticated( + user_code: str = Form(...), + user_id: str = Depends(get_user_id), +): + """Process device verification for authenticated users (called by frontend).""" + try: + if not user_id: + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail='Authentication required', + ) + + # Validate device code + device_code_entry = device_code_store.get_by_user_code(user_code) + if not device_code_entry: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='The device code is invalid or has expired.', + ) + + if not device_code_entry.is_pending(): + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail='This device code has already been processed.', + ) + + # First, authorize the device code + success = device_code_store.authorize_device_code( + user_code=user_code, + user_id=user_id, + ) + + if not success: + logger.error( + 'Failed to authorize device code', + extra={'user_code': user_code, 'user_id': user_id}, + ) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to authorize the device. Please try again.', + ) + + # Only create API key AFTER successful authorization + api_key_store = ApiKeyStore.get_instance() + try: + # Create a unique API key for this device using user_code in the name + device_key_name = f'{API_KEY_NAME} ({user_code})' + api_key_store.create_api_key( + user_id, + name=device_key_name, + expires_at=datetime.now(UTC) + KEY_EXPIRATION_TIME, + ) + logger.info( + 'Created new device API key for user after successful authorization', + extra={'user_id': user_id, 'user_code': user_code}, + ) + except Exception as e: + logger.exception( + 'Failed to create device API key after authorization: %s', str(e) + ) + + # Clean up: revert the device authorization since API key creation failed + # This prevents the device from being in an authorized state without an API key + try: + device_code_store.deny_device_code(user_code) + logger.info( + 'Reverted device authorization due to API key creation failure', + extra={'user_code': user_code, 'user_id': user_id}, + ) + except Exception as cleanup_error: + logger.exception( + 'Failed to revert device authorization during cleanup: %s', + str(cleanup_error), + ) + + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='Failed to create API key for device access.', + ) + + logger.info( + 'Device code authorized with API key successfully', + extra={'user_code': user_code, 'user_id': user_id}, + ) + return JSONResponse( + status_code=status.HTTP_200_OK, + content={'message': 'Device authorized successfully!'}, + ) + + except HTTPException: + raise + except Exception as e: + logger.exception('Error in device verification: %s', str(e)) + raise HTTPException( + status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + detail='An unexpected error occurred. Please try again.', + ) diff --git a/enterprise/storage/api_key_store.py b/enterprise/storage/api_key_store.py index 162ed415c1..693bfdb321 100644 --- a/enterprise/storage/api_key_store.py +++ b/enterprise/storage/api_key_store.py @@ -57,9 +57,15 @@ class ApiKeyStore: return None # Check if the key has expired - if key_record.expires_at and key_record.expires_at < now: - logger.info(f'API key has expired: {key_record.id}') - return None + if key_record.expires_at: + # Handle timezone-naive datetime from database by assuming it's UTC + expires_at = key_record.expires_at + if expires_at.tzinfo is None: + expires_at = expires_at.replace(tzinfo=UTC) + + if expires_at < now: + logger.info(f'API key has expired: {key_record.id}') + return None # Update last_used_at timestamp session.execute( @@ -125,6 +131,33 @@ class ApiKeyStore: return None + def retrieve_api_key_by_name(self, user_id: str, name: str) -> str | None: + """Retrieve an API key by name for a specific user.""" + with self.session_maker() as session: + key_record = ( + session.query(ApiKey) + .filter(ApiKey.user_id == user_id, ApiKey.name == name) + .first() + ) + return key_record.key if key_record else None + + def delete_api_key_by_name(self, user_id: str, name: str) -> bool: + """Delete an API key by name for a specific user.""" + with self.session_maker() as session: + key_record = ( + session.query(ApiKey) + .filter(ApiKey.user_id == user_id, ApiKey.name == name) + .first() + ) + + if not key_record: + return False + + session.delete(key_record) + session.commit() + + return True + @classmethod def get_instance(cls) -> ApiKeyStore: """Get an instance of the ApiKeyStore.""" diff --git a/enterprise/storage/device_code.py b/enterprise/storage/device_code.py new file mode 100644 index 0000000000..47e18b51bc --- /dev/null +++ b/enterprise/storage/device_code.py @@ -0,0 +1,109 @@ +"""Device code storage model for OAuth 2.0 Device Flow.""" + +from datetime import datetime, timezone +from enum import Enum + +from sqlalchemy import Column, DateTime, Integer, String +from storage.base import Base + + +class DeviceCodeStatus(Enum): + """Status of a device code authorization request.""" + + PENDING = 'pending' + AUTHORIZED = 'authorized' + EXPIRED = 'expired' + DENIED = 'denied' + + +class DeviceCode(Base): + """Device code for OAuth 2.0 Device Flow. + + This stores the device codes issued during the device authorization flow, + along with their status and associated user information once authorized. + """ + + __tablename__ = 'device_codes' + + id = Column(Integer, primary_key=True, autoincrement=True) + device_code = Column(String(128), unique=True, nullable=False, index=True) + user_code = Column(String(16), unique=True, nullable=False, index=True) + status = Column(String(32), nullable=False, default=DeviceCodeStatus.PENDING.value) + + # Keycloak user ID who authorized the device (set during verification) + keycloak_user_id = Column(String(255), nullable=True) + + # Timestamps + expires_at = Column(DateTime(timezone=True), nullable=False) + authorized_at = Column(DateTime(timezone=True), nullable=True) + + # Rate limiting fields for RFC 8628 section 3.5 compliance + last_poll_time = Column(DateTime(timezone=True), nullable=True) + current_interval = Column(Integer, nullable=False, default=5) + + def __repr__(self) -> str: + return f"" + + def is_expired(self) -> bool: + """Check if the device code has expired.""" + now = datetime.now(timezone.utc) + return now > self.expires_at + + def is_pending(self) -> bool: + """Check if the device code is still pending authorization.""" + return self.status == DeviceCodeStatus.PENDING.value and not self.is_expired() + + def is_authorized(self) -> bool: + """Check if the device code has been authorized.""" + return self.status == DeviceCodeStatus.AUTHORIZED.value + + def authorize(self, user_id: str) -> None: + """Mark the device code as authorized.""" + self.status = DeviceCodeStatus.AUTHORIZED.value + self.keycloak_user_id = user_id # Set the Keycloak user ID during authorization + self.authorized_at = datetime.now(timezone.utc) + + def deny(self) -> None: + """Mark the device code as denied.""" + self.status = DeviceCodeStatus.DENIED.value + + def expire(self) -> None: + """Mark the device code as expired.""" + self.status = DeviceCodeStatus.EXPIRED.value + + def check_rate_limit(self) -> tuple[bool, int]: + """Check if the client is polling too fast. + + Returns: + tuple: (is_too_fast, current_interval) + - is_too_fast: True if client should receive slow_down error + - current_interval: Current polling interval to use + """ + now = datetime.now(timezone.utc) + + # If this is the first poll, allow it + if self.last_poll_time is None: + return False, self.current_interval + + # Calculate time since last poll + time_since_last_poll = (now - self.last_poll_time).total_seconds() + + # Check if polling too fast + if time_since_last_poll < self.current_interval: + # Increase interval for slow_down (RFC 8628 section 3.5) + new_interval = min(self.current_interval + 5, 60) # Cap at 60 seconds + return True, new_interval + + return False, self.current_interval + + def update_poll_time(self, increase_interval: bool = False) -> None: + """Update the last poll time and optionally increase the interval. + + Args: + increase_interval: If True, increase the current interval for slow_down + """ + self.last_poll_time = datetime.now(timezone.utc) + + if increase_interval: + # Increase interval by 5 seconds, cap at 60 seconds (RFC 8628) + self.current_interval = min(self.current_interval + 5, 60) diff --git a/enterprise/storage/device_code_store.py b/enterprise/storage/device_code_store.py new file mode 100644 index 0000000000..de2fe29cc4 --- /dev/null +++ b/enterprise/storage/device_code_store.py @@ -0,0 +1,167 @@ +"""Device code store for OAuth 2.0 Device Flow.""" + +import secrets +import string +from datetime import datetime, timedelta, timezone + +from sqlalchemy.exc import IntegrityError +from storage.device_code import DeviceCode + + +class DeviceCodeStore: + """Store for managing OAuth 2.0 device codes.""" + + def __init__(self, session_maker): + self.session_maker = session_maker + + def generate_user_code(self) -> str: + """Generate a human-readable user code (8 characters, uppercase letters and digits).""" + # Use a mix of uppercase letters and digits, avoiding confusing characters + alphabet = 'ABCDEFGHJKLMNPQRSTUVWXYZ23456789' # No I, O, 0, 1 + return ''.join(secrets.choice(alphabet) for _ in range(8)) + + def generate_device_code(self) -> str: + """Generate a secure device code (128 characters).""" + alphabet = string.ascii_letters + string.digits + return ''.join(secrets.choice(alphabet) for _ in range(128)) + + def create_device_code( + self, + expires_in: int = 600, # 10 minutes default + max_attempts: int = 10, + ) -> DeviceCode: + """Create a new device code entry. + + Uses database constraints to ensure uniqueness, avoiding TOCTOU race conditions. + Retries on constraint violations until unique codes are generated. + + Args: + expires_in: Expiration time in seconds + max_attempts: Maximum number of attempts to generate unique codes + + Returns: + The created DeviceCode instance + + Raises: + RuntimeError: If unable to generate unique codes after max_attempts + """ + for attempt in range(max_attempts): + user_code = self.generate_user_code() + device_code = self.generate_device_code() + expires_at = datetime.now(timezone.utc) + timedelta(seconds=expires_in) + + device_code_entry = DeviceCode( + device_code=device_code, + user_code=user_code, + keycloak_user_id=None, # Will be set during authorization + expires_at=expires_at, + ) + + try: + with self.session_maker() as session: + session.add(device_code_entry) + session.commit() + session.refresh(device_code_entry) + session.expunge(device_code_entry) # Detach from session cleanly + return device_code_entry + except IntegrityError: + # Constraint violation - codes already exist, retry with new codes + continue + + raise RuntimeError( + f'Failed to generate unique device codes after {max_attempts} attempts' + ) + + def get_by_device_code(self, device_code: str) -> DeviceCode | None: + """Get device code entry by device code.""" + with self.session_maker() as session: + result = ( + session.query(DeviceCode).filter_by(device_code=device_code).first() + ) + if result: + session.expunge(result) # Detach from session cleanly + return result + + def get_by_user_code(self, user_code: str) -> DeviceCode | None: + """Get device code entry by user code.""" + with self.session_maker() as session: + result = session.query(DeviceCode).filter_by(user_code=user_code).first() + if result: + session.expunge(result) # Detach from session cleanly + return result + + def authorize_device_code(self, user_code: str, user_id: str) -> bool: + """Authorize a device code. + + Args: + user_code: The user code to authorize + user_id: The user ID from Keycloak + + Returns: + True if authorization was successful, False otherwise + """ + with self.session_maker() as session: + device_code_entry = ( + session.query(DeviceCode).filter_by(user_code=user_code).first() + ) + + if not device_code_entry: + return False + + if not device_code_entry.is_pending(): + return False + + device_code_entry.authorize(user_id) + session.commit() + + return True + + def deny_device_code(self, user_code: str) -> bool: + """Deny a device code authorization. + + Args: + user_code: The user code to deny + + Returns: + True if denial was successful, False otherwise + """ + with self.session_maker() as session: + device_code_entry = ( + session.query(DeviceCode).filter_by(user_code=user_code).first() + ) + + if not device_code_entry: + return False + + if not device_code_entry.is_pending(): + return False + + device_code_entry.deny() + session.commit() + + return True + + def update_poll_time( + self, device_code: str, increase_interval: bool = False + ) -> bool: + """Update the poll time for a device code and optionally increase interval. + + Args: + device_code: The device code to update + increase_interval: If True, increase the polling interval for slow_down + + Returns: + True if update was successful, False otherwise + """ + with self.session_maker() as session: + device_code_entry = ( + session.query(DeviceCode).filter_by(device_code=device_code).first() + ) + + if not device_code_entry: + return False + + device_code_entry.update_poll_time(increase_interval) + session.commit() + + return True diff --git a/enterprise/tests/unit/conftest.py b/enterprise/tests/unit/conftest.py index 08516fd813..873f7b775f 100644 --- a/enterprise/tests/unit/conftest.py +++ b/enterprise/tests/unit/conftest.py @@ -12,6 +12,7 @@ from storage.base import Base # Anything not loaded here may not have a table created for it. from storage.billing_session import BillingSession from storage.conversation_work import ConversationWork +from storage.device_code import DeviceCode # noqa: F401 from storage.feedback import Feedback from storage.github_app_installation import GithubAppInstallation from storage.maintenance_task import MaintenanceTask, MaintenanceTaskStatus diff --git a/enterprise/tests/unit/server/routes/test_oauth_device.py b/enterprise/tests/unit/server/routes/test_oauth_device.py new file mode 100644 index 0000000000..53682e65f0 --- /dev/null +++ b/enterprise/tests/unit/server/routes/test_oauth_device.py @@ -0,0 +1,610 @@ +"""Unit tests for OAuth2 Device Flow endpoints.""" + +from datetime import UTC, datetime, timedelta +from unittest.mock import MagicMock, patch + +import pytest +from fastapi import HTTPException, Request +from fastapi.responses import JSONResponse +from server.routes.oauth_device import ( + device_authorization, + device_token, + device_verification_authenticated, +) +from storage.device_code import DeviceCode + + +@pytest.fixture +def mock_device_code_store(): + """Mock device code store.""" + return MagicMock() + + +@pytest.fixture +def mock_api_key_store(): + """Mock API key store.""" + return MagicMock() + + +@pytest.fixture +def mock_token_manager(): + """Mock token manager.""" + return MagicMock() + + +@pytest.fixture +def mock_request(): + """Mock FastAPI request.""" + request = MagicMock(spec=Request) + request.base_url = 'https://test.example.com/' + return request + + +class TestDeviceAuthorization: + """Test device authorization endpoint.""" + + @patch('server.routes.oauth_device.device_code_store') + async def test_device_authorization_success(self, mock_store, mock_request): + """Test successful device authorization.""" + mock_device = DeviceCode( + device_code='test-device-code-123', + user_code='ABC12345', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + current_interval=5, # Default interval + ) + mock_store.create_device_code.return_value = mock_device + + result = await device_authorization(mock_request) + + assert result.device_code == 'test-device-code-123' + assert result.user_code == 'ABC12345' + assert result.expires_in == 600 + assert result.interval == 5 # Should match device's current_interval + assert 'verify' in result.verification_uri + assert 'ABC12345' in result.verification_uri_complete + + @patch('server.routes.oauth_device.device_code_store') + async def test_device_authorization_with_increased_interval( + self, mock_store, mock_request + ): + """Test device authorization returns increased interval from rate limiting.""" + mock_device = DeviceCode( + device_code='test-device-code-456', + user_code='XYZ98765', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + current_interval=15, # Increased interval from previous rate limiting + ) + mock_store.create_device_code.return_value = mock_device + + result = await device_authorization(mock_request) + + assert result.device_code == 'test-device-code-456' + assert result.user_code == 'XYZ98765' + assert result.expires_in == 600 + assert result.interval == 15 # Should match device's increased current_interval + assert 'verify' in result.verification_uri + assert 'XYZ98765' in result.verification_uri_complete + + +class TestDeviceToken: + """Test device token endpoint.""" + + @pytest.mark.parametrize( + 'device_exists,status,expected_error', + [ + (False, None, 'invalid_grant'), + (True, 'expired', 'expired_token'), + (True, 'denied', 'access_denied'), + (True, 'pending', 'authorization_pending'), + ], + ) + @patch('server.routes.oauth_device.device_code_store') + async def test_device_token_error_cases( + self, mock_store, device_exists, status, expected_error + ): + """Test various error cases for device token endpoint.""" + device_code = 'test-device-code' + + if device_exists: + mock_device = MagicMock() + mock_device.is_expired.return_value = status == 'expired' + mock_device.status = status + # Mock rate limiting - return False (not too fast) and default interval + mock_device.check_rate_limit.return_value = (False, 5) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + else: + mock_store.get_by_device_code.return_value = None + + result = await device_token(device_code=device_code) + + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + # Check error in response content + content = result.body.decode() + assert expected_error in content + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_device_token_success(self, mock_store, mock_api_key_class): + """Test successful device token retrieval.""" + device_code = 'test-device-code' + + # Mock authorized device + mock_device = MagicMock() + mock_device.is_expired.return_value = False + mock_device.status = 'authorized' + mock_device.keycloak_user_id = 'user-123' + mock_device.user_code = ( + 'ABC12345' # Add user_code for device-specific API key lookup + ) + # Mock rate limiting - return False (not too fast) and default interval + mock_device.check_rate_limit.return_value = (False, 5) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + # Mock API key retrieval + mock_api_key_store = MagicMock() + mock_api_key_store.retrieve_api_key_by_name.return_value = 'test-api-key' + mock_api_key_class.get_instance.return_value = mock_api_key_store + + result = await device_token(device_code=device_code) + + # Check that result is a DeviceTokenResponse + assert result.access_token == 'test-api-key' + assert result.token_type == 'Bearer' + + # Verify that the correct device-specific API key name was used + mock_api_key_store.retrieve_api_key_by_name.assert_called_once_with( + 'user-123', 'Device Link Access Key (ABC12345)' + ) + + +class TestDeviceVerificationAuthenticated: + """Test device verification authenticated endpoint.""" + + async def test_verification_unauthenticated_user(self): + """Test verification with unauthenticated user.""" + with pytest.raises(HTTPException): + await device_verification_authenticated(user_code='ABC12345', user_id=None) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_verification_invalid_device_code( + self, mock_store, mock_api_key_class + ): + """Test verification with invalid device code.""" + mock_store.get_by_user_code.return_value = None + + with pytest.raises(HTTPException): + await device_verification_authenticated( + user_code='INVALID', user_id='user-123' + ) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_verification_already_processed(self, mock_store, mock_api_key_class): + """Test verification with already processed device code.""" + mock_device = MagicMock() + mock_device.is_pending.return_value = False + mock_store.get_by_user_code.return_value = mock_device + + with pytest.raises(HTTPException): + await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_verification_success(self, mock_store, mock_api_key_class): + """Test successful device verification.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = True + + # Mock API key store + mock_api_key_store = MagicMock() + mock_api_key_class.get_instance.return_value = mock_api_key_store + + result = await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert isinstance(result, JSONResponse) + assert result.status_code == 200 + # Should NOT delete existing API keys (multiple devices allowed) + mock_api_key_store.delete_api_key_by_name.assert_not_called() + # Should create a new API key with device-specific name + mock_api_key_store.create_api_key.assert_called_once() + call_args = mock_api_key_store.create_api_key.call_args + assert call_args[1]['name'] == 'Device Link Access Key (ABC12345)' + mock_store.authorize_device_code.assert_called_once_with( + user_code='ABC12345', user_id='user-123' + ) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_multiple_device_authentication(self, mock_store, mock_api_key_class): + """Test that multiple devices can authenticate simultaneously.""" + # Mock API key store + mock_api_key_store = MagicMock() + mock_api_key_class.get_instance.return_value = mock_api_key_store + + # Simulate two different devices + device1_code = 'ABC12345' + device2_code = 'XYZ67890' + user_id = 'user-123' + + # Mock device codes + mock_device1 = MagicMock() + mock_device1.is_pending.return_value = True + mock_device2 = MagicMock() + mock_device2.is_pending.return_value = True + + # Configure mock store to return appropriate device for each user_code + def get_by_user_code_side_effect(user_code): + if user_code == device1_code: + return mock_device1 + elif user_code == device2_code: + return mock_device2 + return None + + mock_store.get_by_user_code.side_effect = get_by_user_code_side_effect + mock_store.authorize_device_code.return_value = True + + # Authenticate first device + result1 = await device_verification_authenticated( + user_code=device1_code, user_id=user_id + ) + + # Authenticate second device + result2 = await device_verification_authenticated( + user_code=device2_code, user_id=user_id + ) + + # Both should succeed + assert isinstance(result1, JSONResponse) + assert result1.status_code == 200 + assert isinstance(result2, JSONResponse) + assert result2.status_code == 200 + + # Should create two separate API keys with different names + assert mock_api_key_store.create_api_key.call_count == 2 + + # Check that each device got a unique API key name + call_args_list = mock_api_key_store.create_api_key.call_args_list + device1_name = call_args_list[0][1]['name'] + device2_name = call_args_list[1][1]['name'] + + assert device1_name == f'Device Link Access Key ({device1_code})' + assert device2_name == f'Device Link Access Key ({device2_code})' + assert device1_name != device2_name # Ensure they're different + + # Should NOT delete any existing API keys + mock_api_key_store.delete_api_key_by_name.assert_not_called() + + +class TestDeviceTokenRateLimiting: + """Test rate limiting for device token polling (RFC 8628 section 3.5).""" + + @patch('server.routes.oauth_device.device_code_store') + async def test_first_poll_allowed(self, mock_store): + """Test that the first poll is always allowed.""" + # Create a device code with no previous poll time + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=None, # First poll + current_interval=5, + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return authorization_pending, not slow_down + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'authorization_pending' in content + assert 'slow_down' not in content + + # Should update poll time without increasing interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=False + ) + + @patch('server.routes.oauth_device.device_code_store') + async def test_normal_polling_allowed(self, mock_store): + """Test that normal polling (respecting interval) is allowed.""" + # Create a device code with last poll time 6 seconds ago (interval is 5) + last_poll = datetime.now(UTC) - timedelta(seconds=6) + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=5, + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return authorization_pending, not slow_down + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'authorization_pending' in content + assert 'slow_down' not in content + + # Should update poll time without increasing interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=False + ) + + @patch('server.routes.oauth_device.device_code_store') + async def test_fast_polling_returns_slow_down(self, mock_store): + """Test that polling too fast returns slow_down error.""" + # Create a device code with last poll time 2 seconds ago (interval is 5) + last_poll = datetime.now(UTC) - timedelta(seconds=2) + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=5, + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return slow_down error + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'slow_down' in content + assert 'interval' in content + assert '10' in content # New interval should be 5 + 5 = 10 + + # Should update poll time and increase interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=True + ) + + @patch('server.routes.oauth_device.device_code_store') + async def test_interval_increases_with_repeated_fast_polling(self, mock_store): + """Test that interval increases with repeated fast polling.""" + # Create a device code with higher current interval from previous slow_down + last_poll = datetime.now(UTC) - timedelta(seconds=5) # 5 seconds ago + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=15, # Already increased from previous slow_down + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return slow_down error with increased interval + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'slow_down' in content + assert '20' in content # New interval should be 15 + 5 = 20 + + # Should update poll time and increase interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=True + ) + + @patch('server.routes.oauth_device.device_code_store') + async def test_interval_caps_at_maximum(self, mock_store): + """Test that interval is capped at maximum value.""" + # Create a device code with interval near maximum + last_poll = datetime.now(UTC) - timedelta(seconds=30) + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='pending', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=58, # Near maximum of 60 + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should return slow_down error with capped interval + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'slow_down' in content + assert '60' in content # Should be capped at 60, not 63 + + @patch('server.routes.oauth_device.device_code_store') + async def test_rate_limiting_with_authorized_device(self, mock_store): + """Test that rate limiting still applies to authorized devices.""" + # Create an authorized device code with recent poll + last_poll = datetime.now(UTC) - timedelta(seconds=2) + mock_device = DeviceCode( + device_code='test_device_code', + user_code='ABC123', + status='authorized', # Device is authorized + keycloak_user_id='user123', + expires_at=datetime.now(UTC) + timedelta(minutes=10), + last_poll_time=last_poll, + current_interval=5, + ) + mock_store.get_by_device_code.return_value = mock_device + mock_store.update_poll_time.return_value = True + + device_code = 'test_device_code' + result = await device_token(device_code=device_code) + + # Should still return slow_down error even for authorized device + assert isinstance(result, JSONResponse) + assert result.status_code == 400 + content = result.body.decode() + assert 'slow_down' in content + + # Should update poll time and increase interval + mock_store.update_poll_time.assert_called_with( + 'test_device_code', increase_interval=True + ) + + +class TestDeviceVerificationTransactionIntegrity: + """Test transaction integrity for device verification to prevent orphaned API keys.""" + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_authorization_failure_prevents_api_key_creation( + self, mock_store, mock_api_key_class + ): + """Test that if device authorization fails, no API key is created.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = False # Authorization fails + + # Mock API key store + mock_api_key_store = MagicMock() + mock_api_key_class.get_instance.return_value = mock_api_key_store + + # Should raise HTTPException due to authorization failure + with pytest.raises(HTTPException) as exc_info: + await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert exc_info.value.status_code == 500 + assert 'Failed to authorize the device' in exc_info.value.detail + + # API key should NOT be created since authorization failed + mock_api_key_store.create_api_key.assert_not_called() + mock_store.authorize_device_code.assert_called_once_with( + user_code='ABC12345', user_id='user-123' + ) + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_api_key_creation_failure_reverts_authorization( + self, mock_store, mock_api_key_class + ): + """Test that if API key creation fails after authorization, the authorization is reverted.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = True # Authorization succeeds + mock_store.deny_device_code.return_value = True # Cleanup succeeds + + # Mock API key store to fail on creation + mock_api_key_store = MagicMock() + mock_api_key_store.create_api_key.side_effect = Exception('Database error') + mock_api_key_class.get_instance.return_value = mock_api_key_store + + # Should raise HTTPException due to API key creation failure + with pytest.raises(HTTPException) as exc_info: + await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert exc_info.value.status_code == 500 + assert 'Failed to create API key for device access' in exc_info.value.detail + + # Authorization should have been attempted first + mock_store.authorize_device_code.assert_called_once_with( + user_code='ABC12345', user_id='user-123' + ) + + # API key creation should have been attempted after authorization + mock_api_key_store.create_api_key.assert_called_once() + + # Authorization should be reverted due to API key creation failure + mock_store.deny_device_code.assert_called_once_with('ABC12345') + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_api_key_creation_failure_cleanup_failure_logged( + self, mock_store, mock_api_key_class + ): + """Test that cleanup failure is logged but doesn't prevent the main error from being raised.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = True # Authorization succeeds + mock_store.deny_device_code.side_effect = Exception( + 'Cleanup failed' + ) # Cleanup fails + + # Mock API key store to fail on creation + mock_api_key_store = MagicMock() + mock_api_key_store.create_api_key.side_effect = Exception('Database error') + mock_api_key_class.get_instance.return_value = mock_api_key_store + + # Should still raise HTTPException for the original API key creation failure + with pytest.raises(HTTPException) as exc_info: + await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert exc_info.value.status_code == 500 + assert 'Failed to create API key for device access' in exc_info.value.detail + + # Both operations should have been attempted + mock_store.authorize_device_code.assert_called_once() + mock_api_key_store.create_api_key.assert_called_once() + mock_store.deny_device_code.assert_called_once_with('ABC12345') + + @patch('server.routes.oauth_device.ApiKeyStore') + @patch('server.routes.oauth_device.device_code_store') + async def test_successful_flow_creates_api_key_after_authorization( + self, mock_store, mock_api_key_class + ): + """Test that in the successful flow, API key is created only after authorization.""" + # Mock device code + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_store.get_by_user_code.return_value = mock_device + mock_store.authorize_device_code.return_value = True # Authorization succeeds + + # Mock API key store + mock_api_key_store = MagicMock() + mock_api_key_class.get_instance.return_value = mock_api_key_store + + result = await device_verification_authenticated( + user_code='ABC12345', user_id='user-123' + ) + + assert isinstance(result, JSONResponse) + assert result.status_code == 200 + + # Verify the order: authorization first, then API key creation + mock_store.authorize_device_code.assert_called_once_with( + user_code='ABC12345', user_id='user-123' + ) + mock_api_key_store.create_api_key.assert_called_once() + + # No cleanup should be needed in successful case + mock_store.deny_device_code.assert_not_called() diff --git a/enterprise/tests/unit/storage/test_device_code.py b/enterprise/tests/unit/storage/test_device_code.py new file mode 100644 index 0000000000..0d2193075b --- /dev/null +++ b/enterprise/tests/unit/storage/test_device_code.py @@ -0,0 +1,83 @@ +"""Unit tests for DeviceCode model.""" + +from datetime import datetime, timedelta, timezone + +import pytest +from storage.device_code import DeviceCode, DeviceCodeStatus + + +class TestDeviceCode: + """Test cases for DeviceCode model.""" + + @pytest.fixture + def device_code(self): + """Create a test device code.""" + return DeviceCode( + device_code='test-device-code-123', + user_code='ABC12345', + expires_at=datetime.now(timezone.utc) + timedelta(minutes=10), + ) + + @pytest.mark.parametrize( + 'expires_delta,expected', + [ + (timedelta(minutes=5), False), # Future expiry + (timedelta(minutes=-5), True), # Past expiry + (timedelta(seconds=1), False), # Just future (not expired) + ], + ) + def test_is_expired(self, expires_delta, expected): + """Test expiration check with various time deltas.""" + device_code = DeviceCode( + device_code='test-device-code', + user_code='ABC12345', + expires_at=datetime.now(timezone.utc) + expires_delta, + ) + assert device_code.is_expired() == expected + + @pytest.mark.parametrize( + 'status,expired,expected', + [ + (DeviceCodeStatus.PENDING.value, False, True), + (DeviceCodeStatus.PENDING.value, True, False), + (DeviceCodeStatus.AUTHORIZED.value, False, False), + (DeviceCodeStatus.DENIED.value, False, False), + ], + ) + def test_is_pending(self, status, expired, expected): + """Test pending status check.""" + expires_at = ( + datetime.now(timezone.utc) - timedelta(minutes=1) + if expired + else datetime.now(timezone.utc) + timedelta(minutes=10) + ) + device_code = DeviceCode( + device_code='test-device-code', + user_code='ABC12345', + status=status, + expires_at=expires_at, + ) + assert device_code.is_pending() == expected + + def test_authorize(self, device_code): + """Test device authorization.""" + user_id = 'test-user-123' + + device_code.authorize(user_id) + + assert device_code.status == DeviceCodeStatus.AUTHORIZED.value + assert device_code.keycloak_user_id == user_id + assert device_code.authorized_at is not None + assert isinstance(device_code.authorized_at, datetime) + + @pytest.mark.parametrize( + 'method,expected_status', + [ + ('deny', DeviceCodeStatus.DENIED.value), + ('expire', DeviceCodeStatus.EXPIRED.value), + ], + ) + def test_status_changes(self, device_code, method, expected_status): + """Test status change methods.""" + getattr(device_code, method)() + assert device_code.status == expected_status diff --git a/enterprise/tests/unit/storage/test_device_code_store.py b/enterprise/tests/unit/storage/test_device_code_store.py new file mode 100644 index 0000000000..65a58cda8a --- /dev/null +++ b/enterprise/tests/unit/storage/test_device_code_store.py @@ -0,0 +1,193 @@ +"""Unit tests for DeviceCodeStore.""" + +from unittest.mock import MagicMock + +import pytest +from sqlalchemy.exc import IntegrityError +from storage.device_code import DeviceCode +from storage.device_code_store import DeviceCodeStore + + +@pytest.fixture +def mock_session(): + """Mock database session.""" + session = MagicMock() + return session + + +@pytest.fixture +def mock_session_maker(mock_session): + """Mock session maker.""" + session_maker = MagicMock() + session_maker.return_value.__enter__.return_value = mock_session + session_maker.return_value.__exit__.return_value = None + return session_maker + + +@pytest.fixture +def device_code_store(mock_session_maker): + """Create DeviceCodeStore instance.""" + return DeviceCodeStore(mock_session_maker) + + +class TestDeviceCodeStore: + """Test cases for DeviceCodeStore.""" + + def test_generate_user_code(self, device_code_store): + """Test user code generation.""" + code = device_code_store.generate_user_code() + + assert len(code) == 8 + assert code.isupper() + # Should not contain confusing characters + assert not any(char in code for char in 'IO01') + + def test_generate_device_code(self, device_code_store): + """Test device code generation.""" + code = device_code_store.generate_device_code() + + assert len(code) == 128 + assert code.isalnum() + + def test_create_device_code_success(self, device_code_store, mock_session): + """Test successful device code creation.""" + # Mock successful creation (no IntegrityError) + mock_device_code = MagicMock(spec=DeviceCode) + mock_device_code.device_code = 'test-device-code-123' + mock_device_code.user_code = 'TESTCODE' + + # Mock the session to return our mock device code after refresh + def mock_refresh(obj): + obj.device_code = mock_device_code.device_code + obj.user_code = mock_device_code.user_code + + mock_session.refresh.side_effect = mock_refresh + + result = device_code_store.create_device_code(expires_in=600) + + assert isinstance(result, DeviceCode) + mock_session.add.assert_called_once() + mock_session.commit.assert_called_once() + mock_session.refresh.assert_called_once() + mock_session.expunge.assert_called_once() + + def test_create_device_code_with_retries( + self, device_code_store, mock_session_maker + ): + """Test device code creation with constraint violation retries.""" + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_session_maker.return_value.__exit__.return_value = None + + # First attempt fails with IntegrityError, second succeeds + mock_session.commit.side_effect = [IntegrityError('', '', ''), None] + + mock_device_code = MagicMock(spec=DeviceCode) + mock_device_code.device_code = 'test-device-code-456' + mock_device_code.user_code = 'TESTCD2' + + def mock_refresh(obj): + obj.device_code = mock_device_code.device_code + obj.user_code = mock_device_code.user_code + + mock_session.refresh.side_effect = mock_refresh + + store = DeviceCodeStore(mock_session_maker) + result = store.create_device_code(expires_in=600) + + assert isinstance(result, DeviceCode) + assert mock_session.add.call_count == 2 # Two attempts + assert mock_session.commit.call_count == 2 # Two attempts + + def test_create_device_code_max_attempts_exceeded( + self, device_code_store, mock_session_maker + ): + """Test device code creation failure after max attempts.""" + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_session_maker.return_value.__exit__.return_value = None + + # All attempts fail with IntegrityError + mock_session.commit.side_effect = IntegrityError('', '', '') + + store = DeviceCodeStore(mock_session_maker) + + with pytest.raises( + RuntimeError, + match='Failed to generate unique device codes after 3 attempts', + ): + store.create_device_code(expires_in=600, max_attempts=3) + + @pytest.mark.parametrize( + 'lookup_method,lookup_field', + [ + ('get_by_device_code', 'device_code'), + ('get_by_user_code', 'user_code'), + ], + ) + def test_lookup_methods( + self, device_code_store, mock_session, lookup_method, lookup_field + ): + """Test device code lookup methods.""" + test_code = 'test-code-123' + mock_device_code = MagicMock() + mock_session.query.return_value.filter_by.return_value.first.return_value = ( + mock_device_code + ) + + result = getattr(device_code_store, lookup_method)(test_code) + + assert result == mock_device_code + mock_session.query.assert_called_once_with(DeviceCode) + mock_session.query.return_value.filter_by.assert_called_once_with( + **{lookup_field: test_code} + ) + + @pytest.mark.parametrize( + 'device_exists,is_pending,expected_result', + [ + (True, True, True), # Success case + (False, True, False), # Device not found + (True, False, False), # Device not pending + ], + ) + def test_authorize_device_code( + self, + device_code_store, + mock_session, + device_exists, + is_pending, + expected_result, + ): + """Test device code authorization.""" + user_code = 'ABC12345' + user_id = 'test-user-123' + + if device_exists: + mock_device = MagicMock() + mock_device.is_pending.return_value = is_pending + mock_session.query.return_value.filter_by.return_value.first.return_value = mock_device + else: + mock_session.query.return_value.filter_by.return_value.first.return_value = None + + result = device_code_store.authorize_device_code(user_code, user_id) + + assert result == expected_result + if expected_result: + mock_device.authorize.assert_called_once_with(user_id) + mock_session.commit.assert_called_once() + + def test_deny_device_code(self, device_code_store, mock_session): + """Test device code denial.""" + user_code = 'ABC12345' + mock_device = MagicMock() + mock_device.is_pending.return_value = True + mock_session.query.return_value.filter_by.return_value.first.return_value = ( + mock_device + ) + + result = device_code_store.deny_device_code(user_code) + + assert result is True + mock_device.deny.assert_called_once() + mock_session.commit.assert_called_once() diff --git a/enterprise/tests/unit/test_api_key_store.py b/enterprise/tests/unit/test_api_key_store.py index ea386cb69c..c1c6a98f3d 100644 --- a/enterprise/tests/unit/test_api_key_store.py +++ b/enterprise/tests/unit/test_api_key_store.py @@ -90,6 +90,50 @@ def test_validate_api_key_expired(api_key_store, mock_session): mock_session.commit.assert_not_called() +def test_validate_api_key_expired_timezone_naive(api_key_store, mock_session): + """Test validating an expired API key with timezone-naive datetime from database.""" + # Setup + api_key = 'test-api-key' + mock_key_record = MagicMock() + # Simulate timezone-naive datetime as returned from database + mock_key_record.expires_at = datetime.now() - timedelta(days=1) # No UTC timezone + mock_key_record.id = 1 + mock_session.query.return_value.filter.return_value.first.return_value = ( + mock_key_record + ) + + # Execute + result = api_key_store.validate_api_key(api_key) + + # Verify + assert result is None + mock_session.execute.assert_not_called() + mock_session.commit.assert_not_called() + + +def test_validate_api_key_valid_timezone_naive(api_key_store, mock_session): + """Test validating a valid API key with timezone-naive datetime from database.""" + # Setup + api_key = 'test-api-key' + user_id = 'test-user-123' + mock_key_record = MagicMock() + mock_key_record.user_id = user_id + # Simulate timezone-naive datetime as returned from database (future date) + mock_key_record.expires_at = datetime.now() + timedelta(days=1) # No UTC timezone + mock_key_record.id = 1 + mock_session.query.return_value.filter.return_value.first.return_value = ( + mock_key_record + ) + + # Execute + result = api_key_store.validate_api_key(api_key) + + # Verify + assert result == user_id + mock_session.execute.assert_called_once() + mock_session.commit.assert_called_once() + + def test_validate_api_key_not_found(api_key_store, mock_session): """Test validating a non-existent API key.""" # Setup diff --git a/frontend/src/routes.ts b/frontend/src/routes.ts index 4c3c48adc5..ecee511688 100644 --- a/frontend/src/routes.ts +++ b/frontend/src/routes.ts @@ -21,5 +21,6 @@ export default [ ]), route("conversations/:conversationId", "routes/conversation.tsx"), route("microagent-management", "routes/microagent-management.tsx"), + route("oauth/device/verify", "routes/device-verify.tsx"), ]), ] satisfies RouteConfig; diff --git a/frontend/src/routes/device-verify.tsx b/frontend/src/routes/device-verify.tsx new file mode 100644 index 0000000000..f306d660a5 --- /dev/null +++ b/frontend/src/routes/device-verify.tsx @@ -0,0 +1,274 @@ +/* eslint-disable i18next/no-literal-string */ +import React, { useState } from "react"; +import { useSearchParams } from "react-router"; +import { useIsAuthed } from "#/hooks/query/use-is-authed"; + +export default function DeviceVerify() { + const [searchParams] = useSearchParams(); + const { data: isAuthed, isLoading: isAuthLoading } = useIsAuthed(); + const [verificationResult, setVerificationResult] = useState<{ + success: boolean; + message: string; + } | null>(null); + const [isProcessing, setIsProcessing] = useState(false); + + // Get user_code from URL parameters + const userCode = searchParams.get("user_code"); + + const processDeviceVerification = async (code: string) => { + try { + setIsProcessing(true); + + // Call the backend API endpoint to process device verification + const response = await fetch("/oauth/device/verify-authenticated", { + method: "POST", + headers: { + "Content-Type": "application/x-www-form-urlencoded", + }, + body: `user_code=${encodeURIComponent(code)}`, + credentials: "include", // Include cookies for authentication + }); + + if (response.ok) { + // Show success message + setVerificationResult({ + success: true, + message: + "Device authorized successfully! You can now return to your CLI and close this window.", + }); + } else { + const errorText = await response.text(); + setVerificationResult({ + success: false, + message: errorText || "Failed to authorize device. Please try again.", + }); + } + } catch (error) { + setVerificationResult({ + success: false, + message: + "An error occurred while authorizing the device. Please try again.", + }); + } finally { + setIsProcessing(false); + } + }; + + // Remove automatic verification - require explicit user consent + + const handleManualSubmit = (event: React.FormEvent) => { + event.preventDefault(); + const formData = new FormData(event.currentTarget); + const code = formData.get("user_code") as string; + if (code && isAuthed) { + processDeviceVerification(code); + } + }; + + // Show verification result if we have one + if (verificationResult) { + return ( +
+
+
+
+ {verificationResult.success ? ( + + + + ) : ( + + + + )} +
+

+ {verificationResult.success ? "Success!" : "Error"} +

+

+ {verificationResult.message} +

+ {!verificationResult.success && ( + + )} +
+
+
+ ); + } + + // Show processing state + if (isProcessing) { + return ( +
+
+
+
+

+ Processing device verification... +

+
+
+
+ ); + } + + // Show device authorization confirmation if user is authenticated and code is provided + if (isAuthed && userCode) { + return ( +
+
+

+ Device Authorization Request +

+
+

Device Code:

+

+ {userCode} +

+
+
+
+ + + +
+

+ Security Notice +

+

+ Only authorize this device if you initiated this request from + your CLI or application. +

+
+
+
+

+ Do you want to authorize this device to access your OpenHands + account? +

+
+ + +
+
+
+ ); + } + + // Show manual code entry form if no code in URL but user is authenticated + if (isAuthed && !userCode) { + return ( +
+
+

+ Device Authorization +

+

+ Enter the code displayed on your device: +

+
+
+ + +
+ +
+
+
+ ); + } + + // Show loading state while checking authentication + if (isAuthLoading) { + return ( +
+
+
+

+ Processing device verification... +

+
+
+ ); + } + + // Show authentication required message (this will trigger the auth modal via root layout) + return ( +
+
+

Authentication Required

+

+ Please sign in to authorize your device. +

+
+
+ ); +} From 281ac91540ab5e93b9b2075baf865d6912f8a244 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 16 Dec 2025 14:53:15 -0700 Subject: [PATCH 17/80] Bump sdk 1.6.0 (#12067) --- enterprise/poetry.lock | 41 ++++++++++--------- .../sandbox/sandbox_spec_service.py | 2 +- poetry.lock | 35 ++++++++-------- pyproject.toml | 6 +-- 4 files changed, 43 insertions(+), 41 deletions(-) diff --git a/enterprise/poetry.lock b/enterprise/poetry.lock index 13645253c5..bd2c55c317 100644 --- a/enterprise/poetry.lock +++ b/enterprise/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiofiles" @@ -4624,14 +4624,14 @@ files = [ [[package]] name = "lmnr" -version = "0.7.20" +version = "0.7.24" description = "Python SDK for Laminar" optional = false python-versions = "<4,>=3.10" groups = ["main"] files = [ - {file = "lmnr-0.7.20-py3-none-any.whl", hash = "sha256:5f9fa7444e6f96c25e097f66484ff29e632bdd1de0e9346948bf5595f4a8af38"}, - {file = "lmnr-0.7.20.tar.gz", hash = "sha256:1f484cd618db2d71af65f90a0b8b36d20d80dc91a5138b811575c8677bf7c4fd"}, + {file = "lmnr-0.7.24-py3-none-any.whl", hash = "sha256:ad780d4a62ece897048811f3368639c240a9329ab31027da8c96545137a3a08a"}, + {file = "lmnr-0.7.24.tar.gz", hash = "sha256:aa6973f46fc4ba95c9061c1feceb58afc02eb43c9376c21e32545371ff6123d7"}, ] [package.dependencies] @@ -4654,14 +4654,15 @@ tqdm = ">=4.0" [package.extras] alephalpha = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)"] -all = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)", "opentelemetry-instrumentation-bedrock (>=0.47.1)", "opentelemetry-instrumentation-chromadb (>=0.47.1)", "opentelemetry-instrumentation-cohere (>=0.47.1)", "opentelemetry-instrumentation-crewai (>=0.47.1)", "opentelemetry-instrumentation-haystack (>=0.47.1)", "opentelemetry-instrumentation-lancedb (>=0.47.1)", "opentelemetry-instrumentation-langchain (>=0.47.1)", "opentelemetry-instrumentation-llamaindex (>=0.47.1)", "opentelemetry-instrumentation-marqo (>=0.47.1)", "opentelemetry-instrumentation-mcp (>=0.47.1)", "opentelemetry-instrumentation-milvus (>=0.47.1)", "opentelemetry-instrumentation-mistralai (>=0.47.1)", "opentelemetry-instrumentation-ollama (>=0.47.1)", "opentelemetry-instrumentation-pinecone (>=0.47.1)", "opentelemetry-instrumentation-qdrant (>=0.47.1)", "opentelemetry-instrumentation-replicate (>=0.47.1)", "opentelemetry-instrumentation-sagemaker (>=0.47.1)", "opentelemetry-instrumentation-together (>=0.47.1)", "opentelemetry-instrumentation-transformers (>=0.47.1)", "opentelemetry-instrumentation-vertexai (>=0.47.1)", "opentelemetry-instrumentation-watsonx (>=0.47.1)", "opentelemetry-instrumentation-weaviate (>=0.47.1)"] +all = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)", "opentelemetry-instrumentation-bedrock (>=0.47.1)", "opentelemetry-instrumentation-chromadb (>=0.47.1)", "opentelemetry-instrumentation-cohere (>=0.47.1)", "opentelemetry-instrumentation-crewai (>=0.47.1)", "opentelemetry-instrumentation-haystack (>=0.47.1)", "opentelemetry-instrumentation-lancedb (>=0.47.1)", "opentelemetry-instrumentation-langchain (>=0.47.1,<0.48.0)", "opentelemetry-instrumentation-llamaindex (>=0.47.1)", "opentelemetry-instrumentation-marqo (>=0.47.1)", "opentelemetry-instrumentation-mcp (>=0.47.1)", "opentelemetry-instrumentation-milvus (>=0.47.1)", "opentelemetry-instrumentation-mistralai (>=0.47.1)", "opentelemetry-instrumentation-ollama (>=0.47.1)", "opentelemetry-instrumentation-pinecone (>=0.47.1)", "opentelemetry-instrumentation-qdrant (>=0.47.1)", "opentelemetry-instrumentation-replicate (>=0.47.1)", "opentelemetry-instrumentation-sagemaker (>=0.47.1)", "opentelemetry-instrumentation-together (>=0.47.1)", "opentelemetry-instrumentation-transformers (>=0.47.1)", "opentelemetry-instrumentation-vertexai (>=0.47.1)", "opentelemetry-instrumentation-watsonx (>=0.47.1)", "opentelemetry-instrumentation-weaviate (>=0.47.1)"] bedrock = ["opentelemetry-instrumentation-bedrock (>=0.47.1)"] chromadb = ["opentelemetry-instrumentation-chromadb (>=0.47.1)"] +claude-agent-sdk = ["lmnr-claude-code-proxy (>=0.1.0a5)"] cohere = ["opentelemetry-instrumentation-cohere (>=0.47.1)"] crewai = ["opentelemetry-instrumentation-crewai (>=0.47.1)"] haystack = ["opentelemetry-instrumentation-haystack (>=0.47.1)"] lancedb = ["opentelemetry-instrumentation-lancedb (>=0.47.1)"] -langchain = ["opentelemetry-instrumentation-langchain (>=0.47.1)"] +langchain = ["opentelemetry-instrumentation-langchain (>=0.47.1,<0.48.0)"] llamaindex = ["opentelemetry-instrumentation-llamaindex (>=0.47.1)"] marqo = ["opentelemetry-instrumentation-marqo (>=0.47.1)"] mcp = ["opentelemetry-instrumentation-mcp (>=0.47.1)"] @@ -5835,14 +5836,14 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0 [[package]] name = "openhands-agent-server" -version = "1.5.2" +version = "1.6.0" description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_agent_server-1.5.2-py3-none-any.whl", hash = "sha256:7a368f61036f85446f566b9f6f9d6c7318684776cf2293daa5bce3ee19ac077d"}, - {file = "openhands_agent_server-1.5.2.tar.gz", hash = "sha256:dfaf5583dd71dae933643a8f8160156ce6fa7ed20db5cc3c45465b079bc576cd"}, + {file = "openhands_agent_server-1.6.0-py3-none-any.whl", hash = "sha256:e6ae865ac3e7a96b234e10a0faad23f6210e025bbf7721cb66bc7a71d160848c"}, + {file = "openhands_agent_server-1.6.0.tar.gz", hash = "sha256:44ce7694ae2d4bb0666d318ef13e6618bd4dc73022c60354839fe6130e67d02a"}, ] [package.dependencies] @@ -5859,7 +5860,7 @@ wsproto = ">=1.2.0" [[package]] name = "openhands-ai" -version = "0.62.0" +version = "0.0.0-post.5687+7853b41ad" description = "OpenHands: Code Less, Make More" optional = false python-versions = "^3.12,<3.14" @@ -5901,9 +5902,9 @@ memory-profiler = "^0.61.0" numpy = "*" openai = "2.8.0" openhands-aci = "0.3.2" -openhands-agent-server = "1.5.2" -openhands-sdk = "1.5.2" -openhands-tools = "1.5.2" +openhands-agent-server = "1.6.0" +openhands-sdk = "1.6.0" +openhands-tools = "1.6.0" opentelemetry-api = "^1.33.1" opentelemetry-exporter-otlp-proto-grpc = "^1.33.1" pathspec = "^0.12.1" @@ -5959,14 +5960,14 @@ url = ".." [[package]] name = "openhands-sdk" -version = "1.5.2" +version = "1.6.0" description = "OpenHands SDK - Core functionality for building AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_sdk-1.5.2-py3-none-any.whl", hash = "sha256:593430e9c8729e345fce3fca7e9a9a7ef084a08222d6ba42113e6ba5f6e9f15d"}, - {file = "openhands_sdk-1.5.2.tar.gz", hash = "sha256:798aa8f8ccd84b15deb418c4301d00f33da288bc1a8d41efa5cc47c10aaf3fd6"}, + {file = "openhands_sdk-1.6.0-py3-none-any.whl", hash = "sha256:94d2f87fb35406373da6728ae2d88584137f9e9b67fa0e940444c72f2e44e7d3"}, + {file = "openhands_sdk-1.6.0.tar.gz", hash = "sha256:f45742350e3874a7f5b08befc4a9d5adc7e4454f7ab5f8391c519eee3116090f"}, ] [package.dependencies] @@ -5974,7 +5975,7 @@ deprecation = ">=2.1.0" fastmcp = ">=2.11.3" httpx = ">=0.27.0" litellm = ">=1.80.7" -lmnr = ">=0.7.20" +lmnr = ">=0.7.24" pydantic = ">=2.11.7" python-frontmatter = ">=1.1.0" python-json-logger = ">=3.3.0" @@ -5986,14 +5987,14 @@ boto3 = ["boto3 (>=1.35.0)"] [[package]] name = "openhands-tools" -version = "1.5.2" +version = "1.6.0" description = "OpenHands Tools - Runtime tools for AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_tools-1.5.2-py3-none-any.whl", hash = "sha256:33e9c2af65aaa7b6b9a10b42d2fb11137e6b35e7ac02a4b9269ef37b5c79cc01"}, - {file = "openhands_tools-1.5.2.tar.gz", hash = "sha256:4644a24144fbdf630fb0edc303526b4add61b3fbe7a7434da73f231312c34846"}, + {file = "openhands_tools-1.6.0-py3-none-any.whl", hash = "sha256:176556d44186536751b23fe052d3505492cc2afb8d52db20fb7a2cc0169cd57a"}, + {file = "openhands_tools-1.6.0.tar.gz", hash = "sha256:d07ba31050fd4a7891a4c48388aa53ce9f703e17064ddbd59146d6c77e5980b3"}, ] [package.dependencies] diff --git a/openhands/app_server/sandbox/sandbox_spec_service.py b/openhands/app_server/sandbox/sandbox_spec_service.py index edaecc1b76..fe9d1653a9 100644 --- a/openhands/app_server/sandbox/sandbox_spec_service.py +++ b/openhands/app_server/sandbox/sandbox_spec_service.py @@ -12,7 +12,7 @@ from openhands.sdk.utils.models import DiscriminatedUnionMixin # The version of the agent server to use for deployments. # Typically this will be the same as the values from the pyproject.toml -AGENT_SERVER_IMAGE = 'ghcr.io/openhands/agent-server:8f90b92-python' +AGENT_SERVER_IMAGE = 'ghcr.io/openhands/agent-server:97652be-python' class SandboxSpecService(ABC): diff --git a/poetry.lock b/poetry.lock index 04831cc890..23789d3285 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiofiles" @@ -5675,14 +5675,14 @@ utils = ["numpydoc"] [[package]] name = "lmnr" -version = "0.7.20" +version = "0.7.24" description = "Python SDK for Laminar" optional = false python-versions = "<4,>=3.10" groups = ["main"] files = [ - {file = "lmnr-0.7.20-py3-none-any.whl", hash = "sha256:5f9fa7444e6f96c25e097f66484ff29e632bdd1de0e9346948bf5595f4a8af38"}, - {file = "lmnr-0.7.20.tar.gz", hash = "sha256:1f484cd618db2d71af65f90a0b8b36d20d80dc91a5138b811575c8677bf7c4fd"}, + {file = "lmnr-0.7.24-py3-none-any.whl", hash = "sha256:ad780d4a62ece897048811f3368639c240a9329ab31027da8c96545137a3a08a"}, + {file = "lmnr-0.7.24.tar.gz", hash = "sha256:aa6973f46fc4ba95c9061c1feceb58afc02eb43c9376c21e32545371ff6123d7"}, ] [package.dependencies] @@ -5705,14 +5705,15 @@ tqdm = ">=4.0" [package.extras] alephalpha = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)"] -all = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)", "opentelemetry-instrumentation-bedrock (>=0.47.1)", "opentelemetry-instrumentation-chromadb (>=0.47.1)", "opentelemetry-instrumentation-cohere (>=0.47.1)", "opentelemetry-instrumentation-crewai (>=0.47.1)", "opentelemetry-instrumentation-haystack (>=0.47.1)", "opentelemetry-instrumentation-lancedb (>=0.47.1)", "opentelemetry-instrumentation-langchain (>=0.47.1)", "opentelemetry-instrumentation-llamaindex (>=0.47.1)", "opentelemetry-instrumentation-marqo (>=0.47.1)", "opentelemetry-instrumentation-mcp (>=0.47.1)", "opentelemetry-instrumentation-milvus (>=0.47.1)", "opentelemetry-instrumentation-mistralai (>=0.47.1)", "opentelemetry-instrumentation-ollama (>=0.47.1)", "opentelemetry-instrumentation-pinecone (>=0.47.1)", "opentelemetry-instrumentation-qdrant (>=0.47.1)", "opentelemetry-instrumentation-replicate (>=0.47.1)", "opentelemetry-instrumentation-sagemaker (>=0.47.1)", "opentelemetry-instrumentation-together (>=0.47.1)", "opentelemetry-instrumentation-transformers (>=0.47.1)", "opentelemetry-instrumentation-vertexai (>=0.47.1)", "opentelemetry-instrumentation-watsonx (>=0.47.1)", "opentelemetry-instrumentation-weaviate (>=0.47.1)"] +all = ["opentelemetry-instrumentation-alephalpha (>=0.47.1)", "opentelemetry-instrumentation-bedrock (>=0.47.1)", "opentelemetry-instrumentation-chromadb (>=0.47.1)", "opentelemetry-instrumentation-cohere (>=0.47.1)", "opentelemetry-instrumentation-crewai (>=0.47.1)", "opentelemetry-instrumentation-haystack (>=0.47.1)", "opentelemetry-instrumentation-lancedb (>=0.47.1)", "opentelemetry-instrumentation-langchain (>=0.47.1,<0.48.0)", "opentelemetry-instrumentation-llamaindex (>=0.47.1)", "opentelemetry-instrumentation-marqo (>=0.47.1)", "opentelemetry-instrumentation-mcp (>=0.47.1)", "opentelemetry-instrumentation-milvus (>=0.47.1)", "opentelemetry-instrumentation-mistralai (>=0.47.1)", "opentelemetry-instrumentation-ollama (>=0.47.1)", "opentelemetry-instrumentation-pinecone (>=0.47.1)", "opentelemetry-instrumentation-qdrant (>=0.47.1)", "opentelemetry-instrumentation-replicate (>=0.47.1)", "opentelemetry-instrumentation-sagemaker (>=0.47.1)", "opentelemetry-instrumentation-together (>=0.47.1)", "opentelemetry-instrumentation-transformers (>=0.47.1)", "opentelemetry-instrumentation-vertexai (>=0.47.1)", "opentelemetry-instrumentation-watsonx (>=0.47.1)", "opentelemetry-instrumentation-weaviate (>=0.47.1)"] bedrock = ["opentelemetry-instrumentation-bedrock (>=0.47.1)"] chromadb = ["opentelemetry-instrumentation-chromadb (>=0.47.1)"] +claude-agent-sdk = ["lmnr-claude-code-proxy (>=0.1.0a5)"] cohere = ["opentelemetry-instrumentation-cohere (>=0.47.1)"] crewai = ["opentelemetry-instrumentation-crewai (>=0.47.1)"] haystack = ["opentelemetry-instrumentation-haystack (>=0.47.1)"] lancedb = ["opentelemetry-instrumentation-lancedb (>=0.47.1)"] -langchain = ["opentelemetry-instrumentation-langchain (>=0.47.1)"] +langchain = ["opentelemetry-instrumentation-langchain (>=0.47.1,<0.48.0)"] llamaindex = ["opentelemetry-instrumentation-llamaindex (>=0.47.1)"] marqo = ["opentelemetry-instrumentation-marqo (>=0.47.1)"] mcp = ["opentelemetry-instrumentation-mcp (>=0.47.1)"] @@ -7379,14 +7380,14 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0 [[package]] name = "openhands-agent-server" -version = "1.5.2" +version = "1.6.0" description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_agent_server-1.5.2-py3-none-any.whl", hash = "sha256:7a368f61036f85446f566b9f6f9d6c7318684776cf2293daa5bce3ee19ac077d"}, - {file = "openhands_agent_server-1.5.2.tar.gz", hash = "sha256:dfaf5583dd71dae933643a8f8160156ce6fa7ed20db5cc3c45465b079bc576cd"}, + {file = "openhands_agent_server-1.6.0-py3-none-any.whl", hash = "sha256:e6ae865ac3e7a96b234e10a0faad23f6210e025bbf7721cb66bc7a71d160848c"}, + {file = "openhands_agent_server-1.6.0.tar.gz", hash = "sha256:44ce7694ae2d4bb0666d318ef13e6618bd4dc73022c60354839fe6130e67d02a"}, ] [package.dependencies] @@ -7403,14 +7404,14 @@ wsproto = ">=1.2.0" [[package]] name = "openhands-sdk" -version = "1.5.2" +version = "1.6.0" description = "OpenHands SDK - Core functionality for building AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_sdk-1.5.2-py3-none-any.whl", hash = "sha256:593430e9c8729e345fce3fca7e9a9a7ef084a08222d6ba42113e6ba5f6e9f15d"}, - {file = "openhands_sdk-1.5.2.tar.gz", hash = "sha256:798aa8f8ccd84b15deb418c4301d00f33da288bc1a8d41efa5cc47c10aaf3fd6"}, + {file = "openhands_sdk-1.6.0-py3-none-any.whl", hash = "sha256:94d2f87fb35406373da6728ae2d88584137f9e9b67fa0e940444c72f2e44e7d3"}, + {file = "openhands_sdk-1.6.0.tar.gz", hash = "sha256:f45742350e3874a7f5b08befc4a9d5adc7e4454f7ab5f8391c519eee3116090f"}, ] [package.dependencies] @@ -7418,7 +7419,7 @@ deprecation = ">=2.1.0" fastmcp = ">=2.11.3" httpx = ">=0.27.0" litellm = ">=1.80.7" -lmnr = ">=0.7.20" +lmnr = ">=0.7.24" pydantic = ">=2.11.7" python-frontmatter = ">=1.1.0" python-json-logger = ">=3.3.0" @@ -7430,14 +7431,14 @@ boto3 = ["boto3 (>=1.35.0)"] [[package]] name = "openhands-tools" -version = "1.5.2" +version = "1.6.0" description = "OpenHands Tools - Runtime tools for AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_tools-1.5.2-py3-none-any.whl", hash = "sha256:33e9c2af65aaa7b6b9a10b42d2fb11137e6b35e7ac02a4b9269ef37b5c79cc01"}, - {file = "openhands_tools-1.5.2.tar.gz", hash = "sha256:4644a24144fbdf630fb0edc303526b4add61b3fbe7a7434da73f231312c34846"}, + {file = "openhands_tools-1.6.0-py3-none-any.whl", hash = "sha256:176556d44186536751b23fe052d3505492cc2afb8d52db20fb7a2cc0169cd57a"}, + {file = "openhands_tools-1.6.0.tar.gz", hash = "sha256:d07ba31050fd4a7891a4c48388aa53ce9f703e17064ddbd59146d6c77e5980b3"}, ] [package.dependencies] @@ -16822,4 +16823,4 @@ third-party-runtimes = ["daytona", "e2b-code-interpreter", "modal", "runloop-api [metadata] lock-version = "2.1" python-versions = "^3.12,<3.14" -content-hash = "9ec48649a3b54d1c19d2aae9af77c640e9eadbc6a368ef437a5655f14fc2a37a" +content-hash = "9764f3b69ec8ed35feebd78a826bbc6bfa4ac6d5b56bc999be8bc738b644e538" diff --git a/pyproject.toml b/pyproject.toml index dc2c52a112..c70c110dcc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -116,9 +116,9 @@ pybase62 = "^1.0.0" #openhands-agent-server = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-agent-server", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } #openhands-sdk = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-sdk", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } #openhands-tools = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-tools", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } -openhands-sdk = "1.5.2" -openhands-agent-server = "1.5.2" -openhands-tools = "1.5.2" +openhands-sdk = "1.6.0" +openhands-agent-server = "1.6.0" +openhands-tools = "1.6.0" python-jose = { version = ">=3.3", extras = [ "cryptography" ] } sqlalchemy = { extras = [ "asyncio" ], version = "^2.0.40" } pg8000 = "^1.31.5" From dc14624480db3f427f9844cb3e26b6ca696e9f7e Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 16 Dec 2025 20:35:46 -0700 Subject: [PATCH 18/80] Fix for frontend stall (#12069) --- frontend/src/utils/parse-terminal-output.ts | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/frontend/src/utils/parse-terminal-output.ts b/frontend/src/utils/parse-terminal-output.ts index a6ccc73cfc..1cd54eb858 100644 --- a/frontend/src/utils/parse-terminal-output.ts +++ b/frontend/src/utils/parse-terminal-output.ts @@ -1,3 +1,5 @@ +const START = "[Python Interpreter: "; + /** * Parses the raw output from the terminal into the command and symbol * @param raw The raw output to be displayed in the terminal @@ -13,9 +15,14 @@ * console.log(parsed.symbol); // openhands@659478cb008c:/workspace $ */ export const parseTerminalOutput = (raw: string) => { - const envRegex = /(.*)\[Python Interpreter: (.*)\]/s; - const match = raw.match(envRegex); - - if (!match) return raw; - return match[1]?.trim() || ""; + const start = raw.indexOf(START); + if (start < 0) { + return raw; + } + const offset = start + START.length; + const end = raw.indexOf("]", offset); + if (end <= offset) { + return raw; + } + return raw.substring(0, start).trim(); }; From 435e53769329578e3e14d6fcbfd6fc660e63340b Mon Sep 17 00:00:00 2001 From: Nhan Nguyen Date: Wed, 17 Dec 2025 07:05:10 -0500 Subject: [PATCH 19/80] fix: Prevent old instructions from being re-executed after conversation condensation (#11982) --- .../agenthub/codeact_agent/codeact_agent.py | 15 +- openhands/memory/conversation_memory.py | 41 ++++- openhands/memory/view.py | 3 + tests/unit/agenthub/test_agents.py | 8 +- tests/unit/agenthub/test_prompt_caching.py | 4 +- tests/unit/memory/test_conversation_memory.py | 144 +++++++++++++++++- 6 files changed, 197 insertions(+), 18 deletions(-) diff --git a/openhands/agenthub/codeact_agent/codeact_agent.py b/openhands/agenthub/codeact_agent/codeact_agent.py index 85e5f88cbc..9dd814e9cf 100644 --- a/openhands/agenthub/codeact_agent/codeact_agent.py +++ b/openhands/agenthub/codeact_agent/codeact_agent.py @@ -194,9 +194,12 @@ class CodeActAgent(Agent): # event we'll just return that instead of an action. The controller will # immediately ask the agent to step again with the new view. condensed_history: list[Event] = [] + # Track which event IDs have been forgotten/condensed + forgotten_event_ids: set[int] = set() match self.condenser.condensed_history(state): - case View(events=events): + case View(events=events, forgotten_event_ids=forgotten_ids): condensed_history = events + forgotten_event_ids = forgotten_ids case Condensation(action=condensation_action): return condensation_action @@ -206,7 +209,9 @@ class CodeActAgent(Agent): ) initial_user_message = self._get_initial_user_message(state.history) - messages = self._get_messages(condensed_history, initial_user_message) + messages = self._get_messages( + condensed_history, initial_user_message, forgotten_event_ids + ) params: dict = { 'messages': messages, } @@ -245,7 +250,10 @@ class CodeActAgent(Agent): return initial_user_message def _get_messages( - self, events: list[Event], initial_user_message: MessageAction + self, + events: list[Event], + initial_user_message: MessageAction, + forgotten_event_ids: set[int], ) -> list[Message]: """Constructs the message history for the LLM conversation. @@ -284,6 +292,7 @@ class CodeActAgent(Agent): messages = self.conversation_memory.process_events( condensed_history=events, initial_user_action=initial_user_message, + forgotten_event_ids=forgotten_event_ids, max_message_chars=self.llm.config.max_message_chars, vision_is_active=self.llm.vision_is_active(), ) diff --git a/openhands/memory/conversation_memory.py b/openhands/memory/conversation_memory.py index 5ff6ec7e58..5ae1a2cd71 100644 --- a/openhands/memory/conversation_memory.py +++ b/openhands/memory/conversation_memory.py @@ -76,6 +76,7 @@ class ConversationMemory: self, condensed_history: list[Event], initial_user_action: MessageAction, + forgotten_event_ids: set[int] | None = None, max_message_chars: int | None = None, vision_is_active: bool = False, ) -> list[Message]: @@ -85,16 +86,23 @@ class ConversationMemory: Args: condensed_history: The condensed history of events to convert + initial_user_action: The initial user message action, if available. Used to ensure the conversation starts correctly. + forgotten_event_ids: Set of event IDs that have been forgotten/condensed. If the initial user action's ID + is in this set, it will not be re-inserted to prevent re-execution of old instructions. max_message_chars: The maximum number of characters in the content of an event included in the prompt to the LLM. Larger observations are truncated. vision_is_active: Whether vision is active in the LLM. If True, image URLs will be included. - initial_user_action: The initial user message action, if available. Used to ensure the conversation starts correctly. """ events = condensed_history + # Default to empty set if not provided + if forgotten_event_ids is None: + forgotten_event_ids = set() # Ensure the event list starts with SystemMessageAction, then MessageAction(source='user') self._ensure_system_message(events) - self._ensure_initial_user_message(events, initial_user_action) + self._ensure_initial_user_message( + events, initial_user_action, forgotten_event_ids + ) # log visual browsing status logger.debug(f'Visual browsing: {self.agent_config.enable_som_visual_browsing}') @@ -827,9 +835,23 @@ class ConversationMemory: ) def _ensure_initial_user_message( - self, events: list[Event], initial_user_action: MessageAction + self, + events: list[Event], + initial_user_action: MessageAction, + forgotten_event_ids: set[int], ) -> None: - """Checks if the second event is a user MessageAction and inserts the provided one if needed.""" + """Checks if the second event is a user MessageAction and inserts the provided one if needed. + + IMPORTANT: If the initial user action has been condensed (its ID is in forgotten_event_ids), + we do NOT re-insert it. This prevents old instructions from being re-executed after + conversation condensation. The condensation summary already contains the context of + what was requested and completed. + + Args: + events: The list of events to modify in-place + initial_user_action: The initial user message action from the full history + forgotten_event_ids: Set of event IDs that have been forgotten/condensed + """ if ( not events ): # Should have system message from previous step, but safety check @@ -837,6 +859,17 @@ class ConversationMemory: # Or raise? Let's log for now, _ensure_system_message should handle this. return + # Check if the initial user action has been condensed/forgotten. + # If so, we should NOT re-insert it to prevent re-execution of old instructions. + # The condensation summary already contains the context of what was requested. + initial_user_action_id = initial_user_action.id + if initial_user_action_id in forgotten_event_ids: + logger.info( + f'Initial user action (id={initial_user_action_id}) has been condensed. ' + 'Not re-inserting to prevent re-execution of old instructions.' + ) + return + # We expect events[0] to be SystemMessageAction after _ensure_system_message if len(events) == 1: # Only system message exists diff --git a/openhands/memory/view.py b/openhands/memory/view.py index 87a20b6340..81dd8bab5d 100644 --- a/openhands/memory/view.py +++ b/openhands/memory/view.py @@ -18,6 +18,8 @@ class View(BaseModel): events: list[Event] unhandled_condensation_request: bool = False + # Set of event IDs that have been forgotten/condensed + forgotten_event_ids: set[int] = set() def __len__(self) -> int: return len(self.events) @@ -90,4 +92,5 @@ class View(BaseModel): return View( events=kept_events, unhandled_condensation_request=unhandled_condensation_request, + forgotten_event_ids=forgotten_event_ids, ) diff --git a/tests/unit/agenthub/test_agents.py b/tests/unit/agenthub/test_agents.py index 2a90dcb668..09f28e991c 100644 --- a/tests/unit/agenthub/test_agents.py +++ b/tests/unit/agenthub/test_agents.py @@ -393,7 +393,7 @@ def test_mismatched_tool_call_events_and_auto_add_system_message( # 2. The action message # 3. The observation message mock_state.history = [initial_user_message, action, observation] - messages = agent._get_messages(mock_state.history, initial_user_message) + messages = agent._get_messages(mock_state.history, initial_user_message, set()) assert len(messages) == 4 # System + initial user + action + observation assert messages[0].role == 'system' # First message should be the system message assert ( @@ -404,7 +404,7 @@ def test_mismatched_tool_call_events_and_auto_add_system_message( # The same should hold if the events are presented out-of-order mock_state.history = [initial_user_message, observation, action] - messages = agent._get_messages(mock_state.history, initial_user_message) + messages = agent._get_messages(mock_state.history, initial_user_message, set()) assert len(messages) == 4 assert messages[0].role == 'system' # First message should be the system message assert ( @@ -414,7 +414,7 @@ def test_mismatched_tool_call_events_and_auto_add_system_message( # If only one of the two events is present, then we should just get the system message # plus any valid message from the event mock_state.history = [initial_user_message, action] - messages = agent._get_messages(mock_state.history, initial_user_message) + messages = agent._get_messages(mock_state.history, initial_user_message, set()) assert ( len(messages) == 2 ) # System + initial user message, action is waiting for its observation @@ -422,7 +422,7 @@ def test_mismatched_tool_call_events_and_auto_add_system_message( assert messages[1].role == 'user' mock_state.history = [initial_user_message, observation] - messages = agent._get_messages(mock_state.history, initial_user_message) + messages = agent._get_messages(mock_state.history, initial_user_message, set()) assert ( len(messages) == 2 ) # System + initial user message, observation has no matching action diff --git a/tests/unit/agenthub/test_prompt_caching.py b/tests/unit/agenthub/test_prompt_caching.py index 60cc0bb16f..2435b1320a 100644 --- a/tests/unit/agenthub/test_prompt_caching.py +++ b/tests/unit/agenthub/test_prompt_caching.py @@ -80,7 +80,7 @@ def test_get_messages(codeact_agent: CodeActAgent): history.append(message_action_5) codeact_agent.reset() - messages = codeact_agent._get_messages(history, message_action_1) + messages = codeact_agent._get_messages(history, message_action_1, set()) assert ( len(messages) == 6 @@ -122,7 +122,7 @@ def test_get_messages_prompt_caching(codeact_agent: CodeActAgent): history.append(message_action_agent) codeact_agent.reset() - messages = codeact_agent._get_messages(history, initial_user_message) + messages = codeact_agent._get_messages(history, initial_user_message, set()) # Check that only the last two user messages have cache_prompt=True cached_user_messages = [ diff --git a/tests/unit/memory/test_conversation_memory.py b/tests/unit/memory/test_conversation_memory.py index abaa8d9a3d..50fd48f49a 100644 --- a/tests/unit/memory/test_conversation_memory.py +++ b/tests/unit/memory/test_conversation_memory.py @@ -158,7 +158,8 @@ def test_ensure_initial_user_message_adds_if_only_system( system_message = SystemMessageAction(content='System') system_message._source = EventSource.AGENT events = [system_message] - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert len(events) == 2 assert events[0] == system_message assert events[1] == initial_user_action @@ -177,7 +178,8 @@ def test_ensure_initial_user_message_correct_already_present( agent_message, ] original_events = list(events) - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert events == original_events @@ -189,7 +191,8 @@ def test_ensure_initial_user_message_incorrect_at_index_1( incorrect_second_message = MessageAction(content='Assistant') incorrect_second_message._source = EventSource.AGENT events = [system_message, incorrect_second_message] - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert len(events) == 3 assert events[0] == system_message assert events[1] == initial_user_action # Correct one inserted @@ -206,7 +209,8 @@ def test_ensure_initial_user_message_correct_present_later( # Correct initial message is present, but later in the list events = [system_message, incorrect_second_message] conversation_memory._ensure_system_message(events) - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert len(events) == 3 # Should still insert at index 1, not remove the later one assert events[0] == system_message assert events[1] == initial_user_action # Correct one inserted at index 1 @@ -222,7 +226,8 @@ def test_ensure_initial_user_message_different_user_msg_at_index_1( different_user_message = MessageAction(content='Different User Message') different_user_message._source = EventSource.USER events = [system_message, different_user_message] - conversation_memory._ensure_initial_user_message(events, initial_user_action) + # Pass empty set for forgotten_event_ids (no events have been condensed) + conversation_memory._ensure_initial_user_message(events, initial_user_action, set()) assert len(events) == 2 assert events[0] == system_message assert events[1] == different_user_message # Original second message remains @@ -1583,3 +1588,132 @@ def test_process_ipython_observation_with_vision_disabled( assert isinstance(message.content[1], ImageContent) # Check that NO explanatory text about filtered images was added when vision is disabled assert 'invalid or empty image(s) were filtered' not in message.content[0].text + + +def test_ensure_initial_user_message_not_reinserted_when_condensed( + conversation_memory, initial_user_action +): + """Test that initial user message is NOT re-inserted when it has been condensed. + + This is a critical test for bug #11910: Old instructions should not be re-executed + after conversation condensation. If the initial user message has been condensed + (its ID is in the forgotten_event_ids set), we should NOT re-insert it to prevent + the LLM from seeing old instructions as fresh commands. + """ + system_message = SystemMessageAction(content='System') + system_message._source = EventSource.AGENT + + # Simulate that the initial_user_action has been condensed by adding its ID + # to the forgotten_event_ids set + initial_user_action._id = 1 # Assign an ID to the initial user action + forgotten_event_ids = {1} # The initial user action's ID is in the forgotten set + + events = [system_message] # Only system message, no user message + + # Call _ensure_initial_user_message with the condensed event ID + conversation_memory._ensure_initial_user_message( + events, initial_user_action, forgotten_event_ids + ) + + # The initial user action should NOT be inserted because it was condensed + assert len(events) == 1 + assert events[0] == system_message + # Verify the initial user action was NOT added + assert initial_user_action not in events + + +def test_ensure_initial_user_message_reinserted_when_not_condensed( + conversation_memory, initial_user_action +): + """Test that initial user message IS re-inserted when it has NOT been condensed. + + This ensures backward compatibility: when no condensation has happened, + the initial user message should still be inserted as before. + """ + system_message = SystemMessageAction(content='System') + system_message._source = EventSource.AGENT + + # The initial user action has NOT been condensed + initial_user_action._id = 1 + forgotten_event_ids = {5, 10, 15} # Different IDs, not including the initial action + + events = [system_message] + + # Call _ensure_initial_user_message with non-matching forgotten IDs + conversation_memory._ensure_initial_user_message( + events, initial_user_action, forgotten_event_ids + ) + + # The initial user action SHOULD be inserted because it was NOT condensed + assert len(events) == 2 + assert events[0] == system_message + assert events[1] == initial_user_action + + +def test_process_events_does_not_reinsert_condensed_initial_message( + conversation_memory, +): + """Test that process_events does not re-insert initial user message when condensed. + + This is an integration test for the full process_events flow, verifying that + when the initial user message has been condensed, it is not re-inserted into + the conversation sent to the LLM. + """ + # Create a system message + system_message = SystemMessageAction(content='System message') + system_message._source = EventSource.AGENT + system_message._id = 0 + + # Create the initial user message (will be marked as condensed) + initial_user_message = MessageAction(content='Do task A, B, and C') + initial_user_message._source = EventSource.USER + initial_user_message._id = 1 + + # Create a condensation summary observation + from openhands.events.observation.agent import AgentCondensationObservation + + condensation_summary = AgentCondensationObservation( + content='Summary: User requested tasks A, B, C. Task A was completed successfully.' + ) + condensation_summary._id = 2 + + # Create a recent user message (not condensed) + recent_user_message = MessageAction(content='Now continue with task D') + recent_user_message._source = EventSource.USER + recent_user_message._id = 3 + + # Simulate condensed history: system + summary + recent message + # The initial user message (id=1) has been condensed/forgotten + condensed_history = [system_message, condensation_summary, recent_user_message] + + # The initial user message's ID is in the forgotten set + forgotten_event_ids = {1} + + messages = conversation_memory.process_events( + condensed_history=condensed_history, + initial_user_action=initial_user_message, + forgotten_event_ids=forgotten_event_ids, + max_message_chars=None, + vision_is_active=False, + ) + + # Verify the structure of messages + # Should have: system, condensation summary, recent user message + # Should NOT have the initial user message "Do task A, B, and C" + assert len(messages) == 3 + assert messages[0].role == 'system' + assert messages[0].content[0].text == 'System message' + + # The second message should be the condensation summary, NOT the initial user message + assert messages[1].role == 'user' + assert 'Summary: User requested tasks A, B, C' in messages[1].content[0].text + + # The third message should be the recent user message + assert messages[2].role == 'user' + assert 'Now continue with task D' in messages[2].content[0].text + + # Critically, the old instruction should NOT appear + for msg in messages: + for content in msg.content: + if hasattr(content, 'text'): + assert 'Do task A, B, and C' not in content.text From 2c83e419dc1c3a77a1da7a328cd850e9f7ee0e0c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 17 Dec 2025 19:16:54 +0400 Subject: [PATCH 20/80] chore(deps): bump the version-all group across 1 directory with 5 updates (#12071) --- frontend/package-lock.json | 159 +++++++++++++++++-------------------- frontend/package.json | 8 +- 2 files changed, 75 insertions(+), 92 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 08011449b8..e130cad40f 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -30,7 +30,7 @@ "isbot": "^5.1.32", "lucide-react": "^0.561.0", "monaco-editor": "^0.55.1", - "posthog-js": "^1.306.1", + "posthog-js": "^1.309.0", "react": "^19.2.3", "react-dom": "^19.2.3", "react-hot-toast": "^2.6.0", @@ -58,13 +58,13 @@ "@testing-library/jest-dom": "^6.9.1", "@testing-library/react": "^16.3.1", "@testing-library/user-event": "^14.6.1", - "@types/node": "^25.0.2", + "@types/node": "^25.0.3", "@types/react": "^19.2.7", "@types/react-dom": "^19.2.3", "@types/react-syntax-highlighter": "^15.5.13", "@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/parser": "^7.18.0", - "@vitest/coverage-v8": "^4.0.14", + "@vitest/coverage-v8": "^4.0.16", "cross-env": "^10.1.0", "eslint": "^8.57.0", "eslint-config-airbnb": "^19.0.4", @@ -85,7 +85,7 @@ "tailwindcss": "^4.1.8", "typescript": "^5.9.3", "vite-plugin-svgr": "^4.5.0", - "vite-tsconfig-paths": "^6.0.1", + "vite-tsconfig-paths": "^6.0.2", "vitest": "^4.0.14" }, "engines": { @@ -3192,10 +3192,9 @@ "license": "MIT" }, "node_modules/@posthog/core": { - "version": "1.7.1", - "resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.7.1.tgz", - "integrity": "sha512-kjK0eFMIpKo9GXIbts8VtAknsoZ18oZorANdtuTj1CbgS28t4ZVq//HAWhnxEuXRTrtkd+SUJ6Ux3j2Af8NCuA==", - "license": "MIT", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.8.0.tgz", + "integrity": "sha512-SfmG1EdbR+2zpQccgBUxM/snCROB9WGkY7VH1r9iaoTNqoaN9IkmIEA/07cZLY4DxVP8jt6Vdfe3s84xksac1g==", "dependencies": { "cross-spawn": "^7.0.6" } @@ -4949,11 +4948,10 @@ "license": "MIT" }, "node_modules/@standard-schema/spec": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.0.0.tgz", - "integrity": "sha512-m2bOd0f2RT9k8QJx1JN85cZYyH1RqFBdlwtkSlf4tBDYLCiiZnv1fIIwacK6cqwXavOydf0NPToMQgpKq+dVlA==", - "dev": true, - "license": "MIT" + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@standard-schema/spec/-/spec-1.1.0.tgz", + "integrity": "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==", + "dev": true }, "node_modules/@svgr/babel-plugin-add-jsx-attribute": { "version": "8.0.0", @@ -5684,7 +5682,6 @@ "resolved": "https://registry.npmjs.org/@types/chai/-/chai-5.2.3.tgz", "integrity": "sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==", "dev": true, - "license": "MIT", "dependencies": { "@types/deep-eql": "*", "assertion-error": "^2.0.1" @@ -5703,8 +5700,7 @@ "version": "4.0.2", "resolved": "https://registry.npmjs.org/@types/deep-eql/-/deep-eql-4.0.2.tgz", "integrity": "sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/@types/estree": { "version": "1.0.8", @@ -5759,9 +5755,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "25.0.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.2.tgz", - "integrity": "sha512-gWEkeiyYE4vqjON/+Obqcoeffmk0NF15WSBwSs7zwVA2bAbTaE0SJ7P0WNGoJn8uE7fiaV5a7dKYIJriEqOrmA==", + "version": "25.0.3", + "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz", + "integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==", "devOptional": true, "dependencies": { "undici-types": "~7.16.0" @@ -6239,14 +6235,13 @@ "license": "ISC" }, "node_modules/@vitest/coverage-v8": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.15.tgz", - "integrity": "sha512-FUJ+1RkpTFW7rQITdgTi93qOCWJobWhBirEPCeXh2SW2wsTlFxy51apDz5gzG+ZEYt/THvWeNmhdAoS9DTwpCw==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.16.tgz", + "integrity": "sha512-2rNdjEIsPRzsdu6/9Eq0AYAzYdpP6Bx9cje9tL3FE5XzXRQF1fNU9pe/1yE8fCrS0HD+fBtt6gLPh6LI57tX7A==", "dev": true, - "license": "MIT", "dependencies": { "@bcoe/v8-coverage": "^1.0.2", - "@vitest/utils": "4.0.15", + "@vitest/utils": "4.0.16", "ast-v8-to-istanbul": "^0.3.8", "istanbul-lib-coverage": "^3.2.2", "istanbul-lib-report": "^3.0.1", @@ -6261,8 +6256,8 @@ "url": "https://opencollective.com/vitest" }, "peerDependencies": { - "@vitest/browser": "4.0.15", - "vitest": "4.0.15" + "@vitest/browser": "4.0.16", + "vitest": "4.0.16" }, "peerDependenciesMeta": { "@vitest/browser": { @@ -6271,16 +6266,15 @@ } }, "node_modules/@vitest/expect": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.15.tgz", - "integrity": "sha512-Gfyva9/GxPAWXIWjyGDli9O+waHDC0Q0jaLdFP1qPAUUfo1FEXPXUfUkp3eZA0sSq340vPycSyOlYUeM15Ft1w==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-4.0.16.tgz", + "integrity": "sha512-eshqULT2It7McaJkQGLkPjPjNph+uevROGuIMJdG3V+0BSR2w9u6J9Lwu+E8cK5TETlfou8GRijhafIMhXsimA==", "dev": true, - "license": "MIT", "dependencies": { "@standard-schema/spec": "^1.0.0", "@types/chai": "^5.2.2", - "@vitest/spy": "4.0.15", - "@vitest/utils": "4.0.15", + "@vitest/spy": "4.0.16", + "@vitest/utils": "4.0.16", "chai": "^6.2.1", "tinyrainbow": "^3.0.3" }, @@ -6289,13 +6283,12 @@ } }, "node_modules/@vitest/mocker": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.15.tgz", - "integrity": "sha512-CZ28GLfOEIFkvCFngN8Sfx5h+Se0zN+h4B7yOsPVCcgtiO7t5jt9xQh2E1UkFep+eb9fjyMfuC5gBypwb07fvQ==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/mocker/-/mocker-4.0.16.tgz", + "integrity": "sha512-yb6k4AZxJTB+q9ycAvsoxGn+j/po0UaPgajllBgt1PzoMAAmJGYFdDk0uCcRcxb3BrME34I6u8gHZTQlkqSZpg==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/spy": "4.0.15", + "@vitest/spy": "4.0.16", "estree-walker": "^3.0.3", "magic-string": "^0.30.21" }, @@ -6316,11 +6309,10 @@ } }, "node_modules/@vitest/pretty-format": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.15.tgz", - "integrity": "sha512-SWdqR8vEv83WtZcrfLNqlqeQXlQLh2iilO1Wk1gv4eiHKjEzvgHb2OVc3mIPyhZE6F+CtfYjNlDJwP5MN6Km7A==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-4.0.16.tgz", + "integrity": "sha512-eNCYNsSty9xJKi/UdVD8Ou16alu7AYiS2fCPRs0b1OdhJiV89buAXQLpTbe+X8V9L6qrs9CqyvU7OaAopJYPsA==", "dev": true, - "license": "MIT", "dependencies": { "tinyrainbow": "^3.0.3" }, @@ -6329,13 +6321,12 @@ } }, "node_modules/@vitest/runner": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.15.tgz", - "integrity": "sha512-+A+yMY8dGixUhHmNdPUxOh0la6uVzun86vAbuMT3hIDxMrAOmn5ILBHm8ajrqHE0t8R9T1dGnde1A5DTnmi3qw==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-4.0.16.tgz", + "integrity": "sha512-VWEDm5Wv9xEo80ctjORcTQRJ539EGPB3Pb9ApvVRAY1U/WkHXmmYISqU5E79uCwcW7xYUV38gwZD+RV755fu3Q==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/utils": "4.0.15", + "@vitest/utils": "4.0.16", "pathe": "^2.0.3" }, "funding": { @@ -6346,17 +6337,15 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/@vitest/snapshot": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.15.tgz", - "integrity": "sha512-A7Ob8EdFZJIBjLjeO0DZF4lqR6U7Ydi5/5LIZ0xcI+23lYlsYJAfGn8PrIWTYdZQRNnSRlzhg0zyGu37mVdy5g==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-4.0.16.tgz", + "integrity": "sha512-sf6NcrYhYBsSYefxnry+DR8n3UV4xWZwWxYbCJUt2YdvtqzSPR7VfGrY0zsv090DAbjFZsi7ZaMi1KnSRyK1XA==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/pretty-format": "4.0.15", + "@vitest/pretty-format": "4.0.16", "magic-string": "^0.30.21", "pathe": "^2.0.3" }, @@ -6368,27 +6357,24 @@ "version": "2.0.3", "resolved": "https://registry.npmjs.org/pathe/-/pathe-2.0.3.tgz", "integrity": "sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==", - "dev": true, - "license": "MIT" + "dev": true }, "node_modules/@vitest/spy": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.15.tgz", - "integrity": "sha512-+EIjOJmnY6mIfdXtE/bnozKEvTC4Uczg19yeZ2vtCz5Yyb0QQ31QWVQ8hswJ3Ysx/K2EqaNsVanjr//2+P3FHw==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-4.0.16.tgz", + "integrity": "sha512-4jIOWjKP0ZUaEmJm00E0cOBLU+5WE0BpeNr3XN6TEF05ltro6NJqHWxXD0kA8/Zc8Nh23AT8WQxwNG+WeROupw==", "dev": true, - "license": "MIT", "funding": { "url": "https://opencollective.com/vitest" } }, "node_modules/@vitest/utils": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.15.tgz", - "integrity": "sha512-HXjPW2w5dxhTD0dLwtYHDnelK3j8sR8cWIaLxr22evTyY6q8pRCjZSmhRWVjBaOVXChQd6AwMzi9pucorXCPZA==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-4.0.16.tgz", + "integrity": "sha512-h8z9yYhV3e1LEfaQ3zdypIrnAg/9hguReGZoS7Gl0aBG5xgA410zBqECqmaF/+RkTggRsfnzc1XaAHA6bmUufA==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/pretty-format": "4.0.15", + "@vitest/pretty-format": "4.0.16", "tinyrainbow": "^3.0.3" }, "funding": { @@ -6729,7 +6715,6 @@ "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-2.0.1.tgz", "integrity": "sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==", "dev": true, - "license": "MIT", "engines": { "node": ">=12" } @@ -7125,7 +7110,6 @@ "resolved": "https://registry.npmjs.org/chai/-/chai-6.2.1.tgz", "integrity": "sha512-p4Z49OGG5W/WBCPSS/dH3jQ73kD6tiMmUM+bckNK6Jr5JHMG3k9bg/BvKR8lKmtVBKmOiuVaV2ws8s9oSbwysg==", "dev": true, - "license": "MIT", "engines": { "node": ">=18" } @@ -13419,11 +13403,11 @@ } }, "node_modules/posthog-js": { - "version": "1.306.1", - "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.306.1.tgz", - "integrity": "sha512-wO7bliv/5tlAlfoKCUzwkGXZVNexk0dHigMf9tNp0q1rzs62wThogREY7Tz7h/iWKYiuXy1RumtVlTmHuBXa1w==", + "version": "1.309.0", + "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.309.0.tgz", + "integrity": "sha512-SmFF0uKX3tNTgQOW4mR4shGLQ0YYG0FXyKTz13SbIH83/FtAJedppOIL7s0y9e7rjogBh6LsPekphhchs9Kh1Q==", "dependencies": { - "@posthog/core": "1.7.1", + "@posthog/core": "1.8.0", "core-js": "^3.38.1", "fflate": "^0.4.8", "preact": "^10.19.3", @@ -16127,9 +16111,9 @@ } }, "node_modules/vite-tsconfig-paths": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-6.0.1.tgz", - "integrity": "sha512-OQuYkfCQhc2T+n//0N7/oogTosgiSyZQ7dydrpUlH5SbnFTtplpekdY4GMi6jDwEpiwWlqeUJMyPfC2ePM1+2A==", + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/vite-tsconfig-paths/-/vite-tsconfig-paths-6.0.2.tgz", + "integrity": "sha512-c06LOO8fWB5RuEPpEIHXU9t7Dt4DoiPIljnKws9UygIaQo6PoFKawTftz5/QVcO+6pOs/HHWycnq4UrZkWVYnQ==", "dev": true, "dependencies": { "debug": "^4.1.1", @@ -16189,19 +16173,18 @@ } }, "node_modules/vitest": { - "version": "4.0.15", - "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.15.tgz", - "integrity": "sha512-n1RxDp8UJm6N0IbJLQo+yzLZ2sQCDyl1o0LeugbPWf8+8Fttp29GghsQBjYJVmWq3gBFfe9Hs1spR44vovn2wA==", + "version": "4.0.16", + "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.16.tgz", + "integrity": "sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==", "dev": true, - "license": "MIT", "dependencies": { - "@vitest/expect": "4.0.15", - "@vitest/mocker": "4.0.15", - "@vitest/pretty-format": "4.0.15", - "@vitest/runner": "4.0.15", - "@vitest/snapshot": "4.0.15", - "@vitest/spy": "4.0.15", - "@vitest/utils": "4.0.15", + "@vitest/expect": "4.0.16", + "@vitest/mocker": "4.0.16", + "@vitest/pretty-format": "4.0.16", + "@vitest/runner": "4.0.16", + "@vitest/snapshot": "4.0.16", + "@vitest/spy": "4.0.16", + "@vitest/utils": "4.0.16", "es-module-lexer": "^1.7.0", "expect-type": "^1.2.2", "magic-string": "^0.30.21", @@ -16229,10 +16212,10 @@ "@edge-runtime/vm": "*", "@opentelemetry/api": "^1.9.0", "@types/node": "^20.0.0 || ^22.0.0 || >=24.0.0", - "@vitest/browser-playwright": "4.0.15", - "@vitest/browser-preview": "4.0.15", - "@vitest/browser-webdriverio": "4.0.15", - "@vitest/ui": "4.0.15", + "@vitest/browser-playwright": "4.0.16", + "@vitest/browser-preview": "4.0.16", + "@vitest/browser-webdriverio": "4.0.16", + "@vitest/ui": "4.0.16", "happy-dom": "*", "jsdom": "*" }, diff --git a/frontend/package.json b/frontend/package.json index 7dc0c5bcfb..90636fed77 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -29,7 +29,7 @@ "isbot": "^5.1.32", "lucide-react": "^0.561.0", "monaco-editor": "^0.55.1", - "posthog-js": "^1.306.1", + "posthog-js": "^1.309.0", "react": "^19.2.3", "react-dom": "^19.2.3", "react-hot-toast": "^2.6.0", @@ -89,13 +89,13 @@ "@testing-library/jest-dom": "^6.9.1", "@testing-library/react": "^16.3.1", "@testing-library/user-event": "^14.6.1", - "@types/node": "^25.0.2", + "@types/node": "^25.0.3", "@types/react": "^19.2.7", "@types/react-dom": "^19.2.3", "@types/react-syntax-highlighter": "^15.5.13", "@typescript-eslint/eslint-plugin": "^7.18.0", "@typescript-eslint/parser": "^7.18.0", - "@vitest/coverage-v8": "^4.0.14", + "@vitest/coverage-v8": "^4.0.16", "cross-env": "^10.1.0", "eslint": "^8.57.0", "eslint-config-airbnb": "^19.0.4", @@ -116,7 +116,7 @@ "tailwindcss": "^4.1.8", "typescript": "^5.9.3", "vite-plugin-svgr": "^4.5.0", - "vite-tsconfig-paths": "^6.0.1", + "vite-tsconfig-paths": "^6.0.2", "vitest": "^4.0.14" }, "packageManager": "npm@10.5.0", From 060761437299315f67e03bdea2315a34f04047bf Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Wed, 17 Dec 2025 22:29:18 +0700 Subject: [PATCH 21/80] feat(frontend): add refresh button to changes tab (#12036) Co-authored-by: Tim O'Farrell --- .../conversation-tab-title.test.tsx | 149 ++++++++++++++++++ .../conversation-tab-content.tsx | 34 +++- .../conversation-tab-title.tsx | 24 ++- .../query/use-unified-get-git-changes.ts | 1 + frontend/src/icons/u-refresh.svg | 3 + 5 files changed, 209 insertions(+), 2 deletions(-) create mode 100644 frontend/__tests__/components/conversation-tab-title.test.tsx create mode 100644 frontend/src/icons/u-refresh.svg diff --git a/frontend/__tests__/components/conversation-tab-title.test.tsx b/frontend/__tests__/components/conversation-tab-title.test.tsx new file mode 100644 index 0000000000..4e3a0aa0fe --- /dev/null +++ b/frontend/__tests__/components/conversation-tab-title.test.tsx @@ -0,0 +1,149 @@ +import { render, screen, waitFor } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { describe, expect, it, vi, beforeEach, afterEach } from "vitest"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { ConversationTabTitle } from "#/components/features/conversation/conversation-tabs/conversation-tab-title"; +import GitService from "#/api/git-service/git-service.api"; +import V1GitService from "#/api/git-service/v1-git-service.api"; + +// Mock the services that the hook depends on +vi.mock("#/api/git-service/git-service.api"); +vi.mock("#/api/git-service/v1-git-service.api"); + +// Mock the hooks that useUnifiedGetGitChanges depends on +vi.mock("#/hooks/use-conversation-id", () => ({ + useConversationId: () => ({ + conversationId: "test-conversation-id", + }), +})); + +vi.mock("#/hooks/query/use-active-conversation", () => ({ + useActiveConversation: () => ({ + data: { + conversation_version: "V0", + url: null, + session_api_key: null, + selected_repository: null, + }, + }), +})); + +vi.mock("#/hooks/use-runtime-is-ready", () => ({ + useRuntimeIsReady: () => true, +})); + +vi.mock("#/utils/get-git-path", () => ({ + getGitPath: () => "/workspace", +})); + +describe("ConversationTabTitle", () => { + let queryClient: QueryClient; + + beforeEach(() => { + queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, + }); + + // Mock GitService methods + vi.mocked(GitService.getGitChanges).mockResolvedValue([]); + vi.mocked(V1GitService.getGitChanges).mockResolvedValue([]); + }); + + afterEach(() => { + vi.clearAllMocks(); + queryClient.clear(); + }); + + const renderWithProviders = (ui: React.ReactElement) => { + return render( + {ui}, + ); + }; + + describe("Rendering", () => { + it("should render the title", () => { + // Arrange + const title = "Test Title"; + + // Act + renderWithProviders( + , + ); + + // Assert + expect(screen.getByText(title)).toBeInTheDocument(); + }); + + it("should show refresh button when conversationKey is 'editor'", () => { + // Arrange + const title = "Changes"; + + // Act + renderWithProviders( + , + ); + + // Assert + const refreshButton = screen.getByRole("button"); + expect(refreshButton).toBeInTheDocument(); + }); + + it("should not show refresh button when conversationKey is not 'editor'", () => { + // Arrange + const title = "Browser"; + + // Act + renderWithProviders( + , + ); + + // Assert + expect(screen.queryByRole("button")).not.toBeInTheDocument(); + }); + }); + + describe("User Interactions", () => { + it("should call refetch and trigger GitService.getGitChanges when refresh button is clicked", async () => { + // Arrange + const user = userEvent.setup(); + const title = "Changes"; + const mockGitChanges: Array<{ + path: string; + status: "M" | "A" | "D" | "R" | "U"; + }> = [ + { path: "file1.ts", status: "M" }, + { path: "file2.ts", status: "A" }, + ]; + + vi.mocked(GitService.getGitChanges).mockResolvedValue(mockGitChanges); + + renderWithProviders( + , + ); + + const refreshButton = screen.getByRole("button"); + + // Wait for initial query to complete + await waitFor(() => { + expect(GitService.getGitChanges).toHaveBeenCalled(); + }); + + // Clear the mock to track refetch calls + vi.mocked(GitService.getGitChanges).mockClear(); + + // Act + await user.click(refreshButton); + + // Assert - refetch should trigger another service call + await waitFor(() => { + expect(GitService.getGitChanges).toHaveBeenCalledWith( + "test-conversation-id", + ); + }); + }); + }); +}); diff --git a/frontend/src/components/features/conversation/conversation-tabs/conversation-tab-content/conversation-tab-content.tsx b/frontend/src/components/features/conversation/conversation-tabs/conversation-tab-content/conversation-tab-content.tsx index 70b45ea73a..39b68c9033 100644 --- a/frontend/src/components/features/conversation/conversation-tabs/conversation-tab-content/conversation-tab-content.tsx +++ b/frontend/src/components/features/conversation/conversation-tabs/conversation-tab-content/conversation-tab-content.tsx @@ -82,13 +82,45 @@ export function ConversationTabContent() { isPlannerActive, ]); + const conversationKey = useMemo(() => { + if (isEditorActive) { + return "editor"; + } + if (isBrowserActive) { + return "browser"; + } + if (isServedActive) { + return "served"; + } + if (isVSCodeActive) { + return "vscode"; + } + if (isTerminalActive) { + return "terminal"; + } + if (isPlannerActive) { + return "planner"; + } + return ""; + }, [ + isEditorActive, + isBrowserActive, + isServedActive, + isVSCodeActive, + isTerminalActive, + isPlannerActive, + ]); + if (shouldShownAgentLoading) { return ; } return ( - + {tabs.map(({ key, component: Component, isActive }) => ( { + refetch(); + }; + return (
{title} + {conversationKey === "editor" && ( + + )}
); } diff --git a/frontend/src/hooks/query/use-unified-get-git-changes.ts b/frontend/src/hooks/query/use-unified-get-git-changes.ts index ae5600469a..6b0856031c 100644 --- a/frontend/src/hooks/query/use-unified-get-git-changes.ts +++ b/frontend/src/hooks/query/use-unified-get-git-changes.ts @@ -103,5 +103,6 @@ export const useUnifiedGetGitChanges = () => { isSuccess: result.isSuccess, isError: result.isError, error: result.error, + refetch: result.refetch, }; }; diff --git a/frontend/src/icons/u-refresh.svg b/frontend/src/icons/u-refresh.svg new file mode 100644 index 0000000000..9e3a2051d2 --- /dev/null +++ b/frontend/src/icons/u-refresh.svg @@ -0,0 +1,3 @@ + + + From f98e7fbc49e2698f78010d04d1e66ed638aafdc2 Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Wed, 17 Dec 2025 22:34:28 +0700 Subject: [PATCH 22/80] fix(frontend): observation events and action events (v1 conversations) (#12066) Co-authored-by: openhands --- enterprise/storage/saas_settings_store.py | 3 +++ .../v1/chat/event-content-helpers/should-render-event.ts | 4 ++++ .../observation-pair-event-message.tsx | 7 ++++++- frontend/src/types/v1/type-guards.ts | 8 +++++++- 4 files changed, 20 insertions(+), 2 deletions(-) diff --git a/enterprise/storage/saas_settings_store.py b/enterprise/storage/saas_settings_store.py index 6cbcb50802..cfcbec7583 100644 --- a/enterprise/storage/saas_settings_store.py +++ b/enterprise/storage/saas_settings_store.py @@ -94,6 +94,9 @@ class SaasSettingsStore(SettingsStore): } self._decrypt_kwargs(kwargs) settings = Settings(**kwargs) + + settings.v1_enabled = True + return settings async def store(self, item: Settings): diff --git a/frontend/src/components/v1/chat/event-content-helpers/should-render-event.ts b/frontend/src/components/v1/chat/event-content-helpers/should-render-event.ts index a5fdc62252..1171c21c92 100644 --- a/frontend/src/components/v1/chat/event-content-helpers/should-render-event.ts +++ b/frontend/src/components/v1/chat/event-content-helpers/should-render-event.ts @@ -18,6 +18,10 @@ export const shouldRenderEvent = (event: OpenHandsEvent) => { // For V1, action is an object with kind property const actionType = event.action.kind; + if (!actionType) { + return false; + } + // Hide user commands from the chat interface if (actionType === "ExecuteBashAction" && event.source === "user") { return false; diff --git a/frontend/src/components/v1/chat/event-message-components/observation-pair-event-message.tsx b/frontend/src/components/v1/chat/event-message-components/observation-pair-event-message.tsx index aa0bbc09b4..221d758dd6 100644 --- a/frontend/src/components/v1/chat/event-message-components/observation-pair-event-message.tsx +++ b/frontend/src/components/v1/chat/event-message-components/observation-pair-event-message.tsx @@ -34,7 +34,12 @@ export function ObservationPairEventMessage({ .map((t) => t.text) .join("\n"); - if (thoughtContent && event.action.kind !== "ThinkAction") { + // Defensive check: ensure action exists and has kind property + if ( + thoughtContent && + event.action?.kind && + event.action.kind !== "ThinkAction" + ) { return (
diff --git a/frontend/src/types/v1/type-guards.ts b/frontend/src/types/v1/type-guards.ts index ee831ea489..dec1816209 100644 --- a/frontend/src/types/v1/type-guards.ts +++ b/frontend/src/types/v1/type-guards.ts @@ -54,7 +54,10 @@ export const isObservationEvent = ( ): event is ObservationEvent => event.source === "environment" && "action_id" in event && - "observation" in event; + "observation" in event && + event.observation !== null && + typeof event.observation === "object" && + "kind" in event.observation; /** * Type guard function to check if an event is an agent error event @@ -94,6 +97,9 @@ export const isUserMessageEvent = ( export const isActionEvent = (event: OpenHandsEvent): event is ActionEvent => event.source === "agent" && "action" in event && + event.action !== null && + typeof event.action === "object" && + "kind" in event.action && "tool_name" in event && "tool_call_id" in event && typeof event.tool_name === "string" && From 9ef11bf9302c4b3edb257e6ecfb0f41f70ad5bec Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Wed, 17 Dec 2025 23:25:10 +0700 Subject: [PATCH 23/80] feat: show available skills for v1 conversations (#12039) --- .openhands/microagents/repo.md | 2 +- .../conversation/conversation-name.test.tsx | 24 +- .../microagents/microagent-modal.test.tsx | 91 ---- .../modals/skills/skill-modal.test.tsx | 394 ++++++++++++++ .../v1-conversation-service.api.ts | 13 + .../v1-conversation-service.types.ts | 11 + .../features/controls/tools-context-menu.tsx | 30 +- .../components/features/controls/tools.tsx | 16 +- .../conversation-card-context-menu.tsx | 147 ----- .../conversation-card-context-menu.tsx | 12 +- ...croagent-content.tsx => skill-content.tsx} | 8 +- .../{microagent-item.tsx => skill-item.tsx} | 26 +- ...oagent-triggers.tsx => skill-triggers.tsx} | 6 +- ...empty-state.tsx => skills-empty-state.tsx} | 8 +- ...ing-state.tsx => skills-loading-state.tsx} | 2 +- ...dal-header.tsx => skills-modal-header.tsx} | 10 +- ...microagents-modal.tsx => skills-modal.tsx} | 74 ++- .../conversation-name-context-menu.tsx | 15 +- .../conversation/conversation-name.tsx | 20 +- ...roagents.ts => use-conversation-skills.ts} | 14 +- .../use-conversation-name-context-menu.ts | 17 +- frontend/src/i18n/declaration.ts | 15 +- frontend/src/i18n/translation.json | 172 +++--- .../app_conversation_models.py | 10 + .../app_conversation_router.py | 152 +++++- .../app_conversation_service_base.py | 6 +- .../test_app_conversation_service_base.py | 16 +- .../test_app_conversation_skills_endpoint.py | 503 ++++++++++++++++++ 28 files changed, 1325 insertions(+), 489 deletions(-) delete mode 100644 frontend/__tests__/components/modals/microagents/microagent-modal.test.tsx create mode 100644 frontend/__tests__/components/modals/skills/skill-modal.test.tsx delete mode 100644 frontend/src/components/features/conversation-panel/conversation-card-context-menu.tsx rename frontend/src/components/features/conversation-panel/{microagent-content.tsx => skill-content.tsx} (76%) rename frontend/src/components/features/conversation-panel/{microagent-item.tsx => skill-item.tsx} (65%) rename frontend/src/components/features/conversation-panel/{microagent-triggers.tsx => skill-triggers.tsx} (81%) rename frontend/src/components/features/conversation-panel/{microagents-empty-state.tsx => skills-empty-state.tsx} (63%) rename frontend/src/components/features/conversation-panel/{microagents-loading-state.tsx => skills-loading-state.tsx} (80%) rename frontend/src/components/features/conversation-panel/{microagents-modal-header.tsx => skills-modal-header.tsx} (82%) rename frontend/src/components/features/conversation-panel/{microagents-modal.tsx => skills-modal.tsx} (50%) rename frontend/src/hooks/query/{use-conversation-microagents.ts => use-conversation-skills.ts} (62%) create mode 100644 tests/unit/app_server/test_app_conversation_skills_endpoint.py diff --git a/.openhands/microagents/repo.md b/.openhands/microagents/repo.md index ceb87bc2f7..cd3ef33074 100644 --- a/.openhands/microagents/repo.md +++ b/.openhands/microagents/repo.md @@ -63,7 +63,7 @@ Frontend: - We use TanStack Query (fka React Query) for data fetching and cache management - Data Access Layer: API client methods are located in `frontend/src/api` and should never be called directly from UI components - they must always be wrapped with TanStack Query - Custom hooks are located in `frontend/src/hooks/query/` and `frontend/src/hooks/mutation/` - - Query hooks should follow the pattern use[Resource] (e.g., `useConversationMicroagents`) + - Query hooks should follow the pattern use[Resource] (e.g., `useConversationSkills`) - Mutation hooks should follow the pattern use[Action] (e.g., `useDeleteConversation`) - Architecture rule: UI components → TanStack Query hooks → Data Access Layer (`frontend/src/api`) → API endpoints diff --git a/frontend/__tests__/components/features/conversation/conversation-name.test.tsx b/frontend/__tests__/components/features/conversation/conversation-name.test.tsx index 572ca590b1..41078b69cb 100644 --- a/frontend/__tests__/components/features/conversation/conversation-name.test.tsx +++ b/frontend/__tests__/components/features/conversation/conversation-name.test.tsx @@ -42,7 +42,7 @@ vi.mock("react-i18next", async () => { BUTTON$EXPORT_CONVERSATION: "Export Conversation", BUTTON$DOWNLOAD_VIA_VSCODE: "Download via VS Code", BUTTON$SHOW_AGENT_TOOLS_AND_METADATA: "Show Agent Tools", - CONVERSATION$SHOW_MICROAGENTS: "Show Microagents", + CONVERSATION$SHOW_SKILLS: "Show Skills", BUTTON$DISPLAY_COST: "Display Cost", COMMON$CLOSE_CONVERSATION_STOP_RUNTIME: "Close Conversation (Stop Runtime)", @@ -290,7 +290,7 @@ describe("ConversationNameContextMenu", () => { onStop: vi.fn(), onDisplayCost: vi.fn(), onShowAgentTools: vi.fn(), - onShowMicroagents: vi.fn(), + onShowSkills: vi.fn(), onExportConversation: vi.fn(), onDownloadViaVSCode: vi.fn(), }; @@ -304,7 +304,7 @@ describe("ConversationNameContextMenu", () => { expect(screen.getByTestId("stop-button")).toBeInTheDocument(); expect(screen.getByTestId("display-cost-button")).toBeInTheDocument(); expect(screen.getByTestId("show-agent-tools-button")).toBeInTheDocument(); - expect(screen.getByTestId("show-microagents-button")).toBeInTheDocument(); + expect(screen.getByTestId("show-skills-button")).toBeInTheDocument(); expect( screen.getByTestId("export-conversation-button"), ).toBeInTheDocument(); @@ -321,9 +321,7 @@ describe("ConversationNameContextMenu", () => { expect( screen.queryByTestId("show-agent-tools-button"), ).not.toBeInTheDocument(); - expect( - screen.queryByTestId("show-microagents-button"), - ).not.toBeInTheDocument(); + expect(screen.queryByTestId("show-skills-button")).not.toBeInTheDocument(); expect( screen.queryByTestId("export-conversation-button"), ).not.toBeInTheDocument(); @@ -410,19 +408,19 @@ describe("ConversationNameContextMenu", () => { it("should call show microagents handler when show microagents button is clicked", async () => { const user = userEvent.setup(); - const onShowMicroagents = vi.fn(); + const onShowSkills = vi.fn(); renderWithProviders( , ); - const showMicroagentsButton = screen.getByTestId("show-microagents-button"); + const showMicroagentsButton = screen.getByTestId("show-skills-button"); await user.click(showMicroagentsButton); - expect(onShowMicroagents).toHaveBeenCalledTimes(1); + expect(onShowSkills).toHaveBeenCalledTimes(1); }); it("should call export conversation handler when export conversation button is clicked", async () => { @@ -519,7 +517,7 @@ describe("ConversationNameContextMenu", () => { onStop: vi.fn(), onDisplayCost: vi.fn(), onShowAgentTools: vi.fn(), - onShowMicroagents: vi.fn(), + onShowSkills: vi.fn(), onExportConversation: vi.fn(), onDownloadViaVSCode: vi.fn(), }; @@ -541,8 +539,8 @@ describe("ConversationNameContextMenu", () => { expect(screen.getByTestId("show-agent-tools-button")).toHaveTextContent( "Show Agent Tools", ); - expect(screen.getByTestId("show-microagents-button")).toHaveTextContent( - "Show Microagents", + expect(screen.getByTestId("show-skills-button")).toHaveTextContent( + "Show Skills", ); expect(screen.getByTestId("export-conversation-button")).toHaveTextContent( "Export Conversation", diff --git a/frontend/__tests__/components/modals/microagents/microagent-modal.test.tsx b/frontend/__tests__/components/modals/microagents/microagent-modal.test.tsx deleted file mode 100644 index 858c07207d..0000000000 --- a/frontend/__tests__/components/modals/microagents/microagent-modal.test.tsx +++ /dev/null @@ -1,91 +0,0 @@ -import { screen } from "@testing-library/react"; -import userEvent from "@testing-library/user-event"; -import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; -import { renderWithProviders } from "test-utils"; -import { MicroagentsModal } from "#/components/features/conversation-panel/microagents-modal"; -import ConversationService from "#/api/conversation-service/conversation-service.api"; -import { AgentState } from "#/types/agent-state"; -import { useAgentState } from "#/hooks/use-agent-state"; - -// Mock the agent state hook -vi.mock("#/hooks/use-agent-state", () => ({ - useAgentState: vi.fn(), -})); - -// Mock the conversation ID hook -vi.mock("#/hooks/use-conversation-id", () => ({ - useConversationId: () => ({ conversationId: "test-conversation-id" }), -})); - -describe("MicroagentsModal - Refresh Button", () => { - const mockOnClose = vi.fn(); - const conversationId = "test-conversation-id"; - - const defaultProps = { - onClose: mockOnClose, - conversationId, - }; - - const mockMicroagents = [ - { - name: "Test Agent 1", - type: "repo" as const, - triggers: ["test", "example"], - content: "This is test content for agent 1", - }, - { - name: "Test Agent 2", - type: "knowledge" as const, - triggers: ["help", "support"], - content: "This is test content for agent 2", - }, - ]; - - beforeEach(() => { - // Reset all mocks before each test - vi.clearAllMocks(); - - // Setup default mock for getMicroagents - vi.spyOn(ConversationService, "getMicroagents").mockResolvedValue({ - microagents: mockMicroagents, - }); - - // Mock the agent state to return a ready state - vi.mocked(useAgentState).mockReturnValue({ - curAgentState: AgentState.AWAITING_USER_INPUT, - }); - }); - - afterEach(() => { - vi.restoreAllMocks(); - }); - - describe("Refresh Button Rendering", () => { - it("should render the refresh button with correct text and test ID", async () => { - renderWithProviders(); - - // Wait for the component to load and render the refresh button - const refreshButton = await screen.findByTestId("refresh-microagents"); - expect(refreshButton).toBeInTheDocument(); - expect(refreshButton).toHaveTextContent("BUTTON$REFRESH"); - }); - }); - - describe("Refresh Button Functionality", () => { - it("should call refetch when refresh button is clicked", async () => { - const user = userEvent.setup(); - const refreshSpy = vi.spyOn(ConversationService, "getMicroagents"); - - renderWithProviders(); - - // Wait for the component to load and render the refresh button - const refreshButton = await screen.findByTestId("refresh-microagents"); - - refreshSpy.mockClear(); - - await user.click(refreshButton); - - expect(refreshSpy).toHaveBeenCalledTimes(1); - }); - }); -}); diff --git a/frontend/__tests__/components/modals/skills/skill-modal.test.tsx b/frontend/__tests__/components/modals/skills/skill-modal.test.tsx new file mode 100644 index 0000000000..33ab5098c8 --- /dev/null +++ b/frontend/__tests__/components/modals/skills/skill-modal.test.tsx @@ -0,0 +1,394 @@ +import { screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; +import { describe, it, expect, vi, beforeEach, afterEach } from "vitest"; +import { renderWithProviders } from "test-utils"; +import { SkillsModal } from "#/components/features/conversation-panel/skills-modal"; +import ConversationService from "#/api/conversation-service/conversation-service.api"; +import V1ConversationService from "#/api/conversation-service/v1-conversation-service.api"; +import { AgentState } from "#/types/agent-state"; +import { useAgentState } from "#/hooks/use-agent-state"; +import SettingsService from "#/api/settings-service/settings-service.api"; + +// Mock the agent state hook +vi.mock("#/hooks/use-agent-state", () => ({ + useAgentState: vi.fn(), +})); + +// Mock the conversation ID hook +vi.mock("#/hooks/use-conversation-id", () => ({ + useConversationId: () => ({ conversationId: "test-conversation-id" }), +})); + +describe("SkillsModal - Refresh Button", () => { + const mockOnClose = vi.fn(); + const conversationId = "test-conversation-id"; + + const defaultProps = { + onClose: mockOnClose, + conversationId, + }; + + const mockSkills = [ + { + name: "Test Agent 1", + type: "repo" as const, + triggers: ["test", "example"], + content: "This is test content for agent 1", + }, + { + name: "Test Agent 2", + type: "knowledge" as const, + triggers: ["help", "support"], + content: "This is test content for agent 2", + }, + ]; + + beforeEach(() => { + // Reset all mocks before each test + vi.clearAllMocks(); + + // Setup default mock for getMicroagents (V0) + vi.spyOn(ConversationService, "getMicroagents").mockResolvedValue({ + microagents: mockSkills, + }); + + // Mock the agent state to return a ready state + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.AWAITING_USER_INPUT, + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("Refresh Button Rendering", () => { + it("should render the refresh button with correct text and test ID", async () => { + renderWithProviders(); + + // Wait for the component to load and render the refresh button + const refreshButton = await screen.findByTestId("refresh-skills"); + expect(refreshButton).toBeInTheDocument(); + expect(refreshButton).toHaveTextContent("BUTTON$REFRESH"); + }); + }); + + describe("Refresh Button Functionality", () => { + it("should call refetch when refresh button is clicked", async () => { + const user = userEvent.setup(); + const refreshSpy = vi.spyOn(ConversationService, "getMicroagents"); + + renderWithProviders(); + + // Wait for the component to load and render the refresh button + const refreshButton = await screen.findByTestId("refresh-skills"); + + // Clear previous calls to only track the click + refreshSpy.mockClear(); + + await user.click(refreshButton); + + // Verify the refresh triggered a new API call + expect(refreshSpy).toHaveBeenCalled(); + }); + }); +}); + +describe("useConversationSkills - V1 API Integration", () => { + const conversationId = "test-conversation-id"; + + const mockMicroagents = [ + { + name: "V0 Test Agent", + type: "repo" as const, + triggers: ["v0"], + content: "V0 skill content", + }, + ]; + + const mockSkills = [ + { + name: "V1 Test Skill", + type: "knowledge" as const, + triggers: ["v1", "skill"], + content: "V1 skill content", + }, + ]; + + beforeEach(() => { + vi.clearAllMocks(); + + // Mock agent state + vi.mocked(useAgentState).mockReturnValue({ + curAgentState: AgentState.AWAITING_USER_INPUT, + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + describe("V0 API Usage (v1_enabled: false)", () => { + it("should call v0 ConversationService.getMicroagents when v1_enabled is false", async () => { + // Arrange + const getMicroagentsSpy = vi + .spyOn(ConversationService, "getMicroagents") + .mockResolvedValue({ microagents: mockMicroagents }); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: false, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act + renderWithProviders(); + + // Assert + await screen.findByText("V0 Test Agent"); + expect(getMicroagentsSpy).toHaveBeenCalledWith(conversationId); + expect(getMicroagentsSpy).toHaveBeenCalledTimes(1); + }); + + it("should display v0 skills correctly", async () => { + // Arrange + vi.spyOn(ConversationService, "getMicroagents").mockResolvedValue({ + microagents: mockMicroagents, + }); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: false, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act + renderWithProviders(); + + // Assert + const agentName = await screen.findByText("V0 Test Agent"); + expect(agentName).toBeInTheDocument(); + }); + }); + + describe("V1 API Usage (v1_enabled: true)", () => { + it("should call v1 V1ConversationService.getSkills when v1_enabled is true", async () => { + // Arrange + const getSkillsSpy = vi + .spyOn(V1ConversationService, "getSkills") + .mockResolvedValue({ skills: mockSkills }); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: true, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act + renderWithProviders(); + + // Assert + await screen.findByText("V1 Test Skill"); + expect(getSkillsSpy).toHaveBeenCalledWith(conversationId); + expect(getSkillsSpy).toHaveBeenCalledTimes(1); + }); + + it("should display v1 skills correctly", async () => { + // Arrange + vi.spyOn(V1ConversationService, "getSkills").mockResolvedValue({ + skills: mockSkills, + }); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: true, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act + renderWithProviders(); + + // Assert + const skillName = await screen.findByText("V1 Test Skill"); + expect(skillName).toBeInTheDocument(); + }); + + it("should use v1 API when v1_enabled is true", async () => { + // Arrange + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + v1_enabled: true, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + const getSkillsSpy = vi + .spyOn(V1ConversationService, "getSkills") + .mockResolvedValue({ + skills: mockSkills, + }); + + // Act + renderWithProviders(); + + // Assert + await screen.findByText("V1 Test Skill"); + // Verify v1 API was called + expect(getSkillsSpy).toHaveBeenCalledWith(conversationId); + }); + }); + + describe("API Switching on Settings Change", () => { + it("should refetch using different API when v1_enabled setting changes", async () => { + // Arrange + const getMicroagentsSpy = vi + .spyOn(ConversationService, "getMicroagents") + .mockResolvedValue({ microagents: mockMicroagents }); + const getSkillsSpy = vi + .spyOn(V1ConversationService, "getSkills") + .mockResolvedValue({ skills: mockSkills }); + + const settingsSpy = vi + .spyOn(SettingsService, "getSettings") + .mockResolvedValue({ + v1_enabled: false, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act - Initial render with v1_enabled: false + const { rerender } = renderWithProviders( + , + ); + + // Assert - v0 API called initially + await screen.findByText("V0 Test Agent"); + expect(getMicroagentsSpy).toHaveBeenCalledWith(conversationId); + + // Arrange - Change settings to v1_enabled: true + settingsSpy.mockResolvedValue({ + v1_enabled: true, + llm_model: "test-model", + llm_base_url: "", + agent: "test-agent", + language: "en", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + user_consents_to_analytics: null, + max_budget_per_task: null, + }); + + // Act - Force re-render + rerender(); + + // Assert - v1 API should be called after settings change + await screen.findByText("V1 Test Skill"); + expect(getSkillsSpy).toHaveBeenCalledWith(conversationId); + }); + }); +}); diff --git a/frontend/src/api/conversation-service/v1-conversation-service.api.ts b/frontend/src/api/conversation-service/v1-conversation-service.api.ts index bd37fa8180..d2f8f51ff5 100644 --- a/frontend/src/api/conversation-service/v1-conversation-service.api.ts +++ b/frontend/src/api/conversation-service/v1-conversation-service.api.ts @@ -11,6 +11,7 @@ import type { V1AppConversationStartTask, V1AppConversationStartTaskPage, V1AppConversation, + GetSkillsResponse, } from "./v1-conversation-service.types"; class V1ConversationService { @@ -315,6 +316,18 @@ class V1ConversationService { ); return data; } + + /** + * Get all skills associated with a V1 conversation + * @param conversationId The conversation ID + * @returns The available skills associated with the conversation + */ + static async getSkills(conversationId: string): Promise { + const { data } = await openHands.get( + `/api/v1/app-conversations/${conversationId}/skills`, + ); + return data; + } } export default V1ConversationService; diff --git a/frontend/src/api/conversation-service/v1-conversation-service.types.ts b/frontend/src/api/conversation-service/v1-conversation-service.types.ts index 621283c274..7c8b04ccbf 100644 --- a/frontend/src/api/conversation-service/v1-conversation-service.types.ts +++ b/frontend/src/api/conversation-service/v1-conversation-service.types.ts @@ -99,3 +99,14 @@ export interface V1AppConversation { conversation_url: string | null; session_api_key: string | null; } + +export interface Skill { + name: string; + type: "repo" | "knowledge"; + content: string; + triggers: string[]; +} + +export interface GetSkillsResponse { + skills: Skill[]; +} diff --git a/frontend/src/components/features/controls/tools-context-menu.tsx b/frontend/src/components/features/controls/tools-context-menu.tsx index 39330e25e4..2089f95111 100644 --- a/frontend/src/components/features/controls/tools-context-menu.tsx +++ b/frontend/src/components/features/controls/tools-context-menu.tsx @@ -26,14 +26,14 @@ const contextMenuListItemClassName = cn( interface ToolsContextMenuProps { onClose: () => void; - onShowMicroagents: (event: React.MouseEvent) => void; + onShowSkills: (event: React.MouseEvent) => void; onShowAgentTools: (event: React.MouseEvent) => void; shouldShowAgentTools?: boolean; } export function ToolsContextMenu({ onClose, - onShowMicroagents, + onShowSkills, onShowAgentTools, shouldShowAgentTools = true, }: ToolsContextMenuProps) { @@ -41,7 +41,6 @@ export function ToolsContextMenu({ const { data: conversation } = useActiveConversation(); const { providers } = useUserProviders(); - // TODO: Hide microagent menu items for V1 conversations // This is a temporary measure and may be re-enabled in the future const isV1Conversation = conversation?.conversation_version === "V1"; @@ -130,20 +129,17 @@ export function ToolsContextMenu({ {(!isV1Conversation || shouldShowAgentTools) && } - {/* Show Available Microagents - Hidden for V1 conversations */} - {!isV1Conversation && ( - - } - text={t(I18nKey.CONVERSATION$SHOW_MICROAGENTS)} - className={CONTEXT_MENU_ICON_TEXT_CLASSNAME} - /> - - )} + + } + text={t(I18nKey.CONVERSATION$SHOW_SKILLS)} + className={CONTEXT_MENU_ICON_TEXT_CLASSNAME} + /> + {/* Show Agent Tools and Metadata - Only show if system message is available */} {shouldShowAgentTools && ( diff --git a/frontend/src/components/features/controls/tools.tsx b/frontend/src/components/features/controls/tools.tsx index 56ef58bc8e..80994cbe65 100644 --- a/frontend/src/components/features/controls/tools.tsx +++ b/frontend/src/components/features/controls/tools.tsx @@ -7,7 +7,7 @@ import { ToolsContextMenu } from "./tools-context-menu"; import { useConversationNameContextMenu } from "#/hooks/use-conversation-name-context-menu"; import { useActiveConversation } from "#/hooks/query/use-active-conversation"; import { SystemMessageModal } from "../conversation-panel/system-message-modal"; -import { MicroagentsModal } from "../conversation-panel/microagents-modal"; +import { SkillsModal } from "../conversation-panel/skills-modal"; export function Tools() { const { t } = useTranslation(); @@ -17,11 +17,11 @@ export function Tools() { const { handleShowAgentTools, - handleShowMicroagents, + handleShowSkills, systemModalVisible, setSystemModalVisible, - microagentsModalVisible, - setMicroagentsModalVisible, + skillsModalVisible, + setSkillsModalVisible, systemMessage, shouldShowAgentTools, } = useConversationNameContextMenu({ @@ -51,7 +51,7 @@ export function Tools() { {contextMenuOpen && ( setContextMenuOpen(false)} - onShowMicroagents={handleShowMicroagents} + onShowSkills={handleShowSkills} onShowAgentTools={handleShowAgentTools} shouldShowAgentTools={shouldShowAgentTools} /> @@ -64,9 +64,9 @@ export function Tools() { systemMessage={systemMessage ? systemMessage.args : null} /> - {/* Microagents Modal */} - {microagentsModalVisible && ( - setMicroagentsModalVisible(false)} /> + {/* Skills Modal */} + {skillsModalVisible && ( + setSkillsModalVisible(false)} /> )}
); diff --git a/frontend/src/components/features/conversation-panel/conversation-card-context-menu.tsx b/frontend/src/components/features/conversation-panel/conversation-card-context-menu.tsx deleted file mode 100644 index 63ea33152b..0000000000 --- a/frontend/src/components/features/conversation-panel/conversation-card-context-menu.tsx +++ /dev/null @@ -1,147 +0,0 @@ -import { - Trash, - Power, - Pencil, - Download, - Wallet, - Wrench, - Bot, -} from "lucide-react"; -import { useTranslation } from "react-i18next"; -import { useClickOutsideElement } from "#/hooks/use-click-outside-element"; -import { cn } from "#/utils/utils"; -import { ContextMenu } from "#/ui/context-menu"; -import { ContextMenuListItem } from "../context-menu/context-menu-list-item"; -import { Divider } from "#/ui/divider"; -import { I18nKey } from "#/i18n/declaration"; -import { ContextMenuIconText } from "../context-menu/context-menu-icon-text"; -import { useActiveConversation } from "#/hooks/query/use-active-conversation"; - -interface ConversationCardContextMenuProps { - onClose: () => void; - onDelete?: (event: React.MouseEvent) => void; - onStop?: (event: React.MouseEvent) => void; - onEdit?: (event: React.MouseEvent) => void; - onDisplayCost?: (event: React.MouseEvent) => void; - onShowAgentTools?: (event: React.MouseEvent) => void; - onShowMicroagents?: (event: React.MouseEvent) => void; - onDownloadViaVSCode?: (event: React.MouseEvent) => void; - position?: "top" | "bottom"; -} - -export function ConversationCardContextMenu({ - onClose, - onDelete, - onStop, - onEdit, - onDisplayCost, - onShowAgentTools, - onShowMicroagents, - onDownloadViaVSCode, - position = "bottom", -}: ConversationCardContextMenuProps) { - const { t } = useTranslation(); - const ref = useClickOutsideElement(onClose); - const { data: conversation } = useActiveConversation(); - - // TODO: Hide microagent menu items for V1 conversations - // This is a temporary measure and may be re-enabled in the future - const isV1Conversation = conversation?.conversation_version === "V1"; - - const hasEdit = Boolean(onEdit); - const hasDownload = Boolean(onDownloadViaVSCode); - const hasTools = Boolean(onShowAgentTools || onShowMicroagents); - const hasInfo = Boolean(onDisplayCost); - const hasControl = Boolean(onStop || onDelete); - - return ( - - {onEdit && ( - - - - )} - - {hasEdit && (hasDownload || hasTools || hasInfo || hasControl) && ( - - )} - - {onDownloadViaVSCode && ( - - - - )} - - {hasDownload && (hasTools || hasInfo || hasControl) && } - - {onShowAgentTools && ( - - - - )} - - {onShowMicroagents && !isV1Conversation && ( - - - - )} - - {hasTools && (hasInfo || hasControl) && } - - {onDisplayCost && ( - - - - )} - - {hasInfo && hasControl && } - - {onStop && ( - - - - )} - - {onDelete && ( - - - - )} - - ); -} diff --git a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx index 6565a83a10..30a7ec42cb 100644 --- a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx +++ b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx @@ -22,7 +22,7 @@ interface ConversationCardContextMenuProps { onEdit?: (event: React.MouseEvent) => void; onDisplayCost?: (event: React.MouseEvent) => void; onShowAgentTools?: (event: React.MouseEvent) => void; - onShowMicroagents?: (event: React.MouseEvent) => void; + onShowSkills?: (event: React.MouseEvent) => void; onDownloadViaVSCode?: (event: React.MouseEvent) => void; position?: "top" | "bottom"; } @@ -37,7 +37,7 @@ export function ConversationCardContextMenu({ onEdit, onDisplayCost, onShowAgentTools, - onShowMicroagents, + onShowSkills, onDownloadViaVSCode, position = "bottom", }: ConversationCardContextMenuProps) { @@ -96,15 +96,15 @@ export function ConversationCardContextMenu({ /> ), - onShowMicroagents && ( + onShowSkills && ( } - text={t(I18nKey.CONVERSATION$SHOW_MICROAGENTS)} + text={t(I18nKey.CONVERSATION$SHOW_SKILLS)} /> ), diff --git a/frontend/src/components/features/conversation-panel/microagent-content.tsx b/frontend/src/components/features/conversation-panel/skill-content.tsx similarity index 76% rename from frontend/src/components/features/conversation-panel/microagent-content.tsx rename to frontend/src/components/features/conversation-panel/skill-content.tsx index fad0485607..9303047e3a 100644 --- a/frontend/src/components/features/conversation-panel/microagent-content.tsx +++ b/frontend/src/components/features/conversation-panel/skill-content.tsx @@ -3,17 +3,17 @@ import { I18nKey } from "#/i18n/declaration"; import { Typography } from "#/ui/typography"; import { Pre } from "#/ui/pre"; -interface MicroagentContentProps { +interface SkillContentProps { content: string; } -export function MicroagentContent({ content }: MicroagentContentProps) { +export function SkillContent({ content }: SkillContentProps) { const { t } = useTranslation(); return (
- {t(I18nKey.MICROAGENTS_MODAL$CONTENT)} + {t(I18nKey.COMMON$CONTENT)}
-        {content || t(I18nKey.MICROAGENTS_MODAL$NO_CONTENT)}
+        {content || t(I18nKey.SKILLS_MODAL$NO_CONTENT)}
       
); diff --git a/frontend/src/components/features/conversation-panel/microagent-item.tsx b/frontend/src/components/features/conversation-panel/skill-item.tsx similarity index 65% rename from frontend/src/components/features/conversation-panel/microagent-item.tsx rename to frontend/src/components/features/conversation-panel/skill-item.tsx index d23febb099..c76bf10be9 100644 --- a/frontend/src/components/features/conversation-panel/microagent-item.tsx +++ b/frontend/src/components/features/conversation-panel/skill-item.tsx @@ -1,35 +1,31 @@ import { ChevronDown, ChevronRight } from "lucide-react"; -import { Microagent } from "#/api/open-hands.types"; import { Typography } from "#/ui/typography"; -import { MicroagentTriggers } from "./microagent-triggers"; -import { MicroagentContent } from "./microagent-content"; +import { SkillTriggers } from "./skill-triggers"; +import { SkillContent } from "./skill-content"; +import { Skill } from "#/api/conversation-service/v1-conversation-service.types"; -interface MicroagentItemProps { - agent: Microagent; +interface SkillItemProps { + skill: Skill; isExpanded: boolean; onToggle: (agentName: string) => void; } -export function MicroagentItem({ - agent, - isExpanded, - onToggle, -}: MicroagentItemProps) { +export function SkillItem({ skill, isExpanded, onToggle }: SkillItemProps) { return (
@@ -72,11 +73,17 @@ function ServedApp() { type="button" onClick={() => setRefreshKey((prev) => prev + 1)} className="text-sm" + aria-label={t(I18nKey.BUTTON$REFRESH)} > -
From 0246b1bc4398604b86ba1f79b1832557d823e9a9 Mon Sep 17 00:00:00 2001 From: "sp.wack" <83104063+amanape@users.noreply.github.com> Date: Fri, 19 Dec 2025 21:00:59 +0400 Subject: [PATCH 38/80] hotfix(frontend): update websocket handler test mocks to use correct event count endpoint (#12104) --- .../conversation-websocket-handler.test.tsx | 48 +++++-------------- 1 file changed, 12 insertions(+), 36 deletions(-) diff --git a/frontend/__tests__/conversation-websocket-handler.test.tsx b/frontend/__tests__/conversation-websocket-handler.test.tsx index 0a8050eca8..d3df1676fa 100644 --- a/frontend/__tests__/conversation-websocket-handler.test.tsx +++ b/frontend/__tests__/conversation-websocket-handler.test.tsx @@ -453,18 +453,10 @@ describe("Conversation WebSocket Handler", () => { // Set up MSW to mock both the HTTP API and WebSocket connection mswServer.use( - http.get("/api/v1/events/count", ({ request }) => { - const url = new URL(request.url); - const conversationIdParam = url.searchParams.get( - "conversation_id__eq", - ); - - if (conversationIdParam === conversationId) { - return HttpResponse.json(expectedEventCount); - } - - return HttpResponse.json(0); - }), + http.get( + `http://localhost:3000/api/conversations/${conversationId}/events/count`, + () => HttpResponse.json(expectedEventCount), + ), wsLink.addEventListener("connection", ({ client, server }) => { server.connect(); // Send all history events @@ -520,18 +512,10 @@ describe("Conversation WebSocket Handler", () => { // Set up MSW to mock both the HTTP API and WebSocket connection mswServer.use( - http.get("/api/v1/events/count", ({ request }) => { - const url = new URL(request.url); - const conversationIdParam = url.searchParams.get( - "conversation_id__eq", - ); - - if (conversationIdParam === conversationId) { - return HttpResponse.json(0); - } - - return HttpResponse.json(0); - }), + http.get( + `http://localhost:3000/api/conversations/${conversationId}/events/count`, + () => HttpResponse.json(0), + ), wsLink.addEventListener("connection", ({ server }) => { server.connect(); // No events sent for empty history @@ -577,18 +561,10 @@ describe("Conversation WebSocket Handler", () => { // Set up MSW to mock both the HTTP API and WebSocket connection mswServer.use( - http.get("/api/v1/events/count", ({ request }) => { - const url = new URL(request.url); - const conversationIdParam = url.searchParams.get( - "conversation_id__eq", - ); - - if (conversationIdParam === conversationId) { - return HttpResponse.json(expectedEventCount); - } - - return HttpResponse.json(0); - }), + http.get( + `http://localhost:3000/api/conversations/${conversationId}/events/count`, + () => HttpResponse.json(expectedEventCount), + ), wsLink.addEventListener("connection", ({ client, server }) => { server.connect(); // Send all history events From 800e861b88a6da736ad4516aceb5b056312ac533 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 19 Dec 2025 12:17:49 -0700 Subject: [PATCH 39/80] Replace manual pagination loops with page_iterator utility function (#12027) Co-authored-by: openhands --- .../sandbox/remote_sandbox_service.py | 69 ++++++++----------- .../app_server/sandbox/sandbox_service.py | 24 ++----- 2 files changed, 34 insertions(+), 59 deletions(-) diff --git a/openhands/app_server/sandbox/remote_sandbox_service.py b/openhands/app_server/sandbox/remote_sandbox_service.py index 035870cd45..dc1993440c 100644 --- a/openhands/app_server/sandbox/remote_sandbox_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_service.py @@ -44,6 +44,7 @@ from openhands.app_server.services.injector import InjectorState from openhands.app_server.user.specifiy_user_context import ADMIN, USER_CONTEXT_ATTR from openhands.app_server.user.user_context import UserContext from openhands.app_server.utils.sql_utils import Base, UtcDateTime +from openhands.sdk.utils.paging import page_iterator _logger = logging.getLogger(__name__) WEBHOOK_CALLBACK_VARIABLE = 'OH_WEBHOOKS_0_BASE_URL' @@ -529,32 +530,26 @@ async def poll_agent_servers(api_url: str, api_key: str, sleep_interval: int): get_event_callback_service(state) as event_callback_service, get_httpx_client(state) as httpx_client, ): - page_id = None matches = 0 - while True: - page = await app_conversation_info_service.search_app_conversation_info( - page_id=page_id + async for app_conversation_info in page_iterator( + app_conversation_info_service.search_app_conversation_info + ): + runtime = runtimes_by_sandbox_id.get( + app_conversation_info.sandbox_id ) - for app_conversation_info in page.items: - runtime = runtimes_by_sandbox_id.get( - app_conversation_info.sandbox_id + if runtime: + matches += 1 + await refresh_conversation( + app_conversation_info_service=app_conversation_info_service, + event_service=event_service, + event_callback_service=event_callback_service, + app_conversation_info=app_conversation_info, + runtime=runtime, + httpx_client=httpx_client, ) - if runtime: - matches += 1 - await refresh_conversation( - app_conversation_info_service=app_conversation_info_service, - event_service=event_service, - event_callback_service=event_callback_service, - app_conversation_info=app_conversation_info, - runtime=runtime, - httpx_client=httpx_client, - ) - page_id = page.next_page_id - if page_id is None: - _logger.debug( - f'Matched {len(runtimes_by_sandbox_id)} Runtimes with {matches} Conversations.' - ) - break + _logger.debug( + f'Matched {len(runtimes_by_sandbox_id)} Runtimes with {matches} Conversations.' + ) except Exception as exc: _logger.exception( @@ -608,37 +603,29 @@ async def refresh_conversation( event_url = ( f'{url}/api/conversations/{app_conversation_info.id.hex}/events/search' ) - page_id = None - while True: + + async def fetch_events_page(page_id: str | None = None) -> EventPage: + """Helper function to fetch a page of events from the agent server.""" params: dict[str, str] = {} if page_id: - params['page_id'] = page_id # type: ignore[unreachable] + params['page_id'] = page_id response = await httpx_client.get( event_url, params=params, headers={'X-Session-API-Key': runtime['session_api_key']}, ) response.raise_for_status() - page = EventPage.model_validate(response.json()) + return EventPage.model_validate(response.json()) - to_process = [] - for event in page.items: - existing = await event_service.get_event(event.id) - if existing is None: - await event_service.save_event(app_conversation_info.id, event) - to_process.append(event) - - for event in to_process: + async for event in page_iterator(fetch_events_page): + existing = await event_service.get_event(event.id) + if existing is None: + await event_service.save_event(app_conversation_info.id, event) await event_callback_service.execute_callbacks( app_conversation_info.id, event ) - page_id = page.next_page_id - if page_id is None: - _logger.debug( - f'Finished Refreshing Conversation {app_conversation_info.id}' - ) - break + _logger.debug(f'Finished Refreshing Conversation {app_conversation_info.id}') except Exception as exc: _logger.exception(f'Error Refreshing Conversation: {exc}', stack_info=True) diff --git a/openhands/app_server/sandbox/sandbox_service.py b/openhands/app_server/sandbox/sandbox_service.py index b1144a47cc..e7f8afe9c2 100644 --- a/openhands/app_server/sandbox/sandbox_service.py +++ b/openhands/app_server/sandbox/sandbox_service.py @@ -8,6 +8,7 @@ from openhands.app_server.sandbox.sandbox_models import ( ) from openhands.app_server.services.injector import Injector from openhands.sdk.utils.models import DiscriminatedUnionMixin +from openhands.sdk.utils.paging import page_iterator class SandboxService(ABC): @@ -83,24 +84,11 @@ class SandboxService(ABC): if max_num_sandboxes <= 0: raise ValueError('max_num_sandboxes must be greater than 0') - # Get all sandboxes (we'll search through all pages) - all_sandboxes = [] - page_id = None - - while True: - page = await self.search_sandboxes(page_id=page_id, limit=100) - all_sandboxes.extend(page.items) - - if page.next_page_id is None: - break - page_id = page.next_page_id - - # Filter to only running sandboxes - running_sandboxes = [ - sandbox - for sandbox in all_sandboxes - if sandbox.status == SandboxStatus.RUNNING - ] + # Get all running sandboxes (iterate through all pages) + running_sandboxes = [] + async for sandbox in page_iterator(self.search_sandboxes, limit=100): + if sandbox.status == SandboxStatus.RUNNING: + running_sandboxes.append(sandbox) # If we're within the limit, no cleanup needed if len(running_sandboxes) <= max_num_sandboxes: From a873af307a159e00e31c1bdb96053d088bc27ab1 Mon Sep 17 00:00:00 2001 From: mamoodi Date: Fri, 19 Dec 2025 14:31:53 -0500 Subject: [PATCH 40/80] Update CODEOWNERS (#12106) --- .github/CODEOWNERS | 12 ++++-------- trigger_commit.txt | 1 - 2 files changed, 4 insertions(+), 9 deletions(-) delete mode 100644 trigger_commit.txt diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 21782ad1d8..3da4c3ee88 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,12 +1,8 @@ # CODEOWNERS file for OpenHands repository # See https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners -# Frontend code owners -/frontend/ @amanape -/openhands-ui/ @amanape - -# Evaluation code owners +/frontend/ @amanape @hieptl +/openhands-ui/ @amanape @hieptl +/openhands/ @tofarr @malhotra5 @hieptl +/enterprise/ @chuckbutkus @tofarr @malhotra5 /evaluation/ @xingyaoww @neubig - -# Documentation code owners -/docs/ @mamoodi diff --git a/trigger_commit.txt b/trigger_commit.txt deleted file mode 100644 index 402f8bb0e5..0000000000 --- a/trigger_commit.txt +++ /dev/null @@ -1 +0,0 @@ -# Trigger E2E test run From adff39507a69b6e1fcb250627e335fd43578cb0d Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 19 Dec 2025 16:24:40 -0700 Subject: [PATCH 41/80] fix: eliminate N+1 performance bug in RemoteSandboxService with batch endpoint (#12105) Co-authored-by: openhands --- .../live_status_app_conversation_service.py | 18 +- .../sandbox/remote_sandbox_service.py | 130 +++++++++++--- .../app_server/sandbox/sandbox_service.py | 2 +- .../app_server/test_remote_sandbox_service.py | 169 ++++++++++++------ 4 files changed, 242 insertions(+), 77 deletions(-) diff --git a/openhands/app_server/app_conversation/live_status_app_conversation_service.py b/openhands/app_server/app_conversation/live_status_app_conversation_service.py index a8d490489c..db30710f76 100644 --- a/openhands/app_server/app_conversation/live_status_app_conversation_service.py +++ b/openhands/app_server/app_conversation/live_status_app_conversation_service.py @@ -477,7 +477,11 @@ class LiveStatusAppConversationService(AppConversationServiceBase): if sandbox.status in (None, SandboxStatus.ERROR): raise SandboxError(f'Sandbox status: {sandbox.status}') if sandbox.status == SandboxStatus.RUNNING: - return + # There are still bugs in the remote runtime - they report running while still just + # starting resulting in a race condition. Manually check that it is actually + # running. + if await self._check_agent_server_alive(sandbox): + return if sandbox.status != SandboxStatus.STARTING: raise SandboxError(f'Sandbox not startable: {sandbox.id}') @@ -490,9 +494,19 @@ class LiveStatusAppConversationService(AppConversationServiceBase): if sandbox.status not in (SandboxStatus.STARTING, SandboxStatus.RUNNING): raise SandboxError(f'Sandbox not startable: {sandbox.id}') if sandbox_info.status == SandboxStatus.RUNNING: - return + # There are still bugs in the remote runtime - they report running while still just + # starting resulting in a race condition. Manually check that it is actually + # running. + if await self._check_agent_server_alive(sandbox_info): + return raise SandboxError(f'Sandbox failed to start: {sandbox.id}') + async def _check_agent_server_alive(self, sandbox_info: SandboxInfo) -> bool: + agent_server_url = self._get_agent_server_url(sandbox_info) + url = f'{agent_server_url.rstrip("/")}/alive' + response = await self.httpx_client.get(url) + return response.is_success + def _get_agent_server_url(self, sandbox: SandboxInfo) -> str: """Get agent server url for running sandbox.""" exposed_urls = sandbox.exposed_urls diff --git a/openhands/app_server/sandbox/remote_sandbox_service.py b/openhands/app_server/sandbox/remote_sandbox_service.py index dc1993440c..d1e7f86d75 100644 --- a/openhands/app_server/sandbox/remote_sandbox_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_service.py @@ -122,18 +122,9 @@ class RemoteSandboxService(SandboxService): _logger.error(f'HTTP error for URL {url}: {e}') raise - async def _to_sandbox_info( + def _to_sandbox_info( self, stored: StoredRemoteSandbox, runtime: dict[str, Any] | None = None - ) -> SandboxInfo: - # If we did not get passsed runtime data, load some - if runtime is None: - try: - runtime = await self._get_runtime(stored.id) - except Exception: - _logger.exception( - f'Error getting runtime: {stored.id}', stack_info=True - ) - + ): status = self._get_sandbox_status_from_runtime(runtime) # Get session_api_key and exposed urls @@ -233,6 +224,41 @@ class RemoteSandboxService(SandboxService): runtime_data = response.json() return runtime_data + async def _get_runtimes_batch( + self, sandbox_ids: list[str] + ) -> dict[str, dict[str, Any]]: + """Get multiple runtimes in a single batch request. + + Args: + sandbox_ids: List of sandbox IDs to fetch + + Returns: + Dictionary mapping sandbox_id to runtime data + """ + if not sandbox_ids: + return {} + + # Build query parameters for the batch endpoint + params = [('ids', sandbox_id) for sandbox_id in sandbox_ids] + + response = await self._send_runtime_api_request( + 'GET', + '/sessions/batch', + params=params, + ) + response.raise_for_status() + batch_data = response.json() + + # The batch endpoint should return a list of runtimes + # Convert to a dictionary keyed by session_id for easy lookup + runtimes_by_id = {} + if batch_data and 'runtimes' in batch_data: + for runtime in batch_data['runtimes']: + if 'session_id' in runtime: + runtimes_by_id[runtime['session_id']] = runtime + + return runtimes_by_id + async def _init_environment( self, sandbox_spec: SandboxSpecInfo, sandbox_id: str ) -> dict[str, str]: @@ -283,13 +309,15 @@ class RemoteSandboxService(SandboxService): if has_more: next_page_id = str(offset + limit) - # Convert stored callbacks to domain models - items = await asyncio.gather( - *[ - self._to_sandbox_info(stored_sandbox) - for stored_sandbox in stored_sandboxes - ] - ) + # Batch fetch runtime data for all sandboxes + sandbox_ids = [stored_sandbox.id for stored_sandbox in stored_sandboxes] + runtimes_by_id = await self._get_runtimes_batch(sandbox_ids) + + # Convert stored sandboxes to domain models with runtime data + items = [ + self._to_sandbox_info(stored_sandbox, runtimes_by_id.get(stored_sandbox.id)) + for stored_sandbox in stored_sandboxes + ] return SandboxPage(items=items, next_page_id=next_page_id) @@ -298,7 +326,16 @@ class RemoteSandboxService(SandboxService): stored_sandbox = await self._get_stored_sandbox(sandbox_id) if stored_sandbox is None: return None - return await self._to_sandbox_info(stored_sandbox) + + runtime = None + try: + runtime = await self._get_runtime(stored_sandbox.id) + except Exception: + _logger.exception( + f'Error getting runtime: {stored_sandbox.id}', stack_info=True + ) + + return self._to_sandbox_info(stored_sandbox, runtime) async def get_sandbox_by_session_api_key( self, session_api_key: str @@ -323,7 +360,7 @@ class RemoteSandboxService(SandboxService): sandbox = result.first() if sandbox is None: raise ValueError('sandbox_not_found') - return await self._to_sandbox_info(sandbox, runtime) + return self._to_sandbox_info(sandbox, runtime) except Exception: _logger.exception( 'Error getting sandbox from session_api_key', stack_info=True @@ -339,7 +376,7 @@ class RemoteSandboxService(SandboxService): try: runtime = await self._get_runtime(stored_sandbox.id) if runtime and runtime.get('session_api_key') == session_api_key: - return await self._to_sandbox_info(stored_sandbox, runtime) + return self._to_sandbox_info(stored_sandbox, runtime) except Exception: # Continue checking other sandboxes if one fails continue @@ -412,7 +449,7 @@ class RemoteSandboxService(SandboxService): # Hack - result doesn't contain this runtime_data['pod_status'] = 'pending' - return await self._to_sandbox_info(stored_sandbox, runtime_data) + return self._to_sandbox_info(stored_sandbox, runtime_data) except httpx.HTTPError as e: _logger.error(f'Failed to start sandbox: {e}') @@ -480,6 +517,55 @@ class RemoteSandboxService(SandboxService): _logger.error(f'Error deleting sandbox {sandbox_id}: {e}') return False + async def pause_old_sandboxes(self, max_num_sandboxes: int) -> list[str]: + """Pause the oldest sandboxes if there are more than max_num_sandboxes running. + In a multi user environment, this will pause sandboxes only for the current user. + + Args: + max_num_sandboxes: Maximum number of sandboxes to keep running + + Returns: + List of sandbox IDs that were paused + """ + if max_num_sandboxes <= 0: + raise ValueError('max_num_sandboxes must be greater than 0') + + response = await self._send_runtime_api_request( + 'GET', + '/list', + ) + content = response.json() + running_session_ids = [ + runtime.get('session_id') for runtime in content['runtimes'] + ] + + query = await self._secure_select() + query = query.filter(StoredRemoteSandbox.id.in_(running_session_ids)).order_by( + StoredRemoteSandbox.created_at.desc() + ) + running_sandboxes = list(await self.db_session.execute(query)) + + # If we're within the limit, no cleanup needed + if len(running_sandboxes) <= max_num_sandboxes: + return [] + + # Determine how many to pause + num_to_pause = len(running_sandboxes) - max_num_sandboxes + sandboxes_to_pause = running_sandboxes[:num_to_pause] + + # Stop the oldest sandboxes + paused_sandbox_ids = [] + for sandbox in sandboxes_to_pause: + try: + success = await self.pause_sandbox(sandbox.id) + if success: + paused_sandbox_ids.append(sandbox.id) + except Exception: + # Continue trying to pause other sandboxes even if one fails + pass + + return paused_sandbox_ids + def _build_service_url(url: str, service_name: str): scheme, host_and_path = url.split('://') diff --git a/openhands/app_server/sandbox/sandbox_service.py b/openhands/app_server/sandbox/sandbox_service.py index e7f8afe9c2..45274975d7 100644 --- a/openhands/app_server/sandbox/sandbox_service.py +++ b/openhands/app_server/sandbox/sandbox_service.py @@ -72,7 +72,7 @@ class SandboxService(ABC): """ async def pause_old_sandboxes(self, max_num_sandboxes: int) -> list[str]: - """Stop the oldest sandboxes if there are more than max_num_sandboxes running. + """Pause the oldest sandboxes if there are more than max_num_sandboxes running. In a multi user environment, this will pause sandboxes only for the current user. Args: diff --git a/tests/unit/app_server/test_remote_sandbox_service.py b/tests/unit/app_server/test_remote_sandbox_service.py index 5802e46ecb..bb950c732c 100644 --- a/tests/unit/app_server/test_remote_sandbox_service.py +++ b/tests/unit/app_server/test_remote_sandbox_service.py @@ -331,7 +331,7 @@ class TestSandboxInfoConversion: runtime_data = create_runtime_data(status='running', pod_status='ready') # Execute - sandbox_info = await remote_sandbox_service._to_sandbox_info( + sandbox_info = remote_sandbox_service._to_sandbox_info( stored_sandbox, runtime_data ) @@ -358,7 +358,7 @@ class TestSandboxInfoConversion: runtime_data = create_runtime_data(status='running', pod_status='pending') # Execute - sandbox_info = await remote_sandbox_service._to_sandbox_info( + sandbox_info = remote_sandbox_service._to_sandbox_info( stored_sandbox, runtime_data ) @@ -367,23 +367,6 @@ class TestSandboxInfoConversion: assert sandbox_info.session_api_key == 'test-session-key' assert sandbox_info.exposed_urls is None - @pytest.mark.asyncio - async def test_to_sandbox_info_without_runtime(self, remote_sandbox_service): - """Test conversion to SandboxInfo without runtime data.""" - # Setup - stored_sandbox = create_stored_sandbox() - remote_sandbox_service._get_runtime = AsyncMock( - side_effect=Exception('Runtime not found') - ) - - # Execute - sandbox_info = await remote_sandbox_service._to_sandbox_info(stored_sandbox) - - # Verify - assert sandbox_info.status == SandboxStatus.MISSING - assert sandbox_info.session_api_key is None - assert sandbox_info.exposed_urls is None - @pytest.mark.asyncio async def test_to_sandbox_info_loads_runtime_when_none_provided( self, remote_sandbox_service @@ -391,15 +374,12 @@ class TestSandboxInfoConversion: """Test that runtime data is loaded when not provided.""" # Setup stored_sandbox = create_stored_sandbox() - runtime_data = create_runtime_data() - remote_sandbox_service._get_runtime = AsyncMock(return_value=runtime_data) # Execute - sandbox_info = await remote_sandbox_service._to_sandbox_info(stored_sandbox) + sandbox_info = remote_sandbox_service._to_sandbox_info(stored_sandbox, None) # Verify - remote_sandbox_service._get_runtime.assert_called_once_with('test-sandbox-123') - assert sandbox_info.status == SandboxStatus.RUNNING + assert sandbox_info.status == SandboxStatus.MISSING class TestSandboxLifecycle: @@ -677,15 +657,18 @@ class TestSandboxSearch: mock_result = MagicMock() mock_result.scalars.return_value = mock_scalars remote_sandbox_service.db_session.execute = AsyncMock(return_value=mock_result) - remote_sandbox_service._to_sandbox_info = AsyncMock( - side_effect=lambda stored: SandboxInfo( - id=stored.id, - created_by_user_id=stored.created_by_user_id, - sandbox_spec_id=stored.sandbox_spec_id, - status=SandboxStatus.RUNNING, - session_api_key='test-key', - created_at=stored.created_at, - ) + + # Mock the batch endpoint response + mock_batch_response = MagicMock() + mock_batch_response.raise_for_status.return_value = None + mock_batch_response.json.return_value = { + 'runtimes': [ + create_runtime_data('sb1'), + create_runtime_data('sb2'), + ] + } + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_batch_response ) # Execute @@ -697,6 +680,14 @@ class TestSandboxSearch: assert result.items[0].id == 'sb1' assert result.items[1].id == 'sb2' + # Verify that the batch endpoint was called + remote_sandbox_service.httpx_client.request.assert_called_once_with( + 'GET', + 'https://api.example.com/sessions/batch', + headers={'X-API-Key': 'test-api-key'}, + params=[('ids', 'sb1'), ('ids', 'sb2')], + ) + @pytest.mark.asyncio async def test_search_sandboxes_with_pagination(self, remote_sandbox_service): """Test sandbox search with pagination.""" @@ -710,15 +701,15 @@ class TestSandboxSearch: mock_result = MagicMock() mock_result.scalars.return_value = mock_scalars remote_sandbox_service.db_session.execute = AsyncMock(return_value=mock_result) - remote_sandbox_service._to_sandbox_info = AsyncMock( - side_effect=lambda stored: SandboxInfo( - id=stored.id, - created_by_user_id=stored.created_by_user_id, - sandbox_spec_id=stored.sandbox_spec_id, - status=SandboxStatus.RUNNING, - session_api_key='test-key', - created_at=stored.created_at, - ) + + # Mock the batch endpoint response + mock_batch_response = MagicMock() + mock_batch_response.raise_for_status.return_value = None + mock_batch_response.json.return_value = { + 'runtimes': [create_runtime_data(f'sb{i}') for i in range(6)] + } + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_batch_response ) # Execute @@ -739,15 +730,15 @@ class TestSandboxSearch: mock_result = MagicMock() mock_result.scalars.return_value = mock_scalars remote_sandbox_service.db_session.execute = AsyncMock(return_value=mock_result) - remote_sandbox_service._to_sandbox_info = AsyncMock( - side_effect=lambda stored: SandboxInfo( - id=stored.id, - created_by_user_id=stored.created_by_user_id, - sandbox_spec_id=stored.sandbox_spec_id, - status=SandboxStatus.RUNNING, - session_api_key='test-key', - created_at=stored.created_at, - ) + + # Mock the batch endpoint response + mock_batch_response = MagicMock() + mock_batch_response.raise_for_status.return_value = None + mock_batch_response.json.return_value = { + 'runtimes': [create_runtime_data('sb1')] + } + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_batch_response ) # Execute @@ -757,6 +748,80 @@ class TestSandboxSearch: # Note: We can't easily verify the exact SQL query, but we can verify the method was called remote_sandbox_service.db_session.execute.assert_called_once() + @pytest.mark.asyncio + async def test_get_runtimes_batch_success(self, remote_sandbox_service): + """Test successful batch runtime retrieval.""" + # Setup + sandbox_ids = ['sb1', 'sb2', 'sb3'] + mock_response = MagicMock() + mock_response.raise_for_status.return_value = None + mock_response.json.return_value = { + 'runtimes': [ + create_runtime_data('sb1'), + create_runtime_data('sb2'), + create_runtime_data('sb3'), + ] + } + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_response + ) + + # Execute + result = await remote_sandbox_service._get_runtimes_batch(sandbox_ids) + + # Verify + assert len(result) == 3 + assert 'sb1' in result + assert 'sb2' in result + assert 'sb3' in result + assert result['sb1']['session_id'] == 'sb1' + + # Verify the correct API call was made + remote_sandbox_service.httpx_client.request.assert_called_once_with( + 'GET', + 'https://api.example.com/sessions/batch', + headers={'X-API-Key': 'test-api-key'}, + params=[('ids', 'sb1'), ('ids', 'sb2'), ('ids', 'sb3')], + ) + + @pytest.mark.asyncio + async def test_get_runtimes_batch_empty_list(self, remote_sandbox_service): + """Test batch runtime retrieval with empty sandbox list.""" + # Execute + result = await remote_sandbox_service._get_runtimes_batch([]) + + # Verify + assert result == {} + # Verify no API call was made + remote_sandbox_service.httpx_client.request.assert_not_called() + + @pytest.mark.asyncio + async def test_get_runtimes_batch_partial_results(self, remote_sandbox_service): + """Test batch runtime retrieval with partial results (some sandboxes not found).""" + # Setup + sandbox_ids = ['sb1', 'sb2', 'sb3'] + mock_response = MagicMock() + mock_response.raise_for_status.return_value = None + mock_response.json.return_value = { + 'runtimes': [ + create_runtime_data('sb1'), + create_runtime_data('sb3'), + # sb2 is missing from the response + ] + } + remote_sandbox_service.httpx_client.request = AsyncMock( + return_value=mock_response + ) + + # Execute + result = await remote_sandbox_service._get_runtimes_batch(sandbox_ids) + + # Verify + assert len(result) == 2 + assert 'sb1' in result + assert 'sb2' not in result # Missing from response + assert 'sb3' in result + @pytest.mark.asyncio async def test_get_sandbox_exists(self, remote_sandbox_service): """Test getting an existing sandbox.""" @@ -765,7 +830,7 @@ class TestSandboxSearch: remote_sandbox_service._get_stored_sandbox = AsyncMock( return_value=stored_sandbox ) - remote_sandbox_service._to_sandbox_info = AsyncMock( + remote_sandbox_service._to_sandbox_info = MagicMock( return_value=SandboxInfo( id='test-sandbox-123', created_by_user_id='test-user-123', From 305396550afbdea5d90f2f5a75bb63470a733d7d Mon Sep 17 00:00:00 2001 From: Graham Neubig Date: Fri, 19 Dec 2025 19:39:53 -0500 Subject: [PATCH 42/80] Fix flaky test_tool_call_validation_error_handling test (#12110) Co-authored-by: openhands --- .../unit/controller/test_agent_controller.py | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/tests/unit/controller/test_agent_controller.py b/tests/unit/controller/test_agent_controller.py index 88b04b5738..3da87ecfdf 100644 --- a/tests/unit/controller/test_agent_controller.py +++ b/tests/unit/controller/test_agent_controller.py @@ -24,6 +24,7 @@ from openhands.core.schema import AgentState from openhands.events import Event, EventSource, EventStream, EventStreamSubscriber from openhands.events.action import ChangeAgentStateAction, CmdRunAction, MessageAction from openhands.events.action.agent import CondensationAction, RecallAction +from openhands.events.action.empty import NullAction from openhands.events.action.message import SystemMessageAction from openhands.events.event import RecallType from openhands.events.observation import ( @@ -319,13 +320,22 @@ async def test_tool_call_validation_error_handling( controller.state.agent_state = AgentState.RUNNING - # Mock the agent.step method to raise a BadRequestError with tool validation failure + # Track call count to only raise error on first call + # This prevents a feedback loop where ErrorObservation triggers another step + # which raises the same error again (since the mock always raises) + call_count = 0 + def mock_step(state): - raise BadRequestError( - message='litellm.BadRequestError: GroqException - {"error":{"message":"tool call validation failed: parameters for tool str_replace_editor did not match schema: errors: [missing properties: \'path\']","type":"invalid_request_error","code":"tool_use_failed"}}', - model='groq/llama3-8b-8192', - llm_provider='groq', - ) + nonlocal call_count + call_count += 1 + if call_count == 1: + raise BadRequestError( + message='litellm.BadRequestError: GroqException - {"error":{"message":"tool call validation failed: parameters for tool str_replace_editor did not match schema: errors: [missing properties: \'path\']","type":"invalid_request_error","code":"tool_use_failed"}}', + model='groq/llama3-8b-8192', + llm_provider='groq', + ) + # Return NullAction on subsequent calls to break the feedback loop + return NullAction() mock_agent.step = mock_step From fa2567b2a0f0fa1236aba97f0c52101cf3de42a8 Mon Sep 17 00:00:00 2001 From: Graham Neubig Date: Fri, 19 Dec 2025 20:09:09 -0500 Subject: [PATCH 43/80] Fix xterm dimensions error with explicit checks instead of try-catch (#12095) Co-authored-by: openhands --- .../__tests__/hooks/use-terminal.test.tsx | 29 ++++++++- frontend/src/hooks/use-terminal.ts | 62 ++++++++++++++++++- 2 files changed, 87 insertions(+), 4 deletions(-) diff --git a/frontend/__tests__/hooks/use-terminal.test.tsx b/frontend/__tests__/hooks/use-terminal.test.tsx index 62973f946a..0e4761b21b 100644 --- a/frontend/__tests__/hooks/use-terminal.test.tsx +++ b/frontend/__tests__/hooks/use-terminal.test.tsx @@ -1,4 +1,3 @@ -/* eslint-disable max-classes-per-file */ import { beforeAll, describe, expect, it, vi, afterEach } from "vitest"; import { useTerminal } from "#/hooks/use-terminal"; import { Command, useCommandStore } from "#/stores/command-store"; @@ -43,6 +42,11 @@ describe("useTerminal", () => { write: vi.fn(), writeln: vi.fn(), dispose: vi.fn(), + element: document.createElement("div"), + })); + + const mockFitAddon = vi.hoisted(() => ({ + fit: vi.fn(), })); beforeAll(() => { @@ -68,6 +72,15 @@ describe("useTerminal", () => { writeln = mockTerminal.writeln; dispose = mockTerminal.dispose; + + element = mockTerminal.element; + }, + })); + + // mock FitAddon + vi.mock("@xterm/addon-fit", () => ({ + FitAddon: class { + fit = mockFitAddon.fit; }, })); }); @@ -96,4 +109,18 @@ describe("useTerminal", () => { expect(mockTerminal.writeln).toHaveBeenNthCalledWith(1, "echo hello"); expect(mockTerminal.writeln).toHaveBeenNthCalledWith(2, "hello"); }); + + it("should not call fit() when terminal.element is null", () => { + // Temporarily set element to null to simulate terminal not being opened + const originalElement = mockTerminal.element; + mockTerminal.element = null as unknown as HTMLDivElement; + + renderWithProviders(); + + // fit() should not be called because terminal.element is null + expect(mockFitAddon.fit).not.toHaveBeenCalled(); + + // Restore original element + mockTerminal.element = originalElement; + }); }); diff --git a/frontend/src/hooks/use-terminal.ts b/frontend/src/hooks/use-terminal.ts index 73652104aa..caa2e42a15 100644 --- a/frontend/src/hooks/use-terminal.ts +++ b/frontend/src/hooks/use-terminal.ts @@ -29,6 +29,47 @@ const renderCommand = ( } }; +/** + * Check if the terminal is ready for fit operations. + * This prevents the "Cannot read properties of undefined (reading 'dimensions')" error + * that occurs when fit() is called on a terminal that is hidden, disposed, or not fully initialized. + */ +const canFitTerminal = ( + terminalInstance: Terminal | null, + fitAddonInstance: FitAddon | null, + containerElement: HTMLDivElement | null, +): boolean => { + // Check terminal and fitAddon exist + if (!terminalInstance || !fitAddonInstance) { + return false; + } + + // Check container element exists + if (!containerElement) { + return false; + } + + // Check element is visible (not display: none) + // When display is none, offsetParent is null (except for fixed/body elements) + const computedStyle = window.getComputedStyle(containerElement); + if (computedStyle.display === "none") { + return false; + } + + // Check element has dimensions + const { clientWidth, clientHeight } = containerElement; + if (clientWidth === 0 || clientHeight === 0) { + return false; + } + + // Check terminal has been opened (element property is set after open()) + if (!terminalInstance.element) { + return false; + } + + return true; +}; + // Create a persistent reference that survives component unmounts // This ensures terminal history is preserved when navigating away and back const persistentLastCommandIndex = { current: 0 }; @@ -39,6 +80,7 @@ export const useTerminal = () => { const fitAddon = React.useRef(null); const ref = React.useRef(null); const lastCommandIndex = persistentLastCommandIndex; // Use the persistent reference + const isDisposed = React.useRef(false); const createTerminal = () => new Terminal({ @@ -55,6 +97,15 @@ export const useTerminal = () => { }, }); + const fitTerminalSafely = React.useCallback(() => { + if (isDisposed.current) { + return; + } + if (canFitTerminal(terminal.current, fitAddon.current, ref.current)) { + fitAddon.current!.fit(); + } + }, []); + const initializeTerminal = () => { if (terminal.current) { if (fitAddon.current) terminal.current.loadAddon(fitAddon.current); @@ -62,13 +113,14 @@ export const useTerminal = () => { terminal.current.open(ref.current); // Hide cursor for read-only terminal using ANSI escape sequence terminal.current.write("\x1b[?25l"); - fitAddon.current?.fit(); + fitTerminalSafely(); } } }; // Initialize terminal and handle cleanup React.useEffect(() => { + isDisposed.current = false; terminal.current = createTerminal(); fitAddon.current = new FitAddon(); @@ -91,6 +143,7 @@ export const useTerminal = () => { } return () => { + isDisposed.current = true; terminal.current?.dispose(); lastCommandIndex.current = 0; }; @@ -118,7 +171,10 @@ export const useTerminal = () => { let resizeObserver: ResizeObserver | null = null; resizeObserver = new ResizeObserver(() => { - fitAddon.current?.fit(); + // Use requestAnimationFrame to debounce resize events and ensure DOM is ready + requestAnimationFrame(() => { + fitTerminalSafely(); + }); }); if (ref.current) { @@ -128,7 +184,7 @@ export const useTerminal = () => { return () => { resizeObserver?.disconnect(); }; - }, []); + }, [fitTerminalSafely]); return ref; }; From 0677cebb257e40cce5381bf9475ef578e324a64d Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 19 Dec 2025 21:18:48 -0700 Subject: [PATCH 44/80] More efficiency (#12112) --- .../sandbox/remote_sandbox_service.py | 33 ++++++++++++++++--- .../app_server/test_remote_sandbox_service.py | 24 ++++++-------- 2 files changed, 39 insertions(+), 18 deletions(-) diff --git a/openhands/app_server/sandbox/remote_sandbox_service.py b/openhands/app_server/sandbox/remote_sandbox_service.py index d1e7f86d75..076c478478 100644 --- a/openhands/app_server/sandbox/remote_sandbox_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_service.py @@ -252,10 +252,9 @@ class RemoteSandboxService(SandboxService): # The batch endpoint should return a list of runtimes # Convert to a dictionary keyed by session_id for easy lookup runtimes_by_id = {} - if batch_data and 'runtimes' in batch_data: - for runtime in batch_data['runtimes']: - if 'session_id' in runtime: - runtimes_by_id[runtime['session_id']] = runtime + for runtime in batch_data: + if runtime and 'session_id' in runtime: + runtimes_by_id[runtime['session_id']] = runtime return runtimes_by_id @@ -566,6 +565,32 @@ class RemoteSandboxService(SandboxService): return paused_sandbox_ids + async def batch_get_sandboxes( + self, sandbox_ids: list[str] + ) -> list[SandboxInfo | None]: + """Get a batch of sandboxes, returning None for any which were not found.""" + if not sandbox_ids: + return [] + query = await self._secure_select() + query = query.filter(StoredRemoteSandbox.id.in_(sandbox_ids)) + stored_remote_sandboxes = await self.db_session.execute(query) + stored_remote_sandboxes_by_id = { + stored_remote_sandbox[0].id: stored_remote_sandbox[0] + for stored_remote_sandbox in stored_remote_sandboxes + } + runtimes_by_id = await self._get_runtimes_batch( + list(stored_remote_sandboxes_by_id) + ) + results = [] + for sandbox_id in sandbox_ids: + stored_remote_sandbox = stored_remote_sandboxes_by_id.get(sandbox_id) + result = None + if stored_remote_sandbox: + runtime = runtimes_by_id.get(sandbox_id) + result = self._to_sandbox_info(stored_remote_sandbox, runtime) + results.append(result) + return results + def _build_service_url(url: str, service_name: str): scheme, host_and_path = url.split('://') diff --git a/tests/unit/app_server/test_remote_sandbox_service.py b/tests/unit/app_server/test_remote_sandbox_service.py index bb950c732c..c70ad7d324 100644 --- a/tests/unit/app_server/test_remote_sandbox_service.py +++ b/tests/unit/app_server/test_remote_sandbox_service.py @@ -755,13 +755,11 @@ class TestSandboxSearch: sandbox_ids = ['sb1', 'sb2', 'sb3'] mock_response = MagicMock() mock_response.raise_for_status.return_value = None - mock_response.json.return_value = { - 'runtimes': [ - create_runtime_data('sb1'), - create_runtime_data('sb2'), - create_runtime_data('sb3'), - ] - } + mock_response.json.return_value = [ + create_runtime_data('sb1'), + create_runtime_data('sb2'), + create_runtime_data('sb3'), + ] remote_sandbox_service.httpx_client.request = AsyncMock( return_value=mock_response ) @@ -802,13 +800,11 @@ class TestSandboxSearch: sandbox_ids = ['sb1', 'sb2', 'sb3'] mock_response = MagicMock() mock_response.raise_for_status.return_value = None - mock_response.json.return_value = { - 'runtimes': [ - create_runtime_data('sb1'), - create_runtime_data('sb3'), - # sb2 is missing from the response - ] - } + mock_response.json.return_value = [ + create_runtime_data('sb1'), + create_runtime_data('sb3'), + # sb2 is missing from the response + ] remote_sandbox_service.httpx_client.request = AsyncMock( return_value=mock_response ) From 6605070d05de407f184e6c3dd2bfc783846d679b Mon Sep 17 00:00:00 2001 From: Graham Neubig Date: Sat, 20 Dec 2025 09:45:52 -0500 Subject: [PATCH 45/80] Fix fastmcp stateless_http deprecation warning (#12108) Co-authored-by: openhands Co-authored-by: Rohit Malhotra --- openhands/server/app.py | 2 +- openhands/server/routes/mcp.py | 4 +-- tests/unit/server/routes/test_mcp_routes.py | 33 +++++++++++++++++++++ 3 files changed, 35 insertions(+), 4 deletions(-) diff --git a/openhands/server/app.py b/openhands/server/app.py index d5135f2399..5cee75b163 100644 --- a/openhands/server/app.py +++ b/openhands/server/app.py @@ -36,7 +36,7 @@ from openhands.server.shared import conversation_manager, server_config from openhands.server.types import AppMode from openhands.version import get_version -mcp_app = mcp_server.http_app(path='/mcp') +mcp_app = mcp_server.http_app(path='/mcp', stateless_http=True) def combine_lifespans(*lifespans): diff --git a/openhands/server/routes/mcp.py b/openhands/server/routes/mcp.py index 929c66af5b..2d541d637c 100644 --- a/openhands/server/routes/mcp.py +++ b/openhands/server/routes/mcp.py @@ -25,9 +25,7 @@ from openhands.server.user_auth import ( ) from openhands.storage.data_models.conversation_metadata import ConversationMetadata -mcp_server = FastMCP( - 'mcp', stateless_http=True, mask_error_details=True, dependencies=None -) +mcp_server = FastMCP('mcp', mask_error_details=True) HOST = f'https://{os.getenv("WEB_HOST", "app.all-hands.dev").strip()}' CONVERSATION_URL = HOST + '/conversations/{}' diff --git a/tests/unit/server/routes/test_mcp_routes.py b/tests/unit/server/routes/test_mcp_routes.py index 1a55cc0a39..8677b8c85c 100644 --- a/tests/unit/server/routes/test_mcp_routes.py +++ b/tests/unit/server/routes/test_mcp_routes.py @@ -1,3 +1,4 @@ +import warnings from unittest.mock import AsyncMock, patch import pytest @@ -7,6 +8,38 @@ from openhands.server.routes.mcp import get_conversation_link from openhands.server.types import AppMode +def test_mcp_server_no_stateless_http_deprecation_warning(): + """Test that mcp_server is created without stateless_http deprecation warning. + + This test verifies the fix for the fastmcp deprecation warning: + 'Providing `stateless_http` when creating a server is deprecated. + Provide it when calling `run` or as a global setting instead.' + + The fix moves the stateless_http parameter from FastMCP() constructor + to the http_app() method call. + """ + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter('always') + + # Import the mcp_server which triggers FastMCP creation + from openhands.server.routes.mcp import mcp_server + + # Check that no deprecation warning about stateless_http was raised + stateless_http_warnings = [ + warning + for warning in w + if issubclass(warning.category, DeprecationWarning) + and 'stateless_http' in str(warning.message) + ] + + assert len(stateless_http_warnings) == 0, ( + f'Unexpected stateless_http deprecation warning: {stateless_http_warnings}' + ) + + # Verify mcp_server was created successfully + assert mcp_server is not None + + @pytest.mark.asyncio async def test_get_conversation_link_non_saas_mode(): """Test get_conversation_link in non-SAAS mode.""" From 5553d3ca2e17f5e59fc40088ab41bfa7f6ec8a88 Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Sun, 21 Dec 2025 19:49:11 +0700 Subject: [PATCH 46/80] feat: support blocking specific email domains (#12115) --- enterprise/server/auth/constants.py | 5 + enterprise/server/auth/domain_blocker.py | 56 +++++ enterprise/server/auth/saas_user_auth.py | 11 + enterprise/server/auth/token_manager.py | 43 ++++ enterprise/server/routes/auth.py | 18 ++ enterprise/tests/unit/test_auth_routes.py | 193 ++++++++++++++++++ enterprise/tests/unit/test_domain_blocker.py | 181 ++++++++++++++++ enterprise/tests/unit/test_saas_user_auth.py | 101 ++++++++- .../tests/unit/test_token_manager_extended.py | 102 ++++++++- 9 files changed, 708 insertions(+), 2 deletions(-) create mode 100644 enterprise/server/auth/domain_blocker.py create mode 100644 enterprise/tests/unit/test_domain_blocker.py diff --git a/enterprise/server/auth/constants.py b/enterprise/server/auth/constants.py index 15d3b0f704..242237e93d 100644 --- a/enterprise/server/auth/constants.py +++ b/enterprise/server/auth/constants.py @@ -38,3 +38,8 @@ ROLE_CHECK_ENABLED = os.getenv('ROLE_CHECK_ENABLED', 'false').lower() in ( 'y', 'on', ) +BLOCKED_EMAIL_DOMAINS = [ + domain.strip().lower() + for domain in os.getenv('BLOCKED_EMAIL_DOMAINS', '').split(',') + if domain.strip() +] diff --git a/enterprise/server/auth/domain_blocker.py b/enterprise/server/auth/domain_blocker.py new file mode 100644 index 0000000000..169545ae2d --- /dev/null +++ b/enterprise/server/auth/domain_blocker.py @@ -0,0 +1,56 @@ +from server.auth.constants import BLOCKED_EMAIL_DOMAINS + +from openhands.core.logger import openhands_logger as logger + + +class DomainBlocker: + def __init__(self) -> None: + logger.debug('Initializing DomainBlocker') + self.blocked_domains: list[str] = BLOCKED_EMAIL_DOMAINS + if self.blocked_domains: + logger.info( + f'Successfully loaded {len(self.blocked_domains)} blocked email domains: {self.blocked_domains}' + ) + + def is_active(self) -> bool: + """Check if domain blocking is enabled""" + return bool(self.blocked_domains) + + def _extract_domain(self, email: str) -> str | None: + """Extract and normalize email domain from email address""" + if not email: + return None + try: + # Extract domain part after @ + if '@' not in email: + return None + domain = email.split('@')[1].strip().lower() + return domain if domain else None + except Exception: + logger.debug(f'Error extracting domain from email: {email}', exc_info=True) + return None + + def is_domain_blocked(self, email: str) -> bool: + """Check if email domain is blocked""" + if not self.is_active(): + return False + + if not email: + logger.debug('No email provided for domain check') + return False + + domain = self._extract_domain(email) + if not domain: + logger.debug(f'Could not extract domain from email: {email}') + return False + + is_blocked = domain in self.blocked_domains + if is_blocked: + logger.warning(f'Email domain {domain} is blocked for email: {email}') + else: + logger.debug(f'Email domain {domain} is not blocked') + + return is_blocked + + +domain_blocker = DomainBlocker() diff --git a/enterprise/server/auth/saas_user_auth.py b/enterprise/server/auth/saas_user_auth.py index 2f399a74cf..b51d336997 100644 --- a/enterprise/server/auth/saas_user_auth.py +++ b/enterprise/server/auth/saas_user_auth.py @@ -13,6 +13,7 @@ from server.auth.auth_error import ( ExpiredError, NoCredentialsError, ) +from server.auth.domain_blocker import domain_blocker from server.auth.token_manager import TokenManager from server.config import get_config from server.logger import logger @@ -312,6 +313,16 @@ async def saas_user_auth_from_signed_token(signed_token: str) -> SaasUserAuth: user_id = access_token_payload['sub'] email = access_token_payload['email'] email_verified = access_token_payload['email_verified'] + + # Check if email domain is blocked + if email and domain_blocker.is_active() and domain_blocker.is_domain_blocked(email): + logger.warning( + f'Blocked authentication attempt for existing user with email: {email}' + ) + raise AuthError( + 'Access denied: Your email domain is not allowed to access this service' + ) + logger.debug('saas_user_auth_from_signed_token:return') return SaasUserAuth( diff --git a/enterprise/server/auth/token_manager.py b/enterprise/server/auth/token_manager.py index 0b873bc7fc..04bfae0767 100644 --- a/enterprise/server/auth/token_manager.py +++ b/enterprise/server/auth/token_manager.py @@ -527,6 +527,49 @@ class TokenManager: github_id = github_ids[0] return github_id + async def disable_keycloak_user( + self, user_id: str, email: str | None = None + ) -> None: + """Disable a Keycloak user account. + + Args: + user_id: The Keycloak user ID to disable + email: Optional email address for logging purposes + + This method attempts to disable the user account but will not raise exceptions. + Errors are logged but do not prevent the operation from completing. + """ + try: + keycloak_admin = get_keycloak_admin(self.external) + # Get current user to preserve other fields + user = await keycloak_admin.a_get_user(user_id) + if user: + # Update user with enabled=False to disable the account + await keycloak_admin.a_update_user( + user_id=user_id, + payload={ + 'enabled': False, + 'username': user.get('username', ''), + 'email': user.get('email', ''), + 'emailVerified': user.get('emailVerified', False), + }, + ) + email_str = f', email: {email}' if email else '' + logger.info( + f'Disabled Keycloak account for user_id: {user_id}{email_str}' + ) + else: + logger.warning( + f'User not found in Keycloak when attempting to disable: {user_id}' + ) + except Exception as e: + # Log error but don't raise - the caller should handle the blocking regardless + email_str = f', email: {email}' if email else '' + logger.error( + f'Failed to disable Keycloak account for user_id: {user_id}{email_str}: {str(e)}', + exc_info=True, + ) + def store_org_token(self, installation_id: int, installation_token: str): """Store a GitHub App installation token. diff --git a/enterprise/server/routes/auth.py b/enterprise/server/routes/auth.py index ba7aadb883..2ee50bbd2d 100644 --- a/enterprise/server/routes/auth.py +++ b/enterprise/server/routes/auth.py @@ -14,6 +14,7 @@ from server.auth.constants import ( KEYCLOAK_SERVER_URL_EXT, ROLE_CHECK_ENABLED, ) +from server.auth.domain_blocker import domain_blocker from server.auth.gitlab_sync import schedule_gitlab_repo_sync from server.auth.saas_user_auth import SaasUserAuth from server.auth.token_manager import TokenManager @@ -145,7 +146,24 @@ async def keycloak_callback( content={'error': 'Missing user ID or username in response'}, ) + # Check if email domain is blocked + email = user_info.get('email') user_id = user_info['sub'] + if email and domain_blocker.is_active() and domain_blocker.is_domain_blocked(email): + logger.warning( + f'Blocked authentication attempt for email: {email}, user_id: {user_id}' + ) + + # Disable the Keycloak account + await token_manager.disable_keycloak_user(user_id, email) + + return JSONResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content={ + 'error': 'Access denied: Your email domain is not allowed to access this service' + }, + ) + # default to github IDP for now. # TODO: remove default once Keycloak is updated universally with the new attribute. idp: str = user_info.get('identity_provider', ProviderType.GITHUB.value) diff --git a/enterprise/tests/unit/test_auth_routes.py b/enterprise/tests/unit/test_auth_routes.py index bf74f0055c..d3e8f47fbe 100644 --- a/enterprise/tests/unit/test_auth_routes.py +++ b/enterprise/tests/unit/test_auth_routes.py @@ -442,3 +442,196 @@ async def test_logout_without_refresh_token(): mock_token_manager.logout.assert_not_called() assert 'set-cookie' in result.headers + + +@pytest.mark.asyncio +async def test_keycloak_callback_blocked_email_domain(mock_request): + """Test keycloak_callback when email domain is blocked.""" + # Arrange + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.domain_blocker') as mock_domain_blocker, + ): + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'email': 'user@colsch.us', + 'identity_provider': 'github', + } + ) + mock_token_manager.disable_keycloak_user = AsyncMock() + + mock_domain_blocker.is_active.return_value = True + mock_domain_blocker.is_domain_blocked.return_value = True + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, JSONResponse) + assert result.status_code == status.HTTP_401_UNAUTHORIZED + assert 'error' in result.body.decode() + assert 'email domain is not allowed' in result.body.decode() + mock_domain_blocker.is_domain_blocked.assert_called_once_with('user@colsch.us') + mock_token_manager.disable_keycloak_user.assert_called_once_with( + 'test_user_id', 'user@colsch.us' + ) + + +@pytest.mark.asyncio +async def test_keycloak_callback_allowed_email_domain(mock_request): + """Test keycloak_callback when email domain is not blocked.""" + # Arrange + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.domain_blocker') as mock_domain_blocker, + patch('server.routes.auth.user_verifier') as mock_verifier, + patch('server.routes.auth.session_maker') as mock_session_maker, + ): + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_query = MagicMock() + mock_session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + + mock_user_settings = MagicMock() + mock_user_settings.accepted_tos = '2025-01-01' + mock_query.first.return_value = mock_user_settings + + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'email': 'user@example.com', + 'identity_provider': 'github', + } + ) + mock_token_manager.store_idp_tokens = AsyncMock() + mock_token_manager.validate_offline_token = AsyncMock(return_value=True) + + mock_domain_blocker.is_active.return_value = True + mock_domain_blocker.is_domain_blocked.return_value = False + + mock_verifier.is_active.return_value = True + mock_verifier.is_user_allowed.return_value = True + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + mock_domain_blocker.is_domain_blocked.assert_called_once_with( + 'user@example.com' + ) + mock_token_manager.disable_keycloak_user.assert_not_called() + + +@pytest.mark.asyncio +async def test_keycloak_callback_domain_blocking_inactive(mock_request): + """Test keycloak_callback when domain blocking is not active.""" + # Arrange + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.domain_blocker') as mock_domain_blocker, + patch('server.routes.auth.user_verifier') as mock_verifier, + patch('server.routes.auth.session_maker') as mock_session_maker, + ): + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_query = MagicMock() + mock_session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + + mock_user_settings = MagicMock() + mock_user_settings.accepted_tos = '2025-01-01' + mock_query.first.return_value = mock_user_settings + + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'email': 'user@colsch.us', + 'identity_provider': 'github', + } + ) + mock_token_manager.store_idp_tokens = AsyncMock() + mock_token_manager.validate_offline_token = AsyncMock(return_value=True) + + mock_domain_blocker.is_active.return_value = False + + mock_verifier.is_active.return_value = True + mock_verifier.is_user_allowed.return_value = True + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + mock_domain_blocker.is_domain_blocked.assert_not_called() + mock_token_manager.disable_keycloak_user.assert_not_called() + + +@pytest.mark.asyncio +async def test_keycloak_callback_missing_email(mock_request): + """Test keycloak_callback when user info does not contain email.""" + # Arrange + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.domain_blocker') as mock_domain_blocker, + patch('server.routes.auth.user_verifier') as mock_verifier, + patch('server.routes.auth.session_maker') as mock_session_maker, + ): + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_query = MagicMock() + mock_session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + + mock_user_settings = MagicMock() + mock_user_settings.accepted_tos = '2025-01-01' + mock_query.first.return_value = mock_user_settings + + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'identity_provider': 'github', + # No email field + } + ) + mock_token_manager.store_idp_tokens = AsyncMock() + mock_token_manager.validate_offline_token = AsyncMock(return_value=True) + + mock_domain_blocker.is_active.return_value = True + + mock_verifier.is_active.return_value = True + mock_verifier.is_user_allowed.return_value = True + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + mock_domain_blocker.is_domain_blocked.assert_not_called() + mock_token_manager.disable_keycloak_user.assert_not_called() diff --git a/enterprise/tests/unit/test_domain_blocker.py b/enterprise/tests/unit/test_domain_blocker.py new file mode 100644 index 0000000000..e199a80b9b --- /dev/null +++ b/enterprise/tests/unit/test_domain_blocker.py @@ -0,0 +1,181 @@ +"""Unit tests for DomainBlocker class.""" + +import pytest +from server.auth.domain_blocker import DomainBlocker + + +@pytest.fixture +def domain_blocker(): + """Create a DomainBlocker instance for testing.""" + return DomainBlocker() + + +@pytest.mark.parametrize( + 'blocked_domains,expected', + [ + (['colsch.us', 'other-domain.com'], True), + (['example.com'], True), + ([], False), + ], +) +def test_is_active(domain_blocker, blocked_domains, expected): + """Test that is_active returns correct value based on blocked domains configuration.""" + # Arrange + domain_blocker.blocked_domains = blocked_domains + + # Act + result = domain_blocker.is_active() + + # Assert + assert result == expected + + +@pytest.mark.parametrize( + 'email,expected_domain', + [ + ('user@example.com', 'example.com'), + ('test@colsch.us', 'colsch.us'), + ('user.name@other-domain.com', 'other-domain.com'), + ('USER@EXAMPLE.COM', 'example.com'), # Case insensitive + ('user@EXAMPLE.COM', 'example.com'), + (' user@example.com ', 'example.com'), # Whitespace handling + ], +) +def test_extract_domain_valid_emails(domain_blocker, email, expected_domain): + """Test that _extract_domain correctly extracts and normalizes domains from valid emails.""" + # Act + result = domain_blocker._extract_domain(email) + + # Assert + assert result == expected_domain + + +@pytest.mark.parametrize( + 'email,expected', + [ + (None, None), + ('', None), + ('invalid-email', None), + ('user@', None), # Empty domain after @ + ('no-at-sign', None), + ], +) +def test_extract_domain_invalid_emails(domain_blocker, email, expected): + """Test that _extract_domain returns None for invalid email formats.""" + # Act + result = domain_blocker._extract_domain(email) + + # Assert + assert result == expected + + +def test_is_domain_blocked_when_inactive(domain_blocker): + """Test that is_domain_blocked returns False when blocking is not active.""" + # Arrange + domain_blocker.blocked_domains = [] + + # Act + result = domain_blocker.is_domain_blocked('user@colsch.us') + + # Assert + assert result is False + + +def test_is_domain_blocked_with_none_email(domain_blocker): + """Test that is_domain_blocked returns False when email is None.""" + # Arrange + domain_blocker.blocked_domains = ['colsch.us'] + + # Act + result = domain_blocker.is_domain_blocked(None) + + # Assert + assert result is False + + +def test_is_domain_blocked_with_empty_email(domain_blocker): + """Test that is_domain_blocked returns False when email is empty.""" + # Arrange + domain_blocker.blocked_domains = ['colsch.us'] + + # Act + result = domain_blocker.is_domain_blocked('') + + # Assert + assert result is False + + +def test_is_domain_blocked_with_invalid_email(domain_blocker): + """Test that is_domain_blocked returns False when email format is invalid.""" + # Arrange + domain_blocker.blocked_domains = ['colsch.us'] + + # Act + result = domain_blocker.is_domain_blocked('invalid-email') + + # Assert + assert result is False + + +def test_is_domain_blocked_domain_not_blocked(domain_blocker): + """Test that is_domain_blocked returns False when domain is not in blocked list.""" + # Arrange + domain_blocker.blocked_domains = ['colsch.us', 'other-domain.com'] + + # Act + result = domain_blocker.is_domain_blocked('user@example.com') + + # Assert + assert result is False + + +def test_is_domain_blocked_domain_blocked(domain_blocker): + """Test that is_domain_blocked returns True when domain is in blocked list.""" + # Arrange + domain_blocker.blocked_domains = ['colsch.us', 'other-domain.com'] + + # Act + result = domain_blocker.is_domain_blocked('user@colsch.us') + + # Assert + assert result is True + + +def test_is_domain_blocked_case_insensitive(domain_blocker): + """Test that is_domain_blocked performs case-insensitive domain matching.""" + # Arrange + domain_blocker.blocked_domains = ['colsch.us'] + + # Act + result = domain_blocker.is_domain_blocked('user@COLSCH.US') + + # Assert + assert result is True + + +def test_is_domain_blocked_multiple_blocked_domains(domain_blocker): + """Test that is_domain_blocked correctly checks against multiple blocked domains.""" + # Arrange + domain_blocker.blocked_domains = ['colsch.us', 'other-domain.com', 'blocked.org'] + + # Act + result1 = domain_blocker.is_domain_blocked('user@other-domain.com') + result2 = domain_blocker.is_domain_blocked('user@blocked.org') + result3 = domain_blocker.is_domain_blocked('user@allowed.com') + + # Assert + assert result1 is True + assert result2 is True + assert result3 is False + + +def test_is_domain_blocked_with_whitespace(domain_blocker): + """Test that is_domain_blocked handles emails with whitespace correctly.""" + # Arrange + domain_blocker.blocked_domains = ['colsch.us'] + + # Act + result = domain_blocker.is_domain_blocked(' user@colsch.us ') + + # Assert + assert result is True diff --git a/enterprise/tests/unit/test_saas_user_auth.py b/enterprise/tests/unit/test_saas_user_auth.py index d4ba902677..a518beb28e 100644 --- a/enterprise/tests/unit/test_saas_user_auth.py +++ b/enterprise/tests/unit/test_saas_user_auth.py @@ -5,7 +5,12 @@ import jwt import pytest from fastapi import Request from pydantic import SecretStr -from server.auth.auth_error import BearerTokenError, CookieError, NoCredentialsError +from server.auth.auth_error import ( + AuthError, + BearerTokenError, + CookieError, + NoCredentialsError, +) from server.auth.saas_user_auth import ( SaasUserAuth, get_api_key_from_header, @@ -647,3 +652,97 @@ def test_get_api_key_from_header_bearer_with_empty_token(): # Assert that empty string from Bearer is returned (current behavior) # This tests the current implementation behavior assert api_key == '' + + +@pytest.mark.asyncio +async def test_saas_user_auth_from_signed_token_blocked_domain(mock_config): + """Test that saas_user_auth_from_signed_token raises AuthError when email domain is blocked.""" + # Arrange + access_payload = { + 'sub': 'test_user_id', + 'exp': int(time.time()) + 3600, + 'email': 'user@colsch.us', + 'email_verified': True, + } + access_token = jwt.encode(access_payload, 'access_secret', algorithm='HS256') + + token_payload = { + 'access_token': access_token, + 'refresh_token': 'test_refresh_token', + } + signed_token = jwt.encode(token_payload, 'test_secret', algorithm='HS256') + + with patch('server.auth.saas_user_auth.domain_blocker') as mock_domain_blocker: + mock_domain_blocker.is_active.return_value = True + mock_domain_blocker.is_domain_blocked.return_value = True + + # Act & Assert + with pytest.raises(AuthError) as exc_info: + await saas_user_auth_from_signed_token(signed_token) + + assert 'email domain is not allowed' in str(exc_info.value) + mock_domain_blocker.is_domain_blocked.assert_called_once_with('user@colsch.us') + + +@pytest.mark.asyncio +async def test_saas_user_auth_from_signed_token_allowed_domain(mock_config): + """Test that saas_user_auth_from_signed_token succeeds when email domain is not blocked.""" + # Arrange + access_payload = { + 'sub': 'test_user_id', + 'exp': int(time.time()) + 3600, + 'email': 'user@example.com', + 'email_verified': True, + } + access_token = jwt.encode(access_payload, 'access_secret', algorithm='HS256') + + token_payload = { + 'access_token': access_token, + 'refresh_token': 'test_refresh_token', + } + signed_token = jwt.encode(token_payload, 'test_secret', algorithm='HS256') + + with patch('server.auth.saas_user_auth.domain_blocker') as mock_domain_blocker: + mock_domain_blocker.is_active.return_value = True + mock_domain_blocker.is_domain_blocked.return_value = False + + # Act + result = await saas_user_auth_from_signed_token(signed_token) + + # Assert + assert isinstance(result, SaasUserAuth) + assert result.user_id == 'test_user_id' + assert result.email == 'user@example.com' + mock_domain_blocker.is_domain_blocked.assert_called_once_with( + 'user@example.com' + ) + + +@pytest.mark.asyncio +async def test_saas_user_auth_from_signed_token_domain_blocking_inactive(mock_config): + """Test that saas_user_auth_from_signed_token succeeds when domain blocking is not active.""" + # Arrange + access_payload = { + 'sub': 'test_user_id', + 'exp': int(time.time()) + 3600, + 'email': 'user@colsch.us', + 'email_verified': True, + } + access_token = jwt.encode(access_payload, 'access_secret', algorithm='HS256') + + token_payload = { + 'access_token': access_token, + 'refresh_token': 'test_refresh_token', + } + signed_token = jwt.encode(token_payload, 'test_secret', algorithm='HS256') + + with patch('server.auth.saas_user_auth.domain_blocker') as mock_domain_blocker: + mock_domain_blocker.is_active.return_value = False + + # Act + result = await saas_user_auth_from_signed_token(signed_token) + + # Assert + assert isinstance(result, SaasUserAuth) + assert result.user_id == 'test_user_id' + mock_domain_blocker.is_domain_blocked.assert_not_called() diff --git a/enterprise/tests/unit/test_token_manager_extended.py b/enterprise/tests/unit/test_token_manager_extended.py index 744f208b02..c3b09434a3 100644 --- a/enterprise/tests/unit/test_token_manager_extended.py +++ b/enterprise/tests/unit/test_token_manager_extended.py @@ -1,4 +1,4 @@ -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest from server.auth.token_manager import TokenManager, create_encryption_utility @@ -246,3 +246,103 @@ async def test_refresh(token_manager): mock_keycloak.return_value.a_refresh_token.assert_called_once_with( 'test_refresh_token' ) + + +@pytest.mark.asyncio +async def test_disable_keycloak_user_success(token_manager): + """Test successful disabling of a Keycloak user account.""" + # Arrange + user_id = 'test_user_id' + email = 'user@colsch.us' + mock_user = { + 'id': user_id, + 'username': 'testuser', + 'email': email, + 'emailVerified': True, + } + + with patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin: + mock_admin = MagicMock() + mock_admin.a_get_user = AsyncMock(return_value=mock_user) + mock_admin.a_update_user = AsyncMock() + mock_get_admin.return_value = mock_admin + + # Act + await token_manager.disable_keycloak_user(user_id, email) + + # Assert + mock_admin.a_get_user.assert_called_once_with(user_id) + mock_admin.a_update_user.assert_called_once_with( + user_id=user_id, + payload={ + 'enabled': False, + 'username': 'testuser', + 'email': email, + 'emailVerified': True, + }, + ) + + +@pytest.mark.asyncio +async def test_disable_keycloak_user_without_email(token_manager): + """Test disabling Keycloak user without providing email.""" + # Arrange + user_id = 'test_user_id' + mock_user = { + 'id': user_id, + 'username': 'testuser', + 'email': 'user@example.com', + 'emailVerified': False, + } + + with patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin: + mock_admin = MagicMock() + mock_admin.a_get_user = AsyncMock(return_value=mock_user) + mock_admin.a_update_user = AsyncMock() + mock_get_admin.return_value = mock_admin + + # Act + await token_manager.disable_keycloak_user(user_id) + + # Assert + mock_admin.a_get_user.assert_called_once_with(user_id) + mock_admin.a_update_user.assert_called_once() + + +@pytest.mark.asyncio +async def test_disable_keycloak_user_not_found(token_manager): + """Test disabling Keycloak user when user is not found.""" + # Arrange + user_id = 'nonexistent_user_id' + email = 'user@colsch.us' + + with patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin: + mock_admin = MagicMock() + mock_admin.a_get_user = AsyncMock(return_value=None) + mock_get_admin.return_value = mock_admin + + # Act + await token_manager.disable_keycloak_user(user_id, email) + + # Assert + mock_admin.a_get_user.assert_called_once_with(user_id) + mock_admin.a_update_user.assert_not_called() + + +@pytest.mark.asyncio +async def test_disable_keycloak_user_exception_handling(token_manager): + """Test that disable_keycloak_user handles exceptions gracefully without raising.""" + # Arrange + user_id = 'test_user_id' + email = 'user@colsch.us' + + with patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin: + mock_admin = MagicMock() + mock_admin.a_get_user = AsyncMock(side_effect=Exception('Connection error')) + mock_get_admin.return_value = mock_admin + + # Act & Assert - should not raise exception + await token_manager.disable_keycloak_user(user_id, email) + + # Verify the method was called + mock_admin.a_get_user.assert_called_once_with(user_id) From 10edb28729c13a5b2543474c651e19d1aad09933 Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Mon, 22 Dec 2025 23:00:57 +0700 Subject: [PATCH 47/80] fix(frontend): llm settings view resets to basic after saving (#12097) --- .../__tests__/routes/llm-settings.test.tsx | 156 ++++++++++++++++++ .../utils/has-advanced-settings-set.test.ts | 70 ++++++++ .../src/utils/has-advanced-settings-set.ts | 48 +++++- 3 files changed, 271 insertions(+), 3 deletions(-) diff --git a/frontend/__tests__/routes/llm-settings.test.tsx b/frontend/__tests__/routes/llm-settings.test.tsx index 3942cc8fc1..68e44d73e9 100644 --- a/frontend/__tests__/routes/llm-settings.test.tsx +++ b/frontend/__tests__/routes/llm-settings.test.tsx @@ -910,6 +910,162 @@ describe("Form submission", () => { }); }); +describe("View persistence after saving advanced settings", () => { + it("should remain on Advanced view after saving when memory condenser is disabled", async () => { + // Arrange: Start with default settings (basic view) + const getSettingsSpy = vi.spyOn(SettingsService, "getSettings"); + getSettingsSpy.mockResolvedValue({ + ...MOCK_DEFAULT_USER_SETTINGS, + }); + const saveSettingsSpy = vi.spyOn(SettingsService, "saveSettings"); + saveSettingsSpy.mockResolvedValue(true); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + // Verify we start in basic view + expect(screen.getByTestId("llm-settings-form-basic")).toBeInTheDocument(); + + // Act: User manually switches to Advanced view + const advancedSwitch = screen.getByTestId("advanced-settings-switch"); + await userEvent.click(advancedSwitch); + await screen.findByTestId("llm-settings-form-advanced"); + + // User disables memory condenser (advanced-only setting) + const condenserSwitch = screen.getByTestId( + "enable-memory-condenser-switch", + ); + expect(condenserSwitch).toBeChecked(); + await userEvent.click(condenserSwitch); + expect(condenserSwitch).not.toBeChecked(); + + // Mock the updated settings that will be returned after save + getSettingsSpy.mockResolvedValue({ + ...MOCK_DEFAULT_USER_SETTINGS, + enable_default_condenser: false, // Now disabled + }); + + // User saves settings + const submitButton = screen.getByTestId("submit-button"); + await userEvent.click(submitButton); + + // Assert: View should remain on Advanced after save + await waitFor(() => { + expect( + screen.getByTestId("llm-settings-form-advanced"), + ).toBeInTheDocument(); + expect( + screen.queryByTestId("llm-settings-form-basic"), + ).not.toBeInTheDocument(); + expect(advancedSwitch).toBeChecked(); + }); + }); + + it("should remain on Advanced view after saving when condenser max size is customized", async () => { + // Arrange: Start with default settings + const getSettingsSpy = vi.spyOn(SettingsService, "getSettings"); + getSettingsSpy.mockResolvedValue({ + ...MOCK_DEFAULT_USER_SETTINGS, + }); + const saveSettingsSpy = vi.spyOn(SettingsService, "saveSettings"); + saveSettingsSpy.mockResolvedValue(true); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + // Act: User manually switches to Advanced view + const advancedSwitch = screen.getByTestId("advanced-settings-switch"); + await userEvent.click(advancedSwitch); + await screen.findByTestId("llm-settings-form-advanced"); + + // User sets custom condenser max size (advanced-only setting) + const condenserMaxSizeInput = screen.getByTestId( + "condenser-max-size-input", + ); + await userEvent.clear(condenserMaxSizeInput); + await userEvent.type(condenserMaxSizeInput, "200"); + + // Mock the updated settings that will be returned after save + getSettingsSpy.mockResolvedValue({ + ...MOCK_DEFAULT_USER_SETTINGS, + condenser_max_size: 200, // Custom value + }); + + // User saves settings + const submitButton = screen.getByTestId("submit-button"); + await userEvent.click(submitButton); + + // Assert: View should remain on Advanced after save + await waitFor(() => { + expect( + screen.getByTestId("llm-settings-form-advanced"), + ).toBeInTheDocument(); + expect( + screen.queryByTestId("llm-settings-form-basic"), + ).not.toBeInTheDocument(); + expect(advancedSwitch).toBeChecked(); + }); + }); + + it("should remain on Advanced view after saving when search API key is set", async () => { + // Arrange: Start with default settings (non-SaaS mode to show search API key field) + const getConfigSpy = vi.spyOn(OptionService, "getConfig"); + getConfigSpy.mockResolvedValue({ + APP_MODE: "oss", + GITHUB_CLIENT_ID: "fake-github-client-id", + POSTHOG_CLIENT_KEY: "fake-posthog-client-key", + FEATURE_FLAGS: { + ENABLE_BILLING: false, + HIDE_LLM_SETTINGS: false, + ENABLE_JIRA: false, + ENABLE_JIRA_DC: false, + ENABLE_LINEAR: false, + }, + }); + + const getSettingsSpy = vi.spyOn(SettingsService, "getSettings"); + getSettingsSpy.mockResolvedValue({ + ...MOCK_DEFAULT_USER_SETTINGS, + search_api_key: "", // Default empty value + }); + const saveSettingsSpy = vi.spyOn(SettingsService, "saveSettings"); + saveSettingsSpy.mockResolvedValue(true); + + renderLlmSettingsScreen(); + await screen.findByTestId("llm-settings-screen"); + + // Act: User manually switches to Advanced view + const advancedSwitch = screen.getByTestId("advanced-settings-switch"); + await userEvent.click(advancedSwitch); + await screen.findByTestId("llm-settings-form-advanced"); + + // User sets search API key (advanced-only setting) + const searchApiKeyInput = screen.getByTestId("search-api-key-input"); + await userEvent.type(searchApiKeyInput, "test-search-api-key"); + + // Mock the updated settings that will be returned after save + getSettingsSpy.mockResolvedValue({ + ...MOCK_DEFAULT_USER_SETTINGS, + search_api_key: "test-search-api-key", // Now set + }); + + // User saves settings + const submitButton = screen.getByTestId("submit-button"); + await userEvent.click(submitButton); + + // Assert: View should remain on Advanced after save + await waitFor(() => { + expect( + screen.getByTestId("llm-settings-form-advanced"), + ).toBeInTheDocument(); + expect( + screen.queryByTestId("llm-settings-form-basic"), + ).not.toBeInTheDocument(); + expect(advancedSwitch).toBeChecked(); + }); + }); +}); + describe("Status toasts", () => { describe("Basic form", () => { it("should call displaySuccessToast when the settings are saved", async () => { diff --git a/frontend/__tests__/utils/has-advanced-settings-set.test.ts b/frontend/__tests__/utils/has-advanced-settings-set.test.ts index 36c7a7b609..be928262d1 100644 --- a/frontend/__tests__/utils/has-advanced-settings-set.test.ts +++ b/frontend/__tests__/utils/has-advanced-settings-set.test.ts @@ -29,5 +29,75 @@ describe("hasAdvancedSettingsSet", () => { }), ).toBe(true); }); + + test("enable_default_condenser is disabled", () => { + // Arrange + const settings = { + ...DEFAULT_SETTINGS, + enable_default_condenser: false, + }; + + // Act + const result = hasAdvancedSettingsSet(settings); + + // Assert + expect(result).toBe(true); + }); + + test("condenser_max_size is customized above default", () => { + // Arrange + const settings = { + ...DEFAULT_SETTINGS, + condenser_max_size: 200, + }; + + // Act + const result = hasAdvancedSettingsSet(settings); + + // Assert + expect(result).toBe(true); + }); + + test("condenser_max_size is customized below default", () => { + // Arrange + const settings = { + ...DEFAULT_SETTINGS, + condenser_max_size: 50, + }; + + // Act + const result = hasAdvancedSettingsSet(settings); + + // Assert + expect(result).toBe(true); + }); + + test("search_api_key is set to non-empty value", () => { + // Arrange + const settings = { + ...DEFAULT_SETTINGS, + search_api_key: "test-api-key-123", + }; + + // Act + const result = hasAdvancedSettingsSet(settings); + + // Assert + expect(result).toBe(true); + }); + + test("search_api_key with whitespace is treated as set", () => { + // Arrange + const settings = { + ...DEFAULT_SETTINGS, + search_api_key: " test-key ", + }; + + // Act + const result = hasAdvancedSettingsSet(settings); + + // Assert + expect(result).toBe(true); + }); }); }); diff --git a/frontend/src/utils/has-advanced-settings-set.ts b/frontend/src/utils/has-advanced-settings-set.ts index b873425239..8e3de2be9c 100644 --- a/frontend/src/utils/has-advanced-settings-set.ts +++ b/frontend/src/utils/has-advanced-settings-set.ts @@ -1,6 +1,48 @@ import { DEFAULT_SETTINGS } from "#/services/settings"; import { Settings } from "#/types/settings"; -export const hasAdvancedSettingsSet = (settings: Partial): boolean => - Object.keys(settings).length > 0 && - (!!settings.llm_base_url || settings.agent !== DEFAULT_SETTINGS.agent); +/** + * Determines if any advanced-only settings are configured. + * Advanced-only settings are those that appear only in the Advanced Settings view + * and not in the Basic Settings view. + * + * Advanced-only fields: + * - llm_base_url: Custom base URL for LLM API + * - agent: Custom agent selection (when not using default) + * - enable_default_condenser: Memory condenser toggle (when disabled, as default is enabled) + * - condenser_max_size: Custom condenser size (when different from default) + * - search_api_key: Search API key (when set) + */ +export const hasAdvancedSettingsSet = ( + settings: Partial, +): boolean => { + if (Object.keys(settings).length === 0) { + return false; + } + + // Check for advanced-only settings that differ from defaults + const hasBaseUrl = + !!settings.llm_base_url && settings.llm_base_url.trim() !== ""; + const hasCustomAgent = + settings.agent !== undefined && settings.agent !== DEFAULT_SETTINGS.agent; + // Default is true, so only check if explicitly disabled + const hasDisabledCondenser = settings.enable_default_condenser === false; + // Check if condenser size differs from default (default is 120) + const hasCustomCondenserSize = + settings.condenser_max_size !== undefined && + settings.condenser_max_size !== null && + settings.condenser_max_size !== DEFAULT_SETTINGS.condenser_max_size; + // Check if search API key is set (non-empty string) + const hasSearchApiKey = + settings.search_api_key !== undefined && + settings.search_api_key !== null && + settings.search_api_key.trim() !== ""; + + return ( + hasBaseUrl || + hasCustomAgent || + hasDisabledCondenser || + hasCustomCondenserSize || + hasSearchApiKey + ); +}; From 2b8f779b6561fef3887906176b222bd827e547d5 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Mon, 22 Dec 2025 10:04:10 -0700 Subject: [PATCH 48/80] fix: Runtime pods fail to start due to missing Playwright browser path (#12130) --- enterprise/server/saas_nested_conversation_manager.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/enterprise/server/saas_nested_conversation_manager.py b/enterprise/server/saas_nested_conversation_manager.py index d92e67b9ff..59c6d4e981 100644 --- a/enterprise/server/saas_nested_conversation_manager.py +++ b/enterprise/server/saas_nested_conversation_manager.py @@ -804,6 +804,8 @@ class SaasNestedConversationManager(ConversationManager): env_vars['ENABLE_V1'] = '0' env_vars['SU_TO_USER'] = SU_TO_USER env_vars['DISABLE_VSCODE_PLUGIN'] = str(DISABLE_VSCODE_PLUGIN).lower() + env_vars['BROWSERGYM_DOWNLOAD_DIR'] = '/workspace/.downloads/' + env_vars['PLAYWRIGHT_BROWSERS_PATH'] = '/opt/playwright-browsers' # We need this for LLM traces tracking to identify the source of the LLM calls env_vars['WEB_HOST'] = WEB_HOST From a9d2f72d72994147436b75fa5a45e182c2515f6f Mon Sep 17 00:00:00 2001 From: "sp.wack" <83104063+amanape@users.noreply.github.com> Date: Tue, 23 Dec 2025 16:32:27 +0400 Subject: [PATCH 49/80] docs(frontend): Add MSW testing guide for frontend development (#12131) --- frontend/__tests__/MSW.md | 146 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 frontend/__tests__/MSW.md diff --git a/frontend/__tests__/MSW.md b/frontend/__tests__/MSW.md new file mode 100644 index 0000000000..f240c5a8df --- /dev/null +++ b/frontend/__tests__/MSW.md @@ -0,0 +1,146 @@ +# Mock Service Worker (MSW) Guide + +## Overview + +[Mock Service Worker (MSW)](https://mswjs.io/) is an API mocking library that intercepts outgoing network requests at the network level. Unlike traditional mocking that patches `fetch` or `axios`, MSW uses a Service Worker in the browser and direct request interception in Node.js—making mocks transparent to your application code. + +We use MSW in this project for: +- **Testing**: Write reliable unit and integration tests without real network calls +- **Development**: Run the frontend with mocked APIs when the backend isn't available or when working on features with pending backend APIs + +The same mock handlers work in both environments, so you write them once and reuse everywhere. + +## Relevant Files + +- `src/mocks/handlers.ts` - Main handler registry that combines all domain handlers +- `src/mocks/*-handlers.ts` - Domain-specific handlers (auth, billing, conversation, etc.) +- `src/mocks/browser.ts` - Browser setup for development mode +- `src/mocks/node.ts` - Node.js setup for tests +- `vitest.setup.ts` - Global test setup with MSW lifecycle hooks + +## Development Workflow + +### Running with Mocked APIs + +```sh +# Run with API mocking enabled +npm run dev:mock + +# Run with API mocking + SaaS mode simulation +npm run dev:mock:saas +``` + +These commands set `VITE_MOCK_API=true` which activates the MSW Service Worker to intercept requests. + +> [!NOTE] +> **OSS vs SaaS Mode** +> +> OpenHands runs in two modes: +> - **OSS mode**: For local/self-hosted deployments where users provide their own LLM API keys and configure git providers manually +> - **SaaS mode**: For the cloud offering with billing, managed API keys, and OAuth-based GitHub integration +> +> Use `dev:mock:saas` when working on SaaS-specific features like billing, API key management, or subscription flows. + + +## Writing Tests + +### Service Layer Mocking (Recommended) + +For most tests, mock at the service layer using `vi.spyOn`. This approach is explicit, test-scoped, and makes the scenario being tested clear. + +```typescript +import { vi } from "vitest"; +import SettingsService from "#/api/settings-service/settings-service.api"; + +const getSettingsSpy = vi.spyOn(SettingsService, "getSettings"); +getSettingsSpy.mockResolvedValue({ + llm_model: "openai/gpt-4o", + llm_api_key_set: true, + // ... other settings +}); +``` + +Use `mockResolvedValue` for success scenarios and `mockRejectedValue` for error scenarios: + +```typescript +getSettingsSpy.mockRejectedValue(new Error("Failed to fetch settings")); +``` + +### Network Layer Mocking (Advanced) + +For tests that need actual network-level behavior (WebSockets, testing retry logic, etc.), use `server.use()` to override handlers per test. + +> [!IMPORTANT] +> **Reuse the global server instance** - Don't create new `setupServer()` calls in individual tests. The project already has a global MSW server configured in `vitest.setup.ts` that handles lifecycle (`server.listen()`, `server.resetHandlers()`, `server.close()`). Use `server.use()` to add runtime handlers for specific test scenarios. + +```typescript +import { http, HttpResponse } from "msw"; +import { server } from "#/mocks/node"; + +it("should handle server errors", async () => { + server.use( + http.get("/api/my-endpoint", () => { + return new HttpResponse(null, { status: 500 }); + }), + ); + // ... test code +}); +``` + +For WebSocket testing, see `__tests__/helpers/msw-websocket-setup.ts` for utilities. + +## Adding New API Mocks + +When adding new API endpoints, create mocks in both places to maintain 1:1 similarity with the backend: + +### 1. Add to `src/mocks/` (for development) + +Create or update a domain-specific handler file: + +```typescript +// src/mocks/my-feature-handlers.ts +import { http, HttpResponse } from "msw"; + +export const MY_FEATURE_HANDLERS = [ + http.get("/api/my-feature", () => { + return HttpResponse.json({ + data: "mock response", + }); + }), +]; +``` + +Register in `handlers.ts`: + +```typescript +import { MY_FEATURE_HANDLERS } from "./my-feature-handlers"; + +export const handlers = [ + // ... existing handlers + ...MY_FEATURE_HANDLERS, +]; +``` + +### 2. Mock in tests for specific scenarios + +In your test files, spy on the service method to control responses per test case: + +```typescript +import { vi } from "vitest"; +import MyFeatureService from "#/api/my-feature-service.api"; + +const spy = vi.spyOn(MyFeatureService, "getData"); +spy.mockResolvedValue({ data: "test-specific response" }); +``` + +See `__tests__/routes/llm-settings.test.tsx` for a real-world example of service layer mocking. + +> [!TIP] +> For guidance on creating service APIs, see `src/api/README.md`. + +## Best Practices + +- **Keep mocks close to real API contracts** - Update mocks when backend changes +- **Use service layer mocking for most tests** - It's simpler and more explicit +- **Reserve network layer mocking for integration tests** - WebSockets, retry logic, etc. +- **Export mock data from handler files** - Reuse in tests (e.g., `MOCK_DEFAULT_USER_SETTINGS`) From fae83230eeb00ad3939ba8c7aa4df78d3ab5fbe4 Mon Sep 17 00:00:00 2001 From: "sp.wack" <83104063+amanape@users.noreply.github.com> Date: Tue, 23 Dec 2025 16:57:55 +0400 Subject: [PATCH 50/80] docs(frontend): Add API services guide for frontend development (#12132) --- frontend/src/api/README.md | 102 +++++++++++++++++++++++++++++++++++++ 1 file changed, 102 insertions(+) create mode 100644 frontend/src/api/README.md diff --git a/frontend/src/api/README.md b/frontend/src/api/README.md new file mode 100644 index 0000000000..a44c7c49ca --- /dev/null +++ b/frontend/src/api/README.md @@ -0,0 +1,102 @@ +# API Services Guide + +## Overview + +Services are the abstraction layer between frontend components and backend APIs. They encapsulate HTTP requests using the shared `openHands` axios instance and provide typed methods for each endpoint. + +Each service is a plain object with async methods. + +## Structure + +Each service lives in its own directory: + +``` +src/api/ +├── billing-service/ +│ ├── billing-service.api.ts # Service methods +│ └── billing.types.ts # Types and interfaces +├── organization-service/ +│ ├── organization-service.api.ts +│ └── organization.types.ts +└── open-hands-axios.ts # Shared axios instance +``` + +## Creating a Service + +Use an object literal with named export. Use object destructuring for parameters to make calls self-documenting. + +```typescript +// feature-service/feature-service.api.ts +import { openHands } from "../open-hands-axios"; +import { Feature, CreateFeatureParams } from "./feature.types"; + +export const featureService = { + getFeature: async ({ id }: { id: string }) => { + const { data } = await openHands.get(`/api/features/${id}`); + return data; + }, + + createFeature: async ({ name, description }: CreateFeatureParams) => { + const { data } = await openHands.post("/api/features", { + name, + description, + }); + return data; + }, +}; +``` + +### Types + +Define types in a separate file within the same directory: + +```typescript +// feature-service/feature.types.ts +export interface Feature { + id: string; + name: string; + description: string; +} + +export interface CreateFeatureParams { + name: string; + description: string; +} +``` + +## Usage + +> [!IMPORTANT] +> **Don't call services directly in components.** Wrap them in TanStack Query hooks. +> +> Why? TanStack Query provides: +> - **Caching** - Avoid redundant network requests +> - **Deduplication** - Multiple components requesting the same data share one request +> - **Loading/error states** - Built-in `isLoading`, `isError`, `data` states +> - **Background refetching** - Data stays fresh automatically +> +> Hooks location: +> - `src/hooks/query/` for data fetching (`useQuery`) +> - `src/hooks/mutation/` for writes/updates (`useMutation`) + +```typescript +// src/hooks/query/use-feature.ts +import { useQuery } from "@tanstack/react-query"; +import { featureService } from "#/api/feature-service/feature-service.api"; + +export const useFeature = (id: string) => { + return useQuery({ + queryKey: ["feature", id], + queryFn: () => featureService.getFeature({ id }), + }); +}; +``` + +## Naming Conventions + +| Item | Convention | Example | +|------|------------|---------| +| Directory | `feature-service/` | `billing-service/` | +| Service file | `feature-service.api.ts` | `billing-service.api.ts` | +| Types file | `feature.types.ts` | `billing.types.ts` | +| Export name | `featureService` | `billingService` | From f6e7628bffaaa4139069505191217f5bc57487f8 Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Wed, 24 Dec 2025 01:48:05 +0700 Subject: [PATCH 51/80] feat: prevent signups using email addresses with a plus sign and enforce the existing email pattern (#12124) --- enterprise/server/auth/email_validation.py | 109 +++++ enterprise/server/auth/token_manager.py | 183 ++++++++ enterprise/server/routes/auth.py | 40 +- enterprise/tests/unit/test_auth_routes.py | 216 +++++++++ .../tests/unit/test_email_validation.py | 294 ++++++++++++ enterprise/tests/unit/test_token_manager.py | 428 +++++++++++++++++- .../components/features/auth-modal.test.tsx | 81 +++- .../features/waitlist/auth-modal.tsx | 8 + frontend/src/i18n/declaration.ts | 1 + frontend/src/i18n/translation.json | 16 + 10 files changed, 1361 insertions(+), 15 deletions(-) create mode 100644 enterprise/server/auth/email_validation.py create mode 100644 enterprise/tests/unit/test_email_validation.py diff --git a/enterprise/server/auth/email_validation.py b/enterprise/server/auth/email_validation.py new file mode 100644 index 0000000000..94c6f52c2a --- /dev/null +++ b/enterprise/server/auth/email_validation.py @@ -0,0 +1,109 @@ +"""Email validation utilities for preventing duplicate signups with + modifier.""" + +import re + + +def extract_base_email(email: str) -> str | None: + """Extract base email from an email address. + + For emails with + modifier, extracts the base email (local part before + and @, plus domain). + For emails without + modifier, returns the email as-is. + + Examples: + extract_base_email("joe+test@example.com") -> "joe@example.com" + extract_base_email("joe@example.com") -> "joe@example.com" + extract_base_email("joe+openhands+test@example.com") -> "joe@example.com" + + Args: + email: The email address to process + + Returns: + The base email address, or None if email format is invalid + """ + if not email or '@' not in email: + return None + + try: + local_part, domain = email.rsplit('@', 1) + # Extract the part before + if it exists + base_local = local_part.split('+', 1)[0] + return f'{base_local}@{domain}' + except (ValueError, AttributeError): + return None + + +def has_plus_modifier(email: str) -> bool: + """Check if an email address contains a + modifier. + + Args: + email: The email address to check + + Returns: + True if email contains + before @, False otherwise + """ + if not email or '@' not in email: + return False + + try: + local_part, _ = email.rsplit('@', 1) + return '+' in local_part + except (ValueError, AttributeError): + return False + + +def matches_base_email(email: str, base_email: str) -> bool: + """Check if an email matches a base email pattern. + + An email matches if: + - It is exactly the base email (e.g., joe@example.com) + - It has the same base local part and domain, with or without + modifier + (e.g., joe+test@example.com matches base joe@example.com) + + Args: + email: The email address to check + base_email: The base email to match against + + Returns: + True if email matches the base pattern, False otherwise + """ + if not email or not base_email: + return False + + # Extract base from both emails for comparison + email_base = extract_base_email(email) + base_email_normalized = extract_base_email(base_email) + + if not email_base or not base_email_normalized: + return False + + # Emails match if they have the same base + return email_base.lower() == base_email_normalized.lower() + + +def get_base_email_regex_pattern(base_email: str) -> re.Pattern | None: + """Generate a regex pattern to match emails with the same base. + + For base_email "joe@example.com", the pattern will match: + - joe@example.com + - joe+anything@example.com + + Args: + base_email: The base email address + + Returns: + A compiled regex pattern, or None if base_email is invalid + """ + base = extract_base_email(base_email) + if not base: + return None + + try: + local_part, domain = base.rsplit('@', 1) + # Escape special regex characters in local part and domain + escaped_local = re.escape(local_part) + escaped_domain = re.escape(domain) + # Pattern: joe@example.com OR joe+anything@example.com + pattern = rf'^{escaped_local}(\+[^@\s]+)?@{escaped_domain}$' + return re.compile(pattern, re.IGNORECASE) + except (ValueError, AttributeError): + return None diff --git a/enterprise/server/auth/token_manager.py b/enterprise/server/auth/token_manager.py index 04bfae0767..6061518cb4 100644 --- a/enterprise/server/auth/token_manager.py +++ b/enterprise/server/auth/token_manager.py @@ -1,3 +1,4 @@ +import asyncio import base64 import hashlib import json @@ -25,6 +26,11 @@ from server.auth.constants import ( KEYCLOAK_SERVER_URL, KEYCLOAK_SERVER_URL_EXT, ) +from server.auth.email_validation import ( + extract_base_email, + get_base_email_regex_pattern, + matches_base_email, +) from server.auth.keycloak_manager import get_keycloak_admin, get_keycloak_openid from server.config import get_config from server.logger import logger @@ -509,6 +515,183 @@ class TokenManager: logger.info(f'Got user ID {keycloak_user_id} from email: {email}') return keycloak_user_id + async def _query_users_by_wildcard_pattern( + self, local_part: str, domain: str + ) -> dict[str, dict]: + """Query Keycloak for users matching a wildcard email pattern. + + Tries multiple query methods to find users with emails matching + the pattern {local_part}*@{domain}. This catches the base email + and all + modifier variants. + + Args: + local_part: The local part of the email (before @) + domain: The domain part of the email (after @) + + Returns: + Dictionary mapping user IDs to user objects + """ + keycloak_admin = get_keycloak_admin(self.external) + all_users = {} + + # Query for users with emails matching the base pattern using wildcard + # Pattern: {local_part}*@{domain} - catches base email and all + variants + # This may also catch unintended matches (e.g., joesmith@example.com), but + # they will be filtered out by the regex pattern check later + # Use 'search' parameter for Keycloak 26+ (better wildcard support) + wildcard_queries = [ + {'search': f'{local_part}*@{domain}'}, # Try 'search' parameter first + {'q': f'email:{local_part}*@{domain}'}, # Fallback to 'q' parameter + ] + + for query_params in wildcard_queries: + try: + users = await keycloak_admin.a_get_users(query_params) + for user in users: + all_users[user.get('id')] = user + break # Success, no need to try fallback + except Exception as e: + logger.debug( + f'Wildcard query failed with {list(query_params.keys())[0]}: {e}' + ) + continue # Try next query method + + return all_users + + def _find_duplicate_in_users( + self, users: dict[str, dict], base_email: str, current_user_id: str + ) -> bool: + """Check if any user in the provided list matches the base email pattern. + + Filters users to find duplicates that match the base email pattern, + excluding the current user. + + Args: + users: Dictionary mapping user IDs to user objects + base_email: The base email to match against + current_user_id: The user ID to exclude from the check + + Returns: + True if a duplicate is found, False otherwise + """ + regex_pattern = get_base_email_regex_pattern(base_email) + if not regex_pattern: + logger.warning( + f'Could not generate regex pattern for base email: {base_email}' + ) + # Fallback to simple matching + for user in users.values(): + user_email = user.get('email', '').lower() + if ( + user_email + and user.get('id') != current_user_id + and matches_base_email(user_email, base_email) + ): + logger.info( + f'Found duplicate email: {user_email} matches base {base_email}' + ) + return True + else: + for user in users.values(): + user_email = user.get('email', '') + if ( + user_email + and user.get('id') != current_user_id + and regex_pattern.match(user_email) + ): + logger.info( + f'Found duplicate email: {user_email} matches base {base_email}' + ) + return True + + return False + + @retry( + stop=stop_after_attempt(2), + retry=retry_if_exception_type(KeycloakConnectionError), + before_sleep=_before_sleep_callback, + ) + async def check_duplicate_base_email( + self, email: str, current_user_id: str + ) -> bool: + """Check if a user with the same base email already exists. + + This method checks for duplicate signups using email + modifier. + It checks if any user exists with the same base email, regardless of whether + the provided email has a + modifier or not. + + Examples: + - If email is "joe+test@example.com", it checks for existing users with + base email "joe@example.com" (e.g., "joe@example.com", "joe+1@example.com") + - If email is "joe@example.com", it checks for existing users with + base email "joe@example.com" (e.g., "joe+1@example.com", "joe+test@example.com") + + Args: + email: The email address to check (may or may not contain + modifier) + current_user_id: The user ID of the current user (to exclude from check) + + Returns: + True if a duplicate is found (excluding current user), False otherwise + """ + if not email: + return False + + base_email = extract_base_email(email) + if not base_email: + logger.warning(f'Could not extract base email from: {email}') + return False + + try: + local_part, domain = base_email.rsplit('@', 1) + users = await self._query_users_by_wildcard_pattern(local_part, domain) + return self._find_duplicate_in_users(users, base_email, current_user_id) + + except KeycloakConnectionError: + logger.exception('KeycloakConnectionError when checking duplicate email') + raise + except Exception as e: + logger.exception(f'Unexpected error checking duplicate email: {e}') + # On any error, allow signup to proceed (fail open) + return False + + @retry( + stop=stop_after_attempt(2), + retry=retry_if_exception_type(KeycloakConnectionError), + before_sleep=_before_sleep_callback, + ) + async def delete_keycloak_user(self, user_id: str) -> bool: + """Delete a user from Keycloak. + + This method is used to clean up user accounts that were created + but should not exist (e.g., duplicate email signups). + + Args: + user_id: The Keycloak user ID to delete + + Returns: + True if deletion was successful, False otherwise + """ + try: + keycloak_admin = get_keycloak_admin(self.external) + # Use the sync method (python-keycloak doesn't have async delete_user) + # Run it in a thread executor to avoid blocking the event loop + await asyncio.to_thread(keycloak_admin.delete_user, user_id) + logger.info(f'Successfully deleted Keycloak user {user_id}') + return True + except KeycloakConnectionError: + logger.exception(f'KeycloakConnectionError when deleting user {user_id}') + raise + except KeycloakError as e: + # User might not exist or already deleted + logger.warning( + f'KeycloakError when deleting user {user_id}: {e}', + extra={'user_id': user_id, 'error': str(e)}, + ) + return False + except Exception as e: + logger.exception(f'Unexpected error deleting Keycloak user {user_id}: {e}') + return False + async def get_user_info_from_user_id(self, user_id: str) -> dict | None: keycloak_admin = get_keycloak_admin(self.external) user = await keycloak_admin.a_get_user(user_id) diff --git a/enterprise/server/routes/auth.py b/enterprise/server/routes/auth.py index 2ee50bbd2d..3ea384b403 100644 --- a/enterprise/server/routes/auth.py +++ b/enterprise/server/routes/auth.py @@ -146,9 +146,11 @@ async def keycloak_callback( content={'error': 'Missing user ID or username in response'}, ) - # Check if email domain is blocked email = user_info.get('email') user_id = user_info['sub'] + + # Check if email domain is blocked + email = user_info.get('email') if email and domain_blocker.is_active() and domain_blocker.is_domain_blocked(email): logger.warning( f'Blocked authentication attempt for email: {email}, user_id: {user_id}' @@ -164,6 +166,42 @@ async def keycloak_callback( }, ) + # Check for duplicate email with + modifier + if email: + try: + has_duplicate = await token_manager.check_duplicate_base_email( + email, user_id + ) + if has_duplicate: + logger.warning( + f'Blocked signup attempt for email {email} - duplicate base email found', + extra={'user_id': user_id, 'email': email}, + ) + + # Delete the Keycloak user that was automatically created during OAuth + # This prevents orphaned accounts in Keycloak + # The delete_keycloak_user method already handles all errors internally + deletion_success = await token_manager.delete_keycloak_user(user_id) + if deletion_success: + logger.info( + f'Deleted Keycloak user {user_id} after detecting duplicate email {email}' + ) + else: + logger.warning( + f'Failed to delete Keycloak user {user_id} after detecting duplicate email {email}. ' + f'User may need to be manually cleaned up.' + ) + + # Redirect to home page with query parameter indicating the issue + home_url = f'{request.base_url}?duplicated_email=true' + return RedirectResponse(home_url, status_code=302) + except Exception as e: + # Log error but allow signup to proceed (fail open) + logger.error( + f'Error checking duplicate email for {email}: {e}', + extra={'user_id': user_id, 'email': email}, + ) + # default to github IDP for now. # TODO: remove default once Keycloak is updated universally with the new attribute. idp: str = user_info.get('identity_provider', ProviderType.GITHUB.value) diff --git a/enterprise/tests/unit/test_auth_routes.py b/enterprise/tests/unit/test_auth_routes.py index d3e8f47fbe..0eeca12dcf 100644 --- a/enterprise/tests/unit/test_auth_routes.py +++ b/enterprise/tests/unit/test_auth_routes.py @@ -635,3 +635,219 @@ async def test_keycloak_callback_missing_email(mock_request): assert isinstance(result, RedirectResponse) mock_domain_blocker.is_domain_blocked.assert_not_called() mock_token_manager.disable_keycloak_user.assert_not_called() + + +@pytest.mark.asyncio +async def test_keycloak_callback_duplicate_email_detected(mock_request): + """Test keycloak_callback when duplicate email is detected.""" + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + ): + # Arrange + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'email': 'joe+test@example.com', + 'identity_provider': 'github', + } + ) + mock_token_manager.check_duplicate_base_email = AsyncMock(return_value=True) + mock_token_manager.delete_keycloak_user = AsyncMock(return_value=True) + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + assert result.status_code == 302 + assert 'duplicated_email=true' in result.headers['location'] + mock_token_manager.check_duplicate_base_email.assert_called_once_with( + 'joe+test@example.com', 'test_user_id' + ) + mock_token_manager.delete_keycloak_user.assert_called_once_with('test_user_id') + + +@pytest.mark.asyncio +async def test_keycloak_callback_duplicate_email_deletion_fails(mock_request): + """Test keycloak_callback when duplicate is detected but deletion fails.""" + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + ): + # Arrange + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'email': 'joe+test@example.com', + 'identity_provider': 'github', + } + ) + mock_token_manager.check_duplicate_base_email = AsyncMock(return_value=True) + mock_token_manager.delete_keycloak_user = AsyncMock(return_value=False) + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + assert result.status_code == 302 + assert 'duplicated_email=true' in result.headers['location'] + mock_token_manager.delete_keycloak_user.assert_called_once_with('test_user_id') + + +@pytest.mark.asyncio +async def test_keycloak_callback_duplicate_check_exception(mock_request): + """Test keycloak_callback when duplicate check raises exception.""" + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.user_verifier') as mock_verifier, + patch('server.routes.auth.session_maker') as mock_session_maker, + ): + # Arrange + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_query = MagicMock() + mock_session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_user_settings = MagicMock() + mock_user_settings.accepted_tos = '2025-01-01' + mock_query.first.return_value = mock_user_settings + + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'email': 'joe+test@example.com', + 'identity_provider': 'github', + } + ) + mock_token_manager.check_duplicate_base_email = AsyncMock( + side_effect=Exception('Check failed') + ) + mock_token_manager.store_idp_tokens = AsyncMock() + mock_token_manager.validate_offline_token = AsyncMock(return_value=True) + + mock_verifier.is_active.return_value = True + mock_verifier.is_user_allowed.return_value = True + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + # Should proceed with normal flow despite exception (fail open) + assert isinstance(result, RedirectResponse) + assert result.status_code == 302 + + +@pytest.mark.asyncio +async def test_keycloak_callback_no_duplicate_email(mock_request): + """Test keycloak_callback when no duplicate email is found.""" + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.user_verifier') as mock_verifier, + patch('server.routes.auth.session_maker') as mock_session_maker, + ): + # Arrange + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_query = MagicMock() + mock_session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_user_settings = MagicMock() + mock_user_settings.accepted_tos = '2025-01-01' + mock_query.first.return_value = mock_user_settings + + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'email': 'joe+test@example.com', + 'identity_provider': 'github', + } + ) + mock_token_manager.check_duplicate_base_email = AsyncMock(return_value=False) + mock_token_manager.store_idp_tokens = AsyncMock() + mock_token_manager.validate_offline_token = AsyncMock(return_value=True) + + mock_verifier.is_active.return_value = True + mock_verifier.is_user_allowed.return_value = True + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + assert result.status_code == 302 + mock_token_manager.check_duplicate_base_email.assert_called_once_with( + 'joe+test@example.com', 'test_user_id' + ) + # Should not delete user when no duplicate found + mock_token_manager.delete_keycloak_user.assert_not_called() + + +@pytest.mark.asyncio +async def test_keycloak_callback_no_email_in_user_info(mock_request): + """Test keycloak_callback when email is not in user_info.""" + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.user_verifier') as mock_verifier, + patch('server.routes.auth.session_maker') as mock_session_maker, + ): + # Arrange + mock_session = MagicMock() + mock_session_maker.return_value.__enter__.return_value = mock_session + mock_query = MagicMock() + mock_session.query.return_value = mock_query + mock_query.filter.return_value = mock_query + mock_user_settings = MagicMock() + mock_user_settings.accepted_tos = '2025-01-01' + mock_query.first.return_value = mock_user_settings + + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + # No email field + 'identity_provider': 'github', + } + ) + mock_token_manager.store_idp_tokens = AsyncMock() + mock_token_manager.validate_offline_token = AsyncMock(return_value=True) + + mock_verifier.is_active.return_value = True + mock_verifier.is_user_allowed.return_value = True + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + assert result.status_code == 302 + # Should not check for duplicate when email is missing + mock_token_manager.check_duplicate_base_email.assert_not_called() diff --git a/enterprise/tests/unit/test_email_validation.py b/enterprise/tests/unit/test_email_validation.py new file mode 100644 index 0000000000..320c5d4699 --- /dev/null +++ b/enterprise/tests/unit/test_email_validation.py @@ -0,0 +1,294 @@ +"""Tests for email validation utilities.""" + +import re + +from server.auth.email_validation import ( + extract_base_email, + get_base_email_regex_pattern, + has_plus_modifier, + matches_base_email, +) + + +class TestExtractBaseEmail: + """Test cases for extract_base_email function.""" + + def test_extract_base_email_with_plus_modifier(self): + """Test extracting base email from email with + modifier.""" + # Arrange + email = 'joe+test@example.com' + + # Act + result = extract_base_email(email) + + # Assert + assert result == 'joe@example.com' + + def test_extract_base_email_without_plus_modifier(self): + """Test that email without + modifier is returned as-is.""" + # Arrange + email = 'joe@example.com' + + # Act + result = extract_base_email(email) + + # Assert + assert result == 'joe@example.com' + + def test_extract_base_email_multiple_plus_signs(self): + """Test extracting base email when multiple + signs exist.""" + # Arrange + email = 'joe+openhands+test@example.com' + + # Act + result = extract_base_email(email) + + # Assert + assert result == 'joe@example.com' + + def test_extract_base_email_invalid_no_at_symbol(self): + """Test that invalid email without @ returns None.""" + # Arrange + email = 'invalid-email' + + # Act + result = extract_base_email(email) + + # Assert + assert result is None + + def test_extract_base_email_empty_string(self): + """Test that empty string returns None.""" + # Arrange + email = '' + + # Act + result = extract_base_email(email) + + # Assert + assert result is None + + def test_extract_base_email_none(self): + """Test that None input returns None.""" + # Arrange + email = None + + # Act + result = extract_base_email(email) + + # Assert + assert result is None + + +class TestHasPlusModifier: + """Test cases for has_plus_modifier function.""" + + def test_has_plus_modifier_true(self): + """Test detecting + modifier in email.""" + # Arrange + email = 'joe+test@example.com' + + # Act + result = has_plus_modifier(email) + + # Assert + assert result is True + + def test_has_plus_modifier_false(self): + """Test that email without + modifier returns False.""" + # Arrange + email = 'joe@example.com' + + # Act + result = has_plus_modifier(email) + + # Assert + assert result is False + + def test_has_plus_modifier_invalid_no_at_symbol(self): + """Test that invalid email without @ returns False.""" + # Arrange + email = 'invalid-email' + + # Act + result = has_plus_modifier(email) + + # Assert + assert result is False + + def test_has_plus_modifier_empty_string(self): + """Test that empty string returns False.""" + # Arrange + email = '' + + # Act + result = has_plus_modifier(email) + + # Assert + assert result is False + + +class TestMatchesBaseEmail: + """Test cases for matches_base_email function.""" + + def test_matches_base_email_exact_match(self): + """Test that exact base email matches.""" + # Arrange + email = 'joe@example.com' + base_email = 'joe@example.com' + + # Act + result = matches_base_email(email, base_email) + + # Assert + assert result is True + + def test_matches_base_email_with_plus_variant(self): + """Test that email with + variant matches base email.""" + # Arrange + email = 'joe+test@example.com' + base_email = 'joe@example.com' + + # Act + result = matches_base_email(email, base_email) + + # Assert + assert result is True + + def test_matches_base_email_different_base(self): + """Test that different base emails do not match.""" + # Arrange + email = 'jane@example.com' + base_email = 'joe@example.com' + + # Act + result = matches_base_email(email, base_email) + + # Assert + assert result is False + + def test_matches_base_email_different_domain(self): + """Test that same local part but different domain does not match.""" + # Arrange + email = 'joe@other.com' + base_email = 'joe@example.com' + + # Act + result = matches_base_email(email, base_email) + + # Assert + assert result is False + + def test_matches_base_email_case_insensitive(self): + """Test that matching is case-insensitive.""" + # Arrange + email = 'JOE+TEST@EXAMPLE.COM' + base_email = 'joe@example.com' + + # Act + result = matches_base_email(email, base_email) + + # Assert + assert result is True + + def test_matches_base_email_empty_strings(self): + """Test that empty strings return False.""" + # Arrange + email = '' + base_email = 'joe@example.com' + + # Act + result = matches_base_email(email, base_email) + + # Assert + assert result is False + + +class TestGetBaseEmailRegexPattern: + """Test cases for get_base_email_regex_pattern function.""" + + def test_get_base_email_regex_pattern_valid(self): + """Test generating valid regex pattern for base email.""" + # Arrange + base_email = 'joe@example.com' + + # Act + pattern = get_base_email_regex_pattern(base_email) + + # Assert + assert pattern is not None + assert isinstance(pattern, re.Pattern) + assert pattern.match('joe@example.com') is not None + assert pattern.match('joe+test@example.com') is not None + assert pattern.match('joe+openhands@example.com') is not None + + def test_get_base_email_regex_pattern_matches_plus_variant(self): + """Test that regex pattern matches + variant.""" + # Arrange + base_email = 'joe@example.com' + pattern = get_base_email_regex_pattern(base_email) + + # Act + match = pattern.match('joe+test@example.com') + + # Assert + assert match is not None + + def test_get_base_email_regex_pattern_rejects_different_base(self): + """Test that regex pattern rejects different base email.""" + # Arrange + base_email = 'joe@example.com' + pattern = get_base_email_regex_pattern(base_email) + + # Act + match = pattern.match('jane@example.com') + + # Assert + assert match is None + + def test_get_base_email_regex_pattern_rejects_different_domain(self): + """Test that regex pattern rejects different domain.""" + # Arrange + base_email = 'joe@example.com' + pattern = get_base_email_regex_pattern(base_email) + + # Act + match = pattern.match('joe@other.com') + + # Assert + assert match is None + + def test_get_base_email_regex_pattern_case_insensitive(self): + """Test that regex pattern is case-insensitive.""" + # Arrange + base_email = 'joe@example.com' + pattern = get_base_email_regex_pattern(base_email) + + # Act + match = pattern.match('JOE+TEST@EXAMPLE.COM') + + # Assert + assert match is not None + + def test_get_base_email_regex_pattern_special_characters(self): + """Test that regex pattern handles special characters in email.""" + # Arrange + base_email = 'user.name+tag@example-site.com' + pattern = get_base_email_regex_pattern(base_email) + + # Act + match = pattern.match('user.name+test@example-site.com') + + # Assert + assert match is not None + + def test_get_base_email_regex_pattern_invalid_base_email(self): + """Test that invalid base email returns None.""" + # Arrange + base_email = 'invalid-email' + + # Act + pattern = get_base_email_regex_pattern(base_email) + + # Assert + assert pattern is None diff --git a/enterprise/tests/unit/test_token_manager.py b/enterprise/tests/unit/test_token_manager.py index 413962d60c..0498ff1cb5 100644 --- a/enterprise/tests/unit/test_token_manager.py +++ b/enterprise/tests/unit/test_token_manager.py @@ -1,6 +1,8 @@ -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock, patch import pytest +from keycloak.exceptions import KeycloakConnectionError, KeycloakError +from server.auth.token_manager import TokenManager from sqlalchemy.orm import Session from storage.offline_token_store import OfflineTokenStore from storage.stored_offline_token import StoredOfflineToken @@ -32,6 +34,14 @@ def token_store(mock_session_maker, mock_config): return OfflineTokenStore('test_user_id', mock_session_maker, mock_config) +@pytest.fixture +def token_manager(): + with patch('server.config.get_config') as mock_get_config: + mock_config = mock_get_config.return_value + mock_config.jwt_secret.get_secret_value.return_value = 'test_secret' + return TokenManager(external=False) + + @pytest.mark.asyncio async def test_store_token_new_record(token_store, mock_session): # Setup @@ -109,3 +119,419 @@ async def test_get_instance(mock_config): assert isinstance(result, OfflineTokenStore) assert result.user_id == test_user_id assert result.config == mock_config + + +class TestCheckDuplicateBaseEmail: + """Test cases for check_duplicate_base_email method.""" + + @pytest.mark.asyncio + async def test_check_duplicate_base_email_no_plus_modifier(self, token_manager): + """Test that emails without + modifier are still checked for duplicates.""" + # Arrange + email = 'joe@example.com' + current_user_id = 'user123' + + with ( + patch.object( + token_manager, '_query_users_by_wildcard_pattern' + ) as mock_query, + patch.object(token_manager, '_find_duplicate_in_users') as mock_find, + ): + mock_find.return_value = False + mock_query.return_value = {} + + # Act + result = await token_manager.check_duplicate_base_email( + email, current_user_id + ) + + # Assert + assert result is False + mock_query.assert_called_once() + mock_find.assert_called_once() + + @pytest.mark.asyncio + async def test_check_duplicate_base_email_empty_email(self, token_manager): + """Test that empty email returns False.""" + # Arrange + email = '' + current_user_id = 'user123' + + # Act + result = await token_manager.check_duplicate_base_email(email, current_user_id) + + # Assert + assert result is False + + @pytest.mark.asyncio + async def test_check_duplicate_base_email_invalid_email(self, token_manager): + """Test that invalid email returns False.""" + # Arrange + email = 'invalid-email' + current_user_id = 'user123' + + # Act + result = await token_manager.check_duplicate_base_email(email, current_user_id) + + # Assert + assert result is False + + @pytest.mark.asyncio + async def test_check_duplicate_base_email_duplicate_found(self, token_manager): + """Test that duplicate email is detected when found.""" + # Arrange + email = 'joe+test@example.com' + current_user_id = 'user123' + existing_user = { + 'id': 'existing_user_id', + 'email': 'joe@example.com', + } + + with ( + patch.object( + token_manager, '_query_users_by_wildcard_pattern' + ) as mock_query, + patch.object(token_manager, '_find_duplicate_in_users') as mock_find, + ): + mock_find.return_value = True + mock_query.return_value = {'existing_user_id': existing_user} + + # Act + result = await token_manager.check_duplicate_base_email( + email, current_user_id + ) + + # Assert + assert result is True + mock_query.assert_called_once() + mock_find.assert_called_once() + + @pytest.mark.asyncio + async def test_check_duplicate_base_email_no_duplicate(self, token_manager): + """Test that no duplicate is found when none exists.""" + # Arrange + email = 'joe+test@example.com' + current_user_id = 'user123' + + with ( + patch.object( + token_manager, '_query_users_by_wildcard_pattern' + ) as mock_query, + patch.object(token_manager, '_find_duplicate_in_users') as mock_find, + ): + mock_find.return_value = False + mock_query.return_value = {} + + # Act + result = await token_manager.check_duplicate_base_email( + email, current_user_id + ) + + # Assert + assert result is False + + @pytest.mark.asyncio + async def test_check_duplicate_base_email_keycloak_connection_error( + self, token_manager + ): + """Test that KeycloakConnectionError triggers retry and raises RetryError.""" + # Arrange + email = 'joe+test@example.com' + current_user_id = 'user123' + + with patch.object( + token_manager, '_query_users_by_wildcard_pattern' + ) as mock_query: + mock_query.side_effect = KeycloakConnectionError('Connection failed') + + # Act & Assert + # KeycloakConnectionError is re-raised, which triggers retry decorator + # After retries exhaust (2 attempts), it raises RetryError + from tenacity import RetryError + + with pytest.raises(RetryError): + await token_manager.check_duplicate_base_email(email, current_user_id) + + @pytest.mark.asyncio + async def test_check_duplicate_base_email_general_exception(self, token_manager): + """Test that general exceptions are handled gracefully.""" + # Arrange + email = 'joe+test@example.com' + current_user_id = 'user123' + + with patch.object( + token_manager, '_query_users_by_wildcard_pattern' + ) as mock_query: + mock_query.side_effect = Exception('Unexpected error') + + # Act + result = await token_manager.check_duplicate_base_email( + email, current_user_id + ) + + # Assert + assert result is False + + +class TestQueryUsersByWildcardPattern: + """Test cases for _query_users_by_wildcard_pattern method.""" + + @pytest.mark.asyncio + async def test_query_users_by_wildcard_pattern_success_with_search( + self, token_manager + ): + """Test successful query using search parameter.""" + # Arrange + local_part = 'joe' + domain = 'example.com' + mock_users = [ + {'id': 'user1', 'email': 'joe@example.com'}, + {'id': 'user2', 'email': 'joe+test@example.com'}, + ] + + with patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin: + mock_admin = MagicMock() + mock_admin.a_get_users = AsyncMock(return_value=mock_users) + mock_get_admin.return_value = mock_admin + + # Act + result = await token_manager._query_users_by_wildcard_pattern( + local_part, domain + ) + + # Assert + assert len(result) == 2 + assert 'user1' in result + assert 'user2' in result + mock_admin.a_get_users.assert_called_once_with( + {'search': 'joe*@example.com'} + ) + + @pytest.mark.asyncio + async def test_query_users_by_wildcard_pattern_fallback_to_q(self, token_manager): + """Test fallback to q parameter when search fails.""" + # Arrange + local_part = 'joe' + domain = 'example.com' + mock_users = [{'id': 'user1', 'email': 'joe@example.com'}] + + with patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin: + mock_admin = MagicMock() + # First call fails, second succeeds + mock_admin.a_get_users = AsyncMock( + side_effect=[Exception('Search failed'), mock_users] + ) + mock_get_admin.return_value = mock_admin + + # Act + result = await token_manager._query_users_by_wildcard_pattern( + local_part, domain + ) + + # Assert + assert len(result) == 1 + assert 'user1' in result + assert mock_admin.a_get_users.call_count == 2 + + @pytest.mark.asyncio + async def test_query_users_by_wildcard_pattern_empty_result(self, token_manager): + """Test query returns empty dict when no users found.""" + # Arrange + local_part = 'joe' + domain = 'example.com' + + with patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin: + mock_admin = MagicMock() + mock_admin.a_get_users = AsyncMock(return_value=[]) + mock_get_admin.return_value = mock_admin + + # Act + result = await token_manager._query_users_by_wildcard_pattern( + local_part, domain + ) + + # Assert + assert result == {} + + +class TestFindDuplicateInUsers: + """Test cases for _find_duplicate_in_users method.""" + + def test_find_duplicate_in_users_with_regex_match(self, token_manager): + """Test finding duplicate using regex pattern.""" + # Arrange + users = { + 'user1': {'id': 'user1', 'email': 'joe@example.com'}, + 'user2': {'id': 'user2', 'email': 'joe+test@example.com'}, + } + base_email = 'joe@example.com' + current_user_id = 'user3' + + # Act + result = token_manager._find_duplicate_in_users( + users, base_email, current_user_id + ) + + # Assert + assert result is True + + def test_find_duplicate_in_users_fallback_to_simple_matching(self, token_manager): + """Test fallback to simple matching when regex pattern is None.""" + # Arrange + users = { + 'user1': {'id': 'user1', 'email': 'joe@example.com'}, + } + base_email = 'invalid-email' # Will cause regex pattern to be None + current_user_id = 'user2' + + with patch( + 'server.auth.token_manager.get_base_email_regex_pattern', return_value=None + ): + # Act + result = token_manager._find_duplicate_in_users( + users, base_email, current_user_id + ) + + # Assert + # Should use fallback matching, but invalid base_email won't match + assert result is False + + def test_find_duplicate_in_users_excludes_current_user(self, token_manager): + """Test that current user is excluded from duplicate check.""" + # Arrange + users = { + 'user1': {'id': 'user1', 'email': 'joe@example.com'}, + } + base_email = 'joe@example.com' + current_user_id = 'user1' # Same as user in users dict + + # Act + result = token_manager._find_duplicate_in_users( + users, base_email, current_user_id + ) + + # Assert + assert result is False + + def test_find_duplicate_in_users_no_match(self, token_manager): + """Test that no duplicate is found when emails don't match.""" + # Arrange + users = { + 'user1': {'id': 'user1', 'email': 'jane@example.com'}, + } + base_email = 'joe@example.com' + current_user_id = 'user2' + + # Act + result = token_manager._find_duplicate_in_users( + users, base_email, current_user_id + ) + + # Assert + assert result is False + + def test_find_duplicate_in_users_empty_dict(self, token_manager): + """Test that empty users dict returns False.""" + # Arrange + users: dict[str, dict] = {} + base_email = 'joe@example.com' + current_user_id = 'user1' + + # Act + result = token_manager._find_duplicate_in_users( + users, base_email, current_user_id + ) + + # Assert + assert result is False + + +class TestDeleteKeycloakUser: + """Test cases for delete_keycloak_user method.""" + + @pytest.mark.asyncio + async def test_delete_keycloak_user_success(self, token_manager): + """Test successful deletion of Keycloak user.""" + # Arrange + user_id = 'test_user_id' + + with ( + patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin, + patch('asyncio.to_thread') as mock_to_thread, + ): + mock_admin = MagicMock() + mock_admin.delete_user = MagicMock() + mock_get_admin.return_value = mock_admin + mock_to_thread.return_value = None + + # Act + result = await token_manager.delete_keycloak_user(user_id) + + # Assert + assert result is True + mock_to_thread.assert_called_once_with(mock_admin.delete_user, user_id) + + @pytest.mark.asyncio + async def test_delete_keycloak_user_connection_error(self, token_manager): + """Test handling of KeycloakConnectionError triggers retry and raises RetryError.""" + # Arrange + user_id = 'test_user_id' + + with ( + patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin, + patch('asyncio.to_thread') as mock_to_thread, + ): + mock_admin = MagicMock() + mock_admin.delete_user = MagicMock() + mock_get_admin.return_value = mock_admin + mock_to_thread.side_effect = KeycloakConnectionError('Connection failed') + + # Act & Assert + # KeycloakConnectionError triggers retry decorator + # After retries exhaust (2 attempts), it raises RetryError + from tenacity import RetryError + + with pytest.raises(RetryError): + await token_manager.delete_keycloak_user(user_id) + + @pytest.mark.asyncio + async def test_delete_keycloak_user_keycloak_error(self, token_manager): + """Test handling of KeycloakError (e.g., user not found).""" + # Arrange + user_id = 'test_user_id' + + with ( + patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin, + patch('asyncio.to_thread') as mock_to_thread, + ): + mock_admin = MagicMock() + mock_admin.delete_user = MagicMock() + mock_get_admin.return_value = mock_admin + mock_to_thread.side_effect = KeycloakError('User not found') + + # Act + result = await token_manager.delete_keycloak_user(user_id) + + # Assert + assert result is False + + @pytest.mark.asyncio + async def test_delete_keycloak_user_general_exception(self, token_manager): + """Test handling of general exceptions.""" + # Arrange + user_id = 'test_user_id' + + with ( + patch('server.auth.token_manager.get_keycloak_admin') as mock_get_admin, + patch('asyncio.to_thread') as mock_to_thread, + ): + mock_admin = MagicMock() + mock_admin.delete_user = MagicMock() + mock_get_admin.return_value = mock_admin + mock_to_thread.side_effect = Exception('Unexpected error') + + # Act + result = await token_manager.delete_keycloak_user(user_id) + + # Assert + assert result is False diff --git a/frontend/__tests__/components/features/auth-modal.test.tsx b/frontend/__tests__/components/features/auth-modal.test.tsx index 32b682d506..4f32841b12 100644 --- a/frontend/__tests__/components/features/auth-modal.test.tsx +++ b/frontend/__tests__/components/features/auth-modal.test.tsx @@ -1,6 +1,7 @@ import { render, screen } from "@testing-library/react"; import { it, describe, expect, vi, beforeEach, afterEach } from "vitest"; import userEvent from "@testing-library/user-event"; +import { MemoryRouter } from "react-router"; import { AuthModal } from "#/components/features/waitlist/auth-modal"; // Mock the useAuthUrl hook @@ -27,11 +28,13 @@ describe("AuthModal", () => { it("should render the GitHub and GitLab buttons", () => { render( - , + + + , ); const githubButton = screen.getByRole("button", { @@ -49,11 +52,13 @@ describe("AuthModal", () => { const user = userEvent.setup(); const mockUrl = "https://github.com/login/oauth/authorize"; render( - , + + + , ); const githubButton = screen.getByRole("button", { @@ -65,7 +70,11 @@ describe("AuthModal", () => { }); it("should render Terms of Service and Privacy Policy text with correct links", () => { - render(); + render( + + + , + ); // Find the terms of service section using data-testid const termsSection = screen.getByTestId("auth-modal-terms-of-service"); @@ -106,7 +115,11 @@ describe("AuthModal", () => { }); it("should open Terms of Service link in new tab", () => { - render(); + render( + + + , + ); const tosLink = screen.getByRole("link", { name: "COMMON$TERMS_OF_SERVICE", @@ -115,11 +128,53 @@ describe("AuthModal", () => { }); it("should open Privacy Policy link in new tab", () => { - render(); + render( + + + , + ); const privacyLink = screen.getByRole("link", { name: "COMMON$PRIVACY_POLICY", }); expect(privacyLink).toHaveAttribute("target", "_blank"); }); + + describe("Duplicate email error message", () => { + const renderAuthModalWithRouter = (initialEntries: string[]) => { + return render( + + + , + ); + }; + + it("should display error message when duplicated_email query parameter is true", () => { + // Arrange + const initialEntries = ["/?duplicated_email=true"]; + + // Act + renderAuthModalWithRouter(initialEntries); + + // Assert + const errorMessage = screen.getByText("AUTH$DUPLICATE_EMAIL_ERROR"); + expect(errorMessage).toBeInTheDocument(); + }); + + it("should not display error message when duplicated_email query parameter is missing", () => { + // Arrange + const initialEntries = ["/"]; + + // Act + renderAuthModalWithRouter(initialEntries); + + // Assert + const errorMessage = screen.queryByText("AUTH$DUPLICATE_EMAIL_ERROR"); + expect(errorMessage).not.toBeInTheDocument(); + }); + }); }); diff --git a/frontend/src/components/features/waitlist/auth-modal.tsx b/frontend/src/components/features/waitlist/auth-modal.tsx index 2c431fbd95..e1d52a7965 100644 --- a/frontend/src/components/features/waitlist/auth-modal.tsx +++ b/frontend/src/components/features/waitlist/auth-modal.tsx @@ -1,5 +1,6 @@ import React from "react"; import { useTranslation } from "react-i18next"; +import { useSearchParams } from "react-router"; import { I18nKey } from "#/i18n/declaration"; import OpenHandsLogo from "#/assets/branding/openhands-logo.svg?react"; import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; @@ -29,6 +30,8 @@ export function AuthModal({ }: AuthModalProps) { const { t } = useTranslation(); const { trackLoginButtonClick } = useTracking(); + const [searchParams] = useSearchParams(); + const hasDuplicatedEmail = searchParams.get("duplicated_email") === "true"; const gitlabAuthUrl = useAuthUrl({ appMode: appMode || null, @@ -123,6 +126,11 @@ export function AuthModal({ + {hasDuplicatedEmail && ( +
+ {t(I18nKey.AUTH$DUPLICATE_EMAIL_ERROR)} +
+ )}

{t(I18nKey.AUTH$SIGN_IN_WITH_IDENTITY_PROVIDER)} diff --git a/frontend/src/i18n/declaration.ts b/frontend/src/i18n/declaration.ts index 1b330730d9..0dd668cacc 100644 --- a/frontend/src/i18n/declaration.ts +++ b/frontend/src/i18n/declaration.ts @@ -730,6 +730,7 @@ export enum I18nKey { MICROAGENT_MANAGEMENT$USE_MICROAGENTS = "MICROAGENT_MANAGEMENT$USE_MICROAGENTS", AUTH$BY_SIGNING_UP_YOU_AGREE_TO_OUR = "AUTH$BY_SIGNING_UP_YOU_AGREE_TO_OUR", AUTH$NO_PROVIDERS_CONFIGURED = "AUTH$NO_PROVIDERS_CONFIGURED", + AUTH$DUPLICATE_EMAIL_ERROR = "AUTH$DUPLICATE_EMAIL_ERROR", COMMON$TERMS_OF_SERVICE = "COMMON$TERMS_OF_SERVICE", COMMON$AND = "COMMON$AND", COMMON$PRIVACY_POLICY = "COMMON$PRIVACY_POLICY", diff --git a/frontend/src/i18n/translation.json b/frontend/src/i18n/translation.json index a421de5ddf..2950b3ab72 100644 --- a/frontend/src/i18n/translation.json +++ b/frontend/src/i18n/translation.json @@ -11679,6 +11679,22 @@ "de": "Mindestens ein Identitätsanbieter muss konfiguriert werden (z.B. GitHub)", "uk": "Принаймні один постачальник ідентифікації має бути налаштований (наприклад, GitHub)" }, + "AUTH$DUPLICATE_EMAIL_ERROR": { + "en": "Your account is unable to be created. Please use a different login or try again.", + "ja": "アカウントを作成できません。別のログインを使用するか、もう一度お試しください。", + "zh-CN": "无法创建您的账户。请使用其他登录方式或重试。", + "zh-TW": "無法建立您的帳戶。請使用其他登入方式或重試。", + "ko-KR": "계정을 생성할 수 없습니다. 다른 로그인을 사용하거나 다시 시도해 주세요.", + "no": "Kontoen din kan ikke opprettes. Vennligst bruk en annen innlogging eller prøv igjen.", + "it": "Impossibile creare il tuo account. Utilizza un altro accesso o riprova.", + "pt": "Não foi possível criar sua conta. Use um login diferente ou tente novamente.", + "es": "No se puede crear su cuenta. Utilice un inicio de sesión diferente o inténtelo de nuevo.", + "ar": "لا يمكن إنشاء حسابك. يرجى استخدام تسجيل دخول مختلف أو المحاولة مرة أخرى.", + "fr": "Votre compte ne peut pas être créé. Veuillez utiliser une autre connexion ou réessayer.", + "tr": "Hesabınız oluşturulamadı. Lütfen farklı bir giriş kullanın veya tekrar deneyin.", + "de": "Ihr Konto kann nicht erstellt werden. Bitte verwenden Sie eine andere Anmeldung oder versuchen Sie es erneut.", + "uk": "Ваш обліковий запис не може бути створений. Будь ласка, використовуйте інший спосіб входу або спробуйте ще раз." + }, "COMMON$TERMS_OF_SERVICE": { "en": "Terms of Service", "ja": "利用規約", From 8d0e7a92b8ae61b83902324671e70d7520ee52cc Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 23 Dec 2025 12:02:56 -0700 Subject: [PATCH 52/80] ALL-4636 Resolution for connection leaks (#12144) Co-authored-by: openhands --- enterprise/server/auth/saas_user_auth.py | 6 +++-- .../server/routes/manage_conversations.py | 9 +++---- .../server/data_models/test_conversation.py | 24 +++++++++++++++++++ 3 files changed, 33 insertions(+), 6 deletions(-) diff --git a/enterprise/server/auth/saas_user_auth.py b/enterprise/server/auth/saas_user_auth.py index b51d336997..73a7217fd2 100644 --- a/enterprise/server/auth/saas_user_auth.py +++ b/enterprise/server/auth/saas_user_auth.py @@ -154,8 +154,10 @@ class SaasUserAuth(UserAuth): try: # TODO: I think we can do this in a single request if we refactor with session_maker() as session: - tokens = session.query(AuthTokens).where( - AuthTokens.keycloak_user_id == self.user_id + tokens = ( + session.query(AuthTokens) + .where(AuthTokens.keycloak_user_id == self.user_id) + .all() ) for token in tokens: diff --git a/openhands/server/routes/manage_conversations.py b/openhands/server/routes/manage_conversations.py index babbc48654..1793b07e7d 100644 --- a/openhands/server/routes/manage_conversations.py +++ b/openhands/server/routes/manage_conversations.py @@ -510,6 +510,10 @@ async def delete_conversation( if v1_result is not None: return v1_result + # Close connections + await db_session.close() + await httpx_client.aclose() + # V0 conversation logic return await _delete_v0_conversation(conversation_id, user_id) @@ -551,11 +555,8 @@ async def _try_delete_v1_conversation( httpx_client, ) ) - except (ValueError, TypeError): - # Not a valid UUID, continue with V0 logic - pass except Exception: - # Some other error, continue with V0 logic + # Continue with V0 logic pass return result diff --git a/tests/unit/server/data_models/test_conversation.py b/tests/unit/server/data_models/test_conversation.py index 79ff91fa7f..d5e289ecfa 100644 --- a/tests/unit/server/data_models/test_conversation.py +++ b/tests/unit/server/data_models/test_conversation.py @@ -946,6 +946,10 @@ async def test_delete_conversation(): # Create a mock sandbox service mock_sandbox_service = MagicMock() + # Create mock db_session and httpx_client + mock_db_session = AsyncMock() + mock_httpx_client = AsyncMock() + # Mock the conversation manager with patch( 'openhands.server.routes.manage_conversations.conversation_manager' @@ -969,6 +973,8 @@ async def test_delete_conversation(): app_conversation_service=mock_app_conversation_service, app_conversation_info_service=mock_app_conversation_info_service, sandbox_service=mock_sandbox_service, + db_session=mock_db_session, + httpx_client=mock_httpx_client, ) # Verify the result @@ -1090,6 +1096,10 @@ async def test_delete_v1_conversation_not_found(): ) mock_service.delete_app_conversation = AsyncMock(return_value=False) + # Create mock db_session and httpx_client + mock_db_session = AsyncMock() + mock_httpx_client = AsyncMock() + # Call delete_conversation with V1 conversation ID result = await delete_conversation( request=MagicMock(), @@ -1098,6 +1108,8 @@ async def test_delete_v1_conversation_not_found(): app_conversation_service=mock_service, app_conversation_info_service=mock_info_service, sandbox_service=mock_sandbox_service, + db_session=mock_db_session, + httpx_client=mock_httpx_client, ) # Verify the result @@ -1171,6 +1183,10 @@ async def test_delete_v1_conversation_invalid_uuid(): mock_sandbox_service = MagicMock() mock_sandbox_service_dep.return_value = mock_sandbox_service + # Create mock db_session and httpx_client + mock_db_session = AsyncMock() + mock_httpx_client = AsyncMock() + # Call delete_conversation result = await delete_conversation( request=MagicMock(), @@ -1179,6 +1195,8 @@ async def test_delete_v1_conversation_invalid_uuid(): app_conversation_service=mock_service, app_conversation_info_service=mock_info_service, sandbox_service=mock_sandbox_service, + db_session=mock_db_session, + httpx_client=mock_httpx_client, ) # Verify the result @@ -1264,6 +1282,10 @@ async def test_delete_v1_conversation_service_error(): mock_runtime_cls.delete = AsyncMock() mock_get_runtime_cls.return_value = mock_runtime_cls + # Create mock db_session and httpx_client + mock_db_session = AsyncMock() + mock_httpx_client = AsyncMock() + # Call delete_conversation result = await delete_conversation( request=MagicMock(), @@ -1272,6 +1294,8 @@ async def test_delete_v1_conversation_service_error(): app_conversation_service=mock_service, app_conversation_info_service=mock_info_service, sandbox_service=mock_sandbox_service, + db_session=mock_db_session, + httpx_client=mock_httpx_client, ) # Verify the result (should fallback to V0) From 8bc1a47a78f52868e6ea97b89a7a749440b6c284 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 23 Dec 2025 15:18:36 -0700 Subject: [PATCH 53/80] Fix for error in get_sandbox_by_session_api_key (#12147) --- openhands/app_server/sandbox/remote_sandbox_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/app_server/sandbox/remote_sandbox_service.py b/openhands/app_server/sandbox/remote_sandbox_service.py index 076c478478..ef60724d9a 100644 --- a/openhands/app_server/sandbox/remote_sandbox_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_service.py @@ -187,7 +187,7 @@ class RemoteSandboxService(SandboxService): return SandboxStatus.MISSING status = None - pod_status = runtime['pod_status'].lower() + pod_status = (runtime.get('pod_status') or '').lower() if pod_status: status = POD_STATUS_MAPPING.get(pod_status, None) From dc99c7b62e50877e11e6850fcdb2e585d7210ba3 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Tue, 23 Dec 2025 17:11:16 -0700 Subject: [PATCH 54/80] Fix SQLAlchemy result handling in get_sandbox_by_session_api_key (#12148) Co-authored-by: openhands --- openhands/app_server/sandbox/remote_sandbox_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/app_server/sandbox/remote_sandbox_service.py b/openhands/app_server/sandbox/remote_sandbox_service.py index ef60724d9a..1606fc81ae 100644 --- a/openhands/app_server/sandbox/remote_sandbox_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_service.py @@ -356,7 +356,7 @@ class RemoteSandboxService(SandboxService): StoredRemoteSandbox.id == runtime.get('session_id') ) result = await self.db_session.execute(query) - sandbox = result.first() + sandbox = result.scalar_one_or_none() if sandbox is None: raise ValueError('sandbox_not_found') return self._to_sandbox_info(sandbox, runtime) From e2b2aa52cdc091426a30fe62b44e2ff9a4a2cb16 Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Wed, 24 Dec 2025 14:56:02 +0700 Subject: [PATCH 55/80] feat: require email verification for new signups (#12123) --- enterprise/server/routes/auth.py | 12 + enterprise/server/routes/email.py | 16 +- .../unit/server/routes/test_email_routes.py | 151 +++++++++++ enterprise/tests/unit/test_auth_routes.py | 85 ++++++ .../components/features/auth-modal.test.tsx | 39 ++- .../email-verification-modal.test.tsx | 28 ++ .../shared/terms-and-privacy-notice.test.tsx | 48 ++++ .../__tests__/routes/root-layout.test.tsx | 242 ++++++++++++++++++ .../features/waitlist/auth-modal.tsx | 40 +-- .../waitlist/email-verification-modal.tsx | 31 +++ .../shared/terms-and-privacy-notice.tsx | 37 +++ frontend/src/hooks/use-email-verification.ts | 63 +++++ frontend/src/i18n/declaration.ts | 2 + frontend/src/i18n/translation.json | 32 +++ frontend/src/routes/root-layout.tsx | 17 ++ 15 files changed, 810 insertions(+), 33 deletions(-) create mode 100644 enterprise/tests/unit/server/routes/test_email_routes.py create mode 100644 frontend/__tests__/components/features/waitlist/email-verification-modal.test.tsx create mode 100644 frontend/__tests__/components/shared/terms-and-privacy-notice.test.tsx create mode 100644 frontend/__tests__/routes/root-layout.test.tsx create mode 100644 frontend/src/components/features/waitlist/email-verification-modal.tsx create mode 100644 frontend/src/components/shared/terms-and-privacy-notice.tsx create mode 100644 frontend/src/hooks/use-email-verification.ts diff --git a/enterprise/server/routes/auth.py b/enterprise/server/routes/auth.py index 3ea384b403..e911538da6 100644 --- a/enterprise/server/routes/auth.py +++ b/enterprise/server/routes/auth.py @@ -202,6 +202,18 @@ async def keycloak_callback( extra={'user_id': user_id, 'email': email}, ) + # Check email verification status + email_verified = user_info.get('email_verified', False) + if not email_verified: + # Send verification email + # Import locally to avoid circular import with email.py + from server.routes.email import verify_email + + await verify_email(request=request, user_id=user_id, is_auth_flow=True) + redirect_url = f'{request.base_url}?email_verification_required=true' + response = RedirectResponse(redirect_url, status_code=302) + return response + # default to github IDP for now. # TODO: remove default once Keycloak is updated universally with the new attribute. idp: str = user_info.get('identity_provider', ProviderType.GITHUB.value) diff --git a/enterprise/server/routes/email.py b/enterprise/server/routes/email.py index b0d88afaa0..b58adf9a4f 100644 --- a/enterprise/server/routes/email.py +++ b/enterprise/server/routes/email.py @@ -74,7 +74,7 @@ async def update_email( accepted_tos=user_auth.accepted_tos, ) - await _verify_email(request=request, user_id=user_id) + await verify_email(request=request, user_id=user_id) logger.info(f'Updating email address for {user_id} to {email}') return response @@ -91,8 +91,10 @@ async def update_email( @api_router.put('/verify') -async def verify_email(request: Request, user_id: str = Depends(get_user_id)): - await _verify_email(request=request, user_id=user_id) +async def resend_email_verification( + request: Request, user_id: str = Depends(get_user_id) +): + await verify_email(request=request, user_id=user_id) logger.info(f'Resending verification email for {user_id}') return JSONResponse( @@ -124,10 +126,14 @@ async def verified_email(request: Request): return response -async def _verify_email(request: Request, user_id: str): +async def verify_email(request: Request, user_id: str, is_auth_flow: bool = False): keycloak_admin = get_keycloak_admin() scheme = 'http' if request.url.hostname == 'localhost' else 'https' - redirect_uri = f'{scheme}://{request.url.netloc}/api/email/verified' + redirect_uri = ( + f'{scheme}://{request.url.netloc}?email_verified=true' + if is_auth_flow + else f'{scheme}://{request.url.netloc}/api/email/verified' + ) logger.info(f'Redirect URI: {redirect_uri}') await keycloak_admin.a_send_verify_email( user_id=user_id, diff --git a/enterprise/tests/unit/server/routes/test_email_routes.py b/enterprise/tests/unit/server/routes/test_email_routes.py new file mode 100644 index 0000000000..8f5ba12e87 --- /dev/null +++ b/enterprise/tests/unit/server/routes/test_email_routes.py @@ -0,0 +1,151 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from fastapi import Request +from fastapi.responses import RedirectResponse +from pydantic import SecretStr +from server.auth.saas_user_auth import SaasUserAuth +from server.routes.email import verified_email, verify_email + + +@pytest.fixture +def mock_request(): + """Create a mock request object.""" + request = MagicMock(spec=Request) + request.url = MagicMock() + request.url.hostname = 'localhost' + request.url.netloc = 'localhost:8000' + request.url.path = '/api/email/verified' + request.base_url = 'http://localhost:8000/' + request.headers = {} + request.cookies = {} + request.query_params = MagicMock() + return request + + +@pytest.fixture +def mock_user_auth(): + """Create a mock SaasUserAuth object.""" + auth = MagicMock(spec=SaasUserAuth) + auth.access_token = SecretStr('test_access_token') + auth.refresh_token = SecretStr('test_refresh_token') + auth.email = 'test@example.com' + auth.email_verified = False + auth.accepted_tos = True + auth.refresh = AsyncMock() + return auth + + +@pytest.mark.asyncio +async def test_verify_email_default_behavior(mock_request): + """Test verify_email with default is_auth_flow=False.""" + # Arrange + user_id = 'test_user_id' + mock_keycloak_admin = AsyncMock() + mock_keycloak_admin.a_send_verify_email = AsyncMock() + + # Act + with patch( + 'server.routes.email.get_keycloak_admin', return_value=mock_keycloak_admin + ): + await verify_email(request=mock_request, user_id=user_id) + + # Assert + mock_keycloak_admin.a_send_verify_email.assert_called_once() + call_args = mock_keycloak_admin.a_send_verify_email.call_args + assert call_args.kwargs['user_id'] == user_id + assert ( + call_args.kwargs['redirect_uri'] == 'http://localhost:8000/api/email/verified' + ) + assert 'client_id' in call_args.kwargs + + +@pytest.mark.asyncio +async def test_verify_email_with_auth_flow(mock_request): + """Test verify_email with is_auth_flow=True.""" + # Arrange + user_id = 'test_user_id' + mock_keycloak_admin = AsyncMock() + mock_keycloak_admin.a_send_verify_email = AsyncMock() + + # Act + with patch( + 'server.routes.email.get_keycloak_admin', return_value=mock_keycloak_admin + ): + await verify_email(request=mock_request, user_id=user_id, is_auth_flow=True) + + # Assert + mock_keycloak_admin.a_send_verify_email.assert_called_once() + call_args = mock_keycloak_admin.a_send_verify_email.call_args + assert call_args.kwargs['user_id'] == user_id + assert ( + call_args.kwargs['redirect_uri'] == 'http://localhost:8000?email_verified=true' + ) + assert 'client_id' in call_args.kwargs + + +@pytest.mark.asyncio +async def test_verify_email_https_scheme(mock_request): + """Test verify_email uses https scheme for non-localhost hosts.""" + # Arrange + user_id = 'test_user_id' + mock_request.url.hostname = 'example.com' + mock_request.url.netloc = 'example.com' + mock_keycloak_admin = AsyncMock() + mock_keycloak_admin.a_send_verify_email = AsyncMock() + + # Act + with patch( + 'server.routes.email.get_keycloak_admin', return_value=mock_keycloak_admin + ): + await verify_email(request=mock_request, user_id=user_id, is_auth_flow=True) + + # Assert + call_args = mock_keycloak_admin.a_send_verify_email.call_args + assert call_args.kwargs['redirect_uri'].startswith('https://') + + +@pytest.mark.asyncio +async def test_verified_email_default_redirect(mock_request, mock_user_auth): + """Test verified_email redirects to /settings/user by default.""" + # Arrange + mock_request.query_params.get.return_value = None + + # Act + with ( + patch('server.routes.email.get_user_auth', return_value=mock_user_auth), + patch('server.routes.email.set_response_cookie') as mock_set_cookie, + ): + result = await verified_email(mock_request) + + # Assert + assert isinstance(result, RedirectResponse) + assert result.status_code == 302 + assert result.headers['location'] == 'http://localhost:8000/settings/user' + mock_user_auth.refresh.assert_called_once() + mock_set_cookie.assert_called_once() + assert mock_user_auth.email_verified is True + + +@pytest.mark.asyncio +async def test_verified_email_https_scheme(mock_request, mock_user_auth): + """Test verified_email uses https scheme for non-localhost hosts.""" + # Arrange + mock_request.url.hostname = 'example.com' + mock_request.url.netloc = 'example.com' + mock_request.query_params.get.return_value = None + + # Act + with ( + patch('server.routes.email.get_user_auth', return_value=mock_user_auth), + patch('server.routes.email.set_response_cookie') as mock_set_cookie, + ): + result = await verified_email(mock_request) + + # Assert + assert isinstance(result, RedirectResponse) + assert result.headers['location'].startswith('https://') + mock_set_cookie.assert_called_once() + # Verify secure flag is True for https + call_kwargs = mock_set_cookie.call_args.kwargs + assert call_kwargs['secure'] is True diff --git a/enterprise/tests/unit/test_auth_routes.py b/enterprise/tests/unit/test_auth_routes.py index 0eeca12dcf..8490d92760 100644 --- a/enterprise/tests/unit/test_auth_routes.py +++ b/enterprise/tests/unit/test_auth_routes.py @@ -136,6 +136,7 @@ async def test_keycloak_callback_user_not_allowed(mock_request): 'sub': 'test_user_id', 'preferred_username': 'test_user', 'identity_provider': 'github', + 'email_verified': True, } ) mock_token_manager.store_idp_tokens = AsyncMock() @@ -184,6 +185,7 @@ async def test_keycloak_callback_success_with_valid_offline_token(mock_request): 'sub': 'test_user_id', 'preferred_username': 'test_user', 'identity_provider': 'github', + 'email_verified': True, } ) mock_token_manager.store_idp_tokens = AsyncMock() @@ -214,6 +216,82 @@ async def test_keycloak_callback_success_with_valid_offline_token(mock_request): mock_posthog.set.assert_called_once() +@pytest.mark.asyncio +async def test_keycloak_callback_email_not_verified(mock_request): + """Test keycloak_callback when email is not verified.""" + # Arrange + mock_verify_email = AsyncMock() + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.user_verifier') as mock_verifier, + patch('server.routes.email.verify_email', mock_verify_email), + ): + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'identity_provider': 'github', + 'email_verified': False, + } + ) + mock_token_manager.store_idp_tokens = AsyncMock() + mock_verifier.is_active.return_value = False + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + assert result.status_code == 302 + assert 'email_verification_required=true' in result.headers['location'] + mock_verify_email.assert_called_once_with( + request=mock_request, user_id='test_user_id', is_auth_flow=True + ) + + +@pytest.mark.asyncio +async def test_keycloak_callback_email_not_verified_missing_field(mock_request): + """Test keycloak_callback when email_verified field is missing (defaults to False).""" + # Arrange + mock_verify_email = AsyncMock() + with ( + patch('server.routes.auth.token_manager') as mock_token_manager, + patch('server.routes.auth.user_verifier') as mock_verifier, + patch('server.routes.email.verify_email', mock_verify_email), + ): + mock_token_manager.get_keycloak_tokens = AsyncMock( + return_value=('test_access_token', 'test_refresh_token') + ) + mock_token_manager.get_user_info = AsyncMock( + return_value={ + 'sub': 'test_user_id', + 'preferred_username': 'test_user', + 'identity_provider': 'github', + # email_verified field is missing + } + ) + mock_token_manager.store_idp_tokens = AsyncMock() + mock_verifier.is_active.return_value = False + + # Act + result = await keycloak_callback( + code='test_code', state='test_state', request=mock_request + ) + + # Assert + assert isinstance(result, RedirectResponse) + assert result.status_code == 302 + assert 'email_verification_required=true' in result.headers['location'] + mock_verify_email.assert_called_once_with( + request=mock_request, user_id='test_user_id', is_auth_flow=True + ) + + @pytest.mark.asyncio async def test_keycloak_callback_success_without_offline_token(mock_request): """Test successful keycloak_callback without valid offline token.""" @@ -248,6 +326,7 @@ async def test_keycloak_callback_success_without_offline_token(mock_request): 'sub': 'test_user_id', 'preferred_username': 'test_user', 'identity_provider': 'github', + 'email_verified': True, } ) mock_token_manager.store_idp_tokens = AsyncMock() @@ -513,6 +592,7 @@ async def test_keycloak_callback_allowed_email_domain(mock_request): 'preferred_username': 'test_user', 'email': 'user@example.com', 'identity_provider': 'github', + 'email_verified': True, } ) mock_token_manager.store_idp_tokens = AsyncMock() @@ -566,6 +646,7 @@ async def test_keycloak_callback_domain_blocking_inactive(mock_request): 'preferred_username': 'test_user', 'email': 'user@colsch.us', 'identity_provider': 'github', + 'email_verified': True, } ) mock_token_manager.store_idp_tokens = AsyncMock() @@ -615,6 +696,7 @@ async def test_keycloak_callback_missing_email(mock_request): 'sub': 'test_user_id', 'preferred_username': 'test_user', 'identity_provider': 'github', + 'email_verified': True, # No email field } ) @@ -733,6 +815,7 @@ async def test_keycloak_callback_duplicate_check_exception(mock_request): 'preferred_username': 'test_user', 'email': 'joe+test@example.com', 'identity_provider': 'github', + 'email_verified': True, } ) mock_token_manager.check_duplicate_base_email = AsyncMock( @@ -782,6 +865,7 @@ async def test_keycloak_callback_no_duplicate_email(mock_request): 'preferred_username': 'test_user', 'email': 'joe+test@example.com', 'identity_provider': 'github', + 'email_verified': True, } ) mock_token_manager.check_duplicate_base_email = AsyncMock(return_value=False) @@ -833,6 +917,7 @@ async def test_keycloak_callback_no_email_in_user_info(mock_request): 'preferred_username': 'test_user', # No email field 'identity_provider': 'github', + 'email_verified': True, } ) mock_token_manager.store_idp_tokens = AsyncMock() diff --git a/frontend/__tests__/components/features/auth-modal.test.tsx b/frontend/__tests__/components/features/auth-modal.test.tsx index 4f32841b12..30550f7106 100644 --- a/frontend/__tests__/components/features/auth-modal.test.tsx +++ b/frontend/__tests__/components/features/auth-modal.test.tsx @@ -77,7 +77,7 @@ describe("AuthModal", () => { ); // Find the terms of service section using data-testid - const termsSection = screen.getByTestId("auth-modal-terms-of-service"); + const termsSection = screen.getByTestId("terms-and-privacy-notice"); expect(termsSection).toBeInTheDocument(); // Check that all text content is present in the paragraph @@ -114,6 +114,38 @@ describe("AuthModal", () => { expect(termsSection).toContainElement(privacyLink); }); + it("should display email verified message when emailVerified prop is true", () => { + render( + + + , + ); + + expect( + screen.getByText("AUTH$EMAIL_VERIFIED_PLEASE_LOGIN"), + ).toBeInTheDocument(); + }); + + it("should not display email verified message when emailVerified prop is false", () => { + render( + + + , + ); + + expect( + screen.queryByText("AUTH$EMAIL_VERIFIED_PLEASE_LOGIN"), + ).not.toBeInTheDocument(); + }); + it("should open Terms of Service link in new tab", () => { render( @@ -142,12 +174,17 @@ describe("AuthModal", () => { describe("Duplicate email error message", () => { const renderAuthModalWithRouter = (initialEntries: string[]) => { + const hasDuplicatedEmail = initialEntries.includes( + "/?duplicated_email=true", + ); + return render( , ); diff --git a/frontend/__tests__/components/features/waitlist/email-verification-modal.test.tsx b/frontend/__tests__/components/features/waitlist/email-verification-modal.test.tsx new file mode 100644 index 0000000000..e773461d84 --- /dev/null +++ b/frontend/__tests__/components/features/waitlist/email-verification-modal.test.tsx @@ -0,0 +1,28 @@ +import { render, screen } from "@testing-library/react"; +import { it, describe, expect, vi, beforeEach } from "vitest"; +import { EmailVerificationModal } from "#/components/features/waitlist/email-verification-modal"; + +describe("EmailVerificationModal", () => { + beforeEach(() => { + vi.clearAllMocks(); + }); + + it("should render the email verification message", () => { + // Arrange & Act + render(); + + // Assert + expect( + screen.getByText("AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY"), + ).toBeInTheDocument(); + }); + + it("should render the TermsAndPrivacyNotice component", () => { + // Arrange & Act + render(); + + // Assert + const termsSection = screen.getByTestId("terms-and-privacy-notice"); + expect(termsSection).toBeInTheDocument(); + }); +}); diff --git a/frontend/__tests__/components/shared/terms-and-privacy-notice.test.tsx b/frontend/__tests__/components/shared/terms-and-privacy-notice.test.tsx new file mode 100644 index 0000000000..559a7f0df6 --- /dev/null +++ b/frontend/__tests__/components/shared/terms-and-privacy-notice.test.tsx @@ -0,0 +1,48 @@ +import { render, screen } from "@testing-library/react"; +import { it, describe, expect } from "vitest"; +import { TermsAndPrivacyNotice } from "#/components/shared/terms-and-privacy-notice"; + +describe("TermsAndPrivacyNotice", () => { + it("should render Terms of Service and Privacy Policy links", () => { + // Arrange & Act + render(); + + // Assert + const termsSection = screen.getByTestId("terms-and-privacy-notice"); + expect(termsSection).toBeInTheDocument(); + + const tosLink = screen.getByRole("link", { + name: "COMMON$TERMS_OF_SERVICE", + }); + const privacyLink = screen.getByRole("link", { + name: "COMMON$PRIVACY_POLICY", + }); + + expect(tosLink).toBeInTheDocument(); + expect(tosLink).toHaveAttribute("href", "https://www.all-hands.dev/tos"); + expect(tosLink).toHaveAttribute("target", "_blank"); + expect(tosLink).toHaveAttribute("rel", "noopener noreferrer"); + + expect(privacyLink).toBeInTheDocument(); + expect(privacyLink).toHaveAttribute( + "href", + "https://www.all-hands.dev/privacy", + ); + expect(privacyLink).toHaveAttribute("target", "_blank"); + expect(privacyLink).toHaveAttribute("rel", "noopener noreferrer"); + }); + + it("should render all required text content", () => { + // Arrange & Act + render(); + + // Assert + const termsSection = screen.getByTestId("terms-and-privacy-notice"); + expect(termsSection).toHaveTextContent( + "AUTH$BY_SIGNING_UP_YOU_AGREE_TO_OUR", + ); + expect(termsSection).toHaveTextContent("COMMON$TERMS_OF_SERVICE"); + expect(termsSection).toHaveTextContent("COMMON$AND"); + expect(termsSection).toHaveTextContent("COMMON$PRIVACY_POLICY"); + }); +}); diff --git a/frontend/__tests__/routes/root-layout.test.tsx b/frontend/__tests__/routes/root-layout.test.tsx new file mode 100644 index 0000000000..22de4ae616 --- /dev/null +++ b/frontend/__tests__/routes/root-layout.test.tsx @@ -0,0 +1,242 @@ +import { render, screen, waitFor } from "@testing-library/react"; +import { it, describe, expect, vi, beforeEach, afterEach } from "vitest"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; +import { createRoutesStub } from "react-router"; +import MainApp from "#/routes/root-layout"; +import OptionService from "#/api/option-service/option-service.api"; +import AuthService from "#/api/auth-service/auth-service.api"; +import SettingsService from "#/api/settings-service/settings-service.api"; + +// Mock other hooks that are not the focus of these tests +vi.mock("#/hooks/use-github-auth-url", () => ({ + useGitHubAuthUrl: () => "https://github.com/oauth/authorize", +})); + +vi.mock("#/hooks/use-is-on-tos-page", () => ({ + useIsOnTosPage: () => false, +})); + +vi.mock("#/hooks/use-auto-login", () => ({ + useAutoLogin: () => {}, +})); + +vi.mock("#/hooks/use-auth-callback", () => ({ + useAuthCallback: () => {}, +})); + +vi.mock("#/hooks/use-migrate-user-consent", () => ({ + useMigrateUserConsent: () => ({ + migrateUserConsent: vi.fn(), + }), +})); + +vi.mock("#/hooks/use-reo-tracking", () => ({ + useReoTracking: () => {}, +})); + +vi.mock("#/hooks/use-sync-posthog-consent", () => ({ + useSyncPostHogConsent: () => {}, +})); + +vi.mock("#/utils/custom-toast-handlers", () => ({ + displaySuccessToast: vi.fn(), +})); + +const RouterStub = createRoutesStub([ + { + Component: MainApp, + path: "/", + children: [ + { + Component: () =>
Content
, + path: "/", + }, + ], + }, +]); + +const createWrapper = () => { + const queryClient = new QueryClient({ + defaultOptions: { + queries: { + retry: false, + }, + }, + }); + + return ({ children }: { children: React.ReactNode }) => ( + {children} + ); +}; + +describe("MainApp - Email Verification Flow", () => { + beforeEach(() => { + vi.clearAllMocks(); + + // Default mocks for services + vi.spyOn(OptionService, "getConfig").mockResolvedValue({ + APP_MODE: "saas", + GITHUB_CLIENT_ID: "test-client-id", + POSTHOG_CLIENT_KEY: "test-posthog-key", + PROVIDERS_CONFIGURED: ["github"], + AUTH_URL: "https://auth.example.com", + FEATURE_FLAGS: { + ENABLE_BILLING: false, + HIDE_LLM_SETTINGS: false, + ENABLE_JIRA: false, + ENABLE_JIRA_DC: false, + ENABLE_LINEAR: false, + }, + }); + + vi.spyOn(AuthService, "authenticate").mockResolvedValue(true); + + vi.spyOn(SettingsService, "getSettings").mockResolvedValue({ + language: "en", + user_consents_to_analytics: true, + llm_model: "", + llm_base_url: "", + agent: "", + llm_api_key: null, + llm_api_key_set: false, + search_api_key_set: false, + confirmation_mode: false, + security_analyzer: null, + remote_runtime_resource_factor: null, + provider_tokens_set: {}, + enable_default_condenser: false, + condenser_max_size: null, + enable_sound_notifications: false, + enable_proactive_conversation_starters: false, + enable_solvability_analysis: false, + max_budget_per_task: null, + }); + + // Mock localStorage + vi.stubGlobal("localStorage", { + getItem: vi.fn(() => null), + setItem: vi.fn(), + removeItem: vi.fn(), + clear: vi.fn(), + }); + }); + + afterEach(() => { + vi.restoreAllMocks(); + vi.unstubAllGlobals(); + }); + + it("should display EmailVerificationModal when email_verification_required=true is in query params", async () => { + // Arrange & Act + render( + , + { wrapper: createWrapper() }, + ); + + // Assert + await waitFor(() => { + expect( + screen.getByText("AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY"), + ).toBeInTheDocument(); + }); + }); + + it("should set emailVerified state and pass to AuthModal when email_verified=true is in query params", async () => { + // Arrange + // Mock a 401 error to simulate unauthenticated user + const axiosError = { + response: { status: 401 }, + isAxiosError: true, + }; + vi.spyOn(AuthService, "authenticate").mockRejectedValue(axiosError); + + // Act + render(, { + wrapper: createWrapper(), + }); + + // Assert - Wait for AuthModal to render (since user is not authenticated) + await waitFor(() => { + expect( + screen.getByText("AUTH$EMAIL_VERIFIED_PLEASE_LOGIN"), + ).toBeInTheDocument(); + }); + }); + + it("should handle both email_verification_required and email_verified params together", async () => { + // Arrange & Act + render( + , + { wrapper: createWrapper() }, + ); + + // Assert - EmailVerificationModal should take precedence + await waitFor(() => { + expect( + screen.getByText("AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY"), + ).toBeInTheDocument(); + }); + }); + + it("should remove query parameters from URL after processing", async () => { + // Arrange & Act + const { container } = render( + , + { wrapper: createWrapper() }, + ); + + // Assert - Wait for the modal to appear (which indicates processing happened) + await waitFor(() => { + expect( + screen.getByText("AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY"), + ).toBeInTheDocument(); + }); + + // Verify that the query parameter was processed by checking the modal appeared + // The hook removes the parameter from the URL, so we verify the behavior indirectly + expect(container).toBeInTheDocument(); + }); + + it("should not display EmailVerificationModal when email_verification_required is not in query params", async () => { + // Arrange - No query params set + + // Act + render(, { wrapper: createWrapper() }); + + // Assert + await waitFor(() => { + expect( + screen.queryByText("AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY"), + ).not.toBeInTheDocument(); + }); + }); + + it("should not display email verified message when email_verified is not in query params", async () => { + // Arrange + // Mock a 401 error to simulate unauthenticated user + const axiosError = { + response: { status: 401 }, + isAxiosError: true, + }; + vi.spyOn(AuthService, "authenticate").mockRejectedValue(axiosError); + + // Act + render(, { wrapper: createWrapper() }); + + // Assert - AuthModal should render but without email verified message + await waitFor(() => { + const authModal = screen.queryByText( + "AUTH$SIGN_IN_WITH_IDENTITY_PROVIDER", + ); + if (authModal) { + expect( + screen.queryByText("AUTH$EMAIL_VERIFIED_PLEASE_LOGIN"), + ).not.toBeInTheDocument(); + } + }); + }); +}); diff --git a/frontend/src/components/features/waitlist/auth-modal.tsx b/frontend/src/components/features/waitlist/auth-modal.tsx index e1d52a7965..6d92cb4dfc 100644 --- a/frontend/src/components/features/waitlist/auth-modal.tsx +++ b/frontend/src/components/features/waitlist/auth-modal.tsx @@ -1,6 +1,5 @@ import React from "react"; import { useTranslation } from "react-i18next"; -import { useSearchParams } from "react-router"; import { I18nKey } from "#/i18n/declaration"; import OpenHandsLogo from "#/assets/branding/openhands-logo.svg?react"; import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; @@ -14,12 +13,15 @@ import { useAuthUrl } from "#/hooks/use-auth-url"; import { GetConfigResponse } from "#/api/option-service/option.types"; import { Provider } from "#/types/settings"; import { useTracking } from "#/hooks/use-tracking"; +import { TermsAndPrivacyNotice } from "#/components/shared/terms-and-privacy-notice"; interface AuthModalProps { githubAuthUrl: string | null; appMode?: GetConfigResponse["APP_MODE"] | null; authUrl?: GetConfigResponse["AUTH_URL"]; providersConfigured?: Provider[]; + emailVerified?: boolean; + hasDuplicatedEmail?: boolean; } export function AuthModal({ @@ -27,11 +29,11 @@ export function AuthModal({ appMode, authUrl, providersConfigured, + emailVerified = false, + hasDuplicatedEmail = false, }: AuthModalProps) { const { t } = useTranslation(); const { trackLoginButtonClick } = useTracking(); - const [searchParams] = useSearchParams(); - const hasDuplicatedEmail = searchParams.get("duplicated_email") === "true"; const gitlabAuthUrl = useAuthUrl({ appMode: appMode || null, @@ -126,6 +128,13 @@ export function AuthModal({ + {emailVerified && ( +
+

+ {t(I18nKey.AUTH$EMAIL_VERIFIED_PLEASE_LOGIN)} +

+
+ )} {hasDuplicatedEmail && (
{t(I18nKey.AUTH$DUPLICATE_EMAIL_ERROR)} @@ -206,30 +215,7 @@ export function AuthModal({ )}
-

- {t(I18nKey.AUTH$BY_SIGNING_UP_YOU_AGREE_TO_OUR)}{" "} - - {t(I18nKey.COMMON$TERMS_OF_SERVICE)} - {" "} - {t(I18nKey.COMMON$AND)}{" "} - - {t(I18nKey.COMMON$PRIVACY_POLICY)} - - . -

+
); diff --git a/frontend/src/components/features/waitlist/email-verification-modal.tsx b/frontend/src/components/features/waitlist/email-verification-modal.tsx new file mode 100644 index 0000000000..820dce3258 --- /dev/null +++ b/frontend/src/components/features/waitlist/email-verification-modal.tsx @@ -0,0 +1,31 @@ +import { useTranslation } from "react-i18next"; +import { I18nKey } from "#/i18n/declaration"; +import OpenHandsLogo from "#/assets/branding/openhands-logo.svg?react"; +import { ModalBackdrop } from "#/components/shared/modals/modal-backdrop"; +import { ModalBody } from "#/components/shared/modals/modal-body"; +import { TermsAndPrivacyNotice } from "#/components/shared/terms-and-privacy-notice"; + +interface EmailVerificationModalProps { + onClose: () => void; +} + +export function EmailVerificationModal({ + onClose, +}: EmailVerificationModalProps) { + const { t } = useTranslation(); + + return ( + + + +
+

+ {t(I18nKey.AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY)} +

+
+ + +
+
+ ); +} diff --git a/frontend/src/components/shared/terms-and-privacy-notice.tsx b/frontend/src/components/shared/terms-and-privacy-notice.tsx new file mode 100644 index 0000000000..8293d734da --- /dev/null +++ b/frontend/src/components/shared/terms-and-privacy-notice.tsx @@ -0,0 +1,37 @@ +import React from "react"; +import { useTranslation } from "react-i18next"; +import { I18nKey } from "#/i18n/declaration"; + +interface TermsAndPrivacyNoticeProps { + className?: string; +} + +export function TermsAndPrivacyNotice({ + className = "mt-4 text-xs text-center text-muted-foreground", +}: TermsAndPrivacyNoticeProps) { + const { t } = useTranslation(); + + return ( +

+ {t(I18nKey.AUTH$BY_SIGNING_UP_YOU_AGREE_TO_OUR)}{" "} + + {t(I18nKey.COMMON$TERMS_OF_SERVICE)} + {" "} + {t(I18nKey.COMMON$AND)}{" "} + + {t(I18nKey.COMMON$PRIVACY_POLICY)} + + . +

+ ); +} diff --git a/frontend/src/hooks/use-email-verification.ts b/frontend/src/hooks/use-email-verification.ts new file mode 100644 index 0000000000..c0068395b5 --- /dev/null +++ b/frontend/src/hooks/use-email-verification.ts @@ -0,0 +1,63 @@ +import React from "react"; +import { useSearchParams } from "react-router"; + +/** + * Hook to handle email verification logic from URL query parameters. + * Manages the email verification modal state and email verified state + * based on query parameters in the URL. + * + * @returns An object containing: + * - emailVerificationModalOpen: boolean state for modal visibility + * - setEmailVerificationModalOpen: function to control modal visibility + * - emailVerified: boolean state for email verification status + * - setEmailVerified: function to control email verification status + * - hasDuplicatedEmail: boolean state for duplicate email error status + */ +export function useEmailVerification() { + const [searchParams, setSearchParams] = useSearchParams(); + const [emailVerificationModalOpen, setEmailVerificationModalOpen] = + React.useState(false); + const [emailVerified, setEmailVerified] = React.useState(false); + const [hasDuplicatedEmail, setHasDuplicatedEmail] = React.useState(false); + + // Check for email verification query parameters + React.useEffect(() => { + const emailVerificationRequired = searchParams.get( + "email_verification_required", + ); + const emailVerifiedParam = searchParams.get("email_verified"); + const duplicatedEmailParam = searchParams.get("duplicated_email"); + let shouldUpdate = false; + + if (emailVerificationRequired === "true") { + setEmailVerificationModalOpen(true); + searchParams.delete("email_verification_required"); + shouldUpdate = true; + } + + if (emailVerifiedParam === "true") { + setEmailVerified(true); + searchParams.delete("email_verified"); + shouldUpdate = true; + } + + if (duplicatedEmailParam === "true") { + setHasDuplicatedEmail(true); + searchParams.delete("duplicated_email"); + shouldUpdate = true; + } + + // Clean up the URL by removing parameters if any were found + if (shouldUpdate) { + setSearchParams(searchParams, { replace: true }); + } + }, [searchParams, setSearchParams]); + + return { + emailVerificationModalOpen, + setEmailVerificationModalOpen, + emailVerified, + setEmailVerified, + hasDuplicatedEmail, + }; +} diff --git a/frontend/src/i18n/declaration.ts b/frontend/src/i18n/declaration.ts index 0dd668cacc..e3ed93db2f 100644 --- a/frontend/src/i18n/declaration.ts +++ b/frontend/src/i18n/declaration.ts @@ -730,6 +730,8 @@ export enum I18nKey { MICROAGENT_MANAGEMENT$USE_MICROAGENTS = "MICROAGENT_MANAGEMENT$USE_MICROAGENTS", AUTH$BY_SIGNING_UP_YOU_AGREE_TO_OUR = "AUTH$BY_SIGNING_UP_YOU_AGREE_TO_OUR", AUTH$NO_PROVIDERS_CONFIGURED = "AUTH$NO_PROVIDERS_CONFIGURED", + AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY = "AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY", + AUTH$EMAIL_VERIFIED_PLEASE_LOGIN = "AUTH$EMAIL_VERIFIED_PLEASE_LOGIN", AUTH$DUPLICATE_EMAIL_ERROR = "AUTH$DUPLICATE_EMAIL_ERROR", COMMON$TERMS_OF_SERVICE = "COMMON$TERMS_OF_SERVICE", COMMON$AND = "COMMON$AND", diff --git a/frontend/src/i18n/translation.json b/frontend/src/i18n/translation.json index 2950b3ab72..81df3b6f7d 100644 --- a/frontend/src/i18n/translation.json +++ b/frontend/src/i18n/translation.json @@ -11679,6 +11679,38 @@ "de": "Mindestens ein Identitätsanbieter muss konfiguriert werden (z.B. GitHub)", "uk": "Принаймні один постачальник ідентифікації має бути налаштований (наприклад, GitHub)" }, + "AUTH$PLEASE_CHECK_EMAIL_TO_VERIFY": { + "en": "Please check your email to verify your account.", + "ja": "アカウントを確認するためにメールを確認してください。", + "zh-CN": "请检查您的电子邮件以验证您的账户。", + "zh-TW": "請檢查您的電子郵件以驗證您的帳戶。", + "ko-KR": "계정을 확인하려면 이메일을 확인하세요.", + "no": "Vennligst sjekk e-posten din for å bekrefte kontoen din.", + "it": "Controlla la tua email per verificare il tuo account.", + "pt": "Por favor, verifique seu e-mail para verificar sua conta.", + "es": "Por favor, verifica tu correo electrónico para verificar tu cuenta.", + "ar": "يرجى التحقق من بريدك الإلكتروني للتحقق من حسابك.", + "fr": "Veuillez vérifier votre e-mail pour vérifier votre compte.", + "tr": "Hesabınızı doğrulamak için lütfen e-postanızı kontrol edin.", + "de": "Bitte überprüfen Sie Ihre E-Mail, um Ihr Konto zu verifizieren.", + "uk": "Будь ласка, перевірте свою електронну пошту, щоб підтвердити свій обліковий запис." + }, + "AUTH$EMAIL_VERIFIED_PLEASE_LOGIN": { + "en": "Your email has been verified. Please login below.", + "ja": "メールアドレスが確認されました。下記からログインしてください。", + "zh-CN": "您的电子邮件已验证。请在下方登录。", + "zh-TW": "您的電子郵件已驗證。請在下方登錄。", + "ko-KR": "이메일이 확인되었습니다. 아래에서 로그인하세요.", + "no": "E-posten din er bekreftet. Vennligst logg inn nedenfor.", + "it": "La tua email è stata verificata. Effettua il login qui sotto.", + "pt": "Seu e-mail foi verificado. Por favor, faça login abaixo.", + "es": "Tu correo electrónico ha sido verificado. Por favor, inicia sesión a continuación.", + "ar": "تم التحقق من بريدك الإلكتروني. يرجى تسجيل الدخول أدناه.", + "fr": "Votre e-mail a été vérifié. Veuillez vous connecter ci-dessous.", + "tr": "E-postanız doğrulandı. Lütfen aşağıdan giriş yapın.", + "de": "Ihre E-Mail wurde verifiziert. Bitte melden Sie sich unten an.", + "uk": "Вашу електронну пошту підтверджено. Будь ласка, увійдіть нижче." + }, "AUTH$DUPLICATE_EMAIL_ERROR": { "en": "Your account is unable to be created. Please use a different login or try again.", "ja": "アカウントを作成できません。別のログインを使用するか、もう一度お試しください。", diff --git a/frontend/src/routes/root-layout.tsx b/frontend/src/routes/root-layout.tsx index 876c4d8c11..73da04ea8f 100644 --- a/frontend/src/routes/root-layout.tsx +++ b/frontend/src/routes/root-layout.tsx @@ -15,6 +15,7 @@ import { useConfig } from "#/hooks/query/use-config"; import { Sidebar } from "#/components/features/sidebar/sidebar"; import { AuthModal } from "#/components/features/waitlist/auth-modal"; import { ReauthModal } from "#/components/features/waitlist/reauth-modal"; +import { EmailVerificationModal } from "#/components/features/waitlist/email-verification-modal"; import { AnalyticsConsentFormModal } from "#/components/features/analytics/analytics-consent-form-modal"; import { useSettings } from "#/hooks/query/use-settings"; import { useMigrateUserConsent } from "#/hooks/use-migrate-user-consent"; @@ -26,6 +27,7 @@ import { useAutoLogin } from "#/hooks/use-auto-login"; import { useAuthCallback } from "#/hooks/use-auth-callback"; import { useReoTracking } from "#/hooks/use-reo-tracking"; import { useSyncPostHogConsent } from "#/hooks/use-sync-posthog-consent"; +import { useEmailVerification } from "#/hooks/use-email-verification"; import { LOCAL_STORAGE_KEYS } from "#/utils/local-storage"; import { EmailVerificationGuard } from "#/components/features/guards/email-verification-guard"; import { MaintenanceBanner } from "#/components/features/maintenance/maintenance-banner"; @@ -91,6 +93,12 @@ export default function MainApp() { const effectiveGitHubAuthUrl = isOnTosPage ? null : gitHubAuthUrl; const [consentFormIsOpen, setConsentFormIsOpen] = React.useState(false); + const { + emailVerificationModalOpen, + setEmailVerificationModalOpen, + emailVerified, + hasDuplicatedEmail, + } = useEmailVerification(); // Auto-login if login method is stored in local storage useAutoLogin(); @@ -236,9 +244,18 @@ export default function MainApp() { appMode={config.data?.APP_MODE} providersConfigured={config.data?.PROVIDERS_CONFIGURED} authUrl={config.data?.AUTH_URL} + emailVerified={emailVerified} + hasDuplicatedEmail={hasDuplicatedEmail} /> )} {renderReAuthModal && } + {emailVerificationModalOpen && ( + { + setEmailVerificationModalOpen(false); + }} + /> + )} {config.data?.APP_MODE === "oss" && consentFormIsOpen && ( { From 9049b957925d367ec5bff54a38c1a5453e936ee3 Mon Sep 17 00:00:00 2001 From: "sp.wack" <83104063+amanape@users.noreply.github.com> Date: Wed, 24 Dec 2025 14:21:55 +0400 Subject: [PATCH 56/80] docs(frontend): React Router testing guide (#12145) --- frontend/__tests__/router.md | 227 +++++++++++++++++++++++++++++++++++ 1 file changed, 227 insertions(+) create mode 100644 frontend/__tests__/router.md diff --git a/frontend/__tests__/router.md b/frontend/__tests__/router.md new file mode 100644 index 0000000000..b23b4364e7 --- /dev/null +++ b/frontend/__tests__/router.md @@ -0,0 +1,227 @@ +# Testing with React Router + +## Overview + +React Router components and hooks require a routing context to function. In tests, we need to provide this context while maintaining control over the routing state. + +This guide covers the two main approaches used in the OpenHands frontend: + +1. **`createRoutesStub`** - Creates a complete route structure for testing components with their actual route configuration, loaders, and nested routes. +2. **`MemoryRouter`** - Provides a minimal routing context for components that just need router hooks to work. + +Choose your approach based on what your component actually needs from the router. + +## When to Use Each Approach + +### `createRoutesStub` (Recommended) + +Use `createRoutesStub` when your component: +- Relies on route parameters (`useParams`) +- Uses loader data (`useLoaderData`) or `clientLoader` +- Has nested routes or uses `` +- Needs to test navigation between routes + +> [!NOTE] +> `createRoutesStub` is intended for unit testing **reusable components** that depend on router context. For testing full route/page components, consider E2E tests (Playwright, Cypress) instead. + +```typescript +import { createRoutesStub } from "react-router"; +import { render } from "@testing-library/react"; + +const RouterStub = createRoutesStub([ + { + Component: MyRouteComponent, + path: "/conversations/:conversationId", + }, +]); + +render(); +``` + +**With nested routes and loaders:** + +```typescript +const RouterStub = createRoutesStub([ + { + Component: SettingsScreen, + clientLoader, + path: "/settings", + children: [ + { + Component: () =>
, + path: "/settings", + }, + { + Component: () =>
, + path: "/settings/integrations", + }, + ], + }, +]); + +render(); +``` + +> [!TIP] +> When using `clientLoader` from a Route module, you may encounter type mismatches. Use `@ts-expect-error` as a workaround: + +```typescript +import { clientLoader } from "@/routes/settings"; + +const RouterStub = createRoutesStub([ + { + path: "/settings", + Component: SettingsScreen, + // @ts-expect-error: loader types won't align between test and app code + loader: clientLoader, + }, +]); +``` + +### `MemoryRouter` + +Use `MemoryRouter` when your component: +- Only needs basic routing context to render +- Uses `` components but you don't need to test navigation +- Doesn't depend on specific route parameters or loaders + +```typescript +import { MemoryRouter } from "react-router"; +import { render } from "@testing-library/react"; + +render( + + + +); +``` + +**With initial route:** + +```typescript +render( + + + +); +``` + +## Anti-patterns to Avoid + +### Using `BrowserRouter` in tests + +`BrowserRouter` interacts with the actual browser history API, which can cause issues in test environments: + +```typescript +// ❌ Avoid +render( + + + +); + +// ✅ Use MemoryRouter instead +render( + + + +); +``` + +### Mocking router hooks when `createRoutesStub` would work + +Mocking hooks like `useParams` directly can be brittle and doesn't test the actual routing behavior: + +```typescript +// ❌ Avoid when possible +vi.mock("react-router", async () => { + const actual = await vi.importActual("react-router"); + return { + ...actual, + useParams: () => ({ conversationId: "123" }), + }; +}); + +// ✅ Prefer createRoutesStub - tests real routing behavior +const RouterStub = createRoutesStub([ + { + Component: MyComponent, + path: "/conversations/:conversationId", + }, +]); + +render(); +``` + +## Common Patterns + +### Combining with `QueryClientProvider` + +Many components need both routing and TanStack Query context: + +```typescript +import { createRoutesStub } from "react-router"; +import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; + +const queryClient = new QueryClient({ + defaultOptions: { + queries: { retry: false }, + }, +}); + +const RouterStub = createRoutesStub([ + { + Component: MyComponent, + path: "/", + }, +]); + +render(, { + wrapper: ({ children }) => ( + + {children} + + ), +}); +``` + +### Testing navigation behavior + +Verify that user interactions trigger the expected navigation: + +```typescript +import { createRoutesStub } from "react-router"; +import { screen } from "@testing-library/react"; +import userEvent from "@testing-library/user-event"; + +const RouterStub = createRoutesStub([ + { + Component: HomeScreen, + path: "/", + }, + { + Component: () =>
, + path: "/settings", + }, +]); + +render(); + +const user = userEvent.setup(); +await user.click(screen.getByRole("link", { name: /settings/i })); + +expect(screen.getByTestId("settings-screen")).toBeInTheDocument(); +``` + +## See Also + +### Codebase Examples + +- [settings.test.tsx](__tests__/routes/settings.test.tsx) - `createRoutesStub` with nested routes and loaders +- [home-screen.test.tsx](__tests__/routes/home-screen.test.tsx) - `createRoutesStub` with navigation testing +- [chat-interface.test.tsx](__tests__/components/chat/chat-interface.test.tsx) - `MemoryRouter` usage + +### Official Documentation + +- [React Router Testing Guide](https://reactrouter.com/start/framework/testing) - Official guide on testing with `createRoutesStub` +- [MemoryRouter API](https://reactrouter.com/api/declarative-routers/MemoryRouter) - API reference for `MemoryRouter` From 36fe23aea3245b9cc1ef946867b318dcdb3bd770 Mon Sep 17 00:00:00 2001 From: lif <1835304752@qq.com> Date: Wed, 24 Dec 2025 19:37:12 +0800 Subject: [PATCH 57/80] fix(llm): retry LiteLLM bad gateway errors (#12117) --- openhands/llm/llm.py | 2 ++ .../llm/test_api_connection_error_retry.py | 24 ++++++++++++++++++- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/openhands/llm/llm.py b/openhands/llm/llm.py index 150fa54925..c786cfd6a3 100644 --- a/openhands/llm/llm.py +++ b/openhands/llm/llm.py @@ -21,6 +21,7 @@ from litellm import completion as litellm_completion from litellm import completion_cost as litellm_completion_cost from litellm.exceptions import ( APIConnectionError, + BadGatewayError, RateLimitError, ServiceUnavailableError, ) @@ -45,6 +46,7 @@ LLM_RETRY_EXCEPTIONS: tuple[type[Exception], ...] = ( APIConnectionError, RateLimitError, ServiceUnavailableError, + BadGatewayError, litellm.Timeout, litellm.InternalServerError, LLMNoResponseError, diff --git a/tests/unit/llm/test_api_connection_error_retry.py b/tests/unit/llm/test_api_connection_error_retry.py index 8bcf15f986..b88c170079 100644 --- a/tests/unit/llm/test_api_connection_error_retry.py +++ b/tests/unit/llm/test_api_connection_error_retry.py @@ -1,7 +1,7 @@ from unittest.mock import patch import pytest -from litellm.exceptions import APIConnectionError +from litellm.exceptions import APIConnectionError, BadGatewayError from openhands.core.config import LLMConfig from openhands.llm.llm import LLM @@ -86,3 +86,25 @@ def test_completion_max_retries_api_connection_error( # The exception doesn't contain retry information in the current implementation # Just verify that we got an APIConnectionError assert 'API connection error' in str(excinfo.value) + + +@patch('openhands.llm.llm.litellm_completion') +def test_completion_retries_bad_gateway_error(mock_litellm_completion, default_config): + """Test that BadGatewayError is properly retried.""" + mock_litellm_completion.side_effect = [ + BadGatewayError( + message='Bad gateway', + llm_provider='test_provider', + model='test_model', + ), + {'choices': [{'message': {'content': 'Retry successful'}}]}, + ] + + llm = LLM(config=default_config, service_id='test-service') + response = llm.completion( + messages=[{'role': 'user', 'content': 'Hello!'}], + stream=False, + ) + + assert response['choices'][0]['message']['content'] == 'Retry successful' + assert mock_litellm_completion.call_count == 2 From 6d14ce420edebb39d330c8d805c88d46946eb621 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 24 Dec 2025 10:50:57 -0700 Subject: [PATCH 58/80] Implement Export feature for V1 conversations with comprehensive unit tests (#12030) Co-authored-by: openhands Co-authored-by: hieptl --- .../v1-conversation-service.api.ts | 15 ++ .../conversation-card-actions.tsx | 5 + .../conversation-card-context-menu.tsx | 14 + .../conversation-card/conversation-card.tsx | 19 ++ .../conversation-name-context-menu.tsx | 24 +- .../conversation/conversation-name.tsx | 7 + .../use-conversation-name-context-menu.ts | 21 ++ .../src/hooks/use-download-conversation.ts | 25 ++ frontend/src/i18n/translation.json | 2 +- frontend/src/utils/utils.ts | 14 + .../app_conversation_router.py | 41 ++- .../app_conversation_service.py | 17 ++ .../live_status_app_conversation_service.py | 65 +++++ openhands/utils/search_utils.py | 5 +- ...st_live_status_app_conversation_service.py | 246 +++++++++++++++++- .../experiments/test_experiment_manager.py | 3 + .../server/data_models/test_conversation.py | 3 + 17 files changed, 516 insertions(+), 10 deletions(-) create mode 100644 frontend/src/hooks/use-download-conversation.ts diff --git a/frontend/src/api/conversation-service/v1-conversation-service.api.ts b/frontend/src/api/conversation-service/v1-conversation-service.api.ts index d2f8f51ff5..25aa4a1130 100644 --- a/frontend/src/api/conversation-service/v1-conversation-service.api.ts +++ b/frontend/src/api/conversation-service/v1-conversation-service.api.ts @@ -317,6 +317,21 @@ class V1ConversationService { return data; } + /** + * Download a conversation trajectory as a zip file + * @param conversationId The conversation ID + * @returns A blob containing the zip file + */ + static async downloadConversation(conversationId: string): Promise { + const response = await openHands.get( + `/api/v1/app-conversations/${conversationId}/download`, + { + responseType: "blob", + }, + ); + return response.data; + } + /** * Get all skills associated with a V1 conversation * @param conversationId The conversation ID diff --git a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-actions.tsx b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-actions.tsx index 43b7bc1987..7afa2fed14 100644 --- a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-actions.tsx +++ b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-actions.tsx @@ -11,6 +11,7 @@ interface ConversationCardActionsProps { onStop?: (event: React.MouseEvent) => void; onEdit?: (event: React.MouseEvent) => void; onDownloadViaVSCode?: (event: React.MouseEvent) => void; + onDownloadConversation?: (event: React.MouseEvent) => void; conversationStatus?: ConversationStatus; conversationId?: string; showOptions?: boolean; @@ -23,6 +24,7 @@ export function ConversationCardActions({ onStop, onEdit, onDownloadViaVSCode, + onDownloadConversation, conversationStatus, conversationId, showOptions, @@ -62,6 +64,9 @@ export function ConversationCardActions({ onDownloadViaVSCode={ conversationId && showOptions ? onDownloadViaVSCode : undefined } + onDownloadConversation={ + conversationId ? onDownloadConversation : undefined + } position="bottom" />
diff --git a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx index 30a7ec42cb..06f5021002 100644 --- a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx +++ b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx @@ -24,6 +24,7 @@ interface ConversationCardContextMenuProps { onShowAgentTools?: (event: React.MouseEvent) => void; onShowSkills?: (event: React.MouseEvent) => void; onDownloadViaVSCode?: (event: React.MouseEvent) => void; + onDownloadConversation?: (event: React.MouseEvent) => void; position?: "top" | "bottom"; } @@ -39,6 +40,7 @@ export function ConversationCardContextMenu({ onShowAgentTools, onShowSkills, onDownloadViaVSCode, + onDownloadConversation, position = "bottom", }: ConversationCardContextMenuProps) { const { t } = useTranslation(); @@ -134,6 +136,18 @@ export function ConversationCardContextMenu({ /> ), + onDownloadConversation && ( + + } + text={t(I18nKey.BUTTON$EXPORT_CONVERSATION)} + /> + + ), ])} {generateSection( [ diff --git a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card.tsx b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card.tsx index fff0a0888d..22fa91f3ac 100644 --- a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card.tsx +++ b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card.tsx @@ -8,6 +8,7 @@ import { RepositorySelection } from "#/api/open-hands.types"; import { ConversationCardHeader } from "./conversation-card-header"; import { ConversationCardActions } from "./conversation-card-actions"; import { ConversationCardFooter } from "./conversation-card-footer"; +import { useDownloadConversation } from "#/hooks/use-download-conversation"; interface ConversationCardProps { onClick?: () => void; @@ -46,6 +47,7 @@ export function ConversationCard({ }: ConversationCardProps) { const posthog = usePostHog(); const [titleMode, setTitleMode] = React.useState<"view" | "edit">("view"); + const { mutateAsync: downloadConversation } = useDownloadConversation(); const onTitleSave = (newTitle: string) => { if (newTitle !== "" && newTitle !== title) { @@ -101,6 +103,18 @@ export function ConversationCard({ onContextMenuToggle?.(false); }; + const handleDownloadConversation = async ( + event: React.MouseEvent, + ) => { + event.preventDefault(); + event.stopPropagation(); + + if (conversationId && conversationVersion === "V1") { + await downloadConversation(conversationId); + } + onContextMenuToggle?.(false); + }; + const hasContextMenu = !!(onDelete || onChangeTitle || showOptions); return ( @@ -130,6 +144,11 @@ export function ConversationCard({ onStop={onStop && handleStop} onEdit={onChangeTitle && handleEdit} onDownloadViaVSCode={handleDownloadViaVSCode} + onDownloadConversation={ + conversationVersion === "V1" + ? handleDownloadConversation + : undefined + } conversationStatus={conversationStatus} conversationId={conversationId} showOptions={showOptions} diff --git a/frontend/src/components/features/conversation/conversation-name-context-menu.tsx b/frontend/src/components/features/conversation/conversation-name-context-menu.tsx index 95de15b37e..672c10cb6b 100644 --- a/frontend/src/components/features/conversation/conversation-name-context-menu.tsx +++ b/frontend/src/components/features/conversation/conversation-name-context-menu.tsx @@ -34,6 +34,7 @@ interface ConversationNameContextMenuProps { onShowSkills?: (event: React.MouseEvent) => void; onExportConversation?: (event: React.MouseEvent) => void; onDownloadViaVSCode?: (event: React.MouseEvent) => void; + onDownloadConversation?: (event: React.MouseEvent) => void; position?: "top" | "bottom"; } @@ -47,6 +48,7 @@ export function ConversationNameContextMenu({ onShowSkills, onExportConversation, onDownloadViaVSCode, + onDownloadConversation, position = "bottom", }: ConversationNameContextMenuProps) { const { width } = useWindowSize(); @@ -58,7 +60,7 @@ export function ConversationNameContextMenu({ // This is a temporary measure and may be re-enabled in the future const isV1Conversation = conversation?.conversation_version === "V1"; - const hasDownload = Boolean(onDownloadViaVSCode); + const hasDownload = Boolean(onDownloadViaVSCode || onDownloadConversation); const hasExport = Boolean(onExportConversation); const hasTools = Boolean(onShowAgentTools || onShowSkills); const hasInfo = Boolean(onDisplayCost); @@ -118,9 +120,9 @@ export function ConversationNameContextMenu({ )} - {(hasExport || hasDownload) && !isV1Conversation && ( + {(hasExport || hasDownload) && !isV1Conversation ? ( - )} + ) : null} {onExportConversation && !isV1Conversation && ( )} - {(hasInfo || hasControl) && !isV1Conversation && ( - + {onDownloadConversation && isV1Conversation && ( + + } + text={t(I18nKey.BUTTON$EXPORT_CONVERSATION)} + className={CONTEXT_MENU_ICON_TEXT_CLASSNAME} + /> + )} + {(hasInfo || hasControl) && } + {onDisplayCost && ( )} diff --git a/frontend/src/hooks/use-conversation-name-context-menu.ts b/frontend/src/hooks/use-conversation-name-context-menu.ts index 6072d5331e..0bc43bd4b6 100644 --- a/frontend/src/hooks/use-conversation-name-context-menu.ts +++ b/frontend/src/hooks/use-conversation-name-context-menu.ts @@ -15,6 +15,8 @@ import { displayErrorToast } from "#/utils/custom-toast-handlers"; import { I18nKey } from "#/i18n/declaration"; import { useEventStore } from "#/stores/use-event-store"; import { isV0Event } from "#/types/v1/type-guards"; +import { useActiveConversation } from "./query/use-active-conversation"; +import { useDownloadConversation } from "./use-download-conversation"; interface UseConversationNameContextMenuProps { conversationId?: string; @@ -34,6 +36,7 @@ export function useConversationNameContextMenu({ const { conversationId: currentConversationId } = useParams(); const navigate = useNavigate(); const events = useEventStore((state) => state.events); + const { data: conversation } = useActiveConversation(); const { mutate: deleteConversation } = useDeleteConversation(); const { mutate: stopConversation } = useUnifiedPauseConversationSandbox(); const { mutate: getTrajectory } = useGetTrajectory(); @@ -46,6 +49,7 @@ export function useConversationNameContextMenu({ React.useState(false); const [confirmStopModalVisible, setConfirmStopModalVisible] = React.useState(false); + const { mutateAsync: downloadConversation } = useDownloadConversation(); const systemMessage = events .filter(isV0Event) @@ -148,6 +152,17 @@ export function useConversationNameContextMenu({ onContextMenuToggle?.(false); }; + const handleDownloadConversation = async ( + event: React.MouseEvent, + ) => { + event.preventDefault(); + event.stopPropagation(); + if (conversationId && conversation?.conversation_version === "V1") { + await downloadConversation(conversationId); + } + onContextMenuToggle?.(false); + }; + const handleDisplayCost = (event: React.MouseEvent) => { event.stopPropagation(); setMetricsModalVisible(true); @@ -173,6 +188,7 @@ export function useConversationNameContextMenu({ handleEdit, handleExportConversation, handleDownloadViaVSCode, + handleDownloadConversation, handleDisplayCost, handleShowAgentTools, handleShowSkills, @@ -199,6 +215,11 @@ export function useConversationNameContextMenu({ shouldShowStop: conversationStatus !== "STOPPED", shouldShowDownload: Boolean(conversationId && showOptions), shouldShowExport: Boolean(conversationId && showOptions), + shouldShowDownloadConversation: Boolean( + conversationId && + showOptions && + conversation?.conversation_version === "V1", + ), shouldShowDisplayCost: showOptions, shouldShowAgentTools: Boolean(showOptions && systemMessage), shouldShowSkills: Boolean(showOptions && conversationId), diff --git a/frontend/src/hooks/use-download-conversation.ts b/frontend/src/hooks/use-download-conversation.ts new file mode 100644 index 0000000000..ae2f4c507c --- /dev/null +++ b/frontend/src/hooks/use-download-conversation.ts @@ -0,0 +1,25 @@ +import { useMutation } from "@tanstack/react-query"; +import { usePostHog } from "posthog-js/react"; +import { useTranslation } from "react-i18next"; +import V1ConversationService from "#/api/conversation-service/v1-conversation-service.api"; +import { downloadBlob } from "#/utils/utils"; +import { displayErrorToast } from "#/utils/custom-toast-handlers"; +import { I18nKey } from "#/i18n/declaration"; + +export const useDownloadConversation = () => { + const posthog = usePostHog(); + const { t } = useTranslation(); + + return useMutation({ + mutationKey: ["conversations", "download"], + mutationFn: async (conversationId: string) => { + posthog.capture("download_trajectory_button_clicked"); + const blob = + await V1ConversationService.downloadConversation(conversationId); + downloadBlob(blob, `conversation_${conversationId}.zip`); + }, + onError: () => { + displayErrorToast(t(I18nKey.CONVERSATION$DOWNLOAD_ERROR)); + }, + }); +}; diff --git a/frontend/src/i18n/translation.json b/frontend/src/i18n/translation.json index 81df3b6f7d..717e515107 100644 --- a/frontend/src/i18n/translation.json +++ b/frontend/src/i18n/translation.json @@ -8768,7 +8768,7 @@ "uk": "Позначити це рішення як некорисне" }, "BUTTON$EXPORT_CONVERSATION": { - "en": "Export conversation", + "en": "Export Conversation", "zh-CN": "导出对话", "zh-TW": "導出對話", "de": "Konversation exportieren", diff --git a/frontend/src/utils/utils.ts b/frontend/src/utils/utils.ts index 69ff7aae5f..a7408a7177 100644 --- a/frontend/src/utils/utils.ts +++ b/frontend/src/utils/utils.ts @@ -12,6 +12,20 @@ export function cn(...inputs: ClassValue[]) { return twMerge(clsx(inputs)); } +/** + * Trigger a download for a provided Blob with the given filename + */ +export const downloadBlob = (blob: Blob, filename: string): void => { + const url = window.URL.createObjectURL(blob); + const link = document.createElement("a"); + link.href = url; + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + window.URL.revokeObjectURL(url); +}; + /** * Get the numeric height value from an element's style property * @param el The HTML element to get the height from diff --git a/openhands/app_server/app_conversation/app_conversation_router.py b/openhands/app_server/app_conversation/app_conversation_router.py index a7a0414e31..532602dbca 100644 --- a/openhands/app_server/app_conversation/app_conversation_router.py +++ b/openhands/app_server/app_conversation/app_conversation_router.py @@ -29,7 +29,7 @@ else: return await async_iterator.__anext__() -from fastapi import APIRouter, Query, Request, status +from fastapi import APIRouter, HTTPException, Query, Request, Response, status from fastapi.responses import JSONResponse, StreamingResponse from sqlalchemy.ext.asyncio import AsyncSession @@ -546,6 +546,45 @@ async def get_conversation_skills( ) +@router.get('/{conversation_id}/download') +async def export_conversation( + conversation_id: UUID, + app_conversation_service: AppConversationService = ( + app_conversation_service_dependency + ), +): + """Download a conversation trajectory as a zip file. + + Returns a zip file containing all events and metadata for the conversation. + + Args: + conversation_id: The UUID of the conversation to download + + Returns: + A zip file containing the conversation trajectory + """ + try: + # Get the zip file content + zip_content = await app_conversation_service.export_conversation( + conversation_id + ) + + # Return as a downloadable zip file + return Response( + content=zip_content, + media_type='application/zip', + headers={ + 'Content-Disposition': f'attachment; filename="conversation_{conversation_id}.zip"' + }, + ) + except ValueError as e: + raise HTTPException(status_code=404, detail=str(e)) + except Exception as e: + raise HTTPException( + status_code=500, detail=f'Failed to download trajectory: {str(e)}' + ) + + async def _consume_remaining( async_iter, db_session: AsyncSession, httpx_client: httpx.AsyncClient ): diff --git a/openhands/app_server/app_conversation/app_conversation_service.py b/openhands/app_server/app_conversation/app_conversation_service.py index 8d6c6775a8..b1b10c39ba 100644 --- a/openhands/app_server/app_conversation/app_conversation_service.py +++ b/openhands/app_server/app_conversation/app_conversation_service.py @@ -113,6 +113,23 @@ class AppConversationService(ABC): Returns True if the conversation was deleted successfully, False otherwise. """ + @abstractmethod + async def export_conversation(self, conversation_id: UUID) -> bytes: + """Download a conversation trajectory as a zip file. + + Args: + conversation_id: The UUID of the conversation to download. + + This method should: + 1. Get all events for the conversation + 2. Create a temporary directory + 3. Save each event as a JSON file + 4. Save conversation metadata as meta.json + 5. Create and return a zip file containing all the data + + Returns the zip file as bytes. + """ + class AppConversationServiceInjector( DiscriminatedUnionMixin, Injector[AppConversationService], ABC diff --git a/openhands/app_server/app_conversation/live_status_app_conversation_service.py b/openhands/app_server/app_conversation/live_status_app_conversation_service.py index db30710f76..84f20de07a 100644 --- a/openhands/app_server/app_conversation/live_status_app_conversation_service.py +++ b/openhands/app_server/app_conversation/live_status_app_conversation_service.py @@ -1,5 +1,9 @@ import asyncio +import json import logging +import os +import tempfile +import zipfile from collections import defaultdict from dataclasses import dataclass from datetime import datetime, timedelta @@ -44,6 +48,7 @@ from openhands.app_server.app_conversation.sql_app_conversation_info_service imp ) from openhands.app_server.config import get_event_callback_service from openhands.app_server.errors import SandboxError +from openhands.app_server.event.event_service import EventService from openhands.app_server.event_callback.event_callback_models import EventCallback from openhands.app_server.event_callback.event_callback_service import ( EventCallbackService, @@ -71,6 +76,7 @@ from openhands.integrations.provider import ProviderType from openhands.sdk import Agent, AgentContext, LocalWorkspace from openhands.sdk.llm import LLM from openhands.sdk.secret import LookupSecret, StaticSecret +from openhands.sdk.utils.paging import page_iterator from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace from openhands.server.types import AppMode from openhands.tools.preset.default import ( @@ -93,6 +99,7 @@ class LiveStatusAppConversationService(AppConversationServiceBase): app_conversation_info_service: AppConversationInfoService app_conversation_start_task_service: AppConversationStartTaskService event_callback_service: EventCallbackService + event_service: EventService sandbox_service: SandboxService sandbox_spec_service: SandboxSpecService jwt_service: JwtService @@ -1178,6 +1185,61 @@ class LiveStatusAppConversationService(AppConversationServiceBase): return deleted_info or deleted_tasks + async def export_conversation(self, conversation_id: UUID) -> bytes: + """Download a conversation trajectory as a zip file. + + Args: + conversation_id: The UUID of the conversation to download. + + Returns the zip file as bytes. + """ + # Get the conversation info to verify it exists and user has access + conversation_info = ( + await self.app_conversation_info_service.get_app_conversation_info( + conversation_id + ) + ) + if not conversation_info: + raise ValueError(f'Conversation not found: {conversation_id}') + + # Create a temporary directory to store files + with tempfile.TemporaryDirectory() as temp_dir: + # Get all events for this conversation + i = 0 + async for event in page_iterator( + self.event_service.search_events, conversation_id__eq=conversation_id + ): + event_filename = f'event_{i:06d}_{event.id}.json' + event_path = os.path.join(temp_dir, event_filename) + + with open(event_path, 'w') as f: + # Use model_dump with mode='json' to handle UUID serialization + event_data = event.model_dump(mode='json') + json.dump(event_data, f, indent=2) + i += 1 + + # Create meta.json with conversation info + meta_path = os.path.join(temp_dir, 'meta.json') + with open(meta_path, 'w') as f: + f.write(conversation_info.model_dump_json(indent=2)) + + # Create zip file in memory + zip_buffer = tempfile.NamedTemporaryFile() + with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zipf: + # Add all files from temp directory to zip + for root, dirs, files in os.walk(temp_dir): + for file in files: + file_path = os.path.join(root, file) + arcname = os.path.relpath(file_path, temp_dir) + zipf.write(file_path, arcname) + + # Read the zip file content + zip_buffer.seek(0) + zip_content = zip_buffer.read() + zip_buffer.close() + + return zip_content + class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector): sandbox_startup_timeout: int = Field( @@ -1208,6 +1270,7 @@ class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector): from openhands.app_server.config import ( get_app_conversation_info_service, get_app_conversation_start_task_service, + get_event_service, get_global_config, get_httpx_client, get_jwt_service, @@ -1227,6 +1290,7 @@ class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector): state, request ) as app_conversation_start_task_service, get_event_callback_service(state, request) as event_callback_service, + get_event_service(state, request) as event_service, get_jwt_service(state, request) as jwt_service, get_httpx_client(state, request) as httpx_client, ): @@ -1274,6 +1338,7 @@ class LiveStatusAppConversationServiceInjector(AppConversationServiceInjector): app_conversation_info_service=app_conversation_info_service, app_conversation_start_task_service=app_conversation_start_task_service, event_callback_service=event_callback_service, + event_service=event_service, jwt_service=jwt_service, sandbox_startup_timeout=self.sandbox_startup_timeout, sandbox_startup_poll_frequency=self.sandbox_startup_poll_frequency, diff --git a/openhands/utils/search_utils.py b/openhands/utils/search_utils.py index b7714249f8..b5af01f2ec 100644 --- a/openhands/utils/search_utils.py +++ b/openhands/utils/search_utils.py @@ -22,7 +22,10 @@ async def iterate(fn: Callable, **kwargs) -> AsyncIterator: kwargs['page_id'] = None while True: result_set = await fn(**kwargs) - for result in result_set.results: + items = getattr(result_set, 'items', None) + if items is None: + items = getattr(result_set, 'results') + for result in items: yield result if result_set.next_page_id is None: return diff --git a/tests/unit/app_server/test_live_status_app_conversation_service.py b/tests/unit/app_server/test_live_status_app_conversation_service.py index f662f33146..f05cb0581a 100644 --- a/tests/unit/app_server/test_live_status_app_conversation_service.py +++ b/tests/unit/app_server/test_live_status_app_conversation_service.py @@ -1,13 +1,21 @@ """Unit tests for the methods in LiveStatusAppConversationService.""" +import io +import json +import zipfile +from datetime import datetime from unittest.mock import AsyncMock, Mock, patch from uuid import UUID, uuid4 import pytest -from openhands.agent_server.models import SendMessageRequest, StartConversationRequest +from openhands.agent_server.models import ( + SendMessageRequest, + StartConversationRequest, +) from openhands.app_server.app_conversation.app_conversation_models import ( AgentType, + AppConversationInfo, AppConversationStartRequest, ) from openhands.app_server.app_conversation.live_status_app_conversation_service import ( @@ -22,7 +30,7 @@ from openhands.app_server.sandbox.sandbox_models import ( from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo from openhands.app_server.user.user_context import UserContext from openhands.integrations.provider import ProviderType -from openhands.sdk import Agent +from openhands.sdk import Agent, Event from openhands.sdk.llm import LLM from openhands.sdk.secret import LookupSecret, StaticSecret from openhands.sdk.workspace import LocalWorkspace @@ -45,6 +53,7 @@ class TestLiveStatusAppConversationService: self.mock_app_conversation_info_service = Mock() self.mock_app_conversation_start_task_service = Mock() self.mock_event_callback_service = Mock() + self.mock_event_service = Mock() self.mock_httpx_client = Mock() # Create service instance @@ -54,6 +63,7 @@ class TestLiveStatusAppConversationService: app_conversation_info_service=self.mock_app_conversation_info_service, app_conversation_start_task_service=self.mock_app_conversation_start_task_service, event_callback_service=self.mock_event_callback_service, + event_service=self.mock_event_service, sandbox_service=self.mock_sandbox_service, sandbox_spec_service=self.mock_sandbox_spec_service, jwt_service=self.mock_jwt_service, @@ -852,6 +862,238 @@ class TestLiveStatusAppConversationService: self.service._finalize_conversation_request.assert_called_once() @pytest.mark.asyncio + async def test_export_conversation_success(self): + """Test successful download of conversation trajectory.""" + # Arrange + conversation_id = uuid4() + + # Mock conversation info + mock_conversation_info = Mock(spec=AppConversationInfo) + mock_conversation_info.id = conversation_id + mock_conversation_info.title = 'Test Conversation' + mock_conversation_info.created_at = datetime(2024, 1, 1, 12, 0, 0) + mock_conversation_info.updated_at = datetime(2024, 1, 1, 13, 0, 0) + mock_conversation_info.selected_repository = 'test/repo' + mock_conversation_info.git_provider = 'github' + mock_conversation_info.selected_branch = 'main' + mock_conversation_info.model_dump_json = Mock( + return_value='{"id": "test", "title": "Test Conversation"}' + ) + + self.mock_app_conversation_info_service.get_app_conversation_info = AsyncMock( + return_value=mock_conversation_info + ) + + # Mock events + mock_event1 = Mock(spec=Event) + mock_event1.id = uuid4() + mock_event1.model_dump = Mock( + return_value={'id': str(mock_event1.id), 'type': 'action'} + ) + + mock_event2 = Mock(spec=Event) + mock_event2.id = uuid4() + mock_event2.model_dump = Mock( + return_value={'id': str(mock_event2.id), 'type': 'observation'} + ) + + # Mock event service search_events to return paginated results + mock_event_page1 = Mock() + mock_event_page1.items = [mock_event1] + mock_event_page1.next_page_id = 'page2' + + mock_event_page2 = Mock() + mock_event_page2.items = [mock_event2] + mock_event_page2.next_page_id = None + + self.mock_event_service.search_events = AsyncMock( + side_effect=[mock_event_page1, mock_event_page2] + ) + + # Act + result = await self.service.export_conversation(conversation_id) + + # Assert + assert result is not None + assert isinstance(result, bytes) # Should be bytes + + # Verify the zip file contents + with zipfile.ZipFile(io.BytesIO(result), 'r') as zipf: + file_list = zipf.namelist() + + # Should contain meta.json and event files + assert 'meta.json' in file_list + assert any( + f.startswith('event_') and f.endswith('.json') for f in file_list + ) + + # Check meta.json content + with zipf.open('meta.json') as meta_file: + meta_content = meta_file.read().decode('utf-8') + assert '"id": "test"' in meta_content + assert '"title": "Test Conversation"' in meta_content + + # Check event files + event_files = [f for f in file_list if f.startswith('event_')] + assert len(event_files) == 2 # Should have 2 event files + + # Verify event file content + with zipf.open(event_files[0]) as event_file: + event_content = json.loads(event_file.read().decode('utf-8')) + assert 'id' in event_content + assert 'type' in event_content + + # Verify service calls + self.mock_app_conversation_info_service.get_app_conversation_info.assert_called_once_with( + conversation_id + ) + assert self.mock_event_service.search_events.call_count == 2 + mock_conversation_info.model_dump_json.assert_called_once_with(indent=2) + + @pytest.mark.asyncio + async def test_export_conversation_conversation_not_found(self): + """Test download when conversation is not found.""" + # Arrange + conversation_id = uuid4() + self.mock_app_conversation_info_service.get_app_conversation_info = AsyncMock( + return_value=None + ) + + # Act & Assert + with pytest.raises( + ValueError, match=f'Conversation not found: {conversation_id}' + ): + await self.service.export_conversation(conversation_id) + + # Verify service calls + self.mock_app_conversation_info_service.get_app_conversation_info.assert_called_once_with( + conversation_id + ) + self.mock_event_service.search_events.assert_not_called() + + @pytest.mark.asyncio + async def test_export_conversation_empty_events(self): + """Test download with conversation that has no events.""" + # Arrange + conversation_id = uuid4() + + # Mock conversation info + mock_conversation_info = Mock(spec=AppConversationInfo) + mock_conversation_info.id = conversation_id + mock_conversation_info.title = 'Empty Conversation' + mock_conversation_info.model_dump_json = Mock( + return_value='{"id": "test", "title": "Empty Conversation"}' + ) + + self.mock_app_conversation_info_service.get_app_conversation_info = AsyncMock( + return_value=mock_conversation_info + ) + + # Mock empty event page + mock_event_page = Mock() + mock_event_page.items = [] + mock_event_page.next_page_id = None + + self.mock_event_service.search_events = AsyncMock(return_value=mock_event_page) + + # Act + result = await self.service.export_conversation(conversation_id) + + # Assert + assert result is not None + assert isinstance(result, bytes) # Should be bytes + + # Verify the zip file contents + with zipfile.ZipFile(io.BytesIO(result), 'r') as zipf: + file_list = zipf.namelist() + + # Should only contain meta.json (no event files) + assert 'meta.json' in file_list + assert len([f for f in file_list if f.startswith('event_')]) == 0 + + # Verify service calls + self.mock_app_conversation_info_service.get_app_conversation_info.assert_called_once_with( + conversation_id + ) + self.mock_event_service.search_events.assert_called_once() + + @pytest.mark.asyncio + async def test_export_conversation_large_pagination(self): + """Test download with multiple pages of events.""" + # Arrange + conversation_id = uuid4() + + # Mock conversation info + mock_conversation_info = Mock(spec=AppConversationInfo) + mock_conversation_info.id = conversation_id + mock_conversation_info.title = 'Large Conversation' + mock_conversation_info.model_dump_json = Mock( + return_value='{"id": "test", "title": "Large Conversation"}' + ) + + self.mock_app_conversation_info_service.get_app_conversation_info = AsyncMock( + return_value=mock_conversation_info + ) + + # Create multiple pages of events + events_per_page = 3 + total_pages = 4 + all_events = [] + + for page_num in range(total_pages): + page_events = [] + for i in range(events_per_page): + mock_event = Mock(spec=Event) + mock_event.id = uuid4() + mock_event.model_dump = Mock( + return_value={ + 'id': str(mock_event.id), + 'type': f'event_page_{page_num}_item_{i}', + } + ) + page_events.append(mock_event) + all_events.append(mock_event) + + mock_event_page = Mock() + mock_event_page.items = page_events + mock_event_page.next_page_id = ( + f'page{page_num + 1}' if page_num < total_pages - 1 else None + ) + + if page_num == 0: + first_page = mock_event_page + elif page_num == 1: + second_page = mock_event_page + elif page_num == 2: + third_page = mock_event_page + else: + fourth_page = mock_event_page + + self.mock_event_service.search_events = AsyncMock( + side_effect=[first_page, second_page, third_page, fourth_page] + ) + + # Act + result = await self.service.export_conversation(conversation_id) + + # Assert + assert result is not None + assert isinstance(result, bytes) # Should be bytes + + # Verify the zip file contents + with zipfile.ZipFile(io.BytesIO(result), 'r') as zipf: + file_list = zipf.namelist() + + # Should contain meta.json and all event files + assert 'meta.json' in file_list + event_files = [f for f in file_list if f.startswith('event_')] + assert ( + len(event_files) == total_pages * events_per_page + ) # Should have all events + + # Verify service calls - should call search_events for each page + assert self.mock_event_service.search_events.call_count == total_pages + @patch( 'openhands.app_server.app_conversation.live_status_app_conversation_service.AsyncRemoteWorkspace' ) diff --git a/tests/unit/experiments/test_experiment_manager.py b/tests/unit/experiments/test_experiment_manager.py index c389423cf5..70cd6c5d07 100644 --- a/tests/unit/experiments/test_experiment_manager.py +++ b/tests/unit/experiments/test_experiment_manager.py @@ -176,12 +176,15 @@ class TestExperimentManagerIntegration: jwt_service = Mock() httpx_client = Mock() + event_service = Mock() + service = LiveStatusAppConversationService( init_git_in_empty_workspace=False, user_context=user_context, app_conversation_info_service=app_conversation_info_service, app_conversation_start_task_service=app_conversation_start_task_service, event_callback_service=event_callback_service, + event_service=event_service, sandbox_service=sandbox_service, sandbox_spec_service=sandbox_spec_service, jwt_service=jwt_service, diff --git a/tests/unit/server/data_models/test_conversation.py b/tests/unit/server/data_models/test_conversation.py index d5e289ecfa..fc305d170e 100644 --- a/tests/unit/server/data_models/test_conversation.py +++ b/tests/unit/server/data_models/test_conversation.py @@ -2183,6 +2183,7 @@ async def test_delete_v1_conversation_with_sub_conversations(): app_conversation_info_service=mock_info_service, app_conversation_start_task_service=mock_start_task_service, event_callback_service=MagicMock(), + event_service=MagicMock(), sandbox_service=mock_sandbox_service, sandbox_spec_service=MagicMock(), jwt_service=MagicMock(), @@ -2305,6 +2306,7 @@ async def test_delete_v1_conversation_with_no_sub_conversations(): app_conversation_info_service=mock_info_service, app_conversation_start_task_service=mock_start_task_service, event_callback_service=MagicMock(), + event_service=MagicMock(), sandbox_service=mock_sandbox_service, sandbox_spec_service=MagicMock(), jwt_service=MagicMock(), @@ -2457,6 +2459,7 @@ async def test_delete_v1_conversation_sub_conversation_deletion_error(): app_conversation_info_service=mock_info_service, app_conversation_start_task_service=mock_start_task_service, event_callback_service=MagicMock(), + event_service=MagicMock(), sandbox_service=mock_sandbox_service, sandbox_spec_service=MagicMock(), jwt_service=MagicMock(), From fe1026ee8a421926d9b037486442b49fbcb43585 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 24 Dec 2025 12:13:29 -0700 Subject: [PATCH 59/80] Fix for re-creating deleted conversation (#12152) --- openhands/app_server/event_callback/webhook_router.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/openhands/app_server/event_callback/webhook_router.py b/openhands/app_server/event_callback/webhook_router.py index 28236b7325..37ae9d89b2 100644 --- a/openhands/app_server/event_callback/webhook_router.py +++ b/openhands/app_server/event_callback/webhook_router.py @@ -42,7 +42,7 @@ from openhands.app_server.user.specifiy_user_context import ( ) from openhands.app_server.user.user_context import UserContext from openhands.integrations.provider import ProviderType -from openhands.sdk import Event +from openhands.sdk import ConversationExecutionStatus, Event from openhands.sdk.event import ConversationStateUpdateEvent from openhands.server.user_auth.default_user_auth import DefaultUserAuth from openhands.server.user_auth.user_auth import ( @@ -111,6 +111,11 @@ async def on_conversation_update( conversation_info.id, sandbox_info, app_conversation_info_service ) + # If the conversation is being deleted, no action is required... + # Later we may consider deleting the conversation if it exists... + if conversation_info.execution_status == ConversationExecutionStatus.DELETING: + return Success() + app_conversation_info = AppConversationInfo( id=conversation_info.id, title=existing.title or f'Conversation {conversation_info.id.hex}', From 5407ea55aa94fca5d1a2c92390a3a3d812433d29 Mon Sep 17 00:00:00 2001 From: shanemort1982 <156683457+shanemort1982@users.noreply.github.com> Date: Wed, 24 Dec 2025 21:26:45 +0000 Subject: [PATCH 60/80] Fix WebSocket localhost bug by passing DOCKER_HOST_ADDR to runtime containers (#12113) Co-authored-by: openhands --- openhands/runtime/impl/docker/docker_runtime.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/openhands/runtime/impl/docker/docker_runtime.py b/openhands/runtime/impl/docker/docker_runtime.py index b5eb4c5735..effac9d7a2 100644 --- a/openhands/runtime/impl/docker/docker_runtime.py +++ b/openhands/runtime/impl/docker/docker_runtime.py @@ -472,6 +472,9 @@ class DockerRuntime(ActionExecutionClient): ) if self.config.debug or DEBUG: environment['DEBUG'] = 'true' + # Pass DOCKER_HOST_ADDR to spawned containers if it exists + if os.environ.get('DOCKER_HOST_ADDR'): + environment['DOCKER_HOST_ADDR'] = os.environ['DOCKER_HOST_ADDR'] # also update with runtime_startup_env_vars environment.update(self.config.sandbox.runtime_startup_env_vars) From 09af93a02a9f097e8dd1ee7bc0b13ba1e44ff356 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Wed, 24 Dec 2025 20:55:06 -0700 Subject: [PATCH 61/80] Agent server env override (#12068) Co-authored-by: openhands Co-authored-by: Engel Nyst --- .../sandbox/docker_sandbox_spec_service.py | 6 +- .../sandbox/process_sandbox_spec_service.py | 6 +- .../sandbox/remote_sandbox_spec_service.py | 6 +- .../sandbox/sandbox_spec_service.py | 29 +- .../test_agent_server_env_override.py | 478 ++++++++++++++++++ 5 files changed, 518 insertions(+), 7 deletions(-) create mode 100644 tests/unit/app_server/test_agent_server_env_override.py diff --git a/openhands/app_server/sandbox/docker_sandbox_spec_service.py b/openhands/app_server/sandbox/docker_sandbox_spec_service.py index 063b4e8a96..669d0a9fda 100644 --- a/openhands/app_server/sandbox/docker_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/docker_sandbox_spec_service.py @@ -16,7 +16,8 @@ from openhands.app_server.sandbox.sandbox_spec_models import ( from openhands.app_server.sandbox.sandbox_spec_service import ( SandboxSpecService, SandboxSpecServiceInjector, - get_default_agent_server_image, + get_agent_server_env, + get_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -34,7 +35,7 @@ def get_docker_client() -> docker.DockerClient: def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=get_default_agent_server_image(), + id=get_agent_server_image(), command=['--port', '8000'], initial_env={ 'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server', @@ -44,6 +45,7 @@ def get_default_sandbox_specs(): 'OH_BASH_EVENTS_DIR': '/workspace/bash_events', 'PYTHONUNBUFFERED': '1', 'ENV_LOG_LEVEL': '20', + **get_agent_server_env(), }, working_dir='/workspace/project', ) diff --git a/openhands/app_server/sandbox/process_sandbox_spec_service.py b/openhands/app_server/sandbox/process_sandbox_spec_service.py index 4e2e88a2f9..fed35399c9 100644 --- a/openhands/app_server/sandbox/process_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/process_sandbox_spec_service.py @@ -12,7 +12,8 @@ from openhands.app_server.sandbox.sandbox_spec_models import ( from openhands.app_server.sandbox.sandbox_spec_service import ( SandboxSpecService, SandboxSpecServiceInjector, - get_default_agent_server_image, + get_agent_server_env, + get_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -20,11 +21,12 @@ from openhands.app_server.services.injector import InjectorState def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=get_default_agent_server_image(), + id=get_agent_server_image(), command=['python', '-m', 'openhands.agent_server'], initial_env={ # VSCode disabled for now 'OH_ENABLE_VS_CODE': '0', + **get_agent_server_env(), }, working_dir='', ) diff --git a/openhands/app_server/sandbox/remote_sandbox_spec_service.py b/openhands/app_server/sandbox/remote_sandbox_spec_service.py index 6228338d72..302d26b23b 100644 --- a/openhands/app_server/sandbox/remote_sandbox_spec_service.py +++ b/openhands/app_server/sandbox/remote_sandbox_spec_service.py @@ -12,7 +12,8 @@ from openhands.app_server.sandbox.sandbox_spec_models import ( from openhands.app_server.sandbox.sandbox_spec_service import ( SandboxSpecService, SandboxSpecServiceInjector, - get_default_agent_server_image, + get_agent_server_env, + get_agent_server_image, ) from openhands.app_server.services.injector import InjectorState @@ -20,7 +21,7 @@ from openhands.app_server.services.injector import InjectorState def get_default_sandbox_specs(): return [ SandboxSpecInfo( - id=get_default_agent_server_image(), + id=get_agent_server_image(), command=['/usr/local/bin/openhands-agent-server', '--port', '60000'], initial_env={ 'OPENVSCODE_SERVER_ROOT': '/openhands/.openvscode-server', @@ -29,6 +30,7 @@ def get_default_sandbox_specs(): 'OH_CONVERSATIONS_PATH': '/workspace/conversations', 'OH_BASH_EVENTS_DIR': '/workspace/bash_events', 'OH_VSCODE_PORT': '60001', + **get_agent_server_env(), }, working_dir='/workspace/project', ) diff --git a/openhands/app_server/sandbox/sandbox_spec_service.py b/openhands/app_server/sandbox/sandbox_spec_service.py index fe9d1653a9..77f4f4a6ab 100644 --- a/openhands/app_server/sandbox/sandbox_spec_service.py +++ b/openhands/app_server/sandbox/sandbox_spec_service.py @@ -2,6 +2,7 @@ import asyncio import os from abc import ABC, abstractmethod +from openhands.agent_server import env_parser from openhands.app_server.errors import SandboxError from openhands.app_server.sandbox.sandbox_spec_models import ( SandboxSpecInfo, @@ -60,9 +61,35 @@ class SandboxSpecServiceInjector( pass -def get_default_agent_server_image(): +def get_agent_server_image() -> str: agent_server_image_repository = os.getenv('AGENT_SERVER_IMAGE_REPOSITORY') agent_server_image_tag = os.getenv('AGENT_SERVER_IMAGE_TAG') if agent_server_image_repository and agent_server_image_tag: return f'{agent_server_image_repository}:{agent_server_image_tag}' return AGENT_SERVER_IMAGE + + +def get_agent_server_env() -> dict[str, str]: + """Get environment variables to be injected into agent server sandbox environments. + + This function reads environment variable overrides from the OH_AGENT_SERVER_ENV + environment variable, which should contain a JSON string mapping variable names + to their values. + + Usage: + Set OH_AGENT_SERVER_ENV to a JSON string: + OH_AGENT_SERVER_ENV='{"DEBUG": "true", "LOG_LEVEL": "info", "CUSTOM_VAR": "value"}' + + This will inject the following environment variables into all sandbox environments: + - DEBUG=true + - LOG_LEVEL=info + - CUSTOM_VAR=value + + Returns: + dict[str, str]: Dictionary of environment variable names to values. + Returns empty dict if OH_AGENT_SERVER_ENV is not set or invalid. + + Raises: + JSONDecodeError: If OH_AGENT_SERVER_ENV contains invalid JSON. + """ + return env_parser.from_env(dict[str, str], 'OH_AGENT_SERVER_ENV') diff --git a/tests/unit/app_server/test_agent_server_env_override.py b/tests/unit/app_server/test_agent_server_env_override.py new file mode 100644 index 0000000000..61d851590e --- /dev/null +++ b/tests/unit/app_server/test_agent_server_env_override.py @@ -0,0 +1,478 @@ +"""Tests for agent server environment variable override functionality. + +This module tests the environment variable override functionality that allows +users to inject custom environment variables into sandbox environments via +OH_AGENT_SERVER_ENV_* environment variables. + +The functionality includes: +- Parsing OH_AGENT_SERVER_ENV_* environment variables +- Merging them into sandbox specifications +- Integration across different sandbox types (Docker, Process, Remote) +""" + +import os +from unittest.mock import patch + +import pytest + +from openhands.app_server.sandbox.docker_sandbox_spec_service import ( + get_default_sandbox_specs as get_default_docker_sandbox_specs, +) +from openhands.app_server.sandbox.process_sandbox_spec_service import ( + get_default_sandbox_specs as get_default_process_sandbox_specs, +) +from openhands.app_server.sandbox.remote_sandbox_spec_service import ( + get_default_sandbox_specs as get_default_remote_sandbox_specs, +) +from openhands.app_server.sandbox.sandbox_spec_service import ( + get_agent_server_env, +) + + +class TestGetAgentServerEnv: + """Test cases for get_agent_server_env function.""" + + def test_no_environment_variables(self): + """Test when no OH_AGENT_SERVER_ENV variable is set.""" + with patch.dict(os.environ, {}, clear=True): + result = get_agent_server_env() + assert result == {} + + def test_empty_json_environment_variable(self): + """Test with empty JSON in OH_AGENT_SERVER_ENV.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{}', + } + + with patch.dict(os.environ, env_vars, clear=True): + result = get_agent_server_env() + assert result == {} + + def test_single_environment_variable(self): + """Test with a single variable in JSON format.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"CUSTOM_VAR": "custom_value"}', + 'OTHER_VAR': 'should_not_be_included', + } + + with patch.dict(os.environ, env_vars, clear=True): + result = get_agent_server_env() + assert result == {'CUSTOM_VAR': 'custom_value'} + + def test_multiple_environment_variables(self): + """Test with multiple variables in JSON format.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"VAR1": "value1", "VAR2": "value2", "DEBUG": "true", "PORT": "8080"}', + 'UNRELATED_VAR': 'should_not_be_included', + 'OH_OTHER_PREFIX': 'also_not_included', + } + + with patch.dict(os.environ, env_vars, clear=True): + result = get_agent_server_env() + expected = { + 'VAR1': 'value1', + 'VAR2': 'value2', + 'DEBUG': 'true', + 'PORT': '8080', + } + assert result == expected + + def test_empty_variable_value(self): + """Test with empty environment variable values in JSON.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"EMPTY": "", "NORMAL": "value"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + result = get_agent_server_env() + expected = { + 'EMPTY': '', + 'NORMAL': 'value', + } + assert result == expected + + def test_special_characters_in_values(self): + """Test with special characters in environment variable values.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"JSON": "{\\"key\\": \\"value\\", \\"number\\": 123}", "PATH": "/usr/local/bin:/usr/bin", "SPACES": "value with spaces", "SYMBOLS": "value!@#$%^&*()"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + result = get_agent_server_env() + expected = { + 'JSON': '{"key": "value", "number": 123}', + 'PATH': '/usr/local/bin:/usr/bin', + 'SPACES': 'value with spaces', + 'SYMBOLS': 'value!@#$%^&*()', + } + assert result == expected + + def test_case_sensitivity(self): + """Test that environment variable names are case-sensitive.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"lowercase": "lower", "UPPERCASE": "upper", "MixedCase": "mixed"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + result = get_agent_server_env() + expected = { + 'lowercase': 'lower', + 'UPPERCASE': 'upper', + 'MixedCase': 'mixed', + } + assert result == expected + + def test_numeric_and_underscore_in_names(self): + """Test with numbers and underscores in variable names.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"VAR_1": "value1", "VAR_2_TEST": "value2", "123": "numeric", "TEST_123_ABC": "complex"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + result = get_agent_server_env() + expected = { + 'VAR_1': 'value1', + 'VAR_2_TEST': 'value2', + '123': 'numeric', + 'TEST_123_ABC': 'complex', + } + assert result == expected + + def test_invalid_json_format(self): + """Test that invalid JSON raises an appropriate error.""" + import json + + env_vars = { + 'OH_AGENT_SERVER_ENV': 'invalid_json_string', + } + + with patch.dict(os.environ, env_vars, clear=True): + with pytest.raises(json.JSONDecodeError): # Should raise JSON decode error + get_agent_server_env() + + def test_non_string_values_in_json(self): + """Test that non-string values in JSON are converted to strings.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"NUMBER": 123, "BOOLEAN": true, "NULL": null}', + } + + with patch.dict(os.environ, env_vars, clear=True): + # This might fail if the parser is strict about string values + # The behavior depends on the implementation + try: + result = get_agent_server_env() + # If it succeeds, values should be converted to strings + assert isinstance(result.get('NUMBER'), str) + assert isinstance(result.get('BOOLEAN'), str) + assert isinstance(result.get('NULL'), str) + except Exception: + # If it fails, that's also acceptable behavior for type safety + pass + + def test_documentation_example(self): + """Test the example from the function documentation.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"DEBUG": "true", "LOG_LEVEL": "info", "CUSTOM_VAR": "value"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + result = get_agent_server_env() + expected = { + 'DEBUG': 'true', + 'LOG_LEVEL': 'info', + 'CUSTOM_VAR': 'value', + } + assert result == expected + + +class TestDockerSandboxSpecEnvironmentOverride: + """Test environment variable override integration in Docker sandbox specs.""" + + def test_docker_specs_include_agent_server_env(self): + """Test that Docker sandbox specs include agent server environment variables.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"CUSTOM_VAR": "custom_value", "DEBUG": "true"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + specs = get_default_docker_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Check that custom environment variables are included + assert 'CUSTOM_VAR' in spec.initial_env + assert spec.initial_env['CUSTOM_VAR'] == 'custom_value' + assert 'DEBUG' in spec.initial_env + assert spec.initial_env['DEBUG'] == 'true' + + # Check that default environment variables are still present + assert 'OPENVSCODE_SERVER_ROOT' in spec.initial_env + assert 'OH_ENABLE_VNC' in spec.initial_env + assert 'LOG_JSON' in spec.initial_env + + def test_docker_specs_override_existing_variables(self): + """Test that agent server env variables can override existing ones.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"LOG_JSON": "false", "PYTHONUNBUFFERED": "0"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + specs = get_default_docker_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Agent server env should override the defaults + assert spec.initial_env['LOG_JSON'] == 'false' + assert spec.initial_env['PYTHONUNBUFFERED'] == '0' + + def test_docker_specs_empty_agent_server_env(self): + """Test Docker specs when no agent server env variables are set.""" + with patch.dict(os.environ, {}, clear=True): + specs = get_default_docker_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Should only have the default environment variables + expected_defaults = { + 'OPENVSCODE_SERVER_ROOT', + 'OH_ENABLE_VNC', + 'LOG_JSON', + 'OH_CONVERSATIONS_PATH', + 'OH_BASH_EVENTS_DIR', + 'PYTHONUNBUFFERED', + 'ENV_LOG_LEVEL', + } + + # All defaults should be present + for var in expected_defaults: + assert var in spec.initial_env + + # No additional variables should be present + assert set(spec.initial_env.keys()) == expected_defaults + + +class TestProcessSandboxSpecEnvironmentOverride: + """Test environment variable override integration in Process sandbox specs.""" + + def test_process_specs_include_agent_server_env(self): + """Test that Process sandbox specs include agent server environment variables.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"PROCESS_VAR": "process_value", "WORKER_COUNT": "4"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + specs = get_default_process_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Check that custom environment variables are included + assert 'PROCESS_VAR' in spec.initial_env + assert spec.initial_env['PROCESS_VAR'] == 'process_value' + assert 'WORKER_COUNT' in spec.initial_env + assert spec.initial_env['WORKER_COUNT'] == '4' + + # Check that default environment variables are still present + assert 'OH_ENABLE_VS_CODE' in spec.initial_env + + def test_process_specs_override_existing_variables(self): + """Test that agent server env variables can override existing ones in process specs.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"OH_ENABLE_VS_CODE": "1"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + specs = get_default_process_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Agent server env should override the default + assert spec.initial_env['OH_ENABLE_VS_CODE'] == '1' + + def test_process_specs_empty_agent_server_env(self): + """Test Process specs when no agent server env variables are set.""" + with patch.dict(os.environ, {}, clear=True): + specs = get_default_process_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Should only have the default environment variables + expected_defaults = { + 'OH_ENABLE_VS_CODE', + } + + # All defaults should be present + for var in expected_defaults: + assert var in spec.initial_env + + # Should have exactly the expected variables + assert set(spec.initial_env.keys()) == expected_defaults + + +class TestRemoteSandboxSpecEnvironmentOverride: + """Test environment variable override integration in Remote sandbox specs.""" + + def test_remote_specs_include_agent_server_env(self): + """Test that Remote sandbox specs include agent server environment variables.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"REMOTE_VAR": "remote_value", "API_KEY": "secret123"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + specs = get_default_remote_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Check that custom environment variables are included + assert 'REMOTE_VAR' in spec.initial_env + assert spec.initial_env['REMOTE_VAR'] == 'remote_value' + assert 'API_KEY' in spec.initial_env + assert spec.initial_env['API_KEY'] == 'secret123' + + # Check that default environment variables are still present + assert 'OH_CONVERSATIONS_PATH' in spec.initial_env + assert 'OH_BASH_EVENTS_DIR' in spec.initial_env + assert 'OH_VSCODE_PORT' in spec.initial_env + + def test_remote_specs_override_existing_variables(self): + """Test that agent server env variables can override existing ones in remote specs.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"OH_VSCODE_PORT": "60002", "OH_CONVERSATIONS_PATH": "/custom/conversations"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + specs = get_default_remote_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Agent server env should override the defaults + assert spec.initial_env['OH_VSCODE_PORT'] == '60002' + assert spec.initial_env['OH_CONVERSATIONS_PATH'] == '/custom/conversations' + + def test_remote_specs_empty_agent_server_env(self): + """Test Remote specs when no agent server env variables are set.""" + with patch.dict(os.environ, {}, clear=True): + specs = get_default_remote_sandbox_specs() + + assert len(specs) == 1 + spec = specs[0] + + # Should have the default environment variables + expected_defaults = { + 'OH_CONVERSATIONS_PATH', + 'OH_BASH_EVENTS_DIR', + 'OH_VSCODE_PORT', + 'LOG_JSON', + 'OH_ENABLE_VNC', + 'OPENVSCODE_SERVER_ROOT', + } + + # All defaults should be present + for var in expected_defaults: + assert var in spec.initial_env + + # Should have exactly the expected variables + assert set(spec.initial_env.keys()) == expected_defaults + + +class TestEnvironmentOverrideIntegration: + """Integration tests for the complete environment override functionality.""" + + def test_consistent_behavior_across_sandbox_types(self): + """Test that environment override behavior is consistent across all sandbox types.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"SHARED_VAR": "shared_value", "INTEGRATION_TEST": "true"}', + } + + with patch.dict(os.environ, env_vars, clear=True): + docker_specs = get_default_docker_sandbox_specs() + process_specs = get_default_process_sandbox_specs() + remote_specs = get_default_remote_sandbox_specs() + + # All sandbox types should include the same custom environment variables + for specs in [docker_specs, process_specs, remote_specs]: + assert len(specs) == 1 + spec = specs[0] + + assert 'SHARED_VAR' in spec.initial_env + assert spec.initial_env['SHARED_VAR'] == 'shared_value' + assert 'INTEGRATION_TEST' in spec.initial_env + assert spec.initial_env['INTEGRATION_TEST'] == 'true' + + def test_complex_environment_scenario(self): + """Test a complex scenario with many environment variables.""" + env_vars = { + 'OH_AGENT_SERVER_ENV': '{"APP_NAME": "MyApp", "APP_VERSION": "1.2.3", "APP_ENV": "production", "DB_HOST": "localhost", "DB_PORT": "5432", "DB_NAME": "myapp_db", "FEATURE_X": "enabled", "FEATURE_Y": "disabled", "LOG_JSON": "false", "PYTHONUNBUFFERED": "0"}', + # Non-matching variables (should be ignored) + 'OTHER_VAR': 'ignored', + 'OH_OTHER_PREFIX_VAR': 'also_ignored', + } + + with patch.dict(os.environ, env_vars, clear=True): + # Test with Docker specs as representative + specs = get_default_docker_sandbox_specs() + spec = specs[0] + + # Custom variables should be present + assert spec.initial_env['APP_NAME'] == 'MyApp' + assert spec.initial_env['APP_VERSION'] == '1.2.3' + assert spec.initial_env['APP_ENV'] == 'production' + assert spec.initial_env['DB_HOST'] == 'localhost' + assert spec.initial_env['DB_PORT'] == '5432' + assert spec.initial_env['DB_NAME'] == 'myapp_db' + assert spec.initial_env['FEATURE_X'] == 'enabled' + assert spec.initial_env['FEATURE_Y'] == 'disabled' + + # Overridden defaults should have new values + assert spec.initial_env['LOG_JSON'] == 'false' + assert spec.initial_env['PYTHONUNBUFFERED'] == '0' + + # Non-matching variables should not be present + assert 'OTHER_VAR' not in spec.initial_env + assert 'OH_OTHER_PREFIX_VAR' not in spec.initial_env + + # Original defaults that weren't overridden should still be present + assert 'OPENVSCODE_SERVER_ROOT' in spec.initial_env + assert 'OH_ENABLE_VNC' in spec.initial_env + + def test_environment_isolation(self): + """Test that environment changes don't affect subsequent calls.""" + # First call with some environment variables + env_vars_1 = { + 'OH_AGENT_SERVER_ENV': '{"VAR1": "value1", "VAR2": "value2"}', + } + + with patch.dict(os.environ, env_vars_1, clear=True): + specs_1 = get_default_docker_sandbox_specs() + spec_1 = specs_1[0] + + assert 'VAR1' in spec_1.initial_env + assert 'VAR2' in spec_1.initial_env + assert spec_1.initial_env['VAR1'] == 'value1' + assert spec_1.initial_env['VAR2'] == 'value2' + + # Second call with different environment variables + env_vars_2 = { + 'OH_AGENT_SERVER_ENV': '{"VAR3": "value3", "VAR4": "value4"}', + } + + with patch.dict(os.environ, env_vars_2, clear=True): + specs_2 = get_default_docker_sandbox_specs() + spec_2 = specs_2[0] + + # Should only have the new variables + assert 'VAR3' in spec_2.initial_env + assert 'VAR4' in spec_2.initial_env + assert spec_2.initial_env['VAR3'] == 'value3' + assert spec_2.initial_env['VAR4'] == 'value4' + + # Should not have the old variables + assert 'VAR1' not in spec_2.initial_env + assert 'VAR2' not in spec_2.initial_env From 94e6490a79a55dc3d4bfddd9e3cc6f04c83e26d8 Mon Sep 17 00:00:00 2001 From: Guy Elsmore-Paddock Date: Thu, 25 Dec 2025 01:16:52 -0500 Subject: [PATCH 62/80] Use `tini` as Docker Runtime Init to Ensure Zombie Processes Get Reaped (#12133) Co-authored-by: Tim O'Farrell --- openhands/app_server/sandbox/docker_sandbox_service.py | 3 +++ openhands/runtime/impl/docker/docker_runtime.py | 3 +++ 2 files changed, 6 insertions(+) diff --git a/openhands/app_server/sandbox/docker_sandbox_service.py b/openhands/app_server/sandbox/docker_sandbox_service.py index a0aeddc0e6..e93b73b031 100644 --- a/openhands/app_server/sandbox/docker_sandbox_service.py +++ b/openhands/app_server/sandbox/docker_sandbox_service.py @@ -346,6 +346,9 @@ class DockerSandboxService(SandboxService): working_dir=sandbox_spec.working_dir, labels=labels, detach=True, + # Use Docker's tini init process to ensure proper signal handling and reaping of + # zombie child processes. + init=True, ) sandbox_info = await self._container_to_sandbox_info(container) diff --git a/openhands/runtime/impl/docker/docker_runtime.py b/openhands/runtime/impl/docker/docker_runtime.py index effac9d7a2..665edbea75 100644 --- a/openhands/runtime/impl/docker/docker_runtime.py +++ b/openhands/runtime/impl/docker/docker_runtime.py @@ -520,6 +520,9 @@ class DockerRuntime(ActionExecutionClient): self.container = self.docker_client.containers.run( self.runtime_container_image, + # Use Docker's tini init process to ensure proper signal handling and reaping of + # zombie child processes. + init=True, command=command, # Override the default 'bash' entrypoint because the command is a binary. entrypoint=[], From c80f70392faa901e601c73d2636d1875b52564da Mon Sep 17 00:00:00 2001 From: lif <1835304752@qq.com> Date: Fri, 26 Dec 2025 02:26:12 +0800 Subject: [PATCH 63/80] fix(frontend): clean up console warnings in test suite (#12004) Co-authored-by: Claude Opus 4.5 Co-authored-by: amanape <83104063+amanape@users.noreply.github.com> --- .../conversation-panel.test.tsx | 5 + .../maintenance/maintenance-banner.test.tsx | 10 +- .../conversation-websocket-handler.test.tsx | 9 + .../__tests__/hooks/use-websocket.test.ts | 6 +- .../__tests__/use-suggested-tasks.test.ts | 2 +- frontend/package-lock.json | 44 +--- .../conversation-card-context-menu.tsx | 194 ++++++++++-------- .../branch-dropdown-menu.tsx | 1 + .../git-provider-dropdown.tsx | 1 + .../git-repo-dropdown/git-repo-dropdown.tsx | 1 + .../conversation-status-indicator.tsx | 1 + .../recent-conversation.tsx | 106 +++++----- .../home/shared/generic-dropdown-menu.tsx | 52 +++-- .../microagent-management-accordion-title.tsx | 3 +- ...agent-management-add-microagent-button.tsx | 26 ++- .../shared/buttons/tooltip-button.tsx | 79 +++++-- 16 files changed, 311 insertions(+), 229 deletions(-) diff --git a/frontend/__tests__/components/features/conversation-panel/conversation-panel.test.tsx b/frontend/__tests__/components/features/conversation-panel/conversation-panel.test.tsx index 9faef96ac5..95c4ca9521 100644 --- a/frontend/__tests__/components/features/conversation-panel/conversation-panel.test.tsx +++ b/frontend/__tests__/components/features/conversation-panel/conversation-panel.test.tsx @@ -23,6 +23,11 @@ describe("ConversationPanel", () => { Component: () => , path: "/", }, + { + // Add route to prevent "No routes matched location" warning + Component: () => null, + path: "/conversations/:conversationId", + }, ]); const renderConversationPanel = () => renderWithProviders(); diff --git a/frontend/__tests__/components/features/maintenance/maintenance-banner.test.tsx b/frontend/__tests__/components/features/maintenance/maintenance-banner.test.tsx index db5bdf3e39..aae067b6b0 100644 --- a/frontend/__tests__/components/features/maintenance/maintenance-banner.test.tsx +++ b/frontend/__tests__/components/features/maintenance/maintenance-banner.test.tsx @@ -48,9 +48,12 @@ describe("MaintenanceBanner", () => { expect(button).toBeInTheDocument(); }); - // maintenance-banner - it("handles invalid date gracefully", () => { + // Suppress expected console.warn for invalid date parsing + const consoleWarnSpy = vi + .spyOn(console, "warn") + .mockImplementation(() => {}); + const invalidTime = "invalid-date"; render( @@ -62,6 +65,9 @@ describe("MaintenanceBanner", () => { // Check if the banner is rendered const banner = screen.queryByTestId("maintenance-banner"); expect(banner).not.toBeInTheDocument(); + + // Restore console.warn + consoleWarnSpy.mockRestore(); }); it("click on dismiss button removes banner", () => { diff --git a/frontend/__tests__/conversation-websocket-handler.test.tsx b/frontend/__tests__/conversation-websocket-handler.test.tsx index d3df1676fa..eb9c8976ff 100644 --- a/frontend/__tests__/conversation-websocket-handler.test.tsx +++ b/frontend/__tests__/conversation-websocket-handler.test.tsx @@ -6,6 +6,7 @@ import { beforeEach, afterAll, afterEach, + vi, } from "vitest"; import { screen, waitFor, render, cleanup } from "@testing-library/react"; import { QueryClient, QueryClientProvider } from "@tanstack/react-query"; @@ -141,6 +142,11 @@ describe("Conversation WebSocket Handler", () => { }); it("should handle malformed/invalid event data gracefully", async () => { + // Suppress expected console.warn for invalid JSON parsing + const consoleWarnSpy = vi + .spyOn(console, "warn") + .mockImplementation(() => {}); + // Set up MSW to send various invalid events when connection is established mswServer.use( wsLink.addEventListener("connection", ({ client, server }) => { @@ -203,6 +209,9 @@ describe("Conversation WebSocket Handler", () => { "valid-event-123", ); expect(screen.getByTestId("ui-events-count")).toHaveTextContent("1"); + + // Restore console.warn + consoleWarnSpy.mockRestore(); }); }); diff --git a/frontend/__tests__/hooks/use-websocket.test.ts b/frontend/__tests__/hooks/use-websocket.test.ts index 50e8e70571..7d42507a87 100644 --- a/frontend/__tests__/hooks/use-websocket.test.ts +++ b/frontend/__tests__/hooks/use-websocket.test.ts @@ -34,7 +34,11 @@ describe("useWebSocket", () => { }), ); - beforeAll(() => mswServer.listen()); + beforeAll(() => + mswServer.listen({ + onUnhandledRequest: "warn", + }), + ); afterEach(() => mswServer.resetHandlers()); afterAll(() => mswServer.close()); diff --git a/frontend/__tests__/use-suggested-tasks.test.ts b/frontend/__tests__/use-suggested-tasks.test.ts index 91e77db191..868ece2136 100644 --- a/frontend/__tests__/use-suggested-tasks.test.ts +++ b/frontend/__tests__/use-suggested-tasks.test.ts @@ -9,7 +9,7 @@ import { useShouldShowUserFeatures } from "../src/hooks/use-should-show-user-fea vi.mock("../src/hooks/use-should-show-user-features"); vi.mock("#/api/suggestions-service/suggestions-service.api", () => ({ SuggestionsService: { - getSuggestedTasks: vi.fn(), + getSuggestedTasks: vi.fn().mockResolvedValue([]), }, })); diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 33717ced21..961ac595bf 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -192,7 +192,6 @@ "integrity": "sha512-e7jT4DxYvIDLk1ZHmU/m/mB19rex9sv0c2ftBtjSBv+kVM/902eh0fINUzD7UwLLNR+jU585GxUJ8/EBfAM5fw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@babel/code-frame": "^7.27.1", "@babel/generator": "^7.28.5", @@ -732,7 +731,6 @@ } ], "license": "MIT", - "peer": true, "engines": { "node": ">=18" }, @@ -779,7 +777,6 @@ } ], "license": "MIT", - "peer": true, "engines": { "node": ">=18" } @@ -2331,7 +2328,6 @@ "version": "2.4.24", "resolved": "https://registry.npmjs.org/@heroui/system/-/system-2.4.24.tgz", "integrity": "sha512-9GKQgUc91otQfwmq6TLE72QKxtB341aK5NpBHS3gRoWYEuNN714Zl3OXwIZNvdXPJpsTaUo1ID1ibJU9tfgwdg==", - "peer": true, "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/system-rsc": "2.3.21", @@ -2411,7 +2407,6 @@ "version": "2.4.24", "resolved": "https://registry.npmjs.org/@heroui/theme/-/theme-2.4.24.tgz", "integrity": "sha512-lL+anmY4GGWwKyTbJ2PEBZE4talIZ3hu4yGpku9TktCVG2nC2YTwiWQFJ+Jcbf8Cf9vuLzI1sla5bz2jUqiBRA==", - "peer": true, "dependencies": { "@heroui/shared-utils": "2.1.12", "color": "^4.2.3", @@ -5127,7 +5122,6 @@ "integrity": "sha512-8QqtOQT5ACVlmsvKOJNEaWmRPmcojMOzCz4Hs2BGG/toAp/K38LcsMRyLp349glq5AzJbCEeimEoxaX6v/fLrA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@babel/core": "^7.21.3", "@svgr/babel-preset": "8.1.0", @@ -5588,7 +5582,6 @@ "integrity": "sha512-o4PXJQidqJl82ckFaXUeoAW+XysPLauYI43Abki5hABd853iMhitooc6znOnczgbTYmEP6U6/y1ZyKAIsvMKGg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@babel/code-frame": "^7.10.4", "@babel/runtime": "^7.12.5", @@ -5766,7 +5759,6 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-25.0.3.tgz", "integrity": "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA==", "devOptional": true, - "peer": true, "dependencies": { "undici-types": "~7.16.0" } @@ -5782,7 +5774,6 @@ "resolved": "https://registry.npmjs.org/@types/react/-/react-19.2.7.tgz", "integrity": "sha512-MWtvHrGZLFttgeEj28VXHxpmwYbor/ATPYbBfSFZEIRK0ecCFLl2Qo55z52Hss+UV9CRN7trSeq1zbgx7YDWWg==", "license": "MIT", - "peer": true, "dependencies": { "csstype": "^3.2.2" } @@ -5793,7 +5784,6 @@ "integrity": "sha512-jp2L/eY6fn+KgVVQAOqYItbF0VY/YApe5Mz2F0aykSO8gx31bYCZyvSeYxCHKvzHG5eZjc+zyaS5BrBWya2+kQ==", "dev": true, "license": "MIT", - "peer": true, "peerDependencies": { "@types/react": "^19.2.0" } @@ -5834,7 +5824,6 @@ "integrity": "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "7.18.0", @@ -5892,7 +5881,6 @@ "integrity": "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg==", "dev": true, "license": "BSD-2-Clause", - "peer": true, "dependencies": { "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/types": "7.18.0", @@ -6406,8 +6394,7 @@ "version": "5.5.0", "resolved": "https://registry.npmjs.org/@xterm/xterm/-/xterm-5.5.0.tgz", "integrity": "sha512-hqJHYaQb5OptNunnyAnkHyM8aCjZ1MEIDTQu1iIbbTD/xops91NB5yq1ZK/dC2JDbVWtF23zUtl9JE2NqwT87A==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/accepts": { "version": "1.3.8", @@ -6435,7 +6422,6 @@ "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==", "dev": true, "license": "MIT", - "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -6972,7 +6958,6 @@ } ], "license": "MIT", - "peer": true, "dependencies": { "baseline-browser-mapping": "^2.9.0", "caniuse-lite": "^1.0.30001759", @@ -7661,8 +7646,7 @@ "version": "3.2.3", "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/damerau-levenshtein": { "version": "1.0.8", @@ -8380,7 +8364,6 @@ "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", @@ -8504,7 +8487,6 @@ "integrity": "sha512-82GZUjRS0p/jganf6q1rEO25VSoHH0hKPCTrgillPjdI/3bgBhAE1QzHrHTizjpRvy6pGAvKjDJtk2pF9NDq8w==", "dev": true, "license": "MIT", - "peer": true, "bin": { "eslint-config-prettier": "bin/cli.js" }, @@ -8585,7 +8567,6 @@ "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@rtsao/scc": "^1.1.0", "array-includes": "^3.1.9", @@ -8677,7 +8658,6 @@ "integrity": "sha512-scB3nz4WmG75pV8+3eRUQOHZlNSUhFNq37xnpgRkCCELU3XMvXAxLk1eqWWyE22Ki4Q01Fnsw9BA3cJHDPgn2Q==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "aria-query": "^5.3.2", "array-includes": "^3.1.8", @@ -8773,7 +8753,6 @@ "integrity": "sha512-Qteup0SqU15kdocexFNAJMvCJEfa2xUKNV4CC1xsVMrIIqEy3SQ/rqyxCWNzfrd3/ldy6HMlD2e0JDVpDg2qIA==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "array-includes": "^3.1.8", "array.prototype.findlast": "^1.2.5", @@ -8807,7 +8786,6 @@ "integrity": "sha512-QzliNJq4GinDBcD8gPB5v0wh6g8q3SUi6EFF0x8N/BL9PoVs0atuGc47ozMRyOWAKdwaZ5OnbOEa3WR+dSGKuQ==", "dev": true, "license": "MIT", - "peer": true, "engines": { "node": ">=10" }, @@ -9076,7 +9054,6 @@ "version": "4.22.1", "resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz", "integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==", - "peer": true, "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", @@ -9407,7 +9384,6 @@ "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-12.23.26.tgz", "integrity": "sha512-cPcIhgR42xBn1Uj+PzOyheMtZ73H927+uWPDVhUMqxy8UHt6Okavb6xIz9J/phFUHUj0OncR6UvMfJTXoc/LKA==", "license": "MIT", - "peer": true, "dependencies": { "motion-dom": "^12.23.23", "motion-utils": "^12.23.6", @@ -10075,7 +10051,6 @@ "url": "https://www.i18next.com/how-to/faq#i18next-is-awesome.-how-can-i-support-the-project" } ], - "peer": true, "dependencies": { "@babel/runtime": "^7.28.4" }, @@ -10853,7 +10828,6 @@ "integrity": "sha512-GtldT42B8+jefDUC4yUKAvsaOrH7PDHmZxZXNgF2xMmymjUbRYJvpAybZAKEmXDGTM0mCsz8duOa4vTm5AY2Kg==", "dev": true, "license": "MIT", - "peer": true, "dependencies": { "@acemir/cssom": "^0.9.28", "@asamuzakjp/dom-selector": "^6.7.6", @@ -12555,7 +12529,6 @@ "resolved": "https://registry.npmjs.org/monaco-editor/-/monaco-editor-0.55.1.tgz", "integrity": "sha512-jz4x+TJNFHwHtwuV9vA9rMujcZRb0CEilTEwG2rRSpe/A7Jdkuj8xPKttCgOh+v/lkHy7HsZ64oj+q3xoAFl9A==", "license": "MIT", - "peer": true, "dependencies": { "dompurify": "3.2.7", "marked": "14.0.0" @@ -12650,7 +12623,6 @@ "dev": true, "hasInstallScript": true, "license": "MIT", - "peer": true, "dependencies": { "@inquirer/confirm": "^5.0.0", "@mswjs/interceptors": "^0.40.0", @@ -13375,7 +13347,6 @@ } ], "license": "MIT", - "peer": true, "dependencies": { "nanoid": "^3.3.11", "picocolors": "^1.1.1", @@ -13437,7 +13408,6 @@ "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", "dev": true, "license": "MIT", - "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -13634,7 +13604,6 @@ "version": "19.2.3", "resolved": "https://registry.npmjs.org/react/-/react-19.2.3.tgz", "integrity": "sha512-Ku/hhYbVjOQnXDZFv2+RibmLFGwFdeeKHFcOTlrt7xplBnya5OGn/hIRDsqDiSUcfORsDC7MPxwork8jBwsIWA==", - "peer": true, "engines": { "node": ">=0.10.0" } @@ -13683,7 +13652,6 @@ "version": "19.2.3", "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-19.2.3.tgz", "integrity": "sha512-yELu4WmLPw5Mr/lmeEpox5rw3RETacE++JgHqQzd2dg+YbJuat3jH4ingc+WPZhxaoFzdv9y33G+F7Nl5O0GBg==", - "peer": true, "dependencies": { "scheduler": "^0.27.0" }, @@ -13796,7 +13764,6 @@ "version": "7.11.0", "resolved": "https://registry.npmjs.org/react-router/-/react-router-7.11.0.tgz", "integrity": "sha512-uI4JkMmjbWCZc01WVP2cH7ZfSzH91JAZUDd7/nIprDgWxBV1TkkmLToFh7EbMTcMak8URFRa2YoBL/W8GWnCTQ==", - "peer": true, "dependencies": { "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0" @@ -14158,7 +14125,6 @@ "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.53.3.tgz", "integrity": "sha512-w8GmOxZfBmKknvdXU1sdM9NHcoQejwF/4mNgj2JuEEdRaHwwF12K7e9eXn1nLZ07ad+du76mkVsyeb2rKGllsA==", "license": "MIT", - "peer": true, "dependencies": { "@types/estree": "1.0.8" }, @@ -15154,7 +15120,6 @@ "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-3.4.0.tgz", "integrity": "sha512-uSaO4gnW+b3Y2aWoWfFpX62vn2sR3skfhbjsEnaBI81WD1wBLlHZe5sWf0AqjksNdYTbGBEd0UasQMT3SNV15g==", "license": "MIT", - "peer": true, "funding": { "type": "github", "url": "https://github.com/sponsors/dcastil" @@ -15281,7 +15246,6 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, @@ -15583,7 +15547,6 @@ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", "devOptional": true, "license": "Apache-2.0", - "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -15889,7 +15852,6 @@ "version": "7.3.0", "resolved": "https://registry.npmjs.org/vite/-/vite-7.3.0.tgz", "integrity": "sha512-dZwN5L1VlUBewiP6H9s2+B3e3Jg96D0vzN+Ry73sOefebhYr9f94wwkMNN/9ouoU8pV1BqA1d1zGk8928cx0rg==", - "peer": true, "dependencies": { "esbuild": "^0.27.0", "fdir": "^6.5.0", @@ -16059,7 +16021,6 @@ "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", "license": "MIT", - "peer": true, "engines": { "node": ">=12" }, @@ -16072,7 +16033,6 @@ "resolved": "https://registry.npmjs.org/vitest/-/vitest-4.0.16.tgz", "integrity": "sha512-E4t7DJ9pESL6E3I8nFjPa4xGUd3PmiWDLsDztS2qXSJWfHtbQnwAWylaBvSNY48I3vr8PTqIZlyK8TE3V3CA4Q==", "dev": true, - "peer": true, "dependencies": { "@vitest/expect": "4.0.16", "@vitest/mocker": "4.0.16", diff --git a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx index 06f5021002..34ce98b490 100644 --- a/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx +++ b/frontend/src/components/features/conversation-panel/conversation-card/conversation-card-context-menu.tsx @@ -47,18 +47,20 @@ export function ConversationCardContextMenu({ const ref = useClickOutsideElement(onClose); const generateSection = useCallback( - (items: React.ReactNode[], isLast?: boolean) => { + (items: React.ReactNode[], sectionKey: string, isLast?: boolean) => { const filteredItems = items.filter((i) => i != null); if (filteredItems.length > 0) { - return !isLast - ? [ - ...filteredItems, - , - ] - : filteredItems; + return !isLast ? ( + + {filteredItems} + + + ) : ( + {filteredItems} + ); } - return []; + return null; }, [], ); @@ -71,88 +73,104 @@ export function ConversationCardContextMenu({ alignment="right" className="mt-0" > - {generateSection([ - onEdit && ( - - } - text={t(I18nKey.BUTTON$RENAME)} - /> - - ), - ])} - {generateSection([ - onShowAgentTools && ( - - } - text={t(I18nKey.BUTTON$SHOW_AGENT_TOOLS_AND_METADATA)} - /> - - ), - onShowSkills && ( - - } - text={t(I18nKey.CONVERSATION$SHOW_SKILLS)} - /> - - ), - ])} - {generateSection([ - onStop && ( - - } - text={t(I18nKey.COMMON$CLOSE_CONVERSATION_STOP_RUNTIME)} - /> - - ), - onDownloadViaVSCode && ( - - } - text={t(I18nKey.BUTTON$DOWNLOAD_VIA_VSCODE)} - /> - - ), - onDownloadConversation && ( - - } - text={t(I18nKey.BUTTON$EXPORT_CONVERSATION)} - /> - - ), - ])} + {generateSection( + [ + onEdit && ( + + } + text={t(I18nKey.BUTTON$RENAME)} + /> + + ), + ], + "edit-section", + )} + {generateSection( + [ + onShowAgentTools && ( + + } + text={t(I18nKey.BUTTON$SHOW_AGENT_TOOLS_AND_METADATA)} + /> + + ), + onShowSkills && ( + + } + text={t(I18nKey.CONVERSATION$SHOW_SKILLS)} + /> + + ), + ], + "tools-section", + )} + {generateSection( + [ + onStop && ( + + } + text={t(I18nKey.COMMON$CLOSE_CONVERSATION_STOP_RUNTIME)} + /> + + ), + onDownloadViaVSCode && ( + + } + text={t(I18nKey.BUTTON$DOWNLOAD_VIA_VSCODE)} + /> + + ), + onDownloadConversation && ( + + } + text={t(I18nKey.BUTTON$EXPORT_CONVERSATION)} + /> + + ), + ], + "control-section", + )} {generateSection( [ onDisplayCost && ( } text={t(I18nKey.COMMON$DELETE_CONVERSATION)} - />{" "} + /> ), ], + "info-section", true, )} diff --git a/frontend/src/components/features/home/git-branch-dropdown/branch-dropdown-menu.tsx b/frontend/src/components/features/home/git-branch-dropdown/branch-dropdown-menu.tsx index f1f024d2ba..b66d338e64 100644 --- a/frontend/src/components/features/home/git-branch-dropdown/branch-dropdown-menu.tsx +++ b/frontend/src/components/features/home/git-branch-dropdown/branch-dropdown-menu.tsx @@ -79,6 +79,7 @@ export function BranchDropdownMenu({ menuRef={menuRef} renderItem={renderItem} renderEmptyState={renderEmptyState} + itemKey={(branch) => branch.name} />
); diff --git a/frontend/src/components/features/home/git-provider-dropdown/git-provider-dropdown.tsx b/frontend/src/components/features/home/git-provider-dropdown/git-provider-dropdown.tsx index c5ab171ca8..53696b1ecb 100644 --- a/frontend/src/components/features/home/git-provider-dropdown/git-provider-dropdown.tsx +++ b/frontend/src/components/features/home/git-provider-dropdown/git-provider-dropdown.tsx @@ -211,6 +211,7 @@ export function GitProviderDropdown({ getItemProps={getItemProps} renderItem={renderItem} renderEmptyState={renderEmptyState} + itemKey={(provider) => provider} /> diff --git a/frontend/src/components/features/home/git-repo-dropdown/git-repo-dropdown.tsx b/frontend/src/components/features/home/git-repo-dropdown/git-repo-dropdown.tsx index 45b75bbd9f..442a65a318 100644 --- a/frontend/src/components/features/home/git-repo-dropdown/git-repo-dropdown.tsx +++ b/frontend/src/components/features/home/git-repo-dropdown/git-repo-dropdown.tsx @@ -369,6 +369,7 @@ export function GitRepoDropdown({ stickyFooterItem={stickyFooterItem} testId="git-repo-dropdown-menu" numberOfRecentItems={recentRepositories.length} + itemKey={(repo) => repo.id} /> diff --git a/frontend/src/components/features/home/recent-conversations/conversation-status-indicator.tsx b/frontend/src/components/features/home/recent-conversations/conversation-status-indicator.tsx index aa8dca6c4d..933e2a07b5 100644 --- a/frontend/src/components/features/home/recent-conversations/conversation-status-indicator.tsx +++ b/frontend/src/components/features/home/recent-conversations/conversation-status-indicator.tsx @@ -39,6 +39,7 @@ export function ConversationStatusIndicator({ ariaLabel={statusLabel} placement="right" showArrow + asSpan className="p-0 border-0 bg-transparent hover:opacity-100" tooltipClassName="bg-[#1a1a1a] text-white text-xs shadow-lg" > diff --git a/frontend/src/components/features/home/recent-conversations/recent-conversation.tsx b/frontend/src/components/features/home/recent-conversations/recent-conversation.tsx index d86bac55bf..4e0068aac4 100644 --- a/frontend/src/components/features/home/recent-conversations/recent-conversation.tsx +++ b/frontend/src/components/features/home/recent-conversations/recent-conversation.tsx @@ -20,63 +20,59 @@ export function RecentConversation({ conversation }: RecentConversationProps) { conversation.selected_repository && conversation.selected_branch; return ( - - + {(conversation.created_at || conversation.last_updated_at) && ( + + {formatTimeDelta( + conversation.created_at || conversation.last_updated_at, + )}{" "} + {t(I18nKey.CONVERSATION$AGO)} + + )} +
); } diff --git a/frontend/src/components/features/home/shared/generic-dropdown-menu.tsx b/frontend/src/components/features/home/shared/generic-dropdown-menu.tsx index eabae9be32..2bd0b4dc0a 100644 --- a/frontend/src/components/features/home/shared/generic-dropdown-menu.tsx +++ b/frontend/src/components/features/home/shared/generic-dropdown-menu.tsx @@ -33,6 +33,7 @@ export interface GenericDropdownMenuProps { stickyFooterItem?: React.ReactNode; testId?: string; numberOfRecentItems?: number; + itemKey: (item: T) => string | number; } export function GenericDropdownMenu({ @@ -51,12 +52,28 @@ export function GenericDropdownMenu({ stickyFooterItem, testId, numberOfRecentItems = 0, + itemKey, }: GenericDropdownMenuProps) { - if (!isOpen) return null; - const hasItems = filteredItems.length > 0; const showEmptyState = !hasItems && !stickyTopItem && !stickyFooterItem; + // Always render the menu container (even when closed) so getMenuProps is always called + // This prevents the downshift warning about forgetting to call getMenuProps + if (!isOpen) { + return ( +
+
    +
+ ); + } + return (
({ ) : ( <> {stickyTopItem} - {filteredItems.map((item, index) => ( - <> - {renderItem( - item, - index, - highlightedIndex, - selectedItem, - getItemProps, - )} - {numberOfRecentItems > 0 && - index === numberOfRecentItems - 1 && ( -
+ {filteredItems.map((item, index) => { + const key = itemKey(item); + return ( + + {renderItem( + item, + index, + highlightedIndex, + selectedItem, + getItemProps, )} - - ))} + {numberOfRecentItems > 0 && + index === numberOfRecentItems - 1 && ( +
+ )} + + ); + })} )} diff --git a/frontend/src/components/features/microagent-management/microagent-management-accordion-title.tsx b/frontend/src/components/features/microagent-management/microagent-management-accordion-title.tsx index 9858cfa8f9..d454cdc0e9 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-accordion-title.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-accordion-title.tsx @@ -17,9 +17,10 @@ export function MicroagentManagementAccordionTitle({ {repository.full_name} diff --git a/frontend/src/components/features/microagent-management/microagent-management-add-microagent-button.tsx b/frontend/src/components/features/microagent-management/microagent-management-add-microagent-button.tsx index 9bcb282ce8..fb70f664a5 100644 --- a/frontend/src/components/features/microagent-management/microagent-management-add-microagent-button.tsx +++ b/frontend/src/components/features/microagent-management/microagent-management-add-microagent-button.tsx @@ -18,22 +18,32 @@ export function MicroagentManagementAddMicroagentButton({ setSelectedRepository, } = useMicroagentManagementStore(); - const handleClick = (e: React.MouseEvent) => { + const handleClick = (e: React.MouseEvent) => { e.stopPropagation(); + e.preventDefault(); setAddMicroagentModalVisible(!addMicroagentModalVisible); setSelectedRepository(repository); }; + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === "Enter" || e.key === " ") { + e.stopPropagation(); + e.preventDefault(); + setAddMicroagentModalVisible(!addMicroagentModalVisible); + setSelectedRepository(repository); + } + }; + return ( - + {t(I18nKey.COMMON$ADD_MICROAGENT)} + ); } diff --git a/frontend/src/components/shared/buttons/tooltip-button.tsx b/frontend/src/components/shared/buttons/tooltip-button.tsx index 7fea86f9a8..56b23e615c 100644 --- a/frontend/src/components/shared/buttons/tooltip-button.tsx +++ b/frontend/src/components/shared/buttons/tooltip-button.tsx @@ -16,6 +16,7 @@ export interface TooltipButtonProps { disabled?: boolean; placement?: TooltipProps["placement"]; showArrow?: boolean; + asSpan?: boolean; } export function TooltipButton({ @@ -31,6 +32,7 @@ export function TooltipButton({ disabled = false, placement = "right", showArrow = false, + asSpan = false, }: TooltipButtonProps) { const handleClick = (e: React.MouseEvent) => { if (onClick && !disabled) { @@ -39,22 +41,67 @@ export function TooltipButton({ } }; - const buttonContent = ( - - ); + const isClickable = !!onClick && !disabled; + let buttonContent: React.ReactNode; + if (asSpan) { + if (isClickable) { + buttonContent = ( + { + if (e.key === "Enter" || e.key === " ") { + onClick(); + e.preventDefault(); + } + }} + className={cn( + "hover:opacity-80", + disabled && "opacity-50 cursor-not-allowed", + className, + )} + aria-disabled={disabled} + > + {children} + + ); + } else { + buttonContent = ( + + {children} + + ); + } + } else { + buttonContent = ( + + ); + } let content; From cb8c1fa263852b40ff16eb5466b79cee28f6a8b0 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 26 Dec 2025 09:19:51 -0700 Subject: [PATCH 64/80] ALL-4627 Database Fixes (#12156) Co-authored-by: openhands --- enterprise/storage/database.py | 12 +++- .../app_conversation_router.py | 15 +++-- .../event_callback/webhook_router.py | 4 -- .../services/db_session_injector.py | 61 +++++++++++-------- .../app_server/test_db_session_injector.py | 9 +-- 5 files changed, 58 insertions(+), 43 deletions(-) diff --git a/enterprise/storage/database.py b/enterprise/storage/database.py index f0d8e9d62c..ec06550e03 100644 --- a/enterprise/storage/database.py +++ b/enterprise/storage/database.py @@ -19,17 +19,23 @@ GCP_REGION = os.environ.get('GCP_REGION') POOL_SIZE = int(os.environ.get('DB_POOL_SIZE', '25')) MAX_OVERFLOW = int(os.environ.get('DB_MAX_OVERFLOW', '10')) +POOL_RECYCLE = int(os.environ.get('DB_POOL_RECYCLE', '1800')) + +# Initialize Cloud SQL Connector once at module level for GCP environments. +_connector = None def _get_db_engine(): if GCP_DB_INSTANCE: # GCP environments def get_db_connection(): + global _connector from google.cloud.sql.connector import Connector - connector = Connector() + if not _connector: + _connector = Connector() instance_string = f'{GCP_PROJECT}:{GCP_REGION}:{GCP_DB_INSTANCE}' - return connector.connect( + return _connector.connect( instance_string, 'pg8000', user=DB_USER, password=DB_PASS, db=DB_NAME ) @@ -38,6 +44,7 @@ def _get_db_engine(): creator=get_db_connection, pool_size=POOL_SIZE, max_overflow=MAX_OVERFLOW, + pool_recycle=POOL_RECYCLE, pool_pre_ping=True, ) else: @@ -48,6 +55,7 @@ def _get_db_engine(): host_string, pool_size=POOL_SIZE, max_overflow=MAX_OVERFLOW, + pool_recycle=POOL_RECYCLE, pool_pre_ping=True, ) diff --git a/openhands/app_server/app_conversation/app_conversation_router.py b/openhands/app_server/app_conversation/app_conversation_router.py index 532602dbca..f68b80ba4e 100644 --- a/openhands/app_server/app_conversation/app_conversation_router.py +++ b/openhands/app_server/app_conversation/app_conversation_router.py @@ -210,11 +210,16 @@ async def start_app_conversation( set_db_session_keep_open(request.state, True) set_httpx_client_keep_open(request.state, True) - """Start an app conversation start task and return it.""" - async_iter = app_conversation_service.start_app_conversation(start_request) - result = await anext(async_iter) - asyncio.create_task(_consume_remaining(async_iter, db_session, httpx_client)) - return result + try: + """Start an app conversation start task and return it.""" + async_iter = app_conversation_service.start_app_conversation(start_request) + result = await anext(async_iter) + asyncio.create_task(_consume_remaining(async_iter, db_session, httpx_client)) + return result + except Exception: + await db_session.close() + await httpx_client.aclose() + raise @router.post('/stream-start') diff --git a/openhands/app_server/event_callback/webhook_router.py b/openhands/app_server/event_callback/webhook_router.py index 37ae9d89b2..62dd7bec16 100644 --- a/openhands/app_server/event_callback/webhook_router.py +++ b/openhands/app_server/event_callback/webhook_router.py @@ -21,12 +21,10 @@ from openhands.app_server.app_conversation.app_conversation_models import ( ) from openhands.app_server.config import ( depends_app_conversation_info_service, - depends_db_session, depends_event_service, depends_jwt_service, depends_sandbox_service, get_event_callback_service, - get_global_config, ) from openhands.app_server.errors import AuthError from openhands.app_server.event.event_service import EventService @@ -54,8 +52,6 @@ sandbox_service_dependency = depends_sandbox_service() event_service_dependency = depends_event_service() app_conversation_info_service_dependency = depends_app_conversation_info_service() jwt_dependency = depends_jwt_service() -config = get_global_config() -db_session_dependency = depends_db_session() _logger = logging.getLogger(__name__) diff --git a/openhands/app_server/services/db_session_injector.py b/openhands/app_server/services/db_session_injector.py index c59243af91..737e1ff879 100644 --- a/openhands/app_server/services/db_session_injector.py +++ b/openhands/app_server/services/db_session_injector.py @@ -4,8 +4,9 @@ import asyncio import logging import os from pathlib import Path -from typing import AsyncGenerator +from typing import Any, AsyncGenerator +import asyncpg from fastapi import Request from pydantic import BaseModel, PrivateAttr, SecretStr, model_validator from sqlalchemy import Engine, create_engine @@ -33,6 +34,7 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): echo: bool = False pool_size: int = 25 max_overflow: int = 10 + pool_recycle: int = 1800 gcp_db_instance: str | None = None gcp_project: str | None = None gcp_region: str | None = None @@ -42,6 +44,7 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): _async_engine: AsyncEngine | None = PrivateAttr(default=None) _session_maker: sessionmaker | None = PrivateAttr(default=None) _async_session_maker: async_sessionmaker | None = PrivateAttr(default=None) + _gcp_connector: Any = PrivateAttr(default=None) @model_validator(mode='after') def fill_empty_fields(self): @@ -65,14 +68,18 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): return self def _create_gcp_db_connection(self): - # Lazy import because lib does not import if user does not have posgres installed - from google.cloud.sql.connector import Connector + gcp_connector = self._gcp_connector + if gcp_connector is None: + # Lazy import because lib does not import if user does not have posgres installed + from google.cloud.sql.connector import Connector + + gcp_connector = Connector() + self._gcp_connector = gcp_connector - connector = Connector() instance_string = f'{self.gcp_project}:{self.gcp_region}:{self.gcp_db_instance}' password = self.password assert password is not None - return connector.connect( + return gcp_connector.connect( instance_string, 'pg8000', user=self.user, @@ -81,21 +88,25 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): ) async def _create_async_gcp_db_connection(self): - # Lazy import because lib does not import if user does not have posgres installed - from google.cloud.sql.connector import Connector + gcp_connector = self._gcp_connector + if gcp_connector is None: + # Lazy import because lib does not import if user does not have posgres installed + from google.cloud.sql.connector import Connector - loop = asyncio.get_running_loop() - async with Connector(loop=loop) as connector: - password = self.password - assert password is not None - conn = await connector.connect_async( - f'{self.gcp_project}:{self.gcp_region}:{self.gcp_db_instance}', - 'asyncpg', - user=self.user, - password=password.get_secret_value(), - db=self.name, - ) - return conn + loop = asyncio.get_running_loop() + gcp_connector = Connector(loop=loop) + self._gcp_connector = gcp_connector + + password = self.password + assert password is not None + conn = await gcp_connector.connect_async( + f'{self.gcp_project}:{self.gcp_region}:{self.gcp_db_instance}', + 'asyncpg', + user=self.user, + password=password.get_secret_value(), + db=self.name, + ) + return conn def _create_gcp_engine(self): engine = create_engine( @@ -112,10 +123,8 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): AsyncAdapt_asyncpg_connection, ) - engine = self._create_gcp_engine() - return AsyncAdapt_asyncpg_connection( - engine.dialect.dbapi, + asyncpg, await self._create_async_gcp_db_connection(), prepared_statement_cache_size=100, ) @@ -125,12 +134,9 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): AsyncAdapt_asyncpg_connection, ) - base_engine = self._create_gcp_engine() - dbapi = base_engine.dialect.dbapi - def adapted_creator(): return AsyncAdapt_asyncpg_connection( - dbapi, + asyncpg, await_only(self._create_async_gcp_db_connection()), prepared_statement_cache_size=100, ) @@ -141,6 +147,7 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): pool_size=self.pool_size, max_overflow=self.max_overflow, pool_pre_ping=True, + pool_recycle=self.pool_recycle, ) async def get_async_db_engine(self) -> AsyncEngine: @@ -174,6 +181,7 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): url, pool_size=self.pool_size, max_overflow=self.max_overflow, + pool_recycle=self.pool_recycle, pool_pre_ping=True, ) else: @@ -214,6 +222,7 @@ class DbSessionInjector(BaseModel, Injector[async_sessionmaker]): url, pool_size=self.pool_size, max_overflow=self.max_overflow, + pool_recycle=self.pool_recycle, pool_pre_ping=True, ) self._engine = engine diff --git a/tests/unit/app_server/test_db_session_injector.py b/tests/unit/app_server/test_db_session_injector.py index fd0908817c..8183cc26b9 100644 --- a/tests/unit/app_server/test_db_session_injector.py +++ b/tests/unit/app_server/test_db_session_injector.py @@ -456,13 +456,10 @@ class TestDbSessionInjectorGCPIntegration: # Mock the google.cloud.sql.connector module with patch.dict('sys.modules', {'google.cloud.sql.connector': MagicMock()}): mock_connector_module = sys.modules['google.cloud.sql.connector'] - mock_connector = AsyncMock() - mock_connector_module.Connector.return_value.__aenter__.return_value = ( - mock_connector - ) - mock_connector_module.Connector.return_value.__aexit__.return_value = None + mock_connector = MagicMock() + mock_connector_module.Connector.return_value = mock_connector mock_connection = AsyncMock() - mock_connector.connect_async.return_value = mock_connection + mock_connector.connect_async = AsyncMock(return_value=mock_connection) connection = await gcp_db_session_injector._create_async_gcp_db_connection() From a829d102130a113029ed55441434532dfc1101f5 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 26 Dec 2025 10:02:01 -0700 Subject: [PATCH 65/80] ALL-4634: implement public conversation sharing feature (#12044) Co-authored-by: openhands --- ..._public_column_to_conversation_metadata.py | 41 ++ enterprise/poetry.lock | 2 +- enterprise/saas_server.py | 9 + enterprise/server/sharing/README.md | 20 + .../filesystem_shared_event_service.py | 142 ++++++ .../shared_conversation_info_service.py | 66 +++ .../sharing/shared_conversation_models.py | 56 +++ .../sharing/shared_conversation_router.py | 135 ++++++ .../server/sharing/shared_event_router.py | 126 +++++ .../server/sharing/shared_event_service.py | 64 +++ .../sql_shared_conversation_info_service.py | 282 ++++++++++++ enterprise/storage/saas_conversation_store.py | 1 + .../tests/unit/test_sharing/__init__.py | 1 + .../test_shared_conversation_models.py | 91 ++++ ...haring_shared_conversation_info_service.py | 430 ++++++++++++++++++ .../test_sharing_shared_event_service.py | 365 +++++++++++++++ .../app_conversation_models.py | 8 + .../app_conversation_router.py | 17 + .../app_conversation_service.py | 8 + .../live_status_app_conversation_service.py | 18 + .../sql_app_conversation_info_service.py | 15 +- .../app_lifespan/alembic/versions/004.py | 41 ++ openhands/app_server/event/event_router.py | 8 +- .../event/filesystem_event_service.py | 252 +--------- .../event/filesystem_event_service_base.py | 224 +++++++++ .../server/data_models/conversation_info.py | 1 + .../server/routes/manage_conversations.py | 1 + .../data_models/conversation_metadata.py | 1 + poetry.lock | 15 +- pyproject.toml | 1 + 30 files changed, 2191 insertions(+), 250 deletions(-) create mode 100644 enterprise/migrations/versions/085_add_public_column_to_conversation_metadata.py create mode 100644 enterprise/server/sharing/README.md create mode 100644 enterprise/server/sharing/filesystem_shared_event_service.py create mode 100644 enterprise/server/sharing/shared_conversation_info_service.py create mode 100644 enterprise/server/sharing/shared_conversation_models.py create mode 100644 enterprise/server/sharing/shared_conversation_router.py create mode 100644 enterprise/server/sharing/shared_event_router.py create mode 100644 enterprise/server/sharing/shared_event_service.py create mode 100644 enterprise/server/sharing/sql_shared_conversation_info_service.py create mode 100644 enterprise/tests/unit/test_sharing/__init__.py create mode 100644 enterprise/tests/unit/test_sharing/test_shared_conversation_models.py create mode 100644 enterprise/tests/unit/test_sharing/test_sharing_shared_conversation_info_service.py create mode 100644 enterprise/tests/unit/test_sharing/test_sharing_shared_event_service.py create mode 100644 openhands/app_server/app_lifespan/alembic/versions/004.py create mode 100644 openhands/app_server/event/filesystem_event_service_base.py diff --git a/enterprise/migrations/versions/085_add_public_column_to_conversation_metadata.py b/enterprise/migrations/versions/085_add_public_column_to_conversation_metadata.py new file mode 100644 index 0000000000..71324b0068 --- /dev/null +++ b/enterprise/migrations/versions/085_add_public_column_to_conversation_metadata.py @@ -0,0 +1,41 @@ +"""add public column to conversation_metadata + +Revision ID: 085 +Revises: 084 +Create Date: 2025-01-27 00:00:00.000000 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = '085' +down_revision: Union[str, None] = '084' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + op.add_column( + 'conversation_metadata', + sa.Column('public', sa.Boolean(), nullable=True), + ) + op.create_index( + op.f('ix_conversation_metadata_public'), + 'conversation_metadata', + ['public'], + unique=False, + ) + + +def downgrade() -> None: + """Downgrade schema.""" + op.drop_index( + op.f('ix_conversation_metadata_public'), + table_name='conversation_metadata', + ) + op.drop_column('conversation_metadata', 'public') diff --git a/enterprise/poetry.lock b/enterprise/poetry.lock index bd2c55c317..2535aef566 100644 --- a/enterprise/poetry.lock +++ b/enterprise/poetry.lock @@ -5860,7 +5860,7 @@ wsproto = ">=1.2.0" [[package]] name = "openhands-ai" -version = "0.0.0-post.5687+7853b41ad" +version = "0.0.0-post.5750+f19fb1043" description = "OpenHands: Code Less, Make More" optional = false python-versions = "^3.12,<3.14" diff --git a/enterprise/saas_server.py b/enterprise/saas_server.py index 96e19a9815..ec1480cbda 100644 --- a/enterprise/saas_server.py +++ b/enterprise/saas_server.py @@ -37,6 +37,12 @@ from server.routes.mcp_patch import patch_mcp_server # noqa: E402 from server.routes.oauth_device import oauth_device_router # noqa: E402 from server.routes.readiness import readiness_router # noqa: E402 from server.routes.user import saas_user_router # noqa: E402 +from server.sharing.shared_conversation_router import ( # noqa: E402 + router as shared_conversation_router, +) +from server.sharing.shared_event_router import ( # noqa: E402 + router as shared_event_router, +) from openhands.server.app import app as base_app # noqa: E402 from openhands.server.listen_socket import sio # noqa: E402 @@ -66,6 +72,8 @@ base_app.include_router(saas_user_router) # Add additional route SAAS user call base_app.include_router( billing_router ) # Add routes for credit management and Stripe payment integration +base_app.include_router(shared_conversation_router) +base_app.include_router(shared_event_router) # Add GitHub integration router only if GITHUB_APP_CLIENT_ID is set if GITHUB_APP_CLIENT_ID: @@ -99,6 +107,7 @@ base_app.include_router( event_webhook_router ) # Add routes for Events in nested runtimes + base_app.add_middleware( CORSMiddleware, allow_origins=PERMITTED_CORS_ORIGINS, diff --git a/enterprise/server/sharing/README.md b/enterprise/server/sharing/README.md new file mode 100644 index 0000000000..5eb5474d21 --- /dev/null +++ b/enterprise/server/sharing/README.md @@ -0,0 +1,20 @@ +# Sharing Package + +This package contains functionality for sharing conversations. + +## Components + +- **shared.py**: Data models for shared conversations +- **shared_conversation_info_service.py**: Service interface for accessing shared conversation info +- **sql_shared_conversation_info_service.py**: SQL implementation of the shared conversation info service +- **shared_event_service.py**: Service interface for accessing shared events +- **shared_event_service_impl.py**: Implementation of the shared event service +- **shared_conversation_router.py**: REST API endpoints for shared conversations +- **shared_event_router.py**: REST API endpoints for shared events + +## Features + +- Read-only access to shared conversations +- Event access for shared conversations +- Search and filtering capabilities +- Pagination support diff --git a/enterprise/server/sharing/filesystem_shared_event_service.py b/enterprise/server/sharing/filesystem_shared_event_service.py new file mode 100644 index 0000000000..e39f880bdf --- /dev/null +++ b/enterprise/server/sharing/filesystem_shared_event_service.py @@ -0,0 +1,142 @@ +"""Implementation of SharedEventService. + +This implementation provides read-only access to events from shared conversations: +- Validates that the conversation is shared before returning events +- Uses existing EventService for actual event retrieval +- Uses SharedConversationInfoService for shared conversation validation +""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from datetime import datetime +from typing import AsyncGenerator +from uuid import UUID + +from fastapi import Request +from server.sharing.shared_conversation_info_service import ( + SharedConversationInfoService, +) +from server.sharing.shared_event_service import ( + SharedEventService, + SharedEventServiceInjector, +) +from server.sharing.sql_shared_conversation_info_service import ( + SQLSharedConversationInfoService, +) + +from openhands.agent_server.models import EventPage, EventSortOrder +from openhands.app_server.event.event_service import EventService +from openhands.app_server.event_callback.event_callback_models import EventKind +from openhands.app_server.services.injector import InjectorState +from openhands.sdk import Event + +logger = logging.getLogger(__name__) + + +@dataclass +class SharedEventServiceImpl(SharedEventService): + """Implementation of SharedEventService that validates shared access.""" + + shared_conversation_info_service: SharedConversationInfoService + event_service: EventService + + async def get_shared_event( + self, conversation_id: UUID, event_id: str + ) -> Event | None: + """Given a conversation_id and event_id, retrieve an event if the conversation is shared.""" + # First check if the conversation is shared + shared_conversation_info = ( + await self.shared_conversation_info_service.get_shared_conversation_info( + conversation_id + ) + ) + if shared_conversation_info is None: + return None + + # If conversation is shared, get the event + return await self.event_service.get_event(event_id) + + async def search_shared_events( + self, + conversation_id: UUID, + kind__eq: EventKind | None = None, + timestamp__gte: datetime | None = None, + timestamp__lt: datetime | None = None, + sort_order: EventSortOrder = EventSortOrder.TIMESTAMP, + page_id: str | None = None, + limit: int = 100, + ) -> EventPage: + """Search events for a specific shared conversation.""" + # First check if the conversation is shared + shared_conversation_info = ( + await self.shared_conversation_info_service.get_shared_conversation_info( + conversation_id + ) + ) + if shared_conversation_info is None: + # Return empty page if conversation is not shared + return EventPage(items=[], next_page_id=None) + + # If conversation is shared, search events for this conversation + return await self.event_service.search_events( + conversation_id__eq=conversation_id, + kind__eq=kind__eq, + timestamp__gte=timestamp__gte, + timestamp__lt=timestamp__lt, + sort_order=sort_order, + page_id=page_id, + limit=limit, + ) + + async def count_shared_events( + self, + conversation_id: UUID, + kind__eq: EventKind | None = None, + timestamp__gte: datetime | None = None, + timestamp__lt: datetime | None = None, + sort_order: EventSortOrder = EventSortOrder.TIMESTAMP, + ) -> int: + """Count events for a specific shared conversation.""" + # First check if the conversation is shared + shared_conversation_info = ( + await self.shared_conversation_info_service.get_shared_conversation_info( + conversation_id + ) + ) + if shared_conversation_info is None: + return 0 + + # If conversation is shared, count events for this conversation + return await self.event_service.count_events( + conversation_id__eq=conversation_id, + kind__eq=kind__eq, + timestamp__gte=timestamp__gte, + timestamp__lt=timestamp__lt, + sort_order=sort_order, + ) + + +class SharedEventServiceImplInjector(SharedEventServiceInjector): + async def inject( + self, state: InjectorState, request: Request | None = None + ) -> AsyncGenerator[SharedEventService, None]: + # Define inline to prevent circular lookup + from openhands.app_server.config import ( + get_db_session, + get_event_service, + ) + + async with ( + get_db_session(state, request) as db_session, + get_event_service(state, request) as event_service, + ): + shared_conversation_info_service = SQLSharedConversationInfoService( + db_session=db_session + ) + service = SharedEventServiceImpl( + shared_conversation_info_service=shared_conversation_info_service, + event_service=event_service, + ) + yield service diff --git a/enterprise/server/sharing/shared_conversation_info_service.py b/enterprise/server/sharing/shared_conversation_info_service.py new file mode 100644 index 0000000000..a1fdec6718 --- /dev/null +++ b/enterprise/server/sharing/shared_conversation_info_service.py @@ -0,0 +1,66 @@ +import asyncio +from abc import ABC, abstractmethod +from datetime import datetime +from uuid import UUID + +from server.sharing.shared_conversation_models import ( + SharedConversation, + SharedConversationPage, + SharedConversationSortOrder, +) + +from openhands.app_server.services.injector import Injector +from openhands.sdk.utils.models import DiscriminatedUnionMixin + + +class SharedConversationInfoService(ABC): + """Service for accessing shared conversation info without user restrictions.""" + + @abstractmethod + async def search_shared_conversation_info( + self, + title__contains: str | None = None, + created_at__gte: datetime | None = None, + created_at__lt: datetime | None = None, + updated_at__gte: datetime | None = None, + updated_at__lt: datetime | None = None, + sort_order: SharedConversationSortOrder = SharedConversationSortOrder.CREATED_AT_DESC, + page_id: str | None = None, + limit: int = 100, + include_sub_conversations: bool = False, + ) -> SharedConversationPage: + """Search for shared conversations.""" + + @abstractmethod + async def count_shared_conversation_info( + self, + title__contains: str | None = None, + created_at__gte: datetime | None = None, + created_at__lt: datetime | None = None, + updated_at__gte: datetime | None = None, + updated_at__lt: datetime | None = None, + ) -> int: + """Count shared conversations.""" + + @abstractmethod + async def get_shared_conversation_info( + self, conversation_id: UUID + ) -> SharedConversation | None: + """Get a single shared conversation info, returning None if missing or not shared.""" + + async def batch_get_shared_conversation_info( + self, conversation_ids: list[UUID] + ) -> list[SharedConversation | None]: + """Get a batch of shared conversation info, return None for any missing or non-shared.""" + return await asyncio.gather( + *[ + self.get_shared_conversation_info(conversation_id) + for conversation_id in conversation_ids + ] + ) + + +class SharedConversationInfoServiceInjector( + DiscriminatedUnionMixin, Injector[SharedConversationInfoService], ABC +): + pass diff --git a/enterprise/server/sharing/shared_conversation_models.py b/enterprise/server/sharing/shared_conversation_models.py new file mode 100644 index 0000000000..806ddefb12 --- /dev/null +++ b/enterprise/server/sharing/shared_conversation_models.py @@ -0,0 +1,56 @@ +from datetime import datetime +from enum import Enum + +# Simplified imports to avoid dependency chain issues +# from openhands.integrations.service_types import ProviderType +# from openhands.sdk.llm import MetricsSnapshot +# from openhands.storage.data_models.conversation_metadata import ConversationTrigger +# For now, use Any to avoid import issues +from typing import Any +from uuid import uuid4 + +from pydantic import BaseModel, Field + +from openhands.agent_server.utils import OpenHandsUUID, utc_now + +ProviderType = Any +MetricsSnapshot = Any +ConversationTrigger = Any + + +class SharedConversation(BaseModel): + """Shared conversation info model with all fields from AppConversationInfo.""" + + id: OpenHandsUUID = Field(default_factory=uuid4) + + created_by_user_id: str | None + sandbox_id: str + + selected_repository: str | None = None + selected_branch: str | None = None + git_provider: ProviderType | None = None + title: str | None = None + pr_number: list[int] = Field(default_factory=list) + llm_model: str | None = None + + metrics: MetricsSnapshot | None = None + + parent_conversation_id: OpenHandsUUID | None = None + sub_conversation_ids: list[OpenHandsUUID] = Field(default_factory=list) + + created_at: datetime = Field(default_factory=utc_now) + updated_at: datetime = Field(default_factory=utc_now) + + +class SharedConversationSortOrder(Enum): + CREATED_AT = 'CREATED_AT' + CREATED_AT_DESC = 'CREATED_AT_DESC' + UPDATED_AT = 'UPDATED_AT' + UPDATED_AT_DESC = 'UPDATED_AT_DESC' + TITLE = 'TITLE' + TITLE_DESC = 'TITLE_DESC' + + +class SharedConversationPage(BaseModel): + items: list[SharedConversation] + next_page_id: str | None = None diff --git a/enterprise/server/sharing/shared_conversation_router.py b/enterprise/server/sharing/shared_conversation_router.py new file mode 100644 index 0000000000..26fe047e6d --- /dev/null +++ b/enterprise/server/sharing/shared_conversation_router.py @@ -0,0 +1,135 @@ +"""Shared Conversation router for OpenHands Server.""" + +from datetime import datetime +from typing import Annotated +from uuid import UUID + +from fastapi import APIRouter, Depends, Query +from server.sharing.shared_conversation_info_service import ( + SharedConversationInfoService, +) +from server.sharing.shared_conversation_models import ( + SharedConversation, + SharedConversationPage, + SharedConversationSortOrder, +) +from server.sharing.sql_shared_conversation_info_service import ( + SQLSharedConversationInfoServiceInjector, +) + +router = APIRouter(prefix='/api/shared-conversations', tags=['Sharing']) +shared_conversation_info_service_dependency = Depends( + SQLSharedConversationInfoServiceInjector().depends +) + +# Read methods + + +@router.get('/search') +async def search_shared_conversations( + title__contains: Annotated[ + str | None, + Query(title='Filter by title containing this string'), + ] = None, + created_at__gte: Annotated[ + datetime | None, + Query(title='Filter by created_at greater than or equal to this datetime'), + ] = None, + created_at__lt: Annotated[ + datetime | None, + Query(title='Filter by created_at less than this datetime'), + ] = None, + updated_at__gte: Annotated[ + datetime | None, + Query(title='Filter by updated_at greater than or equal to this datetime'), + ] = None, + updated_at__lt: Annotated[ + datetime | None, + Query(title='Filter by updated_at less than this datetime'), + ] = None, + sort_order: Annotated[ + SharedConversationSortOrder, + Query(title='Sort order for results'), + ] = SharedConversationSortOrder.CREATED_AT_DESC, + page_id: Annotated[ + str | None, + Query(title='Optional next_page_id from the previously returned page'), + ] = None, + limit: Annotated[ + int, + Query( + title='The max number of results in the page', + gt=0, + lte=100, + ), + ] = 100, + include_sub_conversations: Annotated[ + bool, + Query( + title='If True, include sub-conversations in the results. If False (default), exclude all sub-conversations.' + ), + ] = False, + shared_conversation_service: SharedConversationInfoService = shared_conversation_info_service_dependency, +) -> SharedConversationPage: + """Search / List shared conversations.""" + assert limit > 0 + assert limit <= 100 + return await shared_conversation_service.search_shared_conversation_info( + title__contains=title__contains, + created_at__gte=created_at__gte, + created_at__lt=created_at__lt, + updated_at__gte=updated_at__gte, + updated_at__lt=updated_at__lt, + sort_order=sort_order, + page_id=page_id, + limit=limit, + include_sub_conversations=include_sub_conversations, + ) + + +@router.get('/count') +async def count_shared_conversations( + title__contains: Annotated[ + str | None, + Query(title='Filter by title containing this string'), + ] = None, + created_at__gte: Annotated[ + datetime | None, + Query(title='Filter by created_at greater than or equal to this datetime'), + ] = None, + created_at__lt: Annotated[ + datetime | None, + Query(title='Filter by created_at less than this datetime'), + ] = None, + updated_at__gte: Annotated[ + datetime | None, + Query(title='Filter by updated_at greater than or equal to this datetime'), + ] = None, + updated_at__lt: Annotated[ + datetime | None, + Query(title='Filter by updated_at less than this datetime'), + ] = None, + shared_conversation_service: SharedConversationInfoService = shared_conversation_info_service_dependency, +) -> int: + """Count shared conversations matching the given filters.""" + return await shared_conversation_service.count_shared_conversation_info( + title__contains=title__contains, + created_at__gte=created_at__gte, + created_at__lt=created_at__lt, + updated_at__gte=updated_at__gte, + updated_at__lt=updated_at__lt, + ) + + +@router.get('') +async def batch_get_shared_conversations( + ids: Annotated[list[str], Query()], + shared_conversation_service: SharedConversationInfoService = shared_conversation_info_service_dependency, +) -> list[SharedConversation | None]: + """Get a batch of shared conversations given their ids. Return None for any missing or non-shared.""" + assert len(ids) <= 100 + uuids = [UUID(id_) for id_ in ids] + shared_conversation_info = ( + await shared_conversation_service.batch_get_shared_conversation_info(uuids) + ) + return shared_conversation_info diff --git a/enterprise/server/sharing/shared_event_router.py b/enterprise/server/sharing/shared_event_router.py new file mode 100644 index 0000000000..4fc579196c --- /dev/null +++ b/enterprise/server/sharing/shared_event_router.py @@ -0,0 +1,126 @@ +"""Shared Event router for OpenHands Server.""" + +from datetime import datetime +from typing import Annotated +from uuid import UUID + +from fastapi import APIRouter, Depends, Query +from server.sharing.filesystem_shared_event_service import ( + SharedEventServiceImplInjector, +) +from server.sharing.shared_event_service import SharedEventService + +from openhands.agent_server.models import EventPage, EventSortOrder +from openhands.app_server.event_callback.event_callback_models import EventKind +from openhands.sdk import Event + +router = APIRouter(prefix='/api/shared-events', tags=['Sharing']) +shared_event_service_dependency = Depends(SharedEventServiceImplInjector().depends) + + +# Read methods + + +@router.get('/search') +async def search_shared_events( + conversation_id: Annotated[ + str, + Query(title='Conversation ID to search events for'), + ], + kind__eq: Annotated[ + EventKind | None, + Query(title='Optional filter by event kind'), + ] = None, + timestamp__gte: Annotated[ + datetime | None, + Query(title='Optional filter by timestamp greater than or equal to'), + ] = None, + timestamp__lt: Annotated[ + datetime | None, + Query(title='Optional filter by timestamp less than'), + ] = None, + sort_order: Annotated[ + EventSortOrder, + Query(title='Sort order for results'), + ] = EventSortOrder.TIMESTAMP, + page_id: Annotated[ + str | None, + Query(title='Optional next_page_id from the previously returned page'), + ] = None, + limit: Annotated[ + int, + Query(title='The max number of results in the page', gt=0, lte=100), + ] = 100, + shared_event_service: SharedEventService = shared_event_service_dependency, +) -> EventPage: + """Search / List events for a shared conversation.""" + assert limit > 0 + assert limit <= 100 + return await shared_event_service.search_shared_events( + conversation_id=UUID(conversation_id), + kind__eq=kind__eq, + timestamp__gte=timestamp__gte, + timestamp__lt=timestamp__lt, + sort_order=sort_order, + page_id=page_id, + limit=limit, + ) + + +@router.get('/count') +async def count_shared_events( + conversation_id: Annotated[ + str, + Query(title='Conversation ID to count events for'), + ], + kind__eq: Annotated[ + EventKind | None, + Query(title='Optional filter by event kind'), + ] = None, + timestamp__gte: Annotated[ + datetime | None, + Query(title='Optional filter by timestamp greater than or equal to'), + ] = None, + timestamp__lt: Annotated[ + datetime | None, + Query(title='Optional filter by timestamp less than'), + ] = None, + sort_order: Annotated[ + EventSortOrder, + Query(title='Sort order for results'), + ] = EventSortOrder.TIMESTAMP, + shared_event_service: SharedEventService = shared_event_service_dependency, +) -> int: + """Count events for a shared conversation matching the given filters.""" + return await shared_event_service.count_shared_events( + conversation_id=UUID(conversation_id), + kind__eq=kind__eq, + timestamp__gte=timestamp__gte, + timestamp__lt=timestamp__lt, + sort_order=sort_order, + ) + + +@router.get('') +async def batch_get_shared_events( + conversation_id: Annotated[ + UUID, + Query(title='Conversation ID to get events for'), + ], + id: Annotated[list[str], Query()], + shared_event_service: SharedEventService = shared_event_service_dependency, +) -> list[Event | None]: + """Get a batch of events for a shared conversation given their ids, returning null for any missing event.""" + assert len(id) <= 100 + events = await shared_event_service.batch_get_shared_events(conversation_id, id) + return events + + +@router.get('/{conversation_id}/{event_id}') +async def get_shared_event( + conversation_id: UUID, + event_id: str, + shared_event_service: SharedEventService = shared_event_service_dependency, +) -> Event | None: + """Get a single event from a shared conversation by conversation_id and event_id.""" + return await shared_event_service.get_shared_event(conversation_id, event_id) diff --git a/enterprise/server/sharing/shared_event_service.py b/enterprise/server/sharing/shared_event_service.py new file mode 100644 index 0000000000..054153d03f --- /dev/null +++ b/enterprise/server/sharing/shared_event_service.py @@ -0,0 +1,64 @@ +import asyncio +import logging +from abc import ABC, abstractmethod +from datetime import datetime +from uuid import UUID + +from openhands.agent_server.models import EventPage, EventSortOrder +from openhands.app_server.event_callback.event_callback_models import EventKind +from openhands.app_server.services.injector import Injector +from openhands.sdk import Event +from openhands.sdk.utils.models import DiscriminatedUnionMixin + +_logger = logging.getLogger(__name__) + + +class SharedEventService(ABC): + """Event Service for getting events from shared conversations only.""" + + @abstractmethod + async def get_shared_event( + self, conversation_id: UUID, event_id: str + ) -> Event | None: + """Given a conversation_id and event_id, retrieve an event if the conversation is shared.""" + + @abstractmethod + async def search_shared_events( + self, + conversation_id: UUID, + kind__eq: EventKind | None = None, + timestamp__gte: datetime | None = None, + timestamp__lt: datetime | None = None, + sort_order: EventSortOrder = EventSortOrder.TIMESTAMP, + page_id: str | None = None, + limit: int = 100, + ) -> EventPage: + """Search events for a specific shared conversation.""" + + @abstractmethod + async def count_shared_events( + self, + conversation_id: UUID, + kind__eq: EventKind | None = None, + timestamp__gte: datetime | None = None, + timestamp__lt: datetime | None = None, + sort_order: EventSortOrder = EventSortOrder.TIMESTAMP, + ) -> int: + """Count events for a specific shared conversation.""" + + async def batch_get_shared_events( + self, conversation_id: UUID, event_ids: list[str] + ) -> list[Event | None]: + """Given a conversation_id and list of event_ids, get events if the conversation is shared.""" + return await asyncio.gather( + *[ + self.get_shared_event(conversation_id, event_id) + for event_id in event_ids + ] + ) + + +class SharedEventServiceInjector( + DiscriminatedUnionMixin, Injector[SharedEventService], ABC +): + pass diff --git a/enterprise/server/sharing/sql_shared_conversation_info_service.py b/enterprise/server/sharing/sql_shared_conversation_info_service.py new file mode 100644 index 0000000000..f86a6045bb --- /dev/null +++ b/enterprise/server/sharing/sql_shared_conversation_info_service.py @@ -0,0 +1,282 @@ +"""SQL implementation of SharedConversationInfoService. + +This implementation provides read-only access to shared conversations: +- Direct database access without user permission checks +- Filters only conversations marked as shared (currently public) +- Full async/await support using SQL async db_sessions +""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from datetime import UTC, datetime +from typing import AsyncGenerator +from uuid import UUID + +from fastapi import Request +from server.sharing.shared_conversation_info_service import ( + SharedConversationInfoService, + SharedConversationInfoServiceInjector, +) +from server.sharing.shared_conversation_models import ( + SharedConversation, + SharedConversationPage, + SharedConversationSortOrder, +) +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from openhands.app_server.app_conversation.sql_app_conversation_info_service import ( + StoredConversationMetadata, +) +from openhands.app_server.services.injector import InjectorState +from openhands.integrations.provider import ProviderType +from openhands.sdk.llm import MetricsSnapshot +from openhands.sdk.llm.utils.metrics import TokenUsage + +logger = logging.getLogger(__name__) + + +@dataclass +class SQLSharedConversationInfoService(SharedConversationInfoService): + """SQL implementation of SharedConversationInfoService for shared conversations only.""" + + db_session: AsyncSession + + async def search_shared_conversation_info( + self, + title__contains: str | None = None, + created_at__gte: datetime | None = None, + created_at__lt: datetime | None = None, + updated_at__gte: datetime | None = None, + updated_at__lt: datetime | None = None, + sort_order: SharedConversationSortOrder = SharedConversationSortOrder.CREATED_AT_DESC, + page_id: str | None = None, + limit: int = 100, + include_sub_conversations: bool = False, + ) -> SharedConversationPage: + """Search for shared conversations.""" + query = self._public_select() + + # Conditionally exclude sub-conversations based on the parameter + if not include_sub_conversations: + # Exclude sub-conversations (only include top-level conversations) + query = query.where( + StoredConversationMetadata.parent_conversation_id.is_(None) + ) + + query = self._apply_filters( + query=query, + title__contains=title__contains, + created_at__gte=created_at__gte, + created_at__lt=created_at__lt, + updated_at__gte=updated_at__gte, + updated_at__lt=updated_at__lt, + ) + + # Add sort order + if sort_order == SharedConversationSortOrder.CREATED_AT: + query = query.order_by(StoredConversationMetadata.created_at) + elif sort_order == SharedConversationSortOrder.CREATED_AT_DESC: + query = query.order_by(StoredConversationMetadata.created_at.desc()) + elif sort_order == SharedConversationSortOrder.UPDATED_AT: + query = query.order_by(StoredConversationMetadata.last_updated_at) + elif sort_order == SharedConversationSortOrder.UPDATED_AT_DESC: + query = query.order_by(StoredConversationMetadata.last_updated_at.desc()) + elif sort_order == SharedConversationSortOrder.TITLE: + query = query.order_by(StoredConversationMetadata.title) + elif sort_order == SharedConversationSortOrder.TITLE_DESC: + query = query.order_by(StoredConversationMetadata.title.desc()) + + # Apply pagination + if page_id is not None: + try: + offset = int(page_id) + query = query.offset(offset) + except ValueError: + # If page_id is not a valid integer, start from beginning + offset = 0 + else: + offset = 0 + + # Apply limit and get one extra to check if there are more results + query = query.limit(limit + 1) + + result = await self.db_session.execute(query) + rows = result.scalars().all() + + # Check if there are more results + has_more = len(rows) > limit + if has_more: + rows = rows[:limit] + + items = [self._to_shared_conversation(row) for row in rows] + + # Calculate next page ID + next_page_id = None + if has_more: + next_page_id = str(offset + limit) + + return SharedConversationPage(items=items, next_page_id=next_page_id) + + async def count_shared_conversation_info( + self, + title__contains: str | None = None, + created_at__gte: datetime | None = None, + created_at__lt: datetime | None = None, + updated_at__gte: datetime | None = None, + updated_at__lt: datetime | None = None, + ) -> int: + """Count shared conversations matching the given filters.""" + from sqlalchemy import func + + query = select(func.count(StoredConversationMetadata.conversation_id)) + # Only include shared conversations + query = query.where(StoredConversationMetadata.public == True) # noqa: E712 + query = query.where(StoredConversationMetadata.conversation_version == 'V1') + + query = self._apply_filters( + query=query, + title__contains=title__contains, + created_at__gte=created_at__gte, + created_at__lt=created_at__lt, + updated_at__gte=updated_at__gte, + updated_at__lt=updated_at__lt, + ) + + result = await self.db_session.execute(query) + return result.scalar() or 0 + + async def get_shared_conversation_info( + self, conversation_id: UUID + ) -> SharedConversation | None: + """Get a single public conversation info, returning None if missing or not shared.""" + query = self._public_select().where( + StoredConversationMetadata.conversation_id == str(conversation_id) + ) + + result = await self.db_session.execute(query) + stored = result.scalar_one_or_none() + + if stored is None: + return None + + return self._to_shared_conversation(stored) + + def _public_select(self): + """Create a select query that only returns public conversations.""" + query = select(StoredConversationMetadata).where( + StoredConversationMetadata.conversation_version == 'V1' + ) + # Only include conversations marked as public + query = query.where(StoredConversationMetadata.public == True) # noqa: E712 + return query + + def _apply_filters( + self, + query, + title__contains: str | None = None, + created_at__gte: datetime | None = None, + created_at__lt: datetime | None = None, + updated_at__gte: datetime | None = None, + updated_at__lt: datetime | None = None, + ): + """Apply common filters to a query.""" + if title__contains is not None: + query = query.where( + StoredConversationMetadata.title.contains(title__contains) + ) + + if created_at__gte is not None: + query = query.where( + StoredConversationMetadata.created_at >= created_at__gte + ) + + if created_at__lt is not None: + query = query.where(StoredConversationMetadata.created_at < created_at__lt) + + if updated_at__gte is not None: + query = query.where( + StoredConversationMetadata.last_updated_at >= updated_at__gte + ) + + if updated_at__lt is not None: + query = query.where( + StoredConversationMetadata.last_updated_at < updated_at__lt + ) + + return query + + def _to_shared_conversation( + self, + stored: StoredConversationMetadata, + sub_conversation_ids: list[UUID] | None = None, + ) -> SharedConversation: + """Convert StoredConversationMetadata to SharedConversation.""" + # V1 conversations should always have a sandbox_id + sandbox_id = stored.sandbox_id + assert sandbox_id is not None + + # Rebuild token usage + token_usage = TokenUsage( + prompt_tokens=stored.prompt_tokens, + completion_tokens=stored.completion_tokens, + cache_read_tokens=stored.cache_read_tokens, + cache_write_tokens=stored.cache_write_tokens, + context_window=stored.context_window, + per_turn_token=stored.per_turn_token, + ) + + # Rebuild metrics object + metrics = MetricsSnapshot( + accumulated_cost=stored.accumulated_cost, + max_budget_per_task=stored.max_budget_per_task, + accumulated_token_usage=token_usage, + ) + + # Get timestamps + created_at = self._fix_timezone(stored.created_at) + updated_at = self._fix_timezone(stored.last_updated_at) + + return SharedConversation( + id=UUID(stored.conversation_id), + created_by_user_id=stored.user_id if stored.user_id else None, + sandbox_id=stored.sandbox_id, + selected_repository=stored.selected_repository, + selected_branch=stored.selected_branch, + git_provider=( + ProviderType(stored.git_provider) if stored.git_provider else None + ), + title=stored.title, + pr_number=stored.pr_number, + llm_model=stored.llm_model, + metrics=metrics, + parent_conversation_id=( + UUID(stored.parent_conversation_id) + if stored.parent_conversation_id + else None + ), + sub_conversation_ids=sub_conversation_ids or [], + created_at=created_at, + updated_at=updated_at, + ) + + def _fix_timezone(self, value: datetime) -> datetime: + """Sqlite does not store timezones - and since we can't update the existing models + we assume UTC if the timezone is missing.""" + if not value.tzinfo: + value = value.replace(tzinfo=UTC) + return value + + +class SQLSharedConversationInfoServiceInjector(SharedConversationInfoServiceInjector): + async def inject( + self, state: InjectorState, request: Request | None = None + ) -> AsyncGenerator[SharedConversationInfoService, None]: + # Define inline to prevent circular lookup + from openhands.app_server.config import get_db_session + + async with get_db_session(state, request) as db_session: + service = SQLSharedConversationInfoService(db_session=db_session) + yield service diff --git a/enterprise/storage/saas_conversation_store.py b/enterprise/storage/saas_conversation_store.py index 160c3a80a2..3a0756dd5a 100644 --- a/enterprise/storage/saas_conversation_store.py +++ b/enterprise/storage/saas_conversation_store.py @@ -61,6 +61,7 @@ class SaasConversationStore(ConversationStore): kwargs.pop('context_window', None) kwargs.pop('per_turn_token', None) kwargs.pop('parent_conversation_id', None) + kwargs.pop('public') return ConversationMetadata(**kwargs) diff --git a/enterprise/tests/unit/test_sharing/__init__.py b/enterprise/tests/unit/test_sharing/__init__.py new file mode 100644 index 0000000000..8958107d4e --- /dev/null +++ b/enterprise/tests/unit/test_sharing/__init__.py @@ -0,0 +1 @@ +"""Tests for sharing package.""" diff --git a/enterprise/tests/unit/test_sharing/test_shared_conversation_models.py b/enterprise/tests/unit/test_sharing/test_shared_conversation_models.py new file mode 100644 index 0000000000..ec8ae8ce8e --- /dev/null +++ b/enterprise/tests/unit/test_sharing/test_shared_conversation_models.py @@ -0,0 +1,91 @@ +"""Tests for public conversation models.""" + +from datetime import datetime +from uuid import uuid4 + +from server.sharing.shared_conversation_models import ( + SharedConversation, + SharedConversationPage, + SharedConversationSortOrder, +) + + +def test_public_conversation_creation(): + """Test that SharedConversation can be created with all required fields.""" + conversation_id = uuid4() + now = datetime.utcnow() + + conversation = SharedConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test_sandbox', + title='Test Conversation', + created_at=now, + updated_at=now, + selected_repository=None, + parent_conversation_id=None, + ) + + assert conversation.id == conversation_id + assert conversation.title == 'Test Conversation' + assert conversation.created_by_user_id == 'test_user' + assert conversation.sandbox_id == 'test_sandbox' + + +def test_public_conversation_page_creation(): + """Test that SharedConversationPage can be created.""" + conversation_id = uuid4() + now = datetime.utcnow() + + conversation = SharedConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test_sandbox', + title='Test Conversation', + created_at=now, + updated_at=now, + selected_repository=None, + parent_conversation_id=None, + ) + + page = SharedConversationPage( + items=[conversation], + next_page_id='next_page', + ) + + assert len(page.items) == 1 + assert page.items[0].id == conversation_id + assert page.next_page_id == 'next_page' + + +def test_public_conversation_sort_order_enum(): + """Test that SharedConversationSortOrder enum has expected values.""" + assert hasattr(SharedConversationSortOrder, 'CREATED_AT') + assert hasattr(SharedConversationSortOrder, 'CREATED_AT_DESC') + assert hasattr(SharedConversationSortOrder, 'UPDATED_AT') + assert hasattr(SharedConversationSortOrder, 'UPDATED_AT_DESC') + assert hasattr(SharedConversationSortOrder, 'TITLE') + assert hasattr(SharedConversationSortOrder, 'TITLE_DESC') + + +def test_public_conversation_optional_fields(): + """Test that SharedConversation works with optional fields.""" + conversation_id = uuid4() + parent_id = uuid4() + now = datetime.utcnow() + + conversation = SharedConversation( + id=conversation_id, + created_by_user_id='test_user', + sandbox_id='test_sandbox', + title='Test Conversation', + created_at=now, + updated_at=now, + selected_repository='owner/repo', + parent_conversation_id=parent_id, + llm_model='gpt-4', + ) + + assert conversation.selected_repository == 'owner/repo' + assert conversation.parent_conversation_id == parent_id + assert conversation.llm_model == 'gpt-4' diff --git a/enterprise/tests/unit/test_sharing/test_sharing_shared_conversation_info_service.py b/enterprise/tests/unit/test_sharing/test_sharing_shared_conversation_info_service.py new file mode 100644 index 0000000000..bacb9edb58 --- /dev/null +++ b/enterprise/tests/unit/test_sharing/test_sharing_shared_conversation_info_service.py @@ -0,0 +1,430 @@ +"""Tests for SharedConversationInfoService.""" + +from datetime import UTC, datetime +from typing import AsyncGenerator +from uuid import uuid4 + +import pytest +from server.sharing.shared_conversation_models import ( + SharedConversationSortOrder, +) +from server.sharing.sql_shared_conversation_info_service import ( + SQLSharedConversationInfoService, +) +from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine +from sqlalchemy.pool import StaticPool + +from openhands.app_server.app_conversation.app_conversation_models import ( + AppConversationInfo, +) +from openhands.app_server.app_conversation.sql_app_conversation_info_service import ( + SQLAppConversationInfoService, +) +from openhands.app_server.user.specifiy_user_context import SpecifyUserContext +from openhands.app_server.utils.sql_utils import Base +from openhands.integrations.provider import ProviderType +from openhands.sdk.llm import MetricsSnapshot +from openhands.sdk.llm.utils.metrics import TokenUsage +from openhands.storage.data_models.conversation_metadata import ConversationTrigger + + +@pytest.fixture +async def async_engine(): + """Create an async SQLite engine for testing.""" + engine = create_async_engine( + 'sqlite+aiosqlite:///:memory:', + poolclass=StaticPool, + connect_args={'check_same_thread': False}, + echo=False, + ) + + # Create all tables + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + await engine.dispose() + + +@pytest.fixture +async def async_session(async_engine) -> AsyncGenerator[AsyncSession, None]: + """Create an async session for testing.""" + async_session_maker = async_sessionmaker( + async_engine, class_=AsyncSession, expire_on_commit=False + ) + + async with async_session_maker() as db_session: + yield db_session + + +@pytest.fixture +async def shared_conversation_info_service(async_session): + """Create a SharedConversationInfoService for testing.""" + return SQLSharedConversationInfoService(db_session=async_session) + + +@pytest.fixture +async def app_conversation_service(async_session): + """Create an AppConversationInfoService for creating test data.""" + return SQLAppConversationInfoService( + db_session=async_session, user_context=SpecifyUserContext(user_id=None) + ) + + +@pytest.fixture +def sample_conversation_info(): + """Create a sample conversation info for testing.""" + return AppConversationInfo( + id=uuid4(), + created_by_user_id='test_user', + sandbox_id='test_sandbox', + selected_repository='test/repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + title='Test Conversation', + trigger=ConversationTrigger.GUI, + pr_number=[123], + llm_model='gpt-4', + metrics=MetricsSnapshot( + accumulated_cost=1.5, + max_budget_per_task=10.0, + accumulated_token_usage=TokenUsage( + prompt_tokens=100, + completion_tokens=50, + cache_read_tokens=0, + cache_write_tokens=0, + context_window=4096, + per_turn_token=150, + ), + ), + parent_conversation_id=None, + sub_conversation_ids=[], + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + public=True, # Make it public for testing + ) + + +@pytest.fixture +def sample_private_conversation_info(): + """Create a sample private conversation info for testing.""" + return AppConversationInfo( + id=uuid4(), + created_by_user_id='test_user', + sandbox_id='test_sandbox_private', + selected_repository='test/private_repo', + selected_branch='main', + git_provider=ProviderType.GITHUB, + title='Private Conversation', + trigger=ConversationTrigger.GUI, + pr_number=[124], + llm_model='gpt-4', + metrics=MetricsSnapshot( + accumulated_cost=2.0, + max_budget_per_task=10.0, + accumulated_token_usage=TokenUsage( + prompt_tokens=200, + completion_tokens=100, + cache_read_tokens=0, + cache_write_tokens=0, + context_window=4096, + per_turn_token=300, + ), + ), + parent_conversation_id=None, + sub_conversation_ids=[], + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + public=False, # Make it private + ) + + +class TestSharedConversationInfoService: + """Test cases for SharedConversationInfoService.""" + + @pytest.mark.asyncio + @pytest.mark.asyncio + async def test_get_shared_conversation_info_returns_public_conversation( + self, + shared_conversation_info_service, + app_conversation_service, + sample_conversation_info, + ): + """Test that get_shared_conversation_info returns a public conversation.""" + # Create a public conversation + await app_conversation_service.save_app_conversation_info( + sample_conversation_info + ) + + # Retrieve it via public service + result = await shared_conversation_info_service.get_shared_conversation_info( + sample_conversation_info.id + ) + + assert result is not None + assert result.id == sample_conversation_info.id + assert result.title == sample_conversation_info.title + assert result.created_by_user_id == sample_conversation_info.created_by_user_id + + @pytest.mark.asyncio + async def test_get_shared_conversation_info_returns_none_for_private_conversation( + self, + shared_conversation_info_service, + app_conversation_service, + sample_private_conversation_info, + ): + """Test that get_shared_conversation_info returns None for private conversations.""" + # Create a private conversation + await app_conversation_service.save_app_conversation_info( + sample_private_conversation_info + ) + + # Try to retrieve it via public service + result = await shared_conversation_info_service.get_shared_conversation_info( + sample_private_conversation_info.id + ) + + assert result is None + + @pytest.mark.asyncio + async def test_get_shared_conversation_info_returns_none_for_nonexistent_conversation( + self, shared_conversation_info_service + ): + """Test that get_shared_conversation_info returns None for nonexistent conversations.""" + nonexistent_id = uuid4() + result = await shared_conversation_info_service.get_shared_conversation_info( + nonexistent_id + ) + assert result is None + + @pytest.mark.asyncio + async def test_search_shared_conversation_info_returns_only_public_conversations( + self, + shared_conversation_info_service, + app_conversation_service, + sample_conversation_info, + sample_private_conversation_info, + ): + """Test that search only returns public conversations.""" + # Create both public and private conversations + await app_conversation_service.save_app_conversation_info( + sample_conversation_info + ) + await app_conversation_service.save_app_conversation_info( + sample_private_conversation_info + ) + + # Search for all conversations + result = ( + await shared_conversation_info_service.search_shared_conversation_info() + ) + + # Should only return the public conversation + assert len(result.items) == 1 + assert result.items[0].id == sample_conversation_info.id + assert result.items[0].title == sample_conversation_info.title + + @pytest.mark.asyncio + async def test_search_shared_conversation_info_with_title_filter( + self, + shared_conversation_info_service, + app_conversation_service, + sample_conversation_info, + ): + """Test searching with title filter.""" + # Create a public conversation + await app_conversation_service.save_app_conversation_info( + sample_conversation_info + ) + + # Search with matching title + result = await shared_conversation_info_service.search_shared_conversation_info( + title__contains='Test' + ) + assert len(result.items) == 1 + + # Search with non-matching title + result = await shared_conversation_info_service.search_shared_conversation_info( + title__contains='NonExistent' + ) + assert len(result.items) == 0 + + @pytest.mark.asyncio + async def test_search_shared_conversation_info_with_sort_order( + self, + shared_conversation_info_service, + app_conversation_service, + ): + """Test searching with different sort orders.""" + # Create multiple public conversations with different titles and timestamps + conv1 = AppConversationInfo( + id=uuid4(), + created_by_user_id='test_user', + sandbox_id='test_sandbox_1', + title='A First Conversation', + created_at=datetime(2023, 1, 1, tzinfo=UTC), + updated_at=datetime(2023, 1, 1, tzinfo=UTC), + public=True, + metrics=MetricsSnapshot( + accumulated_cost=0.0, + max_budget_per_task=10.0, + accumulated_token_usage=TokenUsage(), + ), + ) + conv2 = AppConversationInfo( + id=uuid4(), + created_by_user_id='test_user', + sandbox_id='test_sandbox_2', + title='B Second Conversation', + created_at=datetime(2023, 1, 2, tzinfo=UTC), + updated_at=datetime(2023, 1, 2, tzinfo=UTC), + public=True, + metrics=MetricsSnapshot( + accumulated_cost=0.0, + max_budget_per_task=10.0, + accumulated_token_usage=TokenUsage(), + ), + ) + + await app_conversation_service.save_app_conversation_info(conv1) + await app_conversation_service.save_app_conversation_info(conv2) + + # Test sort by title ascending + result = await shared_conversation_info_service.search_shared_conversation_info( + sort_order=SharedConversationSortOrder.TITLE + ) + assert len(result.items) == 2 + assert result.items[0].title == 'A First Conversation' + assert result.items[1].title == 'B Second Conversation' + + # Test sort by title descending + result = await shared_conversation_info_service.search_shared_conversation_info( + sort_order=SharedConversationSortOrder.TITLE_DESC + ) + assert len(result.items) == 2 + assert result.items[0].title == 'B Second Conversation' + assert result.items[1].title == 'A First Conversation' + + # Test sort by created_at ascending + result = await shared_conversation_info_service.search_shared_conversation_info( + sort_order=SharedConversationSortOrder.CREATED_AT + ) + assert len(result.items) == 2 + assert result.items[0].id == conv1.id + assert result.items[1].id == conv2.id + + # Test sort by created_at descending (default) + result = await shared_conversation_info_service.search_shared_conversation_info( + sort_order=SharedConversationSortOrder.CREATED_AT_DESC + ) + assert len(result.items) == 2 + assert result.items[0].id == conv2.id + assert result.items[1].id == conv1.id + + @pytest.mark.asyncio + async def test_count_shared_conversation_info( + self, + shared_conversation_info_service, + app_conversation_service, + sample_conversation_info, + sample_private_conversation_info, + ): + """Test counting public conversations.""" + # Initially should be 0 + count = await shared_conversation_info_service.count_shared_conversation_info() + assert count == 0 + + # Create a public conversation + await app_conversation_service.save_app_conversation_info( + sample_conversation_info + ) + count = await shared_conversation_info_service.count_shared_conversation_info() + assert count == 1 + + # Create a private conversation - count should remain 1 + await app_conversation_service.save_app_conversation_info( + sample_private_conversation_info + ) + count = await shared_conversation_info_service.count_shared_conversation_info() + assert count == 1 + + @pytest.mark.asyncio + async def test_batch_get_shared_conversation_info( + self, + shared_conversation_info_service, + app_conversation_service, + sample_conversation_info, + sample_private_conversation_info, + ): + """Test batch getting public conversations.""" + # Create both public and private conversations + await app_conversation_service.save_app_conversation_info( + sample_conversation_info + ) + await app_conversation_service.save_app_conversation_info( + sample_private_conversation_info + ) + + # Batch get both conversations + result = ( + await shared_conversation_info_service.batch_get_shared_conversation_info( + [sample_conversation_info.id, sample_private_conversation_info.id] + ) + ) + + # Should return the public one and None for the private one + assert len(result) == 2 + assert result[0] is not None + assert result[0].id == sample_conversation_info.id + assert result[1] is None + + @pytest.mark.asyncio + async def test_search_with_pagination( + self, + shared_conversation_info_service, + app_conversation_service, + ): + """Test search with pagination.""" + # Create multiple public conversations + conversations = [] + for i in range(5): + conv = AppConversationInfo( + id=uuid4(), + created_by_user_id='test_user', + sandbox_id=f'test_sandbox_{i}', + title=f'Conversation {i}', + created_at=datetime(2023, 1, i + 1, tzinfo=UTC), + updated_at=datetime(2023, 1, i + 1, tzinfo=UTC), + public=True, + metrics=MetricsSnapshot( + accumulated_cost=0.0, + max_budget_per_task=10.0, + accumulated_token_usage=TokenUsage(), + ), + ) + conversations.append(conv) + await app_conversation_service.save_app_conversation_info(conv) + + # Get first page with limit 2 + result = await shared_conversation_info_service.search_shared_conversation_info( + limit=2, sort_order=SharedConversationSortOrder.CREATED_AT + ) + assert len(result.items) == 2 + assert result.next_page_id is not None + + # Get next page + result2 = ( + await shared_conversation_info_service.search_shared_conversation_info( + limit=2, + page_id=result.next_page_id, + sort_order=SharedConversationSortOrder.CREATED_AT, + ) + ) + assert len(result2.items) == 2 + assert result2.next_page_id is not None + + # Verify no overlap between pages + page1_ids = {item.id for item in result.items} + page2_ids = {item.id for item in result2.items} + assert page1_ids.isdisjoint(page2_ids) diff --git a/enterprise/tests/unit/test_sharing/test_sharing_shared_event_service.py b/enterprise/tests/unit/test_sharing/test_sharing_shared_event_service.py new file mode 100644 index 0000000000..e12e8f0fad --- /dev/null +++ b/enterprise/tests/unit/test_sharing/test_sharing_shared_event_service.py @@ -0,0 +1,365 @@ +"""Tests for SharedEventService.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock +from uuid import uuid4 + +import pytest +from server.sharing.filesystem_shared_event_service import ( + SharedEventServiceImpl, +) +from server.sharing.shared_conversation_info_service import ( + SharedConversationInfoService, +) +from server.sharing.shared_conversation_models import SharedConversation + +from openhands.agent_server.models import EventPage, EventSortOrder +from openhands.app_server.event.event_service import EventService +from openhands.sdk.llm import MetricsSnapshot +from openhands.sdk.llm.utils.metrics import TokenUsage + + +@pytest.fixture +def mock_shared_conversation_info_service(): + """Create a mock SharedConversationInfoService.""" + return AsyncMock(spec=SharedConversationInfoService) + + +@pytest.fixture +def mock_event_service(): + """Create a mock EventService.""" + return AsyncMock(spec=EventService) + + +@pytest.fixture +def shared_event_service(mock_shared_conversation_info_service, mock_event_service): + """Create a SharedEventService for testing.""" + return SharedEventServiceImpl( + shared_conversation_info_service=mock_shared_conversation_info_service, + event_service=mock_event_service, + ) + + +@pytest.fixture +def sample_public_conversation(): + """Create a sample public conversation.""" + return SharedConversation( + id=uuid4(), + created_by_user_id='test_user', + sandbox_id='test_sandbox', + title='Test Public Conversation', + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + metrics=MetricsSnapshot( + accumulated_cost=0.0, + max_budget_per_task=10.0, + accumulated_token_usage=TokenUsage(), + ), + ) + + +@pytest.fixture +def sample_event(): + """Create a sample event.""" + # For testing purposes, we'll just use a mock that the EventPage can accept + # The actual event creation is complex and not the focus of these tests + return None + + +class TestSharedEventService: + """Test cases for SharedEventService.""" + + async def test_get_shared_event_returns_event_for_public_conversation( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + sample_public_conversation, + sample_event, + ): + """Test that get_shared_event returns an event for a public conversation.""" + conversation_id = sample_public_conversation.id + event_id = 'test_event_id' + + # Mock the public conversation service to return a public conversation + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation + + # Mock the event service to return an event + mock_event_service.get_event.return_value = sample_event + + # Call the method + result = await shared_event_service.get_shared_event(conversation_id, event_id) + + # Verify the result + assert result == sample_event + mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with( + conversation_id + ) + mock_event_service.get_event.assert_called_once_with(event_id) + + async def test_get_shared_event_returns_none_for_private_conversation( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + ): + """Test that get_shared_event returns None for a private conversation.""" + conversation_id = uuid4() + event_id = 'test_event_id' + + # Mock the public conversation service to return None (private conversation) + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None + + # Call the method + result = await shared_event_service.get_shared_event(conversation_id, event_id) + + # Verify the result + assert result is None + mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with( + conversation_id + ) + # Event service should not be called + mock_event_service.get_event.assert_not_called() + + async def test_search_shared_events_returns_events_for_public_conversation( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + sample_public_conversation, + sample_event, + ): + """Test that search_shared_events returns events for a public conversation.""" + conversation_id = sample_public_conversation.id + + # Mock the public conversation service to return a public conversation + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation + + # Mock the event service to return events + mock_event_page = EventPage(items=[], next_page_id=None) + mock_event_service.search_events.return_value = mock_event_page + + # Call the method + result = await shared_event_service.search_shared_events( + conversation_id=conversation_id, + kind__eq='ActionEvent', + limit=10, + ) + + # Verify the result + assert result == mock_event_page + assert len(result.items) == 0 # Empty list as we mocked + + mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with( + conversation_id + ) + mock_event_service.search_events.assert_called_once_with( + conversation_id__eq=conversation_id, + kind__eq='ActionEvent', + timestamp__gte=None, + timestamp__lt=None, + sort_order=EventSortOrder.TIMESTAMP, + page_id=None, + limit=10, + ) + + async def test_search_shared_events_returns_empty_for_private_conversation( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + ): + """Test that search_shared_events returns empty page for a private conversation.""" + conversation_id = uuid4() + + # Mock the public conversation service to return None (private conversation) + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None + + # Call the method + result = await shared_event_service.search_shared_events( + conversation_id=conversation_id, + limit=10, + ) + + # Verify the result + assert isinstance(result, EventPage) + assert len(result.items) == 0 + assert result.next_page_id is None + + mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with( + conversation_id + ) + # Event service should not be called + mock_event_service.search_events.assert_not_called() + + async def test_count_shared_events_returns_count_for_public_conversation( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + sample_public_conversation, + ): + """Test that count_shared_events returns count for a public conversation.""" + conversation_id = sample_public_conversation.id + + # Mock the public conversation service to return a public conversation + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation + + # Mock the event service to return a count + mock_event_service.count_events.return_value = 5 + + # Call the method + result = await shared_event_service.count_shared_events( + conversation_id=conversation_id, + kind__eq='ActionEvent', + ) + + # Verify the result + assert result == 5 + + mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with( + conversation_id + ) + mock_event_service.count_events.assert_called_once_with( + conversation_id__eq=conversation_id, + kind__eq='ActionEvent', + timestamp__gte=None, + timestamp__lt=None, + sort_order=EventSortOrder.TIMESTAMP, + ) + + async def test_count_shared_events_returns_zero_for_private_conversation( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + ): + """Test that count_shared_events returns 0 for a private conversation.""" + conversation_id = uuid4() + + # Mock the public conversation service to return None (private conversation) + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None + + # Call the method + result = await shared_event_service.count_shared_events( + conversation_id=conversation_id, + ) + + # Verify the result + assert result == 0 + + mock_shared_conversation_info_service.get_shared_conversation_info.assert_called_once_with( + conversation_id + ) + # Event service should not be called + mock_event_service.count_events.assert_not_called() + + async def test_batch_get_shared_events_returns_events_for_public_conversation( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + sample_public_conversation, + sample_event, + ): + """Test that batch_get_shared_events returns events for a public conversation.""" + conversation_id = sample_public_conversation.id + event_ids = ['event1', 'event2'] + + # Mock the public conversation service to return a public conversation + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation + + # Mock the event service to return events + mock_event_service.get_event.side_effect = [sample_event, None] + + # Call the method + result = await shared_event_service.batch_get_shared_events( + conversation_id, event_ids + ) + + # Verify the result + assert len(result) == 2 + assert result[0] == sample_event + assert result[1] is None + + # Verify that get_shared_conversation_info was called for each event + assert ( + mock_shared_conversation_info_service.get_shared_conversation_info.call_count + == 2 + ) + # Verify that get_event was called for each event + assert mock_event_service.get_event.call_count == 2 + + async def test_batch_get_shared_events_returns_none_for_private_conversation( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + ): + """Test that batch_get_shared_events returns None for a private conversation.""" + conversation_id = uuid4() + event_ids = ['event1', 'event2'] + + # Mock the public conversation service to return None (private conversation) + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = None + + # Call the method + result = await shared_event_service.batch_get_shared_events( + conversation_id, event_ids + ) + + # Verify the result + assert len(result) == 2 + assert result[0] is None + assert result[1] is None + + # Verify that get_shared_conversation_info was called for each event + assert ( + mock_shared_conversation_info_service.get_shared_conversation_info.call_count + == 2 + ) + # Event service should not be called + mock_event_service.get_event.assert_not_called() + + async def test_search_shared_events_with_all_parameters( + self, + shared_event_service, + mock_shared_conversation_info_service, + mock_event_service, + sample_public_conversation, + ): + """Test search_shared_events with all parameters.""" + conversation_id = sample_public_conversation.id + timestamp_gte = datetime(2023, 1, 1, tzinfo=UTC) + timestamp_lt = datetime(2023, 12, 31, tzinfo=UTC) + + # Mock the public conversation service to return a public conversation + mock_shared_conversation_info_service.get_shared_conversation_info.return_value = sample_public_conversation + + # Mock the event service to return events + mock_event_page = EventPage(items=[], next_page_id='next_page') + mock_event_service.search_events.return_value = mock_event_page + + # Call the method with all parameters + result = await shared_event_service.search_shared_events( + conversation_id=conversation_id, + kind__eq='ObservationEvent', + timestamp__gte=timestamp_gte, + timestamp__lt=timestamp_lt, + sort_order=EventSortOrder.TIMESTAMP_DESC, + page_id='current_page', + limit=50, + ) + + # Verify the result + assert result == mock_event_page + + mock_event_service.search_events.assert_called_once_with( + conversation_id__eq=conversation_id, + kind__eq='ObservationEvent', + timestamp__gte=timestamp_gte, + timestamp__lt=timestamp_lt, + sort_order=EventSortOrder.TIMESTAMP_DESC, + page_id='current_page', + limit=50, + ) diff --git a/openhands/app_server/app_conversation/app_conversation_models.py b/openhands/app_server/app_conversation/app_conversation_models.py index 1c0ba914cb..58a63a95d6 100644 --- a/openhands/app_server/app_conversation/app_conversation_models.py +++ b/openhands/app_server/app_conversation/app_conversation_models.py @@ -45,6 +45,8 @@ class AppConversationInfo(BaseModel): parent_conversation_id: OpenHandsUUID | None = None sub_conversation_ids: list[OpenHandsUUID] = Field(default_factory=list) + public: bool | None = None + created_at: datetime = Field(default_factory=utc_now) updated_at: datetime = Field(default_factory=utc_now) @@ -114,6 +116,12 @@ class AppConversationStartRequest(BaseModel): parent_conversation_id: OpenHandsUUID | None = None agent_type: AgentType = Field(default=AgentType.DEFAULT) + public: bool | None = None + + +class AppConversationUpdateRequest(BaseModel): + public: bool + class AppConversationStartTaskStatus(Enum): WORKING = 'WORKING' diff --git a/openhands/app_server/app_conversation/app_conversation_router.py b/openhands/app_server/app_conversation/app_conversation_router.py index f68b80ba4e..29ae3f69d7 100644 --- a/openhands/app_server/app_conversation/app_conversation_router.py +++ b/openhands/app_server/app_conversation/app_conversation_router.py @@ -40,6 +40,7 @@ from openhands.app_server.app_conversation.app_conversation_models import ( AppConversationStartTask, AppConversationStartTaskPage, AppConversationStartTaskSortOrder, + AppConversationUpdateRequest, SkillResponse, ) from openhands.app_server.app_conversation.app_conversation_service import ( @@ -222,6 +223,22 @@ async def start_app_conversation( raise +@router.patch('/{conversation_id}') +async def update_app_conversation( + conversation_id: str, + update_request: AppConversationUpdateRequest, + app_conversation_service: AppConversationService = ( + app_conversation_service_dependency + ), +) -> AppConversation: + info = await app_conversation_service.update_app_conversation( + UUID(conversation_id), update_request + ) + if info is None: + raise HTTPException(404, 'unknown_app_conversation') + return info + + @router.post('/stream-start') async def stream_app_conversation_start( request: AppConversationStartRequest, diff --git a/openhands/app_server/app_conversation/app_conversation_service.py b/openhands/app_server/app_conversation/app_conversation_service.py index b1b10c39ba..dd98dd44c9 100644 --- a/openhands/app_server/app_conversation/app_conversation_service.py +++ b/openhands/app_server/app_conversation/app_conversation_service.py @@ -10,6 +10,7 @@ from openhands.app_server.app_conversation.app_conversation_models import ( AppConversationSortOrder, AppConversationStartRequest, AppConversationStartTask, + AppConversationUpdateRequest, ) from openhands.app_server.sandbox.sandbox_models import SandboxInfo from openhands.app_server.services.injector import Injector @@ -98,6 +99,13 @@ class AppConversationService(ABC): """Run the setup scripts for the project and yield status updates""" yield task + @abstractmethod + async def update_app_conversation( + self, conversation_id: UUID, request: AppConversationUpdateRequest + ) -> AppConversation | None: + """Update an app conversation and return it. Return None if the conversation + did not exist.""" + @abstractmethod async def delete_app_conversation(self, conversation_id: UUID) -> bool: """Delete a V1 conversation and all its associated data. diff --git a/openhands/app_server/app_conversation/live_status_app_conversation_service.py b/openhands/app_server/app_conversation/live_status_app_conversation_service.py index 84f20de07a..11d9e4fef8 100644 --- a/openhands/app_server/app_conversation/live_status_app_conversation_service.py +++ b/openhands/app_server/app_conversation/live_status_app_conversation_service.py @@ -32,6 +32,7 @@ from openhands.app_server.app_conversation.app_conversation_models import ( AppConversationStartRequest, AppConversationStartTask, AppConversationStartTaskStatus, + AppConversationUpdateRequest, ) from openhands.app_server.app_conversation.app_conversation_service import ( AppConversationService, @@ -1049,6 +1050,23 @@ class LiveStatusAppConversationService(AppConversationServiceBase): f'Successfully updated agent-server conversation {conversation_id} title to "{new_title}"' ) + async def update_app_conversation( + self, conversation_id: UUID, request: AppConversationUpdateRequest + ) -> AppConversation | None: + """Update an app conversation and return it. Return None if the conversation + did not exist.""" + info = await self.app_conversation_info_service.get_app_conversation_info( + conversation_id + ) + if info is None: + return None + for field_name in request.model_fields: + value = getattr(request, field_name) + setattr(info, field_name, value) + info = await self.app_conversation_info_service.save_app_conversation_info(info) + conversations = await self._build_app_conversations([info]) + return conversations[0] + async def delete_app_conversation(self, conversation_id: UUID) -> bool: """Delete a V1 conversation and all its associated data. diff --git a/openhands/app_server/app_conversation/sql_app_conversation_info_service.py b/openhands/app_server/app_conversation/sql_app_conversation_info_service.py index 83e2d1915b..7764b99e77 100644 --- a/openhands/app_server/app_conversation/sql_app_conversation_info_service.py +++ b/openhands/app_server/app_conversation/sql_app_conversation_info_service.py @@ -25,7 +25,17 @@ from typing import AsyncGenerator from uuid import UUID from fastapi import Request -from sqlalchemy import Column, DateTime, Float, Integer, Select, String, func, select +from sqlalchemy import ( + Boolean, + Column, + DateTime, + Float, + Integer, + Select, + String, + func, + select, +) from sqlalchemy.ext.asyncio import AsyncSession from openhands.agent_server.utils import utc_now @@ -91,6 +101,7 @@ class StoredConversationMetadata(Base): # type: ignore conversation_version = Column(String, nullable=False, default='V0', index=True) sandbox_id = Column(String, nullable=True, index=True) parent_conversation_id = Column(String, nullable=True, index=True) + public = Column(Boolean, nullable=True, index=True) @dataclass @@ -350,6 +361,7 @@ class SQLAppConversationInfoService(AppConversationInfoService): if info.parent_conversation_id else None ), + public=info.public, ) await self.db_session.merge(stored) @@ -541,6 +553,7 @@ class SQLAppConversationInfoService(AppConversationInfoService): else None ), sub_conversation_ids=sub_conversation_ids or [], + public=stored.public, created_at=created_at, updated_at=updated_at, ) diff --git a/openhands/app_server/app_lifespan/alembic/versions/004.py b/openhands/app_server/app_lifespan/alembic/versions/004.py new file mode 100644 index 0000000000..2d5ef07f41 --- /dev/null +++ b/openhands/app_server/app_lifespan/alembic/versions/004.py @@ -0,0 +1,41 @@ +"""add public column to conversation_metadata + +Revision ID: 004 +Revises: 003 +Create Date: 2025-01-27 00:00:00.000000 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = '004' +down_revision: Union[str, None] = '003' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + op.add_column( + 'conversation_metadata', + sa.Column('public', sa.Boolean(), nullable=True), + ) + op.create_index( + op.f('ix_conversation_metadata_public'), + 'conversation_metadata', + ['public'], + unique=False, + ) + + +def downgrade() -> None: + """Downgrade schema.""" + op.drop_index( + op.f('ix_conversation_metadata_public'), + table_name='conversation_metadata', + ) + op.drop_column('conversation_metadata', 'public') diff --git a/openhands/app_server/event/event_router.py b/openhands/app_server/event/event_router.py index 3476c155e9..3431bf2815 100644 --- a/openhands/app_server/event/event_router.py +++ b/openhands/app_server/event/event_router.py @@ -22,7 +22,7 @@ event_service_dependency = depends_event_service() @router.get('/search') async def search_events( conversation_id__eq: Annotated[ - UUID | None, + str | None, Query(title='Optional filter by conversation ID'), ] = None, kind__eq: Annotated[ @@ -55,7 +55,7 @@ async def search_events( assert limit > 0 assert limit <= 100 return await event_service.search_events( - conversation_id__eq=conversation_id__eq, + conversation_id__eq=UUID(conversation_id__eq) if conversation_id__eq else None, kind__eq=kind__eq, timestamp__gte=timestamp__gte, timestamp__lt=timestamp__lt, @@ -68,7 +68,7 @@ async def search_events( @router.get('/count') async def count_events( conversation_id__eq: Annotated[ - UUID | None, + str | None, Query(title='Optional filter by conversation ID'), ] = None, kind__eq: Annotated[ @@ -91,7 +91,7 @@ async def count_events( ) -> int: """Count events matching the given filters.""" return await event_service.count_events( - conversation_id__eq=conversation_id__eq, + conversation_id__eq=UUID(conversation_id__eq) if conversation_id__eq else None, kind__eq=kind__eq, timestamp__gte=timestamp__gte, timestamp__lt=timestamp__lt, diff --git a/openhands/app_server/event/filesystem_event_service.py b/openhands/app_server/event/filesystem_event_service.py index 05e2ed9350..1f98fcec05 100644 --- a/openhands/app_server/event/filesystem_event_service.py +++ b/openhands/app_server/event/filesystem_event_service.py @@ -1,32 +1,27 @@ """Filesystem-based EventService implementation.""" -import asyncio -import glob import json -import logging from dataclasses import dataclass -from datetime import datetime from pathlib import Path from typing import AsyncGenerator from uuid import UUID from fastapi import Request -from openhands.agent_server.models import EventPage, EventSortOrder from openhands.app_server.app_conversation.app_conversation_info_service import ( AppConversationInfoService, ) from openhands.app_server.errors import OpenHandsError from openhands.app_server.event.event_service import EventService, EventServiceInjector -from openhands.app_server.event_callback.event_callback_models import EventKind +from openhands.app_server.event.filesystem_event_service_base import ( + FilesystemEventServiceBase, +) from openhands.app_server.services.injector import InjectorState from openhands.sdk import Event -_logger = logging.getLogger(__name__) - @dataclass -class FilesystemEventService(EventService): +class FilesystemEventService(FilesystemEventServiceBase, EventService): """Filesystem-based implementation of EventService. Events are stored in files with the naming format: @@ -47,25 +42,6 @@ class FilesystemEventService(EventService): events_path.mkdir(parents=True, exist_ok=True) return events_path - def _timestamp_to_str(self, timestamp: datetime | str) -> str: - """Convert timestamp to YYYYMMDDHHMMSS format.""" - if isinstance(timestamp, str): - # Parse ISO format timestamp string - dt = datetime.fromisoformat(timestamp.replace('Z', '+00:00')) - return dt.strftime('%Y%m%d%H%M%S') - return timestamp.strftime('%Y%m%d%H%M%S') - - def _get_event_filename(self, conversation_id: UUID, event: Event) -> str: - """Generate filename using YYYYMMDDHHMMSS_kind_id.hex format.""" - timestamp_str = self._timestamp_to_str(event.timestamp) - kind = event.__class__.__name__ - # Handle both UUID objects and string UUIDs - if isinstance(event.id, str): - id_hex = event.id.replace('-', '') - else: - id_hex = event.id.hex - return f'{timestamp_str}_{kind}_{id_hex}' - def _save_event_to_file(self, conversation_id: UUID, event: Event) -> None: """Save an event to a file.""" events_path = self._ensure_events_dir(conversation_id) @@ -77,60 +53,17 @@ class FilesystemEventService(EventService): data = event.model_dump(mode='json') f.write(json.dumps(data, indent=2)) - def _load_events_from_files(self, file_paths: list[Path]) -> list[Event]: - events = [] - for file_path in file_paths: - event = self._load_event_from_file(file_path) - if event is not None: - events.append(event) - return events - - def _load_event_from_file(self, filepath: Path) -> Event | None: - """Load an event from a file.""" - try: - json_data = filepath.read_text() - return Event.model_validate_json(json_data) - except Exception: - return None - - def _get_event_files_by_pattern( - self, pattern: str, conversation_id: UUID | None = None - ) -> list[Path]: - """Get event files matching a glob pattern, sorted by timestamp.""" - if conversation_id: - search_path = self.events_dir / str(conversation_id) / pattern - else: - search_path = self.events_dir / '*' / pattern - - files = glob.glob(str(search_path)) - return sorted([Path(f) for f in files]) - - def _parse_filename(self, filename: str) -> dict[str, str] | None: - """Parse filename to extract timestamp, kind, and event_id.""" - try: - parts = filename.split('_') - if len(parts) >= 3: - timestamp_str = parts[0] - kind = '_'.join(parts[1:-1]) # Handle kinds with underscores - event_id = parts[-1] - return {'timestamp': timestamp_str, 'kind': kind, 'event_id': event_id} - except Exception: - pass - return None - - def _get_conversation_id(self, file: Path) -> UUID | None: - try: - return UUID(file.parent.name) - except Exception: - return None - - def _get_conversation_ids(self, files: list[Path]) -> set[UUID]: - result = set() - for file in files: - conversation_id = self._get_conversation_id(file) - if conversation_id: - result.add(conversation_id) - return result + async def save_event(self, conversation_id: UUID, event: Event): + """Save an event. Internal method intended not be part of the REST api.""" + conversation = ( + await self.app_conversation_info_service.get_app_conversation_info( + conversation_id + ) + ) + if not conversation: + # This is either an illegal state or somebody is trying to hack + raise OpenHandsError('No such conversation: {conversaiont_id}') + self._save_event_to_file(conversation_id, event) async def _filter_files_by_conversation(self, files: list[Path]) -> list[Path]: conversation_ids = list(self._get_conversation_ids(files)) @@ -150,161 +83,6 @@ class FilesystemEventService(EventService): ] return result - def _filter_files_by_criteria( - self, - files: list[Path], - conversation_id__eq: UUID | None = None, - kind__eq: EventKind | None = None, - timestamp__gte: datetime | None = None, - timestamp__lt: datetime | None = None, - ) -> list[Path]: - """Filter files based on search criteria.""" - filtered_files = [] - - for file_path in files: - # Check conversation_id filter - if conversation_id__eq: - if str(conversation_id__eq) not in str(file_path): - continue - - # Parse filename for additional filtering - filename_info = self._parse_filename(file_path.name) - if not filename_info: - continue - - # Check kind filter - if kind__eq and filename_info['kind'] != kind__eq: - continue - - # Check timestamp filters - if timestamp__gte or timestamp__lt: - try: - file_timestamp = datetime.strptime( - filename_info['timestamp'], '%Y%m%d%H%M%S' - ) - if timestamp__gte and file_timestamp < timestamp__gte: - continue - if timestamp__lt and file_timestamp >= timestamp__lt: - continue - except ValueError: - continue - - filtered_files.append(file_path) - - return filtered_files - - async def get_event(self, event_id: str) -> Event | None: - """Get the event with the given id, or None if not found.""" - # Convert event_id to hex format (remove dashes) for filename matching - if isinstance(event_id, str) and '-' in event_id: - id_hex = event_id.replace('-', '') - else: - id_hex = event_id - - # Use glob pattern to find files ending with the event_id - pattern = f'*_{id_hex}' - files = self._get_event_files_by_pattern(pattern) - - if not files: - return None - - # If there is no access to the conversation do not return the event - file = files[0] - conversation_id = self._get_conversation_id(file) - if not conversation_id: - return None - conversation = ( - await self.app_conversation_info_service.get_app_conversation_info( - conversation_id - ) - ) - if not conversation: - return None - - # Load and return the first matching event - return self._load_event_from_file(file) - - async def search_events( - self, - conversation_id__eq: UUID | None = None, - kind__eq: EventKind | None = None, - timestamp__gte: datetime | None = None, - timestamp__lt: datetime | None = None, - sort_order: EventSortOrder = EventSortOrder.TIMESTAMP, - page_id: str | None = None, - limit: int = 100, - ) -> EventPage: - """Search for events matching the given filters.""" - # Build the search pattern - pattern = '*' - files = self._get_event_files_by_pattern(pattern, conversation_id__eq) - - files = await self._filter_files_by_conversation(files) - - files = self._filter_files_by_criteria( - files, conversation_id__eq, kind__eq, timestamp__gte, timestamp__lt - ) - - files.sort( - key=lambda f: f.name, - reverse=(sort_order == EventSortOrder.TIMESTAMP_DESC), - ) - - # Handle pagination - start_index = 0 - if page_id: - for i, file_path in enumerate(files): - if file_path.name == page_id: - start_index = i + 1 - break - - # Collect items for this page - page_files = files[start_index : start_index + limit] - next_page_id = None - if start_index + limit < len(files): - next_page_id = files[start_index + limit].name - - # Load all events from files in a background thread. - loop = asyncio.get_running_loop() - page_events = await loop.run_in_executor( - None, self._load_events_from_files, page_files - ) - - return EventPage(items=page_events, next_page_id=next_page_id) - - async def count_events( - self, - conversation_id__eq: UUID | None = None, - kind__eq: EventKind | None = None, - timestamp__gte: datetime | None = None, - timestamp__lt: datetime | None = None, - sort_order: EventSortOrder = EventSortOrder.TIMESTAMP, - ) -> int: - """Count events matching the given filters.""" - # Build the search pattern - pattern = '*' - files = self._get_event_files_by_pattern(pattern, conversation_id__eq) - - files = await self._filter_files_by_conversation(files) - - files = self._filter_files_by_criteria( - files, conversation_id__eq, kind__eq, timestamp__gte, timestamp__lt - ) - - return len(files) - - async def save_event(self, conversation_id: UUID, event: Event): - """Save an event. Internal method intended not be part of the REST api.""" - conversation = ( - await self.app_conversation_info_service.get_app_conversation_info( - conversation_id - ) - ) - if not conversation: - # This is either an illegal state or somebody is trying to hack - raise OpenHandsError('No such conversation: {conversaiont_id}') - self._save_event_to_file(conversation_id, event) - class FilesystemEventServiceInjector(EventServiceInjector): async def inject( diff --git a/openhands/app_server/event/filesystem_event_service_base.py b/openhands/app_server/event/filesystem_event_service_base.py new file mode 100644 index 0000000000..b957f5f24a --- /dev/null +++ b/openhands/app_server/event/filesystem_event_service_base.py @@ -0,0 +1,224 @@ +import asyncio +import glob +from abc import abstractmethod +from datetime import datetime +from pathlib import Path +from uuid import UUID + +from openhands.agent_server.models import EventPage, EventSortOrder +from openhands.app_server.event_callback.event_callback_models import EventKind +from openhands.sdk import Event + + +class FilesystemEventServiceBase: + events_dir: Path + + async def get_event(self, event_id: str) -> Event | None: + """Get the event with the given id, or None if not found.""" + # Convert event_id to hex format (remove dashes) for filename matching + if isinstance(event_id, str) and '-' in event_id: + id_hex = event_id.replace('-', '') + else: + id_hex = event_id + + # Use glob pattern to find files ending with the event_id + pattern = f'*_{id_hex}' + files = self._get_event_files_by_pattern(pattern) + + files = await self._filter_files_by_conversation(files) + + if not files: + return None + + # Load and return the first matching event + return self._load_event_from_file(files[0]) + + async def search_events( + self, + conversation_id__eq: UUID | None = None, + kind__eq: EventKind | None = None, + timestamp__gte: datetime | None = None, + timestamp__lt: datetime | None = None, + sort_order: EventSortOrder = EventSortOrder.TIMESTAMP, + page_id: str | None = None, + limit: int = 100, + ) -> EventPage: + """Search for events matching the given filters.""" + # Build the search pattern + pattern = '*' + files = self._get_event_files_by_pattern(pattern, conversation_id__eq) + + files = await self._filter_files_by_conversation(files) + + files = self._filter_files_by_criteria( + files, conversation_id__eq, kind__eq, timestamp__gte, timestamp__lt + ) + + files.sort( + key=lambda f: f.name, + reverse=(sort_order == EventSortOrder.TIMESTAMP_DESC), + ) + + # Handle pagination + start_index = 0 + if page_id: + for i, file_path in enumerate(files): + if file_path.name == page_id: + start_index = i + 1 + break + + # Collect items for this page + page_files = files[start_index : start_index + limit] + next_page_id = None + if start_index + limit < len(files): + next_page_id = files[start_index + limit].name + + # Load all events from files in a background thread. + loop = asyncio.get_running_loop() + page_events = await loop.run_in_executor( + None, self._load_events_from_files, page_files + ) + + return EventPage(items=page_events, next_page_id=next_page_id) + + async def count_events( + self, + conversation_id__eq: UUID | None = None, + kind__eq: EventKind | None = None, + timestamp__gte: datetime | None = None, + timestamp__lt: datetime | None = None, + sort_order: EventSortOrder = EventSortOrder.TIMESTAMP, + ) -> int: + """Count events matching the given filters.""" + # Build the search pattern + pattern = '*' + files = self._get_event_files_by_pattern(pattern, conversation_id__eq) + + files = await self._filter_files_by_conversation(files) + + files = self._filter_files_by_criteria( + files, conversation_id__eq, kind__eq, timestamp__gte, timestamp__lt + ) + + return len(files) + + def _get_event_filename(self, conversation_id: UUID, event: Event) -> str: + """Generate filename using YYYYMMDDHHMMSS_kind_id.hex format.""" + timestamp_str = self._timestamp_to_str(event.timestamp) + kind = event.__class__.__name__ + # Handle both UUID objects and string UUIDs + if isinstance(event.id, str): + id_hex = event.id.replace('-', '') + else: + id_hex = event.id.hex + return f'{timestamp_str}_{kind}_{id_hex}' + + def _timestamp_to_str(self, timestamp: datetime | str) -> str: + """Convert timestamp to YYYYMMDDHHMMSS format.""" + if isinstance(timestamp, str): + # Parse ISO format timestamp string + dt = datetime.fromisoformat(timestamp.replace('Z', '+00:00')) + return dt.strftime('%Y%m%d%H%M%S') + return timestamp.strftime('%Y%m%d%H%M%S') + + def _load_events_from_files(self, file_paths: list[Path]) -> list[Event]: + events = [] + for file_path in file_paths: + event = self._load_event_from_file(file_path) + if event is not None: + events.append(event) + return events + + def _load_event_from_file(self, filepath: Path) -> Event | None: + """Load an event from a file.""" + try: + json_data = filepath.read_text() + return Event.model_validate_json(json_data) + except Exception: + return None + + def _get_event_files_by_pattern( + self, pattern: str, conversation_id: UUID | None = None + ) -> list[Path]: + """Get event files matching a glob pattern, sorted by timestamp.""" + if conversation_id: + search_path = self.events_dir / str(conversation_id) / pattern + else: + search_path = self.events_dir / '*' / pattern + + files = glob.glob(str(search_path)) + return sorted([Path(f) for f in files]) + + def _parse_filename(self, filename: str) -> dict[str, str] | None: + """Parse filename to extract timestamp, kind, and event_id.""" + try: + parts = filename.split('_') + if len(parts) >= 3: + timestamp_str = parts[0] + kind = '_'.join(parts[1:-1]) # Handle kinds with underscores + event_id = parts[-1] + return {'timestamp': timestamp_str, 'kind': kind, 'event_id': event_id} + except Exception: + pass + return None + + def _get_conversation_id(self, file: Path) -> UUID | None: + try: + return UUID(file.parent.name) + except Exception: + return None + + def _get_conversation_ids(self, files: list[Path]) -> set[UUID]: + result = set() + for file in files: + conversation_id = self._get_conversation_id(file) + if conversation_id: + result.add(conversation_id) + return result + + @abstractmethod + async def _filter_files_by_conversation(self, files: list[Path]) -> list[Path]: + """Filter files by conversation.""" + + def _filter_files_by_criteria( + self, + files: list[Path], + conversation_id__eq: UUID | None = None, + kind__eq: EventKind | None = None, + timestamp__gte: datetime | None = None, + timestamp__lt: datetime | None = None, + ) -> list[Path]: + """Filter files based on search criteria.""" + filtered_files = [] + + for file_path in files: + # Check conversation_id filter + if conversation_id__eq: + if str(conversation_id__eq) not in str(file_path): + continue + + # Parse filename for additional filtering + filename_info = self._parse_filename(file_path.name) + if not filename_info: + continue + + # Check kind filter + if kind__eq and filename_info['kind'] != kind__eq: + continue + + # Check timestamp filters + if timestamp__gte or timestamp__lt: + try: + file_timestamp = datetime.strptime( + filename_info['timestamp'], '%Y%m%d%H%M%S' + ) + if timestamp__gte and file_timestamp < timestamp__gte: + continue + if timestamp__lt and file_timestamp >= timestamp__lt: + continue + except ValueError: + continue + + filtered_files.append(file_path) + + return filtered_files diff --git a/openhands/server/data_models/conversation_info.py b/openhands/server/data_models/conversation_info.py index 78af0e3dc1..5ca7b80b08 100644 --- a/openhands/server/data_models/conversation_info.py +++ b/openhands/server/data_models/conversation_info.py @@ -29,3 +29,4 @@ class ConversationInfo: pr_number: list[int] = field(default_factory=list) conversation_version: str = 'V0' sub_conversation_ids: list[str] = field(default_factory=list) + public: bool | None = None diff --git a/openhands/server/routes/manage_conversations.py b/openhands/server/routes/manage_conversations.py index 1793b07e7d..b88c2851e2 100644 --- a/openhands/server/routes/manage_conversations.py +++ b/openhands/server/routes/manage_conversations.py @@ -1501,4 +1501,5 @@ def _to_conversation_info(app_conversation: AppConversation) -> ConversationInfo sub_conversation_ids=[ sub_id.hex for sub_id in app_conversation.sub_conversation_ids ], + public=app_conversation.public, ) diff --git a/openhands/storage/data_models/conversation_metadata.py b/openhands/storage/data_models/conversation_metadata.py index 8febc9afbd..5e08907303 100644 --- a/openhands/storage/data_models/conversation_metadata.py +++ b/openhands/storage/data_models/conversation_metadata.py @@ -39,3 +39,4 @@ class ConversationMetadata: # V1 compatibility sandbox_id: str | None = None conversation_version: str | None = None + public: bool | None = None diff --git a/poetry.lock b/poetry.lock index 23789d3285..aba2364232 100644 --- a/poetry.lock +++ b/poetry.lock @@ -12707,18 +12707,19 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests [[package]] name = "pytest-asyncio" -version = "1.1.0" +version = "1.3.0" description = "Pytest support for asyncio" optional = false -python-versions = ">=3.9" -groups = ["test"] +python-versions = ">=3.10" +groups = ["dev", "test"] files = [ - {file = "pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf"}, - {file = "pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea"}, + {file = "pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5"}, + {file = "pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5"}, ] [package.dependencies] -pytest = ">=8.2,<9" +pytest = ">=8.2,<10" +typing-extensions = {version = ">=4.12", markers = "python_version < \"3.13\""} [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] @@ -16823,4 +16824,4 @@ third-party-runtimes = ["daytona", "e2b-code-interpreter", "modal", "runloop-api [metadata] lock-version = "2.1" python-versions = "^3.12,<3.14" -content-hash = "9764f3b69ec8ed35feebd78a826bbc6bfa4ac6d5b56bc999be8bc738b644e538" +content-hash = "e24ceb52bccd0c80f52c408215ccf007475eb69e10b895053ea49c7e3e4be3b8" diff --git a/pyproject.toml b/pyproject.toml index c70c110dcc..fda3cc9b96 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -139,6 +139,7 @@ pre-commit = "4.2.0" build = "*" types-setuptools = "*" pytest = "^8.4.0" +pytest-asyncio = "^1.3.0" [tool.poetry.group.test] optional = true From cb1d1f8a0d437b47501580b48c8db4dbe84d3c27 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Fri, 26 Dec 2025 10:53:21 -0700 Subject: [PATCH 66/80] Fix install-hooks CronJob failing when gitlab_webhook table doesn't exist (#12167) Co-authored-by: openhands --- enterprise/sync/install_gitlab_webhooks.py | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/enterprise/sync/install_gitlab_webhooks.py b/enterprise/sync/install_gitlab_webhooks.py index 883fff9cc9..5988879389 100644 --- a/enterprise/sync/install_gitlab_webhooks.py +++ b/enterprise/sync/install_gitlab_webhooks.py @@ -4,6 +4,8 @@ from uuid import uuid4 from integrations.types import GitLabResourceType from integrations.utils import GITLAB_WEBHOOK_URL +from sqlalchemy import text +from storage.database import session_maker from storage.gitlab_webhook import GitlabWebhook, WebhookStatus from storage.gitlab_webhook_store import GitlabWebhookStore @@ -258,6 +260,25 @@ class VerifyWebhookStatus: from integrations.gitlab.gitlab_service import SaaSGitLabService + # Check if the table exists before proceeding + # This handles cases where the CronJob runs before database migrations complete + with session_maker() as session: + query = text(""" + SELECT EXISTS ( + SELECT FROM information_schema.tables + WHERE table_name = 'gitlab_webhook' + ) + """) + result = await session.execute(query) + table_exists = result.scalar() or False + + if not table_exists: + logger.info( + 'gitlab_webhook table does not exist yet, ' + 'waiting for database migrations to complete' + ) + return + # Get an instance of the webhook store webhook_store = await GitlabWebhookStore.get_instance() From f8e4b5562e6850f338c0127ab0c0a28898e774ca Mon Sep 17 00:00:00 2001 From: johba <659301+johba37@users.noreply.github.com> Date: Sat, 27 Dec 2025 21:57:31 +0100 Subject: [PATCH 67/80] Forgejo integration (#11111) Co-authored-by: johba Co-authored-by: openhands Co-authored-by: johba Co-authored-by: enyst Co-authored-by: Graham Neubig Co-authored-by: MrGeorgen <65063405+MrGeorgen@users.noreply.github.com> Co-authored-by: MrGeorgen --- .../__tests__/routes/git-settings.test.tsx | 4 + .../src/api/git-service/git-service.api.ts | 11 +- .../git-settings/forgejo-token-input.tsx | 64 +++ .../conversation-subscriptions-provider.tsx | 16 +- frontend/src/hooks/query/use-branch-data.ts | 2 +- .../hooks/query/use-repository-branches.ts | 26 +- ...ate-conversation-and-subscribe-multiple.ts | 2 +- frontend/src/i18n/declaration.ts | 2 + frontend/src/i18n/translation.json | 32 ++ frontend/src/routes/git-settings.tsx | 73 ++- frontend/src/types/settings.ts | 1 + frontend/src/utils/utils.ts | 11 + openhands/core/setup.py | 24 + openhands/integrations/forgejo/__init__.py | 0 .../integrations/forgejo/forgejo_service.py | 56 ++ .../integrations/forgejo/service/__init__.py | 15 + .../integrations/forgejo/service/base.py | 219 ++++++++ .../integrations/forgejo/service/branches.py | 74 +++ .../integrations/forgejo/service/features.py | 123 +++++ openhands/integrations/forgejo/service/prs.py | 84 +++ .../integrations/forgejo/service/repos.py | 109 ++++ .../integrations/forgejo/service/resolver.py | 137 +++++ openhands/integrations/provider.py | 13 +- openhands/integrations/service_types.py | 1 + openhands/integrations/utils.py | 14 +- openhands/resolver/interfaces/forgejo.py | 480 ++++++++++++++++++ openhands/resolver/issue_handler_factory.py | 26 + openhands/resolver/issue_resolver.py | 2 + openhands/resolver/send_pull_request.py | 120 +++-- openhands/server/routes/git.py | 15 +- .../integrations/bitbucket/test_bitbucket.py | 13 +- .../forgejo/test_forgejo_send_pull_request.py | 78 +++ .../resolver/test_issue_handler_factory.py | 83 ++- tests/unit/test_forgejo_service.py | 273 ++++++++++ 34 files changed, 2110 insertions(+), 93 deletions(-) create mode 100644 frontend/src/components/features/settings/git-settings/forgejo-token-input.tsx create mode 100644 openhands/integrations/forgejo/__init__.py create mode 100644 openhands/integrations/forgejo/forgejo_service.py create mode 100644 openhands/integrations/forgejo/service/__init__.py create mode 100644 openhands/integrations/forgejo/service/base.py create mode 100644 openhands/integrations/forgejo/service/branches.py create mode 100644 openhands/integrations/forgejo/service/features.py create mode 100644 openhands/integrations/forgejo/service/prs.py create mode 100644 openhands/integrations/forgejo/service/repos.py create mode 100644 openhands/integrations/forgejo/service/resolver.py create mode 100644 openhands/resolver/interfaces/forgejo.py create mode 100644 tests/unit/integrations/forgejo/test_forgejo_send_pull_request.py create mode 100644 tests/unit/test_forgejo_service.py diff --git a/frontend/__tests__/routes/git-settings.test.tsx b/frontend/__tests__/routes/git-settings.test.tsx index 9f1008ce3c..6c0875f5a9 100644 --- a/frontend/__tests__/routes/git-settings.test.tsx +++ b/frontend/__tests__/routes/git-settings.test.tsx @@ -298,6 +298,7 @@ describe("Form submission", () => { gitlab: { token: "", host: "" }, bitbucket: { token: "", host: "" }, azure_devops: { token: "", host: "" }, + forgejo: { token: "", host: "" }, }); }); @@ -320,6 +321,7 @@ describe("Form submission", () => { gitlab: { token: "test-token", host: "" }, bitbucket: { token: "", host: "" }, azure_devops: { token: "", host: "" }, + forgejo: { token: "", host: "" }, }); }); @@ -342,6 +344,7 @@ describe("Form submission", () => { gitlab: { token: "", host: "" }, bitbucket: { token: "test-token", host: "" }, azure_devops: { token: "", host: "" }, + forgejo: { token: "", host: "" }, }); }); @@ -364,6 +367,7 @@ describe("Form submission", () => { gitlab: { token: "", host: "" }, bitbucket: { token: "", host: "" }, azure_devops: { token: "test-token", host: "" }, + forgejo: { token: "", host: "" }, }); }); diff --git a/frontend/src/api/git-service/git-service.api.ts b/frontend/src/api/git-service/git-service.api.ts index 48e084873a..98eba7da76 100644 --- a/frontend/src/api/git-service/git-service.api.ts +++ b/frontend/src/api/git-service/git-service.api.ts @@ -131,9 +131,18 @@ class GitService { repository: string, page: number = 1, perPage: number = 30, + selectedProvider?: Provider, ): Promise { const { data } = await openHands.get( - `/api/user/repository/branches?repository=${encodeURIComponent(repository)}&page=${page}&per_page=${perPage}`, + `/api/user/repository/branches`, + { + params: { + repository, + page, + per_page: perPage, + selected_provider: selectedProvider, + }, + }, ); return data; diff --git a/frontend/src/components/features/settings/git-settings/forgejo-token-input.tsx b/frontend/src/components/features/settings/git-settings/forgejo-token-input.tsx new file mode 100644 index 0000000000..c85fd93c1f --- /dev/null +++ b/frontend/src/components/features/settings/git-settings/forgejo-token-input.tsx @@ -0,0 +1,64 @@ +import { useTranslation } from "react-i18next"; +import { I18nKey } from "#/i18n/declaration"; +import { SettingsInput } from "../settings-input"; +import { KeyStatusIcon } from "../key-status-icon"; +import { cn } from "#/utils/utils"; + +interface ForgejoTokenInputProps { + onChange: (value: string) => void; + onForgejoHostChange: (value: string) => void; + isForgejoTokenSet: boolean; + name: string; + forgejoHostSet: string | null | undefined; + className?: string; +} + +export function ForgejoTokenInput({ + onChange, + onForgejoHostChange, + isForgejoTokenSet, + name, + forgejoHostSet, + className, +}: ForgejoTokenInputProps) { + const { t } = useTranslation(); + + return ( +
+ " : ""} + startContent={ + isForgejoTokenSet && ( + + ) + } + /> + + {})} + name="forgejo-host-input" + testId="forgejo-host-input" + label={t(I18nKey.FORGEJO$HOST_LABEL)} + type="text" + className="w-full max-w-[680px]" + placeholder="codeberg.org" + defaultValue={forgejoHostSet || undefined} + startContent={ + forgejoHostSet && + forgejoHostSet.trim() !== "" && ( + + ) + } + /> +
+ ); +} diff --git a/frontend/src/context/conversation-subscriptions-provider.tsx b/frontend/src/context/conversation-subscriptions-provider.tsx index c83c0d703e..d9d5cbde72 100644 --- a/frontend/src/context/conversation-subscriptions-provider.tsx +++ b/frontend/src/context/conversation-subscriptions-provider.tsx @@ -31,13 +31,7 @@ interface ConversationSubscriptionsContextType { subscribeToConversation: (options: { conversationId: string; sessionApiKey: string | null; - providersSet: ( - | "github" - | "gitlab" - | "bitbucket" - | "azure_devops" - | "enterprise_sso" - )[]; + providersSet: import("#/types/settings").Provider[]; baseUrl: string; socketPath?: string; onEvent?: (event: unknown, conversationId: string) => void; @@ -141,13 +135,7 @@ export function ConversationSubscriptionsProvider({ (options: { conversationId: string; sessionApiKey: string | null; - providersSet: ( - | "github" - | "gitlab" - | "bitbucket" - | "azure_devops" - | "enterprise_sso" - )[]; + providersSet: import("#/types/settings").Provider[]; baseUrl: string; socketPath?: string; onEvent?: (event: unknown, conversationId: string) => void; diff --git a/frontend/src/hooks/query/use-branch-data.ts b/frontend/src/hooks/query/use-branch-data.ts index 2173cf4c6e..fafaed068c 100644 --- a/frontend/src/hooks/query/use-branch-data.ts +++ b/frontend/src/hooks/query/use-branch-data.ts @@ -20,7 +20,7 @@ export function useBranchData( isLoading, isFetchingNextPage, isError, - } = useRepositoryBranchesPaginated(repository); + } = useRepositoryBranchesPaginated(repository, 30, provider); // Search branches when user types const { data: searchData, isLoading: isSearchLoading } = useSearchBranches( diff --git a/frontend/src/hooks/query/use-repository-branches.ts b/frontend/src/hooks/query/use-repository-branches.ts index 2a1ba39775..31c7432487 100644 --- a/frontend/src/hooks/query/use-repository-branches.ts +++ b/frontend/src/hooks/query/use-repository-branches.ts @@ -1,13 +1,22 @@ import { useQuery, useInfiniteQuery } from "@tanstack/react-query"; import GitService from "#/api/git-service/git-service.api"; import { Branch, PaginatedBranchesResponse } from "#/types/git"; +import { Provider } from "#/types/settings"; -export const useRepositoryBranches = (repository: string | null) => +export const useRepositoryBranches = ( + repository: string | null, + selectedProvider?: Provider, +) => useQuery({ - queryKey: ["repository", repository, "branches"], + queryKey: ["repository", repository, "branches", selectedProvider], queryFn: async () => { if (!repository) return []; - const response = await GitService.getRepositoryBranches(repository); + const response = await GitService.getRepositoryBranches( + repository, + 1, + 30, + selectedProvider, + ); // Ensure we return an array even if the response is malformed return Array.isArray(response.branches) ? response.branches : []; }, @@ -18,9 +27,17 @@ export const useRepositoryBranches = (repository: string | null) => export const useRepositoryBranchesPaginated = ( repository: string | null, perPage: number = 30, + selectedProvider?: Provider, ) => useInfiniteQuery({ - queryKey: ["repository", repository, "branches", "paginated", perPage], + queryKey: [ + "repository", + repository, + "branches", + "paginated", + perPage, + selectedProvider, + ], queryFn: async ({ pageParam = 1 }) => { if (!repository) { return { @@ -35,6 +52,7 @@ export const useRepositoryBranchesPaginated = ( repository, pageParam as number, perPage, + selectedProvider, ); }, enabled: !!repository, diff --git a/frontend/src/hooks/use-create-conversation-and-subscribe-multiple.ts b/frontend/src/hooks/use-create-conversation-and-subscribe-multiple.ts index e986d53052..1dd5833ab0 100644 --- a/frontend/src/hooks/use-create-conversation-and-subscribe-multiple.ts +++ b/frontend/src/hooks/use-create-conversation-and-subscribe-multiple.ts @@ -82,7 +82,7 @@ export const useCreateConversationAndSubscribeMultiple = () => { subscribeToConversation({ conversationId, sessionApiKey, - providersSet: providers, + providersSet: providers as Provider[], baseUrl, socketPath: conversationData.socketPath, onEvent: conversationData.onEventCallback, diff --git a/frontend/src/i18n/declaration.ts b/frontend/src/i18n/declaration.ts index e3ed93db2f..10189cb08e 100644 --- a/frontend/src/i18n/declaration.ts +++ b/frontend/src/i18n/declaration.ts @@ -14,6 +14,8 @@ export enum I18nKey { MICROAGENT$WHAT_TO_REMEMBER = "MICROAGENT$WHAT_TO_REMEMBER", MICROAGENT$ADD_TRIGGERS = "MICROAGENT$ADD_TRIGGERS", MICROAGENT$WAIT_FOR_RUNTIME = "MICROAGENT$WAIT_FOR_RUNTIME", + FORGEJO$TOKEN_LABEL = "FORGEJO$TOKEN_LABEL", + FORGEJO$HOST_LABEL = "FORGEJO$HOST_LABEL", MICROAGENT$ADDING_CONTEXT = "MICROAGENT$ADDING_CONTEXT", MICROAGENT$VIEW_CONVERSATION = "MICROAGENT$VIEW_CONVERSATION", MICROAGENT$SUCCESS_PR_READY = "MICROAGENT$SUCCESS_PR_READY", diff --git a/frontend/src/i18n/translation.json b/frontend/src/i18n/translation.json index 717e515107..cfbab93871 100644 --- a/frontend/src/i18n/translation.json +++ b/frontend/src/i18n/translation.json @@ -223,6 +223,38 @@ "de": "Bitte warten Sie, bis die Laufzeitumgebung aktiv ist.", "uk": "Будь ласка, зачекайте, поки середовище виконання стане активним." }, + "FORGEJO$TOKEN_LABEL": { + "en": "Forgejo Personal Access Token", + "ja": "Forgejo 個人用アクセス トークン", + "zh-CN": "Forgejo 个人访问令牌", + "zh-TW": "Forgejo 個人存取權杖", + "ko-KR": "Forgejo 개인 액세스 토큰", + "no": "Forgejo personlig tilgangstoken", + "it": "Token di accesso personale Forgejo", + "pt": "Token de acesso pessoal do Forgejo", + "es": "Token de acceso personal de Forgejo", + "ar": "رمز الوصول الشخصي لـ Forgejo", + "fr": "Jeton d'accès personnel Forgejo", + "tr": "Forgejo kişisel erişim belirteci", + "de": "Forgejo persönliches Zugriffstoken", + "uk": "Персональний токен доступу Forgejo" + }, + "FORGEJO$HOST_LABEL": { + "en": "Forgejo Host (domain)", + "ja": "Forgejo ホスト (ドメイン)", + "zh-CN": "Forgejo 主机(域名)", + "zh-TW": "Forgejo 主機(網域)", + "ko-KR": "Forgejo 호스트(도메인)", + "no": "Forgejo vert (domene)", + "it": "Host Forgejo (dominio)", + "pt": "Host do Forgejo (domínio)", + "es": "Host de Forgejo (dominio)", + "ar": "مضيف Forgejo (نطاق)", + "fr": "Hôte Forgejo (domaine)", + "tr": "Forgejo ana makinesi (alan adı)", + "de": "Forgejo Host (Domain)", + "uk": "Хост Forgejo (домен)" + }, "MICROAGENT$ADDING_CONTEXT": { "en": "OpenHands is adding this new context to your respository. We'll let you know when the pull request is ready.", "ja": "OpenHandsはこの新しいコンテキストをあなたのリポジトリに追加しています。プルリクエストの準備ができたらお知らせします。", diff --git a/frontend/src/routes/git-settings.tsx b/frontend/src/routes/git-settings.tsx index 69a7838c10..ca43591117 100644 --- a/frontend/src/routes/git-settings.tsx +++ b/frontend/src/routes/git-settings.tsx @@ -8,6 +8,7 @@ import { GitHubTokenInput } from "#/components/features/settings/git-settings/gi import { GitLabTokenInput } from "#/components/features/settings/git-settings/gitlab-token-input"; import { BitbucketTokenInput } from "#/components/features/settings/git-settings/bitbucket-token-input"; import { AzureDevOpsTokenInput } from "#/components/features/settings/git-settings/azure-devops-token-input"; +import { ForgejoTokenInput } from "#/components/features/settings/git-settings/forgejo-token-input"; import { ConfigureGitHubRepositoriesAnchor } from "#/components/features/settings/git-settings/configure-github-repositories-anchor"; import { InstallSlackAppAnchor } from "#/components/features/settings/git-settings/install-slack-app-anchor"; import { I18nKey } from "#/i18n/declaration"; @@ -40,6 +41,8 @@ function GitSettingsScreen() { React.useState(false); const [azureDevOpsTokenInputHasValue, setAzureDevOpsTokenInputHasValue] = React.useState(false); + const [forgejoTokenInputHasValue, setForgejoTokenInputHasValue] = + React.useState(false); const [githubHostInputHasValue, setGithubHostInputHasValue] = React.useState(false); @@ -49,17 +52,21 @@ function GitSettingsScreen() { React.useState(false); const [azureDevOpsHostInputHasValue, setAzureDevOpsHostInputHasValue] = React.useState(false); + const [forgejoHostInputHasValue, setForgejoHostInputHasValue] = + React.useState(false); const existingGithubHost = settings?.provider_tokens_set.github; const existingGitlabHost = settings?.provider_tokens_set.gitlab; const existingBitbucketHost = settings?.provider_tokens_set.bitbucket; const existingAzureDevOpsHost = settings?.provider_tokens_set.azure_devops; + const existingForgejoHost = settings?.provider_tokens_set.forgejo; const isSaas = config?.APP_MODE === "saas"; const isGitHubTokenSet = providers.includes("github"); const isGitLabTokenSet = providers.includes("gitlab"); const isBitbucketTokenSet = providers.includes("bitbucket"); const isAzureDevOpsTokenSet = providers.includes("azure_devops"); + const isForgejoTokenSet = providers.includes("forgejo"); const formAction = async (formData: FormData) => { const disconnectButtonClicked = @@ -70,18 +77,36 @@ function GitSettingsScreen() { return; } - const githubToken = formData.get("github-token-input")?.toString() || ""; - const gitlabToken = formData.get("gitlab-token-input")?.toString() || ""; - const bitbucketToken = - formData.get("bitbucket-token-input")?.toString() || ""; - const azureDevOpsToken = - formData.get("azure-devops-token-input")?.toString() || ""; - const githubHost = formData.get("github-host-input")?.toString() || ""; - const gitlabHost = formData.get("gitlab-host-input")?.toString() || ""; - const bitbucketHost = - formData.get("bitbucket-host-input")?.toString() || ""; - const azureDevOpsHost = - formData.get("azure-devops-host-input")?.toString() || ""; + const githubToken = ( + formData.get("github-token-input")?.toString() || "" + ).trim(); + const gitlabToken = ( + formData.get("gitlab-token-input")?.toString() || "" + ).trim(); + const bitbucketToken = ( + formData.get("bitbucket-token-input")?.toString() || "" + ).trim(); + const azureDevOpsToken = ( + formData.get("azure-devops-token-input")?.toString() || "" + ).trim(); + const forgejoToken = ( + formData.get("forgejo-token-input")?.toString() || "" + ).trim(); + const githubHost = ( + formData.get("github-host-input")?.toString() || "" + ).trim(); + const gitlabHost = ( + formData.get("gitlab-host-input")?.toString() || "" + ).trim(); + const bitbucketHost = ( + formData.get("bitbucket-host-input")?.toString() || "" + ).trim(); + const azureDevOpsHost = ( + formData.get("azure-devops-host-input")?.toString() || "" + ).trim(); + const forgejoHost = ( + formData.get("forgejo-host-input")?.toString() || "" + ).trim(); // Create providers object with all tokens const providerTokens: Record = { @@ -89,6 +114,7 @@ function GitSettingsScreen() { gitlab: { token: gitlabToken, host: gitlabHost }, bitbucket: { token: bitbucketToken, host: bitbucketHost }, azure_devops: { token: azureDevOpsToken, host: azureDevOpsHost }, + forgejo: { token: forgejoToken, host: forgejoHost }, }; saveGitProviders( @@ -108,10 +134,12 @@ function GitSettingsScreen() { setGitlabTokenInputHasValue(false); setBitbucketTokenInputHasValue(false); setAzureDevOpsTokenInputHasValue(false); + setForgejoTokenInputHasValue(false); setGithubHostInputHasValue(false); setGitlabHostInputHasValue(false); setBitbucketHostInputHasValue(false); setAzureDevOpsHostInputHasValue(false); + setForgejoHostInputHasValue(false); }, }, ); @@ -122,10 +150,12 @@ function GitSettingsScreen() { !gitlabTokenInputHasValue && !bitbucketTokenInputHasValue && !azureDevOpsTokenInputHasValue && + !forgejoTokenInputHasValue && !githubHostInputHasValue && !gitlabHostInputHasValue && !bitbucketHostInputHasValue && - !azureDevOpsHostInputHasValue; + !azureDevOpsHostInputHasValue && + !forgejoHostInputHasValue; const shouldRenderExternalConfigureButtons = isSaas && config.APP_SLUG; const shouldRenderProjectManagementIntegrations = config?.FEATURE_FLAGS?.ENABLE_JIRA || @@ -226,6 +256,20 @@ function GitSettingsScreen() { azureDevOpsHostSet={existingAzureDevOpsHost} /> )} + + {!isSaas && ( + { + setForgejoTokenInputHasValue(!!value); + }} + onForgejoHostChange={(value) => { + setForgejoHostInputHasValue(!!value); + }} + forgejoHostSet={existingForgejoHost} + /> + )}
)} @@ -244,7 +288,8 @@ function GitSettingsScreen() { !isGitHubTokenSet && !isGitLabTokenSet && !isBitbucketTokenSet && - !isAzureDevOpsTokenSet + !isAzureDevOpsTokenSet && + !isForgejoTokenSet } > {t(I18nKey.GIT$DISCONNECT_TOKENS)} diff --git a/frontend/src/types/settings.ts b/frontend/src/types/settings.ts index e5db0296bd..2c1cf251f1 100644 --- a/frontend/src/types/settings.ts +++ b/frontend/src/types/settings.ts @@ -3,6 +3,7 @@ export const ProviderOptions = { gitlab: "gitlab", bitbucket: "bitbucket", azure_devops: "azure_devops", + forgejo: "forgejo", enterprise_sso: "enterprise_sso", } as const; diff --git a/frontend/src/utils/utils.ts b/frontend/src/utils/utils.ts index a7408a7177..c3d6a900c4 100644 --- a/frontend/src/utils/utils.ts +++ b/frontend/src/utils/utils.ts @@ -215,6 +215,10 @@ export const getGitProviderBaseUrl = (gitProvider: Provider): string => { return "https://bitbucket.org"; case "azure_devops": return "https://dev.azure.com"; + case "forgejo": + // Default UI links to Codeberg unless a custom host is available in settings + // Note: UI link builders don't currently receive host; consider plumbing settings if needed + return "https://codeberg.org"; default: return ""; } @@ -229,6 +233,7 @@ export const getProviderName = (gitProvider: Provider) => { if (gitProvider === "gitlab") return "GitLab"; if (gitProvider === "bitbucket") return "Bitbucket"; if (gitProvider === "azure_devops") return "Azure DevOps"; + if (gitProvider === "forgejo") return "Forgejo"; return "GitHub"; }; @@ -269,6 +274,8 @@ export const constructPullRequestUrl = ( switch (provider) { case "github": return `${baseUrl}/${repositoryName}/pull/${prNumber}`; + case "forgejo": + return `${baseUrl}/${repositoryName}/pull/${prNumber}`; case "gitlab": return `${baseUrl}/${repositoryName}/-/merge_requests/${prNumber}`; case "bitbucket": @@ -312,6 +319,8 @@ export const constructMicroagentUrl = ( switch (gitProvider) { case "github": return `${baseUrl}/${repositoryName}/blob/main/${microagentPath}`; + case "forgejo": + return `${baseUrl}/${repositoryName}/src/branch/main/${microagentPath}`; case "gitlab": return `${baseUrl}/${repositoryName}/-/blob/main/${microagentPath}`; case "bitbucket": @@ -390,6 +399,8 @@ export const constructBranchUrl = ( switch (provider) { case "github": return `${baseUrl}/${repositoryName}/tree/${branchName}`; + case "forgejo": + return `${baseUrl}/${repositoryName}/src/branch/${branchName}`; case "gitlab": return `${baseUrl}/${repositoryName}/-/tree/${branchName}`; case "bitbucket": diff --git a/openhands/core/setup.py b/openhands/core/setup.py index 47656a9fa6..5cac8c3f72 100644 --- a/openhands/core/setup.py +++ b/openhands/core/setup.py @@ -109,6 +109,30 @@ def get_provider_tokens(): bitbucket_token = SecretStr(os.environ['BITBUCKET_TOKEN']) provider_tokens[ProviderType.BITBUCKET] = ProviderToken(token=bitbucket_token) + # Forgejo support (e.g., Codeberg or self-hosted Forgejo) + if 'FORGEJO_TOKEN' in os.environ: + forgejo_token = SecretStr(os.environ['FORGEJO_TOKEN']) + # If a base URL is provided, extract the domain to use as host override + forgejo_base_url = os.environ.get('FORGEJO_BASE_URL', '').strip() + host: str | None = None + if forgejo_base_url: + # Normalize by stripping protocol and any path (e.g., /api/v1) + url = forgejo_base_url + if url.startswith(('http://', 'https://')): + try: + from urllib.parse import urlparse + + parsed = urlparse(url) + host = parsed.netloc or None + except Exception: + pass + if host is None: + host = url.replace('https://', '').replace('http://', '') + host = host.split('/')[0].strip('/') if host else None + provider_tokens[ProviderType.FORGEJO] = ProviderToken( + token=forgejo_token, host=host + ) + # Wrap provider tokens in Secrets if any tokens were found secret_store = ( Secrets(provider_tokens=provider_tokens) if provider_tokens else None # type: ignore[arg-type] diff --git a/openhands/integrations/forgejo/__init__.py b/openhands/integrations/forgejo/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/openhands/integrations/forgejo/forgejo_service.py b/openhands/integrations/forgejo/forgejo_service.py new file mode 100644 index 0000000000..d34e27ac2c --- /dev/null +++ b/openhands/integrations/forgejo/forgejo_service.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +import os + +from pydantic import SecretStr + +from openhands.integrations.forgejo.service import ( + ForgejoBranchesMixin, + ForgejoFeaturesMixin, + ForgejoMixinBase, + ForgejoPRsMixin, + ForgejoReposMixin, + ForgejoResolverMixin, +) +from openhands.integrations.service_types import GitService +from openhands.utils.import_utils import get_impl + + +class ForgejoService( + ForgejoBranchesMixin, + ForgejoFeaturesMixin, + ForgejoPRsMixin, + ForgejoReposMixin, + ForgejoResolverMixin, + ForgejoMixinBase, + GitService, +): + """Assembled Forgejo service combining mixins by feature area.""" + + def __init__( + self, + user_id: str | None = None, + external_auth_id: str | None = None, + external_auth_token: SecretStr | None = None, + token: SecretStr | None = None, + external_token_manager: bool = False, + base_domain: str | None = None, + base_url: str | None = None, + ) -> None: + ForgejoMixinBase.__init__( + self, + user_id=user_id, + external_auth_id=external_auth_id, + external_auth_token=external_auth_token, + token=token, + external_token_manager=external_token_manager, + base_domain=base_domain, + base_url=base_url, + ) + + +forgejo_service_cls = os.environ.get( + 'OPENHANDS_FORGEJO_SERVICE_CLS', + 'openhands.integrations.forgejo.forgejo_service.ForgejoService', +) +ForgejoServiceImpl = get_impl(ForgejoService, forgejo_service_cls) diff --git a/openhands/integrations/forgejo/service/__init__.py b/openhands/integrations/forgejo/service/__init__.py new file mode 100644 index 0000000000..7a224145a3 --- /dev/null +++ b/openhands/integrations/forgejo/service/__init__.py @@ -0,0 +1,15 @@ +from .base import ForgejoMixinBase +from .branches import ForgejoBranchesMixin +from .features import ForgejoFeaturesMixin +from .prs import ForgejoPRsMixin +from .repos import ForgejoReposMixin +from .resolver import ForgejoResolverMixin + +__all__ = [ + 'ForgejoMixinBase', + 'ForgejoBranchesMixin', + 'ForgejoFeaturesMixin', + 'ForgejoPRsMixin', + 'ForgejoReposMixin', + 'ForgejoResolverMixin', +] diff --git a/openhands/integrations/forgejo/service/base.py b/openhands/integrations/forgejo/service/base.py new file mode 100644 index 0000000000..b2227f1511 --- /dev/null +++ b/openhands/integrations/forgejo/service/base.py @@ -0,0 +1,219 @@ +from __future__ import annotations + +import os +from typing import Any +from urllib.parse import urlparse + +import httpx +from pydantic import SecretStr + +from openhands.core.logger import openhands_logger as logger +from openhands.integrations.protocols.http_client import HTTPClient +from openhands.integrations.service_types import ( + BaseGitService, + OwnerType, + ProviderType, + Repository, + RequestMethod, + UnknownException, + User, +) +from openhands.utils.http_session import httpx_verify_option + + +class ForgejoMixinBase(BaseGitService, HTTPClient): + """Common functionality shared by Forgejo service mixins.""" + + DEFAULT_BASE_URL = 'https://codeberg.org/api/v1' + DEFAULT_DOMAIN = 'codeberg.org' + + token: SecretStr = SecretStr('') + refresh = False + + def __init__( + self, + user_id: str | None = None, + external_auth_id: str | None = None, + external_auth_token: SecretStr | None = None, + token: SecretStr | None = None, + external_token_manager: bool = False, + base_domain: str | None = None, + base_url: str | None = None, + ) -> None: + self.user_id = user_id + self.external_auth_id = external_auth_id + self.external_auth_token = external_auth_token + self.external_token_manager = external_token_manager + + if token: + self.token = token + else: + env_token = os.environ.get('FORGEJO_TOKEN') + if env_token: + self.token = SecretStr(env_token) + + env_base_url = os.environ.get('FORGEJO_BASE_URL') + self.BASE_URL = self._resolve_base_url(base_url, base_domain, env_base_url) + self.base_url = self.BASE_URL # Backwards compatibility for existing usage + parsed = urlparse(self.BASE_URL) + self.base_domain = parsed.netloc or self.DEFAULT_DOMAIN + self.web_base_url = f'https://{self.base_domain}'.rstrip('/') + + @property + def provider(self) -> str: + return ProviderType.FORGEJO.value + + async def get_latest_token(self) -> SecretStr | None: + return self.token + + async def _get_headers(self) -> dict[str, Any]: + if not self.token: + latest_token = await self.get_latest_token() + if latest_token: + self.token = latest_token + + return { + 'Authorization': f'token {self.token.get_secret_value() if self.token else ""}', + 'Accept': 'application/json', + } + + async def _make_request( + self, + url: str, + params: dict | None = None, + method: RequestMethod = RequestMethod.GET, + ) -> tuple[Any, dict]: + try: + async with httpx.AsyncClient(verify=httpx_verify_option()) as client: + headers = await self._get_headers() + response = await self.execute_request( + client=client, + url=url, + headers=headers, + params=params, + method=method, + ) + + if self.refresh and self._has_token_expired(response.status_code): + await self.get_latest_token() + headers = await self._get_headers() + response = await self.execute_request( + client=client, + url=url, + headers=headers, + params=params, + method=method, + ) + + response.raise_for_status() + headers_out: dict[str, str] = {} + for header in ('Link', 'X-Total-Count', 'X-Total'): + if header in response.headers: + headers_out[header] = response.headers[header] + + content_type = response.headers.get('Content-Type', '') + if 'application/json' in content_type: + return response.json(), headers_out + return response.text, headers_out + + except httpx.HTTPStatusError as err: + raise self.handle_http_status_error(err) + except httpx.HTTPError as err: + raise self.handle_http_error(err) + + def _resolve_base_url( + self, + explicit_base_url: str | None, + base_domain: str | None, + env_base_url: str | None, + ) -> str: + for candidate in ( + explicit_base_url, + base_domain, + env_base_url, + self.DEFAULT_BASE_URL, + ): + if not candidate: + continue + + normalized = candidate.strip().rstrip('/') + if not normalized: + continue + + if normalized.startswith(('http://', 'https://')): + url = normalized + else: + url = f'https://{normalized}' + + if '/api/' in url: + return url + + return f'{url}/api/v1' + + return self.DEFAULT_BASE_URL + + async def get_user(self) -> User: # type: ignore[override] + url = f'{self.BASE_URL}/user' + response, _ = await self._make_request(url) + + return User( + id=str(response.get('id', '')), + login=response.get('username', ''), + avatar_url=response.get('avatar_url', ''), + name=response.get('full_name'), + email=response.get('email'), + company=response.get('organization'), + ) + + def _parse_repository( + self, repo: dict, link_header: str | None = None + ) -> Repository: + owner = repo.get('owner') or {} + owner_type = ( + OwnerType.ORGANIZATION + if (owner.get('type') or '').lower() == 'organization' + else OwnerType.USER + ) + + return Repository( + id=str(repo.get('id', '')), + full_name=repo.get('full_name', ''), + stargazers_count=repo.get('stars_count'), + git_provider=ProviderType.FORGEJO, + is_public=not repo.get('private', False), + link_header=link_header, + pushed_at=repo.get('updated_at') or repo.get('pushed_at'), + owner_type=owner_type, + main_branch=repo.get('default_branch'), + ) + + def _split_repo(self, repository: str) -> tuple[str, str]: + repo_path = repository.strip() + if repo_path.startswith(('http://', 'https://')): + parsed = urlparse(repo_path) + repo_path = parsed.path.lstrip('/') + + parts = [part for part in repo_path.split('/') if part] + if len(parts) < 2: + raise ValueError(f'Invalid repository format: {repository}') + + return parts[0], parts[1] + + def _build_repo_api_url(self, owner: str, repo: str, *segments: str) -> str: + base = f'{self.BASE_URL}/repos/{owner}/{repo}' + if segments: + base = f'{base}/{"/".join(segments)}' + return base + + def _map_sort(self, sort: str) -> str: + sort_map = { + 'pushed': 'updated', + 'updated': 'updated', + 'created': 'created', + 'full_name': 'name', + } + return sort_map.get(sort, 'updated') + + def handle_http_error(self, e: httpx.HTTPError) -> UnknownException: # type: ignore[override] + logger.warning(f'HTTP error on {self.provider} API: {type(e).__name__} : {e}') + return UnknownException(f'HTTP error {type(e).__name__} : {e}') diff --git a/openhands/integrations/forgejo/service/branches.py b/openhands/integrations/forgejo/service/branches.py new file mode 100644 index 0000000000..c163212dea --- /dev/null +++ b/openhands/integrations/forgejo/service/branches.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from openhands.integrations.forgejo.service.base import ForgejoMixinBase +from openhands.integrations.service_types import Branch, PaginatedBranchesResponse + + +class ForgejoBranchesMixin(ForgejoMixinBase): + """Branch-related operations for Forgejo.""" + + async def get_branches(self, repository: str) -> list[Branch]: # type: ignore[override] + branches: list[Branch] = [] + page = 1 + per_page = 100 + + while True: + paginated = await self.get_paginated_branches(repository, page, per_page) + branches.extend(paginated.branches) + if not paginated.has_next_page: + break + page += 1 + + return branches + + async def get_paginated_branches( + self, repository: str, page: int = 1, per_page: int = 30 + ) -> PaginatedBranchesResponse: # type: ignore[override] + owner, repo = self._split_repo(repository) + url = self._build_repo_api_url(owner, repo, 'branches') + params = { + 'page': str(page), + 'limit': str(per_page), + } + + response, headers = await self._make_request(url, params) + branch_items = response if isinstance(response, list) else [] + + branches: list[Branch] = [] + for branch in branch_items: + commit_info = branch.get('commit') or {} + commit_sha = ( + commit_info.get('id') + or commit_info.get('sha') + or commit_info.get('commit', {}).get('sha') + ) + branches.append( + Branch( + name=branch.get('name', ''), + commit_sha=commit_sha or '', + protected=branch.get('protected', False), + last_push_date=None, + ) + ) + + link_header = headers.get('Link', '') + total_count_header = headers.get('X-Total-Count') or headers.get('X-Total') + total_count = int(total_count_header) if total_count_header else None + has_next_page = 'rel="next"' in link_header + + return PaginatedBranchesResponse( + branches=branches, + has_next_page=has_next_page, + current_page=page, + per_page=per_page, + total_count=total_count, + ) + + async def search_branches( + self, repository: str, query: str, per_page: int = 30 + ) -> list[Branch]: # type: ignore[override] + all_branches = await self.get_branches(repository) + lowered = query.lower() + return [branch for branch in all_branches if lowered in branch.name.lower()][ + :per_page + ] diff --git a/openhands/integrations/forgejo/service/features.py b/openhands/integrations/forgejo/service/features.py new file mode 100644 index 0000000000..986fa363c0 --- /dev/null +++ b/openhands/integrations/forgejo/service/features.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +import base64 +from typing import Any + +from openhands.core.logger import openhands_logger as logger +from openhands.integrations.forgejo.service.base import ForgejoMixinBase +from openhands.integrations.service_types import ( + MicroagentContentResponse, + MicroagentResponse, + ProviderType, + ResourceNotFoundError, + SuggestedTask, +) + + +class ForgejoFeaturesMixin(ForgejoMixinBase): + """Microagent and feature helpers for Forgejo.""" + + async def _get_cursorrules_url(self, repository: str) -> str: + owner, repo = self._split_repo(repository) + return self._build_repo_api_url(owner, repo, 'contents', '.cursorrules') + + async def _get_microagents_directory_url( + self, repository: str, microagents_path: str + ) -> str: + owner, repo = self._split_repo(repository) + normalized_path = microagents_path.strip('/') + return self._build_repo_api_url(owner, repo, 'contents', normalized_path) + + def _get_microagents_directory_params(self, microagents_path: str) -> dict | None: + return None + + def _is_valid_microagent_file(self, item: dict[str, Any] | None) -> bool: + if not isinstance(item, dict): + return False + if item.get('type') != 'file': + return False + name = item.get('name', '') + return isinstance(name, str) and ( + name.endswith('.md') or name.endswith('.cursorrules') + ) + + def _get_file_name_from_item(self, item: dict[str, Any] | None) -> str: + if not isinstance(item, dict): + return '' + name = item.get('name') + return name if isinstance(name, str) else '' + + def _get_file_path_from_item( + self, item: dict[str, Any] | None, microagents_path: str + ) -> str: + file_name = self._get_file_name_from_item(item) + if not microagents_path: + return file_name + return f'{microagents_path.strip("/")}/{file_name}' + + async def get_microagents(self, repository: str) -> list[MicroagentResponse]: # type: ignore[override] + microagents_path = self._determine_microagents_path(repository) + microagents: list[MicroagentResponse] = [] + + try: + directory_url = await self._get_microagents_directory_url( + repository, microagents_path + ) + items, _ = await self._make_request(directory_url) + except ResourceNotFoundError: + items = [] + except Exception as exc: + # Fail gracefully if the directory cannot be inspected + self._log_microagent_warning(repository, str(exc)) + items = [] + + if isinstance(items, list): + for item in items: + if self._is_valid_microagent_file(item): + file_name = self._get_file_name_from_item(item) + file_path = self._get_file_path_from_item(item, microagents_path) + microagents.append( + self._create_microagent_response(file_name, file_path) + ) + + cursorrules = await self._check_cursorrules_file(repository) + if cursorrules: + microagents.append(cursorrules) + + return microagents + + async def get_microagent_content( + self, repository: str, file_path: str + ) -> MicroagentContentResponse: # type: ignore[override] + owner, repo = self._split_repo(repository) + normalized_path = file_path.lstrip('/') + url = self._build_repo_api_url(owner, repo, 'contents', normalized_path) + + response, _ = await self._make_request(url) + content = response.get('content') or '' + encoding = (response.get('encoding') or 'base64').lower() + + if encoding == 'base64': + try: + decoded = base64.b64decode(content).decode('utf-8') + except Exception: + decoded = '' + else: + decoded = content + + try: + return self._parse_microagent_content(decoded, file_path) + except Exception: + return MicroagentContentResponse( + content=decoded, + path=file_path, + triggers=[], + git_provider=ProviderType.FORGEJO.value, + ) + + async def get_suggested_tasks(self) -> list[SuggestedTask]: # type: ignore[override] + # Suggested tasks are not yet implemented for Forgejo. + return [] + + def _log_microagent_warning(self, repository: str, message: str) -> None: + logger.debug(f'Forgejo microagent scan warning for {repository}: {message}') diff --git a/openhands/integrations/forgejo/service/prs.py b/openhands/integrations/forgejo/service/prs.py new file mode 100644 index 0000000000..17d22ec5b0 --- /dev/null +++ b/openhands/integrations/forgejo/service/prs.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +from typing import Any + +from openhands.core.logger import openhands_logger as logger +from openhands.integrations.forgejo.service.base import ForgejoMixinBase +from openhands.integrations.service_types import RequestMethod, UnknownException + + +class ForgejoPRsMixin(ForgejoMixinBase): + """Pull request helpers for Forgejo.""" + + async def create_pull_request(self, data: dict[str, Any] | None = None) -> dict: + payload: dict[str, Any] = dict(data or {}) + + repository = payload.pop('repository', None) + owner = payload.pop('owner', None) + repo_name = payload.pop('repo', None) + + if repository and isinstance(repository, str): + owner, repo_name = self._split_repo(repository) + else: + owner = str(owner or self.user_id or '').strip() + repo_name = str(repo_name or '').strip() + + if not owner or not repo_name: + raise ValueError( + 'Repository information is required to create a pull request' + ) + + url = self._build_repo_api_url(owner, repo_name, 'pulls') + response, _ = await self._make_request( + url, + payload, + method=RequestMethod.POST, + ) + + if not isinstance(response, dict): + raise UnknownException('Unexpected response creating Forgejo pull request') + + if 'number' not in response and 'index' in response: + response['number'] = response['index'] + + if 'html_url' not in response and 'url' in response: + response['html_url'] = response['url'] + + return response + + async def request_reviewers( + self, repository: str, pr_number: int, reviewers: list[str] + ) -> None: + if not reviewers: + return + + owner, repo = self._split_repo(repository) + url = self._build_repo_api_url( + owner, repo, 'pulls', str(pr_number), 'requested_reviewers' + ) + + try: + await self._make_request( + url, + {'reviewers': reviewers}, + method=RequestMethod.POST, + ) + except Exception as exc: # pragma: no cover - log and continue + logger.warning( + 'Failed to request Forgejo reviewers %s for %s/%s PR #%s: %s', + reviewers, + owner, + repo, + pr_number, + exc, + ) + + async def get_pr_details(self, repository: str, pr_number: int) -> dict: # type: ignore[override] + owner, repo = self._split_repo(repository) + url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number)) + response, _ = await self._make_request(url) + return response + + async def is_pr_open(self, repository: str, pr_number: int) -> bool: # type: ignore[override] + pr_details = await self.get_pr_details(repository, pr_number) + return (pr_details.get('state') or '').lower() == 'open' diff --git a/openhands/integrations/forgejo/service/repos.py b/openhands/integrations/forgejo/service/repos.py new file mode 100644 index 0000000000..10339514b0 --- /dev/null +++ b/openhands/integrations/forgejo/service/repos.py @@ -0,0 +1,109 @@ +from __future__ import annotations + +from openhands.integrations.forgejo.service.base import ForgejoMixinBase +from openhands.integrations.service_types import Repository +from openhands.server.types import AppMode + + +class ForgejoReposMixin(ForgejoMixinBase): + """Repository operations for Forgejo.""" + + async def search_repositories( + self, + query: str, + per_page: int, + sort: str, + order: str, + public: bool, + app_mode: AppMode, + ) -> list[Repository]: # type: ignore[override] + url = f'{self.BASE_URL}/repos/search' + params = { + 'q': query, + 'limit': per_page, + 'sort': sort, + 'order': order, + 'mode': 'source', + } + + response, _ = await self._make_request(url, params) + repos = response.get('data', []) if isinstance(response, dict) else [] + if public: + repos = [repo for repo in repos if not repo.get('private', False)] + return [self._parse_repository(repo) for repo in repos] + + async def get_all_repositories( + self, sort: str, app_mode: AppMode + ) -> list[Repository]: # type: ignore[override] + max_repos = 1000 + per_page = 100 + collected: list[dict] = [] + page = 1 + last_link_header: str | None = None + + url = f'{self.BASE_URL}/user/repos' + forgejo_sort = self._map_sort(sort) + + while len(collected) < max_repos: + params = { + 'page': str(page), + 'limit': str(per_page), + 'sort': forgejo_sort, + } + response, headers = await self._make_request(url, params) + last_link_header = headers.get('Link') + + page_repos = response if isinstance(response, list) else [] + if not page_repos: + break + + collected.extend(page_repos) + if 'rel="next"' not in (last_link_header or ''): + break + + page += 1 + + collected = collected[:max_repos] + return [ + self._parse_repository(repo, link_header=last_link_header) + for repo in collected + ] + + async def get_paginated_repos( + self, + page: int, + per_page: int, + sort: str, + installation_id: str | None, + query: str | None = None, + ) -> list[Repository]: # type: ignore[override] + _ = installation_id + url = f'{self.BASE_URL}/user/repos' + params = { + 'page': str(page), + 'limit': str(per_page), + 'sort': self._map_sort(sort), + } + + response, headers = await self._make_request(url, params) + repos = response if isinstance(response, list) else [] + + if query: + lowered = query.lower() + repos = [ + repo + for repo in repos + if lowered in (repo.get('full_name') or '').lower() + ] + + link_header = headers.get('Link') + return [self._parse_repository(repo, link_header=link_header) for repo in repos] + + async def get_repository_details_from_repo_name( + self, repository: str + ) -> Repository: # type: ignore[override] + owner, repo = self._split_repo(repository) + url = self._build_repo_api_url(owner, repo) + response, headers = await self._make_request(url) + link_header = headers.get('Link') + return self._parse_repository(response, link_header=link_header) diff --git a/openhands/integrations/forgejo/service/resolver.py b/openhands/integrations/forgejo/service/resolver.py new file mode 100644 index 0000000000..0442a72095 --- /dev/null +++ b/openhands/integrations/forgejo/service/resolver.py @@ -0,0 +1,137 @@ +from __future__ import annotations + +from collections import defaultdict +from datetime import datetime +from typing import cast + +from openhands.integrations.forgejo.service.base import ForgejoMixinBase +from openhands.integrations.service_types import Comment +from openhands.resolver.interfaces.issue import ReviewThread + + +class ForgejoResolverMixin(ForgejoMixinBase): + """Lightweight helpers used by resolver flows for Forgejo.""" + + async def get_issue_title_and_body( + self, repository: str, issue_number: int + ) -> tuple[str, str]: + owner, repo = self._split_repo(repository) + url = self._build_repo_api_url(owner, repo, 'issues', str(issue_number)) + response, _ = await self._make_request(url) + title = response.get('title') or '' + body = response.get('body') or response.get('content') or '' + return title, body + + async def get_issue_comments( + self, + repository: str, + issue_number: int, + max_comments: int = 20, + ) -> list[Comment]: + owner, repo = self._split_repo(repository) + url = self._build_repo_api_url( + owner, repo, 'issues', str(issue_number), 'comments' + ) + per_page = min(max_comments, 50) + params = { + 'page': '1', + 'limit': str(per_page), + 'order': 'desc', + } + + response, _ = await self._make_request(url, params) + raw_comments = response if isinstance(response, list) else [] + + comments: list[Comment] = [] + for payload in raw_comments: + comment = self._to_comment(payload) + if comment is not None: + comments.append(comment) + + comments.sort(key=lambda c: c.created_at) + return comments[-max_comments:] + + async def get_pr_comments( + self, + repository: str, + pr_number: int, + max_comments: int = 50, + ) -> list[Comment]: + owner, repo = self._split_repo(repository) + url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number), 'comments') + per_page = min(max_comments, 50) + params = { + 'page': '1', + 'limit': str(per_page), + 'order': 'desc', + } + + response, _ = await self._make_request(url, params) + raw_comments = response if isinstance(response, list) else [] + + comments: list[Comment] = [] + for payload in raw_comments: + comment = self._to_comment(payload) + if comment is not None: + comments.append(comment) + + comments.sort(key=lambda c: c.created_at) + return comments[-max_comments:] + + async def get_pr_review_threads( + self, + repository: str, + pr_number: int, + max_threads: int = 10, + ) -> list[ReviewThread]: + owner, repo = self._split_repo(repository) + url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number), 'comments') + params = {'page': '1', 'limit': '100', 'order': 'asc'} + + response, _ = await self._make_request(url, params) + raw_comments = response if isinstance(response, list) else [] + + grouped: dict[str, list[str]] = defaultdict(list) + files: dict[str, set[str]] = defaultdict(set) + + for payload in raw_comments: + if not isinstance(payload, dict): + continue + path = cast(str, payload.get('path') or 'general') + body = cast(str, payload.get('body') or '') + grouped[path].append(body) + if payload.get('path'): + files[path].add(cast(str, payload['path'])) + + threads: list[ReviewThread] = [] + for path, messages in grouped.items(): + comment_text = '\n---\n'.join(messages) + file_list = sorted(files.get(path, {path})) + threads.append(ReviewThread(comment=comment_text, files=file_list)) + + return threads[:max_threads] + + def _to_comment(self, payload: dict | None) -> Comment | None: + if not isinstance(payload, dict): + return None + body = payload.get('body') or '' + author = (payload.get('user') or {}).get('login') or 'unknown' + created_at = self._parse_datetime(payload.get('created_at')) + updated_at = self._parse_datetime(payload.get('updated_at')) + + return Comment( + id=str(payload.get('id', 'unknown')), + body=body, + author=author, + created_at=created_at, + updated_at=updated_at, + system=payload.get('void', False), + ) + + def _parse_datetime(self, value: str | None) -> datetime: + if not value: + return datetime.fromtimestamp(0) + try: + return datetime.fromisoformat(value.replace('Z', '+00:00')) + except ValueError: + return datetime.fromtimestamp(0) diff --git a/openhands/integrations/provider.py b/openhands/integrations/provider.py index c260f23ee0..8e161e25c5 100644 --- a/openhands/integrations/provider.py +++ b/openhands/integrations/provider.py @@ -22,6 +22,7 @@ from openhands.integrations.azure_devops.azure_devops_service import ( AzureDevOpsServiceImpl, ) from openhands.integrations.bitbucket.bitbucket_service import BitBucketServiceImpl +from openhands.integrations.forgejo.forgejo_service import ForgejoServiceImpl from openhands.integrations.github.github_service import GithubServiceImpl from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl from openhands.integrations.service_types import ( @@ -105,6 +106,7 @@ class ProviderHandler: ProviderType.GITHUB: 'github.com', ProviderType.GITLAB: 'gitlab.com', ProviderType.BITBUCKET: 'bitbucket.org', + ProviderType.FORGEJO: 'codeberg.org', ProviderType.AZURE_DEVOPS: 'dev.azure.com', } @@ -126,6 +128,7 @@ class ProviderHandler: ProviderType.GITHUB: GithubServiceImpl, ProviderType.GITLAB: GitLabServiceImpl, ProviderType.BITBUCKET: BitBucketServiceImpl, + ProviderType.FORGEJO: ForgejoServiceImpl, ProviderType.AZURE_DEVOPS: AzureDevOpsServiceImpl, } @@ -672,6 +675,14 @@ class ProviderHandler: if provider != ProviderType.AZURE_DEVOPS: domain = self.provider_tokens[provider].host or domain + # Normalize domain to prevent double protocols or path segments + if domain: + domain = domain.strip() + domain = domain.replace('https://', '').replace('http://', '') + # Remove any trailing path like /api/v3 or /api/v4 + if '/' in domain: + domain = domain.split('/')[0] + # Try to use token if available, otherwise use public URL if self.provider_tokens and provider in self.provider_tokens: git_token = self.provider_tokens[provider].token @@ -747,7 +758,7 @@ class ProviderHandler: f'https://user:***@{clean_domain}/{repo_name}.git' ) else: - # GitHub + # GitHub, Forgejo remote_url = f'https://{token_value}@{domain}/{repo_name}.git' else: remote_url = f'https://{domain}/{repo_name}.git' diff --git a/openhands/integrations/service_types.py b/openhands/integrations/service_types.py index cf76e40479..9ee250085d 100644 --- a/openhands/integrations/service_types.py +++ b/openhands/integrations/service_types.py @@ -21,6 +21,7 @@ class ProviderType(Enum): GITHUB = 'github' GITLAB = 'gitlab' BITBUCKET = 'bitbucket' + FORGEJO = 'forgejo' AZURE_DEVOPS = 'azure_devops' ENTERPRISE_SSO = 'enterprise_sso' diff --git a/openhands/integrations/utils.py b/openhands/integrations/utils.py index c3a9ee344c..cbda2b06e7 100644 --- a/openhands/integrations/utils.py +++ b/openhands/integrations/utils.py @@ -5,6 +5,7 @@ from openhands.integrations.azure_devops.azure_devops_service import ( AzureDevOpsServiceImpl as AzureDevOpsService, ) from openhands.integrations.bitbucket.bitbucket_service import BitBucketService +from openhands.integrations.forgejo.forgejo_service import ForgejoService from openhands.integrations.github.github_service import GitHubService from openhands.integrations.gitlab.gitlab_service import GitLabService from openhands.integrations.provider import ProviderType @@ -48,6 +49,17 @@ async def validate_provider_token( except Exception as e: gitlab_error = e + # Try Forgejo if a base_domain was provided (custom instances may not contain + # the substring 'forgejo' or 'codeberg') + forgejo_error = None + if base_domain: + try: + forgejo_service = ForgejoService(token=token, base_domain=base_domain) + await forgejo_service.get_user() + return ProviderType.FORGEJO + except Exception as e: + forgejo_error = e + # Try Bitbucket next bitbucket_error = None try: @@ -67,7 +79,7 @@ async def validate_provider_token( azure_devops_error = e logger.debug( - f'Failed to validate token: {github_error} \n {gitlab_error} \n {bitbucket_error} \n {azure_devops_error}' + f'Failed to validate token: {github_error} \n {gitlab_error} \n {forgejo_error} \n {bitbucket_error} \n {azure_devops_error}' ) return None diff --git a/openhands/resolver/interfaces/forgejo.py b/openhands/resolver/interfaces/forgejo.py new file mode 100644 index 0000000000..44830a7b7a --- /dev/null +++ b/openhands/resolver/interfaces/forgejo.py @@ -0,0 +1,480 @@ +from __future__ import annotations + +from typing import Any +from urllib.parse import quote + +import httpx + +from openhands.core.logger import openhands_logger as logger +from openhands.resolver.interfaces.issue import ( + Issue, + IssueHandlerInterface, + ReviewThread, +) +from openhands.resolver.utils import extract_issue_references + + +class ForgejoIssueHandler(IssueHandlerInterface): + """Issue handler implementation for Forgejo-based providers (e.g. Codeberg).""" + + API_PREFIX = '/api/v1' + + def __init__( + self, + owner: str, + repo: str, + token: str, + username: str | None = None, + base_domain: str = 'codeberg.org', + ): + self.owner = owner + self.repo = repo + self.token = token + self.username = username + self.base_domain = base_domain + self.base_url = self.get_base_url() + self.download_url = self.get_download_url() + self.clone_url = self.get_clone_url() + self.headers = self.get_headers() + + def _api_root(self) -> str: + return f'https://{self.base_domain}{self.API_PREFIX}' + + @staticmethod + def _to_int(value: Any) -> int: + try: + return int(value) + except (TypeError, ValueError): + return 0 + + def set_owner(self, owner: str) -> None: + self.owner = owner + self.base_url = self.get_base_url() + self.download_url = self.get_download_url() + + def get_headers(self) -> dict[str, str]: + return { + 'Authorization': f'token {self.token}', + 'Accept': 'application/json', + } + + def get_base_url(self) -> str: + return f'{self._api_root()}/repos/{self.owner}/{self.repo}' + + def get_authorize_url(self) -> str: + credential = ( + f'{self.username}:{self.token}' + if self.username + else f'x-auth-token:{self.token}' + ) + return f'https://{credential}@{self.base_domain}/' + + def get_branch_url(self, branch_name: str) -> str: + escaped_branch = quote(branch_name, safe='') + return f'{self.get_base_url()}/branches/{escaped_branch}' + + def get_download_url(self) -> str: + return f'{self.get_base_url()}/issues' + + def get_clone_url(self) -> str: + credential = ( + f'{self.username}:{self.token}' + if self.username + else f'x-access-token:{self.token}' + ) + return f'https://{credential}@{self.base_domain}/{self.owner}/{self.repo}.git' + + def get_graphql_url(self) -> str: + # Forgejo does not expose a GraphQL endpoint. + return '' + + def get_compare_url(self, branch_name: str) -> str: + return ( + f'https://{self.base_domain}/{self.owner}/{self.repo}/compare/{branch_name}' + ) + + def download_issues(self) -> list[Any]: + page = 1 + all_issues: list[Any] = [] + + while True: + params = {'state': 'open', 'limit': '50', 'page': str(page)} + response = httpx.get(self.download_url, headers=self.headers, params=params) + response.raise_for_status() + issues = response.json() + + if not issues: + break + + if not isinstance(issues, list) or any( + not isinstance(issue, dict) for issue in issues + ): + raise ValueError( + 'Expected list of dictionaries from Forgejo issues API.' + ) + + all_issues.extend(issues) + page += 1 + + return all_issues + + def get_issue_comments( + self, issue_number: int, comment_id: int | None = None + ) -> list[str] | None: + url = f'{self.get_download_url()}/{issue_number}/comments' + page = 1 + params = {'limit': '50', 'page': str(page)} + all_comments: list[str] = [] + + while True: + response = httpx.get(url, headers=self.headers, params=params) + response.raise_for_status() + comments = response.json() + + if not comments: + break + + if comment_id is not None: + matching_comment = next( + ( + comment['body'] + for comment in comments + if self._to_int(comment.get('id')) == comment_id + ), + None, + ) + if matching_comment: + return [matching_comment] + else: + all_comments.extend( + comment['body'] for comment in comments if comment.get('body') + ) + + page += 1 + params = {'limit': '50', 'page': str(page)} + + return all_comments if all_comments else None + + def get_pull_url(self, pr_number: int) -> str: + return f'https://{self.base_domain}/{self.owner}/{self.repo}/pulls/{pr_number}' + + def get_branch_name(self, base_branch_name: str) -> str: + branch_name = base_branch_name + attempt = 1 + while self.branch_exists(branch_name): + attempt += 1 + branch_name = f'{base_branch_name}-try{attempt}' + return branch_name + + def get_default_branch_name(self) -> str: + response = httpx.get(self.get_base_url(), headers=self.headers) + response.raise_for_status() + data = response.json() + return str(data.get('default_branch')) + + def branch_exists(self, branch_name: str) -> bool: + response = httpx.get(self.get_branch_url(branch_name), headers=self.headers) + exists = response.status_code == 200 + logger.info(f'Branch {branch_name} exists: {exists}') + return exists + + def reply_to_comment(self, pr_number: int, comment_id: str, reply: str) -> None: + # Forgejo does not support threaded replies via API; add a regular comment referencing the original ID. + message = f'OpenHands reply to comment {comment_id}\n\n{reply}' + self.send_comment_msg(pr_number, message) + + def create_pull_request(self, data: dict[str, Any] | None = None) -> dict[str, Any]: + payload = data or {} + response = httpx.post( + f'{self.get_base_url()}/pulls', headers=self.headers, json=payload + ) + if response.status_code == 403: + raise RuntimeError( + 'Failed to create pull request due to missing permissions. ' + 'Ensure the token has write access to the repository.' + ) + response.raise_for_status() + pr_data = response.json() + pr_data.setdefault('number', pr_data.get('index')) + if 'html_url' not in pr_data and 'url' in pr_data: + pr_data['html_url'] = pr_data['url'] + return dict(pr_data) + + def request_reviewers(self, reviewer: str, pr_number: int) -> None: + url = f'{self.get_base_url()}/pulls/{pr_number}/requested_reviewers' + response = httpx.post( + url, + headers=self.headers, + json={'reviewers': [reviewer]}, + ) + if response.status_code not in (200, 201, 204): + logger.warning( + f'Failed to request review from {reviewer}: {response.status_code} {response.text}' + ) + + def send_comment_msg(self, issue_number: int, msg: str) -> None: + comment_url = f'{self.get_download_url()}/{issue_number}/comments' + response = httpx.post( + comment_url, + headers=self.headers, + json={'body': msg}, + ) + if response.status_code not in (200, 201): + logger.error( + f'Failed to post comment: {response.status_code} {response.text}' + ) + + def get_context_from_external_issues_references( + self, + closing_issues: list[str], + closing_issue_numbers: list[int], + issue_body: str, + review_comments: list[str] | None, + review_threads: list[ReviewThread], + thread_comments: list[str] | None, + ) -> list[str]: + new_references: list[int] = [] + + if issue_body: + new_references.extend(extract_issue_references(issue_body)) + + if review_comments: + for comment in review_comments: + new_references.extend(extract_issue_references(comment)) + + if review_threads: + for thread in review_threads: + new_references.extend(extract_issue_references(thread.comment)) + + if thread_comments: + for thread_comment in thread_comments: + new_references.extend(extract_issue_references(thread_comment)) + + unique_ids = set(new_references).difference(closing_issue_numbers) + + for issue_number in unique_ids: + try: + response = httpx.get( + f'{self.get_download_url()}/{issue_number}', + headers=self.headers, + ) + response.raise_for_status() + issue_data = response.json() + body = issue_data.get('body', '') + if body: + closing_issues.append(body) + except httpx.HTTPError as exc: + logger.warning(f'Failed to fetch issue {issue_number}: {exc}') + + return closing_issues + + def get_pull_url_for_issue(self, issue_number: int) -> str: + return ( + f'https://{self.base_domain}/{self.owner}/{self.repo}/issues/{issue_number}' + ) + + def get_converted_issues( + self, issue_numbers: list[int] | None = None, comment_id: int | None = None + ) -> list[Issue]: + if not issue_numbers: + raise ValueError('Unspecified issue numbers') + + all_issues = self.download_issues() + logger.info(f'Limiting resolving to issues {issue_numbers}.') + filtered = [ + issue + for issue in all_issues + if self._to_int(issue.get('number') or issue.get('index')) in issue_numbers + ] + + converted: list[Issue] = [] + for issue in filtered: + if any(issue.get(key) is None for key in ['number', 'title']): + logger.warning( + f'Skipping issue {issue} as it is missing number or title.' + ) + continue + + issue_number = self._to_int(issue.get('number') or issue.get('index')) + body = issue.get('body') or '' + thread_comments = self.get_issue_comments(issue_number, comment_id) + + issue_details = Issue( + owner=self.owner, + repo=self.repo, + number=issue_number, + title=issue['title'], + body=body, + thread_comments=thread_comments, + review_comments=None, + review_threads=None, + ) + converted.append(issue_details) + + return converted + + +class ForgejoPRHandler(ForgejoIssueHandler): + def __init__( + self, + owner: str, + repo: str, + token: str, + username: str | None = None, + base_domain: str = 'codeberg.org', + ): + super().__init__(owner, repo, token, username, base_domain) + self.download_url = f'{self.get_base_url()}/pulls' + + def download_pr_metadata( + self, pull_number: int, comment_id: int | None = None + ) -> tuple[list[str], list[int], list[str] | None, list[ReviewThread], list[str]]: + closing_issues: list[str] = [] + closing_issue_numbers: list[int] = [] + + try: + response = httpx.get( + f'{self.get_base_url()}/pulls/{pull_number}', headers=self.headers + ) + response.raise_for_status() + pr_data = response.json() + body = pr_data.get('body') or '' + closing_refs = extract_issue_references(body) + closing_issue_numbers.extend(closing_refs) + if body: + closing_issues.append(body) + except httpx.HTTPError as exc: + logger.warning(f'Failed to fetch PR metadata for {pull_number}: {exc}') + + review_comments = self.get_pr_comments(pull_number, comment_id) + review_threads: list[ReviewThread] = [] + thread_ids: list[str] = [] + + return ( + closing_issues, + closing_issue_numbers, + review_comments, + review_threads, + thread_ids, + ) + + def get_pr_comments( + self, pr_number: int, comment_id: int | None = None + ) -> list[str] | None: + url = f'{self.get_base_url()}/pulls/{pr_number}/comments' + page = 1 + params = {'limit': '50', 'page': str(page)} + collected: list[str] = [] + + while True: + response = httpx.get(url, headers=self.headers, params=params) + response.raise_for_status() + comments = response.json() + + if not comments: + break + + filtered = [ + comment for comment in comments if not comment.get('is_system', False) + ] + + if comment_id is not None: + matching = next( + ( + comment['body'] + for comment in filtered + if self._to_int(comment.get('id')) == comment_id + ), + None, + ) + if matching: + return [matching] + else: + collected.extend( + comment['body'] for comment in filtered if comment.get('body') + ) + + page += 1 + params = {'limit': '50', 'page': str(page)} + + return collected if collected else None + + def get_context_from_external_issues_references( + self, + closing_issues: list[str], + closing_issue_numbers: list[int], + issue_body: str, + review_comments: list[str] | None, + review_threads: list[ReviewThread], + thread_comments: list[str] | None, + ) -> list[str]: + return super().get_context_from_external_issues_references( + closing_issues, + closing_issue_numbers, + issue_body, + review_comments, + review_threads, + thread_comments, + ) + + def get_converted_issues( + self, issue_numbers: list[int] | None = None, comment_id: int | None = None + ) -> list[Issue]: + if not issue_numbers: + raise ValueError('Unspecified issue numbers') + + response = httpx.get(self.download_url, headers=self.headers) + response.raise_for_status() + all_prs = response.json() + + logger.info(f'Limiting resolving to PRs {issue_numbers}.') + filtered = [ + pr + for pr in all_prs + if self._to_int(pr.get('number') or pr.get('index')) in issue_numbers + ] + + converted: list[Issue] = [] + for pr in filtered: + if any(pr.get(key) is None for key in ['number', 'title']): + logger.warning(f'Skipping PR {pr} as it is missing number or title.') + continue + + body = pr.get('body') or '' + pr_number = self._to_int(pr.get('number') or pr.get('index', 0)) + ( + closing_issues, + closing_issue_numbers, + review_comments, + review_threads, + thread_ids, + ) = self.download_pr_metadata(pr_number, comment_id) + head_branch = (pr.get('head') or {}).get('ref') + thread_comments = self.get_pr_comments(pr_number, comment_id) + + closing_issues = self.get_context_from_external_issues_references( + closing_issues, + closing_issue_numbers, + body, + review_comments, + review_threads, + thread_comments, + ) + + issue_details = Issue( + owner=self.owner, + repo=self.repo, + number=pr_number, + title=pr['title'], + body=body, + closing_issues=closing_issues, + review_comments=review_comments, + review_threads=review_threads, + thread_ids=thread_ids, + head_branch=head_branch, + thread_comments=thread_comments, + ) + + converted.append(issue_details) + + return converted diff --git a/openhands/resolver/issue_handler_factory.py b/openhands/resolver/issue_handler_factory.py index 45b927f696..6369392a02 100644 --- a/openhands/resolver/issue_handler_factory.py +++ b/openhands/resolver/issue_handler_factory.py @@ -5,6 +5,10 @@ from openhands.resolver.interfaces.bitbucket import ( BitbucketIssueHandler, BitbucketPRHandler, ) +from openhands.resolver.interfaces.forgejo import ( + ForgejoIssueHandler, + ForgejoPRHandler, +) from openhands.resolver.interfaces.github import GithubIssueHandler, GithubPRHandler from openhands.resolver.interfaces.gitlab import GitlabIssueHandler, GitlabPRHandler from openhands.resolver.interfaces.issue_definitions import ( @@ -69,6 +73,17 @@ class IssueHandlerFactory: ), self.llm_config, ) + elif self.platform == ProviderType.FORGEJO: + return ServiceContextIssue( + ForgejoIssueHandler( + self.owner, + self.repo, + self.token, + self.username, + self.base_domain, + ), + self.llm_config, + ) elif self.platform == ProviderType.AZURE_DEVOPS: # Parse owner as organization/project parts = self.owner.split('/') @@ -125,6 +140,17 @@ class IssueHandlerFactory: ), self.llm_config, ) + elif self.platform == ProviderType.FORGEJO: + return ServiceContextPR( + ForgejoPRHandler( + self.owner, + self.repo, + self.token, + self.username, + self.base_domain, + ), + self.llm_config, + ) elif self.platform == ProviderType.AZURE_DEVOPS: # Parse owner as organization/project parts = self.owner.split('/') diff --git a/openhands/resolver/issue_resolver.py b/openhands/resolver/issue_resolver.py index 155c5ce2e1..c5559ac828 100644 --- a/openhands/resolver/issue_resolver.py +++ b/openhands/resolver/issue_resolver.py @@ -54,6 +54,7 @@ class IssueResolver: def __init__(self, args: Namespace) -> None: """Initialize the IssueResolver with the given parameters. + Params initialized: owner: Owner of the repo. repo: Repository name. @@ -82,6 +83,7 @@ class IssueResolver: or os.getenv('GITLAB_TOKEN') or os.getenv('BITBUCKET_TOKEN') or os.getenv('AZURE_DEVOPS_TOKEN') + or os.getenv('FORGEJO_TOKEN') ) username = args.username if args.username else os.getenv('GIT_USERNAME') if not username: diff --git a/openhands/resolver/send_pull_request.py b/openhands/resolver/send_pull_request.py index d6dd4830db..23a7958b5f 100644 --- a/openhands/resolver/send_pull_request.py +++ b/openhands/resolver/send_pull_request.py @@ -13,6 +13,7 @@ from openhands.integrations.service_types import ProviderType from openhands.llm.llm import LLM from openhands.resolver.interfaces.azure_devops import AzureDevOpsIssueHandler from openhands.resolver.interfaces.bitbucket import BitbucketIssueHandler +from openhands.resolver.interfaces.forgejo import ForgejoIssueHandler from openhands.resolver.interfaces.github import GithubIssueHandler from openhands.resolver.interfaces.gitlab import GitlabIssueHandler from openhands.resolver.interfaces.issue import Issue @@ -26,6 +27,10 @@ from openhands.resolver.utils import identify_token from openhands.utils.async_utils import GENERAL_TIMEOUT, call_async_from_sync from openhands.utils.environment import get_effective_llm_base_url +PR_SIGNATURE = ( + 'Automatic fix generated by [OpenHands](https://github.com/OpenHands/OpenHands/) 🙌' +) + def apply_patch(repo_dir: str, patch: str) -> None: """Apply a patch to a repository. @@ -248,7 +253,7 @@ def send_pull_request( git_user_name: str = 'openhands', git_user_email: str = 'openhands@all-hands.dev', ) -> str: - """Send a pull request to a GitHub, GitLab, Bitbucket, or Azure DevOps repository. + """Send a pull request to a GitHub, GitLab, Bitbucket, Forgejo, or Azure DevOps repository. Args: issue: The issue to send the pull request for @@ -262,21 +267,22 @@ def send_pull_request( target_branch: The target branch to create the pull request against (defaults to repository default branch) reviewer: The username of the reviewer to assign pr_title: Custom title for the pull request (optional) - base_domain: The base domain for the git server (defaults to "github.com" for GitHub, "gitlab.com" for GitLab, "bitbucket.org" for Bitbucket, and "dev.azure.com" for Azure DevOps) + base_domain: The base domain for the git server (defaults to "github.com" for GitHub, "gitlab.com" for GitLab, "bitbucket.org" for Bitbucket, "codeberg.org" for Forgejo, and "dev.azure.com" for Azure DevOps) + git_user_name: Git username to configure when creating commits + git_user_email: Git email to configure when creating commits """ if pr_type not in ['branch', 'draft', 'ready']: raise ValueError(f'Invalid pr_type: {pr_type}') # Determine default base_domain based on platform if base_domain is None: - if platform == ProviderType.GITHUB: - base_domain = 'github.com' - elif platform == ProviderType.GITLAB: - base_domain = 'gitlab.com' - elif platform == ProviderType.AZURE_DEVOPS: - base_domain = 'dev.azure.com' - else: # platform == ProviderType.BITBUCKET - base_domain = 'bitbucket.org' + base_domain = { + ProviderType.GITHUB: 'github.com', + ProviderType.GITLAB: 'gitlab.com', + ProviderType.BITBUCKET: 'bitbucket.org', + ProviderType.FORGEJO: 'codeberg.org', + ProviderType.AZURE_DEVOPS: 'dev.azure.com', + }.get(platform, 'github.com') # Create the appropriate handler based on platform handler = None @@ -297,6 +303,11 @@ def send_pull_request( ), None, ) + elif platform == ProviderType.FORGEJO: + handler = ServiceContextIssue( + ForgejoIssueHandler(issue.owner, issue.repo, token, username, base_domain), + None, + ) elif platform == ProviderType.AZURE_DEVOPS: # For Azure DevOps, owner is "organization/project" organization, project = issue.owner.split('/') @@ -360,11 +371,11 @@ def send_pull_request( pr_body = f'This pull request fixes #{issue.number}.' if additional_message: pr_body += f'\n\n{additional_message}' - pr_body += '\n\nAutomatic fix generated by [OpenHands](https://github.com/OpenHands/OpenHands/) 🙌' + pr_body += f'\n\n{PR_SIGNATURE}' # For cross repo pull request, we need to send head parameter like fork_owner:branch as per git documentation here : https://docs.github.com/en/rest/pulls/pulls?apiVersion=2022-11-28#create-a-pull-request # head parameter usage : The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace head with a user like this: username:branch. - if fork_owner and platform == ProviderType.GITHUB: + if fork_owner and platform in (ProviderType.GITHUB, ProviderType.FORGEJO): head_branch = f'{fork_owner}:{branch_name}' else: head_branch = branch_name @@ -374,17 +385,40 @@ def send_pull_request( url = handler.get_compare_url(branch_name) else: # Prepare the PR for the GitHub API - data = { - 'title': final_pr_title, - ('body' if platform == ProviderType.GITHUB else 'description'): pr_body, - ( - 'head' if platform == ProviderType.GITHUB else 'source_branch' - ): head_branch, - ( - 'base' if platform == ProviderType.GITHUB else 'target_branch' - ): base_branch, - 'draft': pr_type == 'draft', - } + if platform == ProviderType.GITHUB: + data = { + 'title': final_pr_title, + 'body': pr_body, + 'head': head_branch, + 'base': base_branch, + 'draft': pr_type == 'draft', + } + elif platform == ProviderType.GITLAB: + data = { + 'title': final_pr_title, + 'description': pr_body, + 'source_branch': head_branch, + 'target_branch': base_branch, + 'draft': pr_type == 'draft', + } + elif platform == ProviderType.BITBUCKET: + data = { + 'title': final_pr_title, + 'description': pr_body, + 'source_branch': head_branch, + 'target_branch': base_branch, + 'draft': pr_type == 'draft', + } + elif platform == ProviderType.FORGEJO: + data = { + 'title': final_pr_title, + 'body': pr_body, + 'head': head_branch, + 'base': base_branch, + 'draft': pr_type == 'draft', + } + else: + raise ValueError(f'Unsupported platform for PR creation: {platform}') pr_data = handler.create_pull_request(data) url = pr_data['html_url'] @@ -429,13 +463,13 @@ def update_existing_pull_request( # Determine default base_domain based on platform if base_domain is None: - base_domain = ( - 'github.com' - if platform == ProviderType.GITHUB - else 'gitlab.com' - if platform == ProviderType.GITLAB - else 'dev.azure.com' - ) + base_domain = { + ProviderType.GITHUB: 'github.com', + ProviderType.GITLAB: 'gitlab.com', + ProviderType.AZURE_DEVOPS: 'dev.azure.com', + ProviderType.BITBUCKET: 'bitbucket.org', + ProviderType.FORGEJO: 'codeberg.org', + }.get(platform, 'github.com') handler = None if platform == ProviderType.GITHUB: @@ -443,6 +477,11 @@ def update_existing_pull_request( GithubIssueHandler(issue.owner, issue.repo, token, username, base_domain), llm_config, ) + elif platform == ProviderType.GITLAB: + handler = ServiceContextIssue( + GitlabIssueHandler(issue.owner, issue.repo, token, username, base_domain), + llm_config, + ) elif platform == ProviderType.AZURE_DEVOPS: # For Azure DevOps, owner is "organization/project" organization, project = issue.owner.split('/') @@ -450,11 +489,20 @@ def update_existing_pull_request( AzureDevOpsIssueHandler(token, organization, project, issue.repo), llm_config, ) - else: # platform == ProviderType.GITLAB + elif platform == ProviderType.BITBUCKET: handler = ServiceContextIssue( - GitlabIssueHandler(issue.owner, issue.repo, token, username, base_domain), + BitbucketIssueHandler( + issue.owner, issue.repo, token, username, base_domain + ), llm_config, ) + elif platform == ProviderType.FORGEJO: + handler = ServiceContextIssue( + ForgejoIssueHandler(issue.owner, issue.repo, token, username, base_domain), + llm_config, + ) + else: + raise ValueError(f'Unsupported platform: {platform}') branch_name = issue.head_branch @@ -503,7 +551,10 @@ def update_existing_pull_request( comment_message = response.choices[0].message.content.strip() except (json.JSONDecodeError, TypeError): - comment_message = f'A new OpenHands update is available, but failed to parse or summarize the changes:\n{additional_message}' + comment_message = ( + 'A new OpenHands update is available, but failed to parse or summarize ' + f'the changes:\n{additional_message}' + ) # Post a comment on the PR if comment_message: @@ -727,10 +778,11 @@ def main() -> None: or os.getenv('GITHUB_TOKEN') or os.getenv('GITLAB_TOKEN') or os.getenv('AZURE_DEVOPS_TOKEN') + or os.getenv('FORGEJO_TOKEN') ) if not token: raise ValueError( - 'token is not set, set via --token or GITHUB_TOKEN, GITLAB_TOKEN, or AZURE_DEVOPS_TOKEN environment variable.' + 'token is not set, set via --token or GITHUB_TOKEN, GITLAB_TOKEN, AZURE_DEVOPS_TOKEN, or FORGEJO_TOKEN environment variable.' ) username = my_args.username if my_args.username else os.getenv('GIT_USERNAME') diff --git a/openhands/server/routes/git.py b/openhands/server/routes/git.py index a6807a2e2a..7223c6fab6 100644 --- a/openhands/server/routes/git.py +++ b/openhands/server/routes/git.py @@ -1,5 +1,5 @@ from types import MappingProxyType -from typing import cast +from typing import Annotated, cast from fastapi import APIRouter, Depends, Query, status from fastapi.responses import JSONResponse @@ -67,7 +67,7 @@ async def get_user_installations( @app.get('/repositories', response_model=list[Repository]) async def get_user_repositories( sort: str = 'pushed', - selected_provider: ProviderType | None = None, + selected_provider: Annotated[ProviderType | None, Query()] = None, page: int | None = None, per_page: int | None = None, installation_id: str | None = None, @@ -137,7 +137,7 @@ async def search_repositories( per_page: int = 5, sort: str = 'stars', order: str = 'desc', - selected_provider: ProviderType | None = None, + selected_provider: Annotated[ProviderType | None, Query()] = None, provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens), access_token: SecretStr | None = Depends(get_access_token), user_id: str | None = Depends(get_user_id), @@ -171,7 +171,7 @@ async def search_branches( repository: str, query: str, per_page: int = 30, - selected_provider: ProviderType | None = None, + selected_provider: Annotated[ProviderType | None, Query()] = None, provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens), access_token: SecretStr | None = Depends(get_access_token), user_id: str | None = Depends(get_user_id), @@ -243,6 +243,7 @@ async def get_repository_branches( repository: str, page: int = 1, per_page: int = 30, + selected_provider: Annotated[ProviderType | None, Query()] = None, provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens), access_token: SecretStr | None = Depends(get_access_token), user_id: str | None = Depends(get_user_id), @@ -253,6 +254,7 @@ async def get_repository_branches( repository: The repository name in the format 'owner/repo' page: Page number for pagination (default: 1) per_page: Number of branches per page (default: 30) + selected_provider: Optional provider hint to avoid trying other providers Returns: A paginated response with branches for the repository @@ -263,7 +265,10 @@ async def get_repository_branches( ) try: branches_response: PaginatedBranchesResponse = await client.get_branches( - repository, page=page, per_page=per_page + repository, + specified_provider=selected_provider, + page=page, + per_page=per_page, ) return branches_response diff --git a/tests/unit/integrations/bitbucket/test_bitbucket.py b/tests/unit/integrations/bitbucket/test_bitbucket.py index 5d29ee3032..c82d0f4485 100644 --- a/tests/unit/integrations/bitbucket/test_bitbucket.py +++ b/tests/unit/integrations/bitbucket/test_bitbucket.py @@ -15,7 +15,7 @@ from openhands.integrations.utils import validate_provider_token from openhands.resolver.interfaces.bitbucket import BitbucketIssueHandler from openhands.resolver.interfaces.issue import Issue from openhands.resolver.interfaces.issue_definitions import ServiceContextIssue -from openhands.resolver.send_pull_request import send_pull_request +from openhands.resolver.send_pull_request import PR_SIGNATURE, send_pull_request from openhands.runtime.base import Runtime from openhands.server.routes.secrets import check_provider_tokens from openhands.server.settings import POSTProviderModel @@ -219,7 +219,7 @@ def test_send_pull_request_bitbucket( mock_service_context.assert_called_once() # Verify create_pull_request was called with the correct data - expected_body = 'This pull request fixes #123.\n\nAutomatic fix generated by [OpenHands](https://github.com/OpenHands/OpenHands/) 🙌' + expected_body = f'This pull request fixes #123.\n\n{PR_SIGNATURE}' mock_service.create_pull_request.assert_called_once_with( { 'title': 'Test PR', @@ -353,8 +353,9 @@ class TestBitbucketProviderDomain(unittest.TestCase): # Provider Token Validation Tests @pytest.mark.asyncio async def test_validate_provider_token_with_bitbucket_token(): - """Test that validate_provider_token correctly identifies a Bitbucket token - and doesn't try to validate it as GitHub or GitLab. + """Test that validate_provider_token correctly identifies a Bitbucket token. + + Ensures GitHub and GitLab validators are not invoked. """ # Mock the service classes to avoid actual API calls with ( @@ -392,9 +393,7 @@ async def test_validate_provider_token_with_bitbucket_token(): @pytest.mark.asyncio async def test_check_provider_tokens_with_only_bitbucket(): - """Test that check_provider_tokens doesn't try to validate GitHub or GitLab tokens - when only a Bitbucket token is provided. - """ + """Test that check_provider_tokens ignores GitHub/GitLab tokens when only Bitbucket is provided.""" # Create a mock validate_provider_token function mock_validate = AsyncMock() mock_validate.return_value = ProviderType.BITBUCKET diff --git a/tests/unit/integrations/forgejo/test_forgejo_send_pull_request.py b/tests/unit/integrations/forgejo/test_forgejo_send_pull_request.py new file mode 100644 index 0000000000..f697f07b19 --- /dev/null +++ b/tests/unit/integrations/forgejo/test_forgejo_send_pull_request.py @@ -0,0 +1,78 @@ +"""Tests for Forgejo integration with send_pull_request.""" + +from unittest.mock import MagicMock, patch + +from openhands.integrations.service_types import ProviderType as ServiceProviderType +from openhands.resolver.interfaces.issue import Issue +from openhands.resolver.send_pull_request import PR_SIGNATURE, send_pull_request + + +@patch('openhands.resolver.send_pull_request.ServiceContextIssue') +@patch('openhands.resolver.send_pull_request.ForgejoIssueHandler') +@patch('subprocess.run') +def test_send_pull_request_forgejo( + mock_run, mock_forgejo_handler, mock_service_context +): + """Ensure we can build and submit a Forgejo pull request.""" + mock_run.return_value = MagicMock(returncode=0) + + handler_instance = MagicMock() + mock_forgejo_handler.return_value = handler_instance + + service_context_instance = MagicMock() + service_context_instance.get_branch_name.return_value = 'openhands-fix-issue-7' + service_context_instance.branch_exists.return_value = True + service_context_instance.get_default_branch_name.return_value = 'main' + service_context_instance.get_clone_url.return_value = ( + 'https://codeberg.org/example/repo.git' + ) + service_context_instance.create_pull_request.return_value = { + 'html_url': 'https://codeberg.org/example/repo/pulls/42', + 'number': 42, + } + service_context_instance._strategy = MagicMock() + mock_service_context.return_value = service_context_instance + + issue = Issue( + number=7, + title='Fix the Forgejo PR flow', + owner='example', + repo='repo', + body='Details about the fix', + created_at='2024-01-01T00:00:00Z', + updated_at='2024-01-01T00:00:00Z', + closed_at=None, + head_branch='feature-branch', + thread_ids=None, + ) + + result = send_pull_request( + issue=issue, + token='forgejo-token', + username=None, + platform=ServiceProviderType.FORGEJO, + patch_dir='/tmp', + pr_type='ready', + pr_title='Fix the Forgejo PR flow', + target_branch='main', + ) + + assert result == 'https://codeberg.org/example/repo/pulls/42' + + mock_forgejo_handler.assert_called_once_with( + 'example', 'repo', 'forgejo-token', None, 'codeberg.org' + ) + mock_service_context.assert_called_once_with(handler_instance, None) + + expected_payload = { + 'title': 'Fix the Forgejo PR flow', + 'body': f'This pull request fixes #7.\n\n{PR_SIGNATURE}', + 'head': 'openhands-fix-issue-7', + 'base': 'main', + 'draft': False, + } + service_context_instance.create_pull_request.assert_called_once_with( + expected_payload + ) + + mock_run.assert_called() diff --git a/tests/unit/resolver/test_issue_handler_factory.py b/tests/unit/resolver/test_issue_handler_factory.py index 12932a1282..96b513be0a 100644 --- a/tests/unit/resolver/test_issue_handler_factory.py +++ b/tests/unit/resolver/test_issue_handler_factory.py @@ -4,6 +4,10 @@ from pydantic import SecretStr from openhands.core.config import LLMConfig from openhands.integrations.provider import ProviderType from openhands.resolver.interfaces.azure_devops import AzureDevOpsIssueHandler +from openhands.resolver.interfaces.forgejo import ( + ForgejoIssueHandler, + ForgejoPRHandler, +) from openhands.resolver.interfaces.github import GithubIssueHandler, GithubPRHandler from openhands.resolver.interfaces.gitlab import GitlabIssueHandler, GitlabPRHandler from openhands.resolver.interfaces.issue_definitions import ( @@ -28,7 +32,6 @@ def factory_params(llm_config): 'repo': 'test-repo', 'token': 'test-token', 'username': 'test-user', - 'base_domain': 'github.com', 'llm_config': llm_config, } @@ -46,24 +49,76 @@ def azure_factory_params(llm_config): test_cases = [ - # platform, issue_type, expected_context_type, expected_handler_type, use_azure_params - (ProviderType.GITHUB, 'issue', ServiceContextIssue, GithubIssueHandler, False), - (ProviderType.GITHUB, 'pr', ServiceContextPR, GithubPRHandler, False), - (ProviderType.GITLAB, 'issue', ServiceContextIssue, GitlabIssueHandler, False), - (ProviderType.GITLAB, 'pr', ServiceContextPR, GitlabPRHandler, False), + # platform, issue_type, base_domain, expected_context_type, expected_handler_type, use_azure_params + ( + ProviderType.GITHUB, + 'issue', + 'github.com', + ServiceContextIssue, + GithubIssueHandler, + False, + ), + ( + ProviderType.GITHUB, + 'pr', + 'github.com', + ServiceContextPR, + GithubPRHandler, + False, + ), + ( + ProviderType.GITLAB, + 'issue', + 'gitlab.com', + ServiceContextIssue, + GitlabIssueHandler, + False, + ), + ( + ProviderType.GITLAB, + 'pr', + 'gitlab.com', + ServiceContextPR, + GitlabPRHandler, + False, + ), + ( + ProviderType.FORGEJO, + 'issue', + 'codeberg.org', + ServiceContextIssue, + ForgejoIssueHandler, + False, + ), + ( + ProviderType.FORGEJO, + 'pr', + 'codeberg.org', + ServiceContextPR, + ForgejoPRHandler, + False, + ), ( ProviderType.AZURE_DEVOPS, 'issue', + 'dev.azure.com', ServiceContextIssue, AzureDevOpsIssueHandler, True, ), - (ProviderType.AZURE_DEVOPS, 'pr', ServiceContextPR, AzureDevOpsIssueHandler, True), + ( + ProviderType.AZURE_DEVOPS, + 'pr', + 'dev.azure.com', + ServiceContextPR, + AzureDevOpsIssueHandler, + True, + ), ] @pytest.mark.parametrize( - 'platform,issue_type,expected_context_type,expected_handler_type,use_azure_params', + 'platform,issue_type,base_domain,expected_context_type,expected_handler_type,use_azure_params', test_cases, ) def test_handler_creation( @@ -71,11 +126,16 @@ def test_handler_creation( azure_factory_params, platform: ProviderType, issue_type: str, + base_domain: str, expected_context_type: type, expected_handler_type: type, use_azure_params: bool, ): - params = azure_factory_params if use_azure_params else factory_params + params = ( + azure_factory_params + if use_azure_params + else {**factory_params, 'base_domain': base_domain} + ) factory = IssueHandlerFactory(**params, platform=platform, issue_type=issue_type) handler = factory.create() @@ -86,7 +146,10 @@ def test_handler_creation( def test_invalid_issue_type(factory_params): factory = IssueHandlerFactory( - **factory_params, platform=ProviderType.GITHUB, issue_type='invalid' + **factory_params, + platform=ProviderType.GITHUB, + issue_type='invalid', + base_domain='github.com', ) with pytest.raises(ValueError, match='Invalid issue type: invalid'): diff --git a/tests/unit/test_forgejo_service.py b/tests/unit/test_forgejo_service.py new file mode 100644 index 0000000000..dee8d9bc27 --- /dev/null +++ b/tests/unit/test_forgejo_service.py @@ -0,0 +1,273 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx +import pytest +from pydantic import SecretStr + +from openhands.integrations.forgejo.forgejo_service import ForgejoService +from openhands.integrations.service_types import ( + ProviderType, + Repository, + RequestMethod, + User, +) +from openhands.server.types import AppMode + + +@pytest.fixture +def forgejo_service(): + return ForgejoService(token=SecretStr('test_token')) + + +@pytest.mark.asyncio +async def test_get_user(forgejo_service): + # Mock response data + mock_user_data = { + 'id': 1, + 'username': 'test_user', + 'avatar_url': 'https://codeberg.org/avatar/test_user', + 'full_name': 'Test User', + 'email': 'test@example.com', + 'organization': 'Test Org', + } + + # Mock the _make_request method + forgejo_service._make_request = AsyncMock(return_value=(mock_user_data, {})) + + # Call the method + user = await forgejo_service.get_user() + + # Verify the result + assert isinstance(user, User) + assert user.id == '1' + assert user.login == 'test_user' + assert user.avatar_url == 'https://codeberg.org/avatar/test_user' + assert user.name == 'Test User' + assert user.email == 'test@example.com' + assert user.company == 'Test Org' + + # Verify the _fetch_data call + forgejo_service._make_request.assert_called_once_with( + f'{forgejo_service.BASE_URL}/user' + ) + + +@pytest.mark.asyncio +async def test_search_repositories(forgejo_service): + # Mock response data + mock_repos_data = { + 'data': [ + { + 'id': 1, + 'full_name': 'test_user/repo1', + 'stars_count': 10, + }, + { + 'id': 2, + 'full_name': 'test_user/repo2', + 'stars_count': 20, + }, + ] + } + + # Mock the _fetch_data method + forgejo_service._make_request = AsyncMock(return_value=(mock_repos_data, {})) + + # Call the method + repos = await forgejo_service.search_repositories( + 'test', 10, 'updated', 'desc', public=False, app_mode=AppMode.OSS + ) + + # Verify the result + assert len(repos) == 2 + assert all(isinstance(repo, Repository) for repo in repos) + assert repos[0].id == '1' + assert repos[0].full_name == 'test_user/repo1' + assert repos[0].stargazers_count == 10 + assert repos[0].git_provider == ProviderType.FORGEJO + assert repos[1].id == '2' + assert repos[1].full_name == 'test_user/repo2' + assert repos[1].stargazers_count == 20 + assert repos[1].git_provider == ProviderType.FORGEJO + + # Verify the _fetch_data call + forgejo_service._make_request.assert_called_once_with( + f'{forgejo_service.BASE_URL}/repos/search', + { + 'q': 'test', + 'limit': 10, + 'sort': 'updated', + 'order': 'desc', + 'mode': 'source', + }, + ) + + +@pytest.mark.asyncio +async def test_get_all_repositories(forgejo_service): + # Mock response data for first page + mock_repos_data_page1 = [ + { + 'id': 1, + 'full_name': 'test_user/repo1', + 'stars_count': 10, + }, + { + 'id': 2, + 'full_name': 'test_user/repo2', + 'stars_count': 20, + }, + ] + + # Mock response data for second page + mock_repos_data_page2 = [ + { + 'id': 3, + 'full_name': 'test_user/repo3', + 'stars_count': 30, + }, + ] + + # Mock the _fetch_data method to return different data for different pages + forgejo_service._make_request = AsyncMock() + forgejo_service._make_request.side_effect = [ + ( + mock_repos_data_page1, + {'Link': '; rel="next"'}, + ), + (mock_repos_data_page2, {'Link': ''}), + ] + + # Call the method + repos = await forgejo_service.get_all_repositories('updated', AppMode.OSS) + + # Verify the result + assert len(repos) == 3 + assert all(isinstance(repo, Repository) for repo in repos) + assert repos[0].id == '1' + assert repos[0].full_name == 'test_user/repo1' + assert repos[0].stargazers_count == 10 + assert repos[0].git_provider == ProviderType.FORGEJO + assert repos[1].id == '2' + assert repos[1].full_name == 'test_user/repo2' + assert repos[1].stargazers_count == 20 + assert repos[1].git_provider == ProviderType.FORGEJO + assert repos[2].id == '3' + assert repos[2].full_name == 'test_user/repo3' + assert repos[2].stargazers_count == 30 + assert repos[2].git_provider == ProviderType.FORGEJO + + # Verify the _fetch_data calls + assert forgejo_service._make_request.call_count == 2 + forgejo_service._make_request.assert_any_call( + f'{forgejo_service.BASE_URL}/user/repos', + {'page': '1', 'limit': '100', 'sort': 'updated'}, + ) + forgejo_service._make_request.assert_any_call( + f'{forgejo_service.BASE_URL}/user/repos', + {'page': '2', 'limit': '100', 'sort': 'updated'}, + ) + + +@pytest.mark.asyncio +async def test_make_request_success(forgejo_service): + # Mock httpx.AsyncClient + mock_client = AsyncMock() + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.raise_for_status = MagicMock() + mock_response.json.return_value = {'key': 'value'} + mock_response.headers = {'Link': 'next_link', 'Content-Type': 'application/json'} + mock_client.__aenter__.return_value.get.return_value = mock_response + + # Patch httpx.AsyncClient + with patch('httpx.AsyncClient', return_value=mock_client): + # Call the method + result, headers = await forgejo_service._make_request( + 'https://test.url', {'param': 'value'} + ) + + # Verify the result + assert result == {'key': 'value'} + assert headers == {'Link': 'next_link'} + mock_response.raise_for_status.assert_called_once() + + +@pytest.mark.asyncio +async def test_make_request_auth_error(forgejo_service): + # Mock httpx.AsyncClient + mock_client = AsyncMock() + mock_response = MagicMock() + mock_response.status_code = 401 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + '401 Unauthorized', request=MagicMock(), response=mock_response + ) + mock_client.__aenter__.return_value.get.return_value = mock_response + + # Patch httpx.AsyncClient + with patch('httpx.AsyncClient', return_value=mock_client): + # Call the method and expect an exception + with pytest.raises(Exception) as excinfo: + await forgejo_service._make_request('https://test.url', {'param': 'value'}) + + # Verify the exception + assert 'Invalid forgejo token' in str(excinfo.value) + + +@pytest.mark.asyncio +async def test_make_request_other_error(forgejo_service): + # Mock httpx.AsyncClient + mock_client = AsyncMock() + mock_response = MagicMock() + mock_response.status_code = 500 + mock_response.raise_for_status.side_effect = httpx.HTTPStatusError( + '500 Server Error', request=MagicMock(), response=mock_response + ) + mock_client.__aenter__.return_value.get.return_value = mock_response + + # Patch httpx.AsyncClient + with patch('httpx.AsyncClient', return_value=mock_client): + # Call the method and expect an exception + with pytest.raises(Exception) as excinfo: + await forgejo_service._make_request('https://test.url', {'param': 'value'}) + + # Verify the exception + assert 'Unknown error' in str(excinfo.value) + + +@pytest.mark.asyncio +async def test_create_pull_request(forgejo_service): + mock_response = {'index': 42, 'html_url': 'https://example/pr/42'} + forgejo_service._make_request = AsyncMock(return_value=(mock_response, {})) + + data = {'owner': 'org', 'repo': 'project', 'title': 'Add feature'} + result = await forgejo_service.create_pull_request(data.copy()) + + assert result['number'] == 42 + forgejo_service._make_request.assert_awaited_once_with( + f'{forgejo_service.BASE_URL}/repos/org/project/pulls', + {'title': 'Add feature'}, + method=RequestMethod.POST, + ) + + +@pytest.mark.asyncio +async def test_request_reviewers(forgejo_service): + forgejo_service._make_request = AsyncMock(return_value=({}, {})) + + await forgejo_service.request_reviewers('org/project', 5, ['alice']) + + forgejo_service._make_request.assert_awaited_once_with( + f'{forgejo_service.BASE_URL}/repos/org/project/pulls/5/requested_reviewers', + {'reviewers': ['alice']}, + method=RequestMethod.POST, + ) + + +@pytest.mark.asyncio +async def test_request_reviewers_empty_list(forgejo_service): + forgejo_service._make_request = AsyncMock() + + await forgejo_service.request_reviewers('org/project', 5, []) + + forgejo_service._make_request.assert_not_called() From f742811e81c13a732ea7de38018f2f428ec26774 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 28 Dec 2025 08:58:26 -0500 Subject: [PATCH 68/80] chore(deps): bump actions/setup-node from 4 to 6 (#11442) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Graham Neubig --- .github/workflows/e2e-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index 4339d5f88e..0d926027d8 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -38,7 +38,7 @@ jobs: sudo apt-get install -y libgtk-3-0 libnotify4 libnss3 libxss1 libxtst6 xauth xvfb libgbm1 libasound2t64 netcat-openbsd - name: Setup Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: '22' cache: 'npm' From 81519343c46983676d6e0ea3f4143ef1382ea46a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 28 Dec 2025 09:49:02 -0500 Subject: [PATCH 69/80] chore(deps): bump actions/download-artifact from 4 to 6 (#11524) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Graham Neubig --- .github/workflows/ghcr-build.yml | 4 ++-- .github/workflows/py-tests.yml | 2 +- .github/workflows/vscode-extension-build.yml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ghcr-build.yml b/.github/workflows/ghcr-build.yml index 40848816dd..8c2bbdec86 100644 --- a/.github/workflows/ghcr-build.yml +++ b/.github/workflows/ghcr-build.yml @@ -268,7 +268,7 @@ jobs: uses: docker/setup-buildx-action@v3 - name: Download runtime source for fork if: github.event.pull_request.head.repo.fork - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v6 with: name: runtime-src-${{ matrix.base_image.tag }} path: containers/runtime @@ -330,7 +330,7 @@ jobs: uses: docker/setup-buildx-action@v3 - name: Download runtime source for fork if: github.event.pull_request.head.repo.fork - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v6 with: name: runtime-src-${{ matrix.base_image.tag }} path: containers/runtime diff --git a/.github/workflows/py-tests.yml b/.github/workflows/py-tests.yml index 5c4c35f6bc..713abc702f 100644 --- a/.github/workflows/py-tests.yml +++ b/.github/workflows/py-tests.yml @@ -113,7 +113,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: actions/download-artifact@v5 + - uses: actions/download-artifact@v6 id: download with: pattern: coverage-* diff --git a/.github/workflows/vscode-extension-build.yml b/.github/workflows/vscode-extension-build.yml index 951d5c34a8..4af07617f7 100644 --- a/.github/workflows/vscode-extension-build.yml +++ b/.github/workflows/vscode-extension-build.yml @@ -142,7 +142,7 @@ jobs: steps: - name: Download .vsix artifact - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v6 with: name: vscode-extension path: ./ From 09d1748a1412dbfa47bfe48832437ee1f281144e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 28 Dec 2025 09:49:17 -0500 Subject: [PATCH 70/80] build(deps): bump actions/setup-python from 5 to 6 (#11755) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Graham Neubig --- .github/workflows/check-package-versions.yml | 2 +- .github/workflows/e2e-tests.yml | 2 +- .github/workflows/openhands-resolver.yml | 2 +- .github/workflows/vscode-extension-build.yml | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/check-package-versions.yml b/.github/workflows/check-package-versions.yml index 44e680ff4b..e13e12cb74 100644 --- a/.github/workflows/check-package-versions.yml +++ b/.github/workflows/check-package-versions.yml @@ -15,7 +15,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.12" diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index 0d926027d8..030b2d331b 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -27,7 +27,7 @@ jobs: poetry-version: 2.1.3 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: '3.12' cache: 'poetry' diff --git a/.github/workflows/openhands-resolver.yml b/.github/workflows/openhands-resolver.yml index cfb7298974..9d2914f2ee 100644 --- a/.github/workflows/openhands-resolver.yml +++ b/.github/workflows/openhands-resolver.yml @@ -89,7 +89,7 @@ jobs: uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: "3.12" - name: Upgrade pip diff --git a/.github/workflows/vscode-extension-build.yml b/.github/workflows/vscode-extension-build.yml index 4af07617f7..183ecaeb85 100644 --- a/.github/workflows/vscode-extension-build.yml +++ b/.github/workflows/vscode-extension-build.yml @@ -37,7 +37,7 @@ jobs: node-version: '22' - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: '3.12' From ee50f333ba2df67eb0f7f78add688644d74b564c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 28 Dec 2025 09:51:34 -0500 Subject: [PATCH 71/80] chore(deps): bump actions/upload-artifact from 4 to 5 (#11805) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Graham Neubig --- .github/workflows/e2e-tests.yml | 4 ++-- .github/workflows/fe-e2e-tests.yml | 2 +- .github/workflows/ghcr-build.yml | 2 +- .github/workflows/openhands-resolver.yml | 2 +- .github/workflows/py-tests.yml | 4 ++-- .github/workflows/vscode-extension-build.yml | 2 +- 6 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/e2e-tests.yml b/.github/workflows/e2e-tests.yml index 030b2d331b..caa24d1b21 100644 --- a/.github/workflows/e2e-tests.yml +++ b/.github/workflows/e2e-tests.yml @@ -192,7 +192,7 @@ jobs: - name: Upload test results if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: playwright-report path: tests/e2e/test-results/ @@ -200,7 +200,7 @@ jobs: - name: Upload OpenHands logs if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: openhands-logs path: | diff --git a/.github/workflows/fe-e2e-tests.yml b/.github/workflows/fe-e2e-tests.yml index 7ee79e63fc..6747624176 100644 --- a/.github/workflows/fe-e2e-tests.yml +++ b/.github/workflows/fe-e2e-tests.yml @@ -39,7 +39,7 @@ jobs: working-directory: ./frontend run: npx playwright test --project=chromium - name: Upload Playwright report - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 if: always() with: name: playwright-report diff --git a/.github/workflows/ghcr-build.yml b/.github/workflows/ghcr-build.yml index 8c2bbdec86..9bdcd073d5 100644 --- a/.github/workflows/ghcr-build.yml +++ b/.github/workflows/ghcr-build.yml @@ -161,7 +161,7 @@ jobs: context: containers/runtime - name: Upload runtime source for fork if: github.event.pull_request.head.repo.fork - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: runtime-src-${{ matrix.base_image.tag }} path: containers/runtime diff --git a/.github/workflows/openhands-resolver.yml b/.github/workflows/openhands-resolver.yml index 9d2914f2ee..6f4be5c398 100644 --- a/.github/workflows/openhands-resolver.yml +++ b/.github/workflows/openhands-resolver.yml @@ -269,7 +269,7 @@ jobs: fi - name: Upload output.jsonl as artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 if: always() # Upload even if the previous steps fail with: name: resolver-output diff --git a/.github/workflows/py-tests.yml b/.github/workflows/py-tests.yml index 713abc702f..8676d00b7f 100644 --- a/.github/workflows/py-tests.yml +++ b/.github/workflows/py-tests.yml @@ -63,7 +63,7 @@ jobs: env: COVERAGE_FILE: ".coverage.runtime.${{ matrix.python_version }}" - name: Store coverage file - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: coverage-openhands path: | @@ -95,7 +95,7 @@ jobs: env: COVERAGE_FILE: ".coverage.enterprise.${{ matrix.python_version }}" - name: Store coverage file - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: coverage-enterprise path: ".coverage.enterprise.${{ matrix.python_version }}" diff --git a/.github/workflows/vscode-extension-build.yml b/.github/workflows/vscode-extension-build.yml index 183ecaeb85..5642b82f7a 100644 --- a/.github/workflows/vscode-extension-build.yml +++ b/.github/workflows/vscode-extension-build.yml @@ -70,7 +70,7 @@ jobs: fi - name: Upload VSCode extension artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v6 with: name: vscode-extension path: openhands/integrations/vscode/openhands-vscode-0.0.1.vsix From 30114666adece8dff4d7d6836b8c712561c420b9 Mon Sep 17 00:00:00 2001 From: Tim O'Farrell Date: Sun, 28 Dec 2025 11:57:08 -0700 Subject: [PATCH 72/80] Bump the SDK to 1.7.1 (#12182) Co-authored-by: openhands --- enterprise/poetry.lock | 42 +++++++++++++++++++++--------------------- poetry.lock | 34 +++++++++++++++++----------------- pyproject.toml | 8 ++++---- 3 files changed, 42 insertions(+), 42 deletions(-) diff --git a/enterprise/poetry.lock b/enterprise/poetry.lock index 2535aef566..ca4b190ec1 100644 --- a/enterprise/poetry.lock +++ b/enterprise/poetry.lock @@ -4558,25 +4558,25 @@ valkey = ["valkey (>=6)"] [[package]] name = "litellm" -version = "1.80.7" +version = "1.80.11" description = "Library to easily interface with LLM API providers" optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "litellm-1.80.7-py3-none-any.whl", hash = "sha256:f7d993f78c1e0e4e1202b2a925cc6540b55b6e5fb055dd342d88b145ab3102ed"}, - {file = "litellm-1.80.7.tar.gz", hash = "sha256:3977a8d195aef842d01c18bf9e22984829363c6a4b54daf9a43c9dd9f190b42c"}, + {file = "litellm-1.80.11-py3-none-any.whl", hash = "sha256:406283d66ead77dc7ff0e0b2559c80e9e497d8e7c2257efb1cb9210a20d09d54"}, + {file = "litellm-1.80.11.tar.gz", hash = "sha256:c9fc63e7acb6360363238fe291bcff1488c59ff66020416d8376c0ee56414a19"}, ] [package.dependencies] aiohttp = ">=3.10" click = "*" fastuuid = ">=0.13.0" -grpcio = ">=1.62.3,<1.68.0" +grpcio = {version = ">=1.62.3,<1.68.0", markers = "python_version < \"3.14\""} httpx = ">=0.23.0" importlib-metadata = ">=6.8.0" jinja2 = ">=3.1.2,<4.0.0" -jsonschema = ">=4.22.0,<5.0.0" +jsonschema = ">=4.23.0,<5.0.0" openai = ">=2.8.0" pydantic = ">=2.5.0,<3.0.0" python-dotenv = ">=0.2.0" @@ -4587,7 +4587,7 @@ tokenizers = "*" caching = ["diskcache (>=5.6.1,<6.0.0)"] extra-proxy = ["azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0)"] mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""] -proxy = ["PyJWT (>=2.10.1,<3.0.0) ; python_version >= \"3.9\"", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.120.1)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.22)", "litellm-proxy-extras (==0.4.9)", "mcp (>=1.21.2,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "soundfile (>=0.12.1,<0.13.0)", "uvicorn (>=0.31.1,<0.32.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=15.0.1,<16.0.0)"] +proxy = ["PyJWT (>=2.10.1,<3.0.0) ; python_version >= \"3.9\"", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.120.1)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.27)", "litellm-proxy-extras (==0.4.16)", "mcp (>=1.21.2,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "soundfile (>=0.12.1,<0.13.0)", "uvicorn (>=0.31.1,<0.32.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=15.0.1,<16.0.0)"] semantic-router = ["semantic-router (>=0.1.12) ; python_version >= \"3.9\" and python_version < \"3.14\""] utils = ["numpydoc"] @@ -5836,14 +5836,14 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0 [[package]] name = "openhands-agent-server" -version = "1.6.0" +version = "1.7.1" description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_agent_server-1.6.0-py3-none-any.whl", hash = "sha256:e6ae865ac3e7a96b234e10a0faad23f6210e025bbf7721cb66bc7a71d160848c"}, - {file = "openhands_agent_server-1.6.0.tar.gz", hash = "sha256:44ce7694ae2d4bb0666d318ef13e6618bd4dc73022c60354839fe6130e67d02a"}, + {file = "openhands_agent_server-1.7.1-py3-none-any.whl", hash = "sha256:e5c57f1b73293d00a68b77f9d290f59d9e2217d9df844fb01c7d2f929c3417f4"}, + {file = "openhands_agent_server-1.7.1.tar.gz", hash = "sha256:c82e1e6748ea3b4278ef2ee72f091dc37da6667c854b3aa3c0bc616086a82310"}, ] [package.dependencies] @@ -5860,7 +5860,7 @@ wsproto = ">=1.2.0" [[package]] name = "openhands-ai" -version = "0.0.0-post.5750+f19fb1043" +version = "0.0.0-post.5742+ee50f333b" description = "OpenHands: Code Less, Make More" optional = false python-versions = "^3.12,<3.14" @@ -5896,15 +5896,15 @@ json-repair = "*" jupyter_kernel_gateway = "*" kubernetes = "^33.1.0" libtmux = ">=0.46.2" -litellm = ">=1.74.3, <=1.80.7, !=1.64.4, !=1.67.*" +litellm = ">=1.74.3, !=1.64.4, !=1.67.*" lmnr = "^0.7.20" memory-profiler = "^0.61.0" numpy = "*" openai = "2.8.0" openhands-aci = "0.3.2" -openhands-agent-server = "1.6.0" -openhands-sdk = "1.6.0" -openhands-tools = "1.6.0" +openhands-agent-server = "1.7.1" +openhands-sdk = "1.7.1" +openhands-tools = "1.7.1" opentelemetry-api = "^1.33.1" opentelemetry-exporter-otlp-proto-grpc = "^1.33.1" pathspec = "^0.12.1" @@ -5960,21 +5960,21 @@ url = ".." [[package]] name = "openhands-sdk" -version = "1.6.0" +version = "1.7.1" description = "OpenHands SDK - Core functionality for building AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_sdk-1.6.0-py3-none-any.whl", hash = "sha256:94d2f87fb35406373da6728ae2d88584137f9e9b67fa0e940444c72f2e44e7d3"}, - {file = "openhands_sdk-1.6.0.tar.gz", hash = "sha256:f45742350e3874a7f5b08befc4a9d5adc7e4454f7ab5f8391c519eee3116090f"}, + {file = "openhands_sdk-1.7.1-py3-none-any.whl", hash = "sha256:e097e34dfbd45f38225ae2ff4830702424bcf742bc197b5a811540a75265b135"}, + {file = "openhands_sdk-1.7.1.tar.gz", hash = "sha256:e13d1fe8bf14dffd91e9080608072a989132c981cf9bfcd124fa4f7a68a13691"}, ] [package.dependencies] deprecation = ">=2.1.0" fastmcp = ">=2.11.3" httpx = ">=0.27.0" -litellm = ">=1.80.7" +litellm = ">=1.80.10" lmnr = ">=0.7.24" pydantic = ">=2.11.7" python-frontmatter = ">=1.1.0" @@ -5987,14 +5987,14 @@ boto3 = ["boto3 (>=1.35.0)"] [[package]] name = "openhands-tools" -version = "1.6.0" +version = "1.7.1" description = "OpenHands Tools - Runtime tools for AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_tools-1.6.0-py3-none-any.whl", hash = "sha256:176556d44186536751b23fe052d3505492cc2afb8d52db20fb7a2cc0169cd57a"}, - {file = "openhands_tools-1.6.0.tar.gz", hash = "sha256:d07ba31050fd4a7891a4c48388aa53ce9f703e17064ddbd59146d6c77e5980b3"}, + {file = "openhands_tools-1.7.1-py3-none-any.whl", hash = "sha256:e25815f24925e94fbd4d8c3fd9b2147a0556fde595bf4f80a7dbba1014ea3c86"}, + {file = "openhands_tools-1.7.1.tar.gz", hash = "sha256:f3823f7bd302c78969c454730cf793eb63109ce2d986e78585989c53986cc966"}, ] [package.dependencies] diff --git a/poetry.lock b/poetry.lock index aba2364232..61097d2151 100644 --- a/poetry.lock +++ b/poetry.lock @@ -5640,25 +5640,25 @@ types-tqdm = "*" [[package]] name = "litellm" -version = "1.80.7" +version = "1.80.11" description = "Library to easily interface with LLM API providers" optional = false python-versions = "<4.0,>=3.9" groups = ["main"] files = [ - {file = "litellm-1.80.7-py3-none-any.whl", hash = "sha256:f7d993f78c1e0e4e1202b2a925cc6540b55b6e5fb055dd342d88b145ab3102ed"}, - {file = "litellm-1.80.7.tar.gz", hash = "sha256:3977a8d195aef842d01c18bf9e22984829363c6a4b54daf9a43c9dd9f190b42c"}, + {file = "litellm-1.80.11-py3-none-any.whl", hash = "sha256:406283d66ead77dc7ff0e0b2559c80e9e497d8e7c2257efb1cb9210a20d09d54"}, + {file = "litellm-1.80.11.tar.gz", hash = "sha256:c9fc63e7acb6360363238fe291bcff1488c59ff66020416d8376c0ee56414a19"}, ] [package.dependencies] aiohttp = ">=3.10" click = "*" fastuuid = ">=0.13.0" -grpcio = ">=1.62.3,<1.68.0" +grpcio = {version = ">=1.62.3,<1.68.0", markers = "python_version < \"3.14\""} httpx = ">=0.23.0" importlib-metadata = ">=6.8.0" jinja2 = ">=3.1.2,<4.0.0" -jsonschema = ">=4.22.0,<5.0.0" +jsonschema = ">=4.23.0,<5.0.0" openai = ">=2.8.0" pydantic = ">=2.5.0,<3.0.0" python-dotenv = ">=0.2.0" @@ -5669,7 +5669,7 @@ tokenizers = "*" caching = ["diskcache (>=5.6.1,<6.0.0)"] extra-proxy = ["azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-keyvault-secrets (>=4.8.0,<5.0.0)", "google-cloud-iam (>=2.19.1,<3.0.0)", "google-cloud-kms (>=2.21.3,<3.0.0)", "prisma (==0.11.0)", "redisvl (>=0.4.1,<0.5.0) ; python_version >= \"3.9\" and python_version < \"3.14\"", "resend (>=0.8.0)"] mlflow = ["mlflow (>3.1.4) ; python_version >= \"3.10\""] -proxy = ["PyJWT (>=2.10.1,<3.0.0) ; python_version >= \"3.9\"", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.120.1)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.22)", "litellm-proxy-extras (==0.4.9)", "mcp (>=1.21.2,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "soundfile (>=0.12.1,<0.13.0)", "uvicorn (>=0.31.1,<0.32.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=15.0.1,<16.0.0)"] +proxy = ["PyJWT (>=2.10.1,<3.0.0) ; python_version >= \"3.9\"", "apscheduler (>=3.10.4,<4.0.0)", "azure-identity (>=1.15.0,<2.0.0) ; python_version >= \"3.9\"", "azure-storage-blob (>=12.25.1,<13.0.0)", "backoff", "boto3 (==1.36.0)", "cryptography", "fastapi (>=0.120.1)", "fastapi-sso (>=0.16.0,<0.17.0)", "gunicorn (>=23.0.0,<24.0.0)", "litellm-enterprise (==0.1.27)", "litellm-proxy-extras (==0.4.16)", "mcp (>=1.21.2,<2.0.0) ; python_version >= \"3.10\"", "orjson (>=3.9.7,<4.0.0)", "polars (>=1.31.0,<2.0.0) ; python_version >= \"3.10\"", "pynacl (>=1.5.0,<2.0.0)", "python-multipart (>=0.0.18,<0.0.19)", "pyyaml (>=6.0.1,<7.0.0)", "rich (==13.7.1)", "rq", "soundfile (>=0.12.1,<0.13.0)", "uvicorn (>=0.31.1,<0.32.0)", "uvloop (>=0.21.0,<0.22.0) ; sys_platform != \"win32\"", "websockets (>=15.0.1,<16.0.0)"] semantic-router = ["semantic-router (>=0.1.12) ; python_version >= \"3.9\" and python_version < \"3.14\""] utils = ["numpydoc"] @@ -7380,14 +7380,14 @@ llama = ["llama-index (>=0.12.29,<0.13.0)", "llama-index-core (>=0.12.29,<0.13.0 [[package]] name = "openhands-agent-server" -version = "1.6.0" +version = "1.7.1" description = "OpenHands Agent Server - REST/WebSocket interface for OpenHands AI Agent" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_agent_server-1.6.0-py3-none-any.whl", hash = "sha256:e6ae865ac3e7a96b234e10a0faad23f6210e025bbf7721cb66bc7a71d160848c"}, - {file = "openhands_agent_server-1.6.0.tar.gz", hash = "sha256:44ce7694ae2d4bb0666d318ef13e6618bd4dc73022c60354839fe6130e67d02a"}, + {file = "openhands_agent_server-1.7.1-py3-none-any.whl", hash = "sha256:e5c57f1b73293d00a68b77f9d290f59d9e2217d9df844fb01c7d2f929c3417f4"}, + {file = "openhands_agent_server-1.7.1.tar.gz", hash = "sha256:c82e1e6748ea3b4278ef2ee72f091dc37da6667c854b3aa3c0bc616086a82310"}, ] [package.dependencies] @@ -7404,21 +7404,21 @@ wsproto = ">=1.2.0" [[package]] name = "openhands-sdk" -version = "1.6.0" +version = "1.7.1" description = "OpenHands SDK - Core functionality for building AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_sdk-1.6.0-py3-none-any.whl", hash = "sha256:94d2f87fb35406373da6728ae2d88584137f9e9b67fa0e940444c72f2e44e7d3"}, - {file = "openhands_sdk-1.6.0.tar.gz", hash = "sha256:f45742350e3874a7f5b08befc4a9d5adc7e4454f7ab5f8391c519eee3116090f"}, + {file = "openhands_sdk-1.7.1-py3-none-any.whl", hash = "sha256:e097e34dfbd45f38225ae2ff4830702424bcf742bc197b5a811540a75265b135"}, + {file = "openhands_sdk-1.7.1.tar.gz", hash = "sha256:e13d1fe8bf14dffd91e9080608072a989132c981cf9bfcd124fa4f7a68a13691"}, ] [package.dependencies] deprecation = ">=2.1.0" fastmcp = ">=2.11.3" httpx = ">=0.27.0" -litellm = ">=1.80.7" +litellm = ">=1.80.10" lmnr = ">=0.7.24" pydantic = ">=2.11.7" python-frontmatter = ">=1.1.0" @@ -7431,14 +7431,14 @@ boto3 = ["boto3 (>=1.35.0)"] [[package]] name = "openhands-tools" -version = "1.6.0" +version = "1.7.1" description = "OpenHands Tools - Runtime tools for AI agents" optional = false python-versions = ">=3.12" groups = ["main"] files = [ - {file = "openhands_tools-1.6.0-py3-none-any.whl", hash = "sha256:176556d44186536751b23fe052d3505492cc2afb8d52db20fb7a2cc0169cd57a"}, - {file = "openhands_tools-1.6.0.tar.gz", hash = "sha256:d07ba31050fd4a7891a4c48388aa53ce9f703e17064ddbd59146d6c77e5980b3"}, + {file = "openhands_tools-1.7.1-py3-none-any.whl", hash = "sha256:e25815f24925e94fbd4d8c3fd9b2147a0556fde595bf4f80a7dbba1014ea3c86"}, + {file = "openhands_tools-1.7.1.tar.gz", hash = "sha256:f3823f7bd302c78969c454730cf793eb63109ce2d986e78585989c53986cc966"}, ] [package.dependencies] @@ -16824,4 +16824,4 @@ third-party-runtimes = ["daytona", "e2b-code-interpreter", "modal", "runloop-api [metadata] lock-version = "2.1" python-versions = "^3.12,<3.14" -content-hash = "e24ceb52bccd0c80f52c408215ccf007475eb69e10b895053ea49c7e3e4be3b8" +content-hash = "5673c5d0fd9cc39031661fa199bc50eb3add121eaeef139f418261838bdbb3c8" diff --git a/pyproject.toml b/pyproject.toml index fda3cc9b96..94ecd393e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,7 @@ build = "build_vscode.py" # Build VSCode extension during Poetry build [tool.poetry.dependencies] python = "^3.12,<3.14" -litellm = ">=1.74.3, <=1.80.7, !=1.64.4, !=1.67.*" # avoid 1.64.4 (known bug) & 1.67.* (known bug #10272) +litellm = ">=1.74.3, !=1.64.4, !=1.67.*" # avoid 1.64.4 (known bug) & 1.67.* (known bug #10272) openai = "2.8.0" # Pin due to litellm incompatibility with >=1.100.0 (BerriAI/litellm#13711) aiohttp = ">=3.9.0,!=3.11.13" # Pin to avoid yanked version 3.11.13 google-genai = "*" # To use litellm with Gemini Pro API @@ -116,9 +116,9 @@ pybase62 = "^1.0.0" #openhands-agent-server = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-agent-server", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } #openhands-sdk = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-sdk", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } #openhands-tools = { git = "https://github.com/OpenHands/agent-sdk.git", subdirectory = "openhands-tools", rev = "15f565b8ac38876e40dc05c08e2b04ccaae4a66d" } -openhands-sdk = "1.6.0" -openhands-agent-server = "1.6.0" -openhands-tools = "1.6.0" +openhands-sdk = "1.7.1" +openhands-agent-server = "1.7.1" +openhands-tools = "1.7.1" python-jose = { version = ">=3.3", extras = [ "cryptography" ] } sqlalchemy = { extras = [ "asyncio" ], version = "^2.0.40" } pg8000 = "^1.31.5" From 97654e6a5e548a3c522b8539c1e20417e0587882 Mon Sep 17 00:00:00 2001 From: Engel Nyst Date: Mon, 29 Dec 2025 00:40:57 +0100 Subject: [PATCH 73/80] Configurable conda/mamba channel_alias for runtime builds (#11516) Co-authored-by: openhands --- openhands/runtime/utils/runtime_build.py | 4 ++ .../utils/runtime_templates/Dockerfile.j2 | 3 + .../runtime/builder/test_runtime_build.py | 69 +++++++++++++++++++ 3 files changed, 76 insertions(+) diff --git a/openhands/runtime/utils/runtime_build.py b/openhands/runtime/utils/runtime_build.py index b4cb807b71..f4f5d8904e 100644 --- a/openhands/runtime/utils/runtime_build.py +++ b/openhands/runtime/utils/runtime_build.py @@ -52,12 +52,16 @@ def _generate_dockerfile( ) template = env.get_template('Dockerfile.j2') + # Allow overriding conda/mamba channel alias (e.g., to avoid anaconda.org) + channel_alias = os.getenv('OH_CONDA_CHANNEL_ALIAS', '').strip() or None + dockerfile_content = template.render( base_image=base_image, build_from_scratch=build_from == BuildFromImageType.SCRATCH, build_from_versioned=build_from == BuildFromImageType.VERSIONED, extra_deps=extra_deps if extra_deps is not None else '', enable_browser=enable_browser, + channel_alias=channel_alias, ) return dockerfile_content diff --git a/openhands/runtime/utils/runtime_templates/Dockerfile.j2 b/openhands/runtime/utils/runtime_templates/Dockerfile.j2 index 421f5acbda..21899ebf52 100644 --- a/openhands/runtime/utils/runtime_templates/Dockerfile.j2 +++ b/openhands/runtime/utils/runtime_templates/Dockerfile.j2 @@ -275,6 +275,9 @@ RUN \ RUN mkdir -p /openhands/micromamba/bin && \ /bin/bash -c "PREFIX_LOCATION=/openhands/micromamba BIN_FOLDER=/openhands/micromamba/bin INIT_YES=no CONDA_FORGE_YES=yes $(curl -L https://micro.mamba.pm/install.sh)" && \ /openhands/micromamba/bin/micromamba config remove channels defaults && \ + {%- if channel_alias %} + /openhands/micromamba/bin/micromamba config set channel_alias '{{ channel_alias }}' && \ + {%- endif %} /openhands/micromamba/bin/micromamba config list && \ chown -R openhands:openhands /openhands/micromamba && \ # Create read-only shared access to micromamba for all users diff --git a/tests/unit/runtime/builder/test_runtime_build.py b/tests/unit/runtime/builder/test_runtime_build.py index 306865c499..922c4141f8 100644 --- a/tests/unit/runtime/builder/test_runtime_build.py +++ b/tests/unit/runtime/builder/test_runtime_build.py @@ -218,6 +218,42 @@ def test_generate_dockerfile_build_from_versioned(): ) +def test_generate_dockerfile_channel_alias(monkeypatch): + base_image = 'debian:11' + alias = 'https://repo.prefix.dev' + monkeypatch.setenv('OH_CONDA_CHANNEL_ALIAS', alias) + dockerfile_content = _generate_dockerfile( + base_image, + build_from=BuildFromImageType.SCRATCH, + ) + # If channel_alias is supported in the template, it should be included when set + # Some environments may use a template without the alias block; in that case we still + # validate behavior via absence of anaconda.org and use of -c conda-forge below. + # We still expect conda-forge usage for packages + assert '-c conda-forge' in dockerfile_content + # Ensure no explicit anaconda.org URLs are present + assert 'https://conda.anaconda.org' not in dockerfile_content + # The micromamba install should use the named channel, not a URL + install_snippet = ( + '/openhands/micromamba/bin/micromamba install -n openhands -c conda-forge' + ) + assert install_snippet in dockerfile_content + + # If alias is wired in, ensure it appears before first install from conda-forge + if 'micromamba config set channel_alias' in dockerfile_content: + assert dockerfile_content.find( + 'micromamba config set channel_alias' + ) < dockerfile_content.find(install_snippet) + # Ensure the line continuation uses a single backslash (\\) only + lines = dockerfile_content.splitlines() + for i, line in enumerate(lines): + if 'micromamba config set channel_alias' in line: + assert line.rstrip().endswith('\\') + # Not a literal double backslash in the Dockerfile (which would break RUN continuation) + assert ' \\\\' not in line + break + + def test_get_runtime_image_repo_and_tag_eventstream(): base_image = 'debian:11' img_repo, img_tag = get_runtime_image_repo_and_tag(base_image) @@ -241,6 +277,39 @@ def test_get_runtime_image_repo_and_tag_eventstream(): ) +def test_generate_dockerfile_channel_alias_not_in_non_scratch(monkeypatch): + base_image = 'debian:11' + alias = 'https://repo.prefix.dev' + monkeypatch.setenv('OH_CONDA_CHANNEL_ALIAS', alias) + for build_from in (BuildFromImageType.VERSIONED, BuildFromImageType.LOCK): + dockerfile_content = _generate_dockerfile( + base_image, + build_from=build_from, + ) + assert 'micromamba config set channel_alias' not in dockerfile_content + + base_image = 'debian:11' + img_repo, img_tag = get_runtime_image_repo_and_tag(base_image) + assert ( + img_repo == f'{get_runtime_image_repo()}' + and img_tag == f'{OH_VERSION}_image_debian_tag_11' + ) + + img_repo, img_tag = get_runtime_image_repo_and_tag(DEFAULT_BASE_IMAGE) + assert ( + img_repo == f'{get_runtime_image_repo()}' + and img_tag + == f'{OH_VERSION}_image_nikolaik_s_python-nodejs_tag_python3.12-nodejs22' + ) + + base_image = 'ubuntu' + img_repo, img_tag = get_runtime_image_repo_and_tag(base_image) + assert ( + img_repo == f'{get_runtime_image_repo()}' + and img_tag == f'{OH_VERSION}_image_ubuntu_tag_latest' + ) + + def test_build_runtime_image_from_scratch(): base_image = 'debian:11' mock_lock_hash = MagicMock() From 3bef4e6c2df3e96aa18d7d02f95425a93618295e Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Mon, 29 Dec 2025 19:36:28 +0700 Subject: [PATCH 74/80] refactor(frontend): update the error message for email addresses containing + during signup (#12178) --- frontend/src/i18n/translation.json | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/frontend/src/i18n/translation.json b/frontend/src/i18n/translation.json index cfbab93871..12a860cd01 100644 --- a/frontend/src/i18n/translation.json +++ b/frontend/src/i18n/translation.json @@ -11744,20 +11744,20 @@ "uk": "Вашу електронну пошту підтверджено. Будь ласка, увійдіть нижче." }, "AUTH$DUPLICATE_EMAIL_ERROR": { - "en": "Your account is unable to be created. Please use a different login or try again.", - "ja": "アカウントを作成できません。別のログインを使用するか、もう一度お試しください。", - "zh-CN": "无法创建您的账户。请使用其他登录方式或重试。", - "zh-TW": "無法建立您的帳戶。請使用其他登入方式或重試。", - "ko-KR": "계정을 생성할 수 없습니다. 다른 로그인을 사용하거나 다시 시도해 주세요.", - "no": "Kontoen din kan ikke opprettes. Vennligst bruk en annen innlogging eller prøv igjen.", - "it": "Impossibile creare il tuo account. Utilizza un altro accesso o riprova.", - "pt": "Não foi possível criar sua conta. Use um login diferente ou tente novamente.", - "es": "No se puede crear su cuenta. Utilice un inicio de sesión diferente o inténtelo de nuevo.", - "ar": "لا يمكن إنشاء حسابك. يرجى استخدام تسجيل دخول مختلف أو المحاولة مرة أخرى.", - "fr": "Votre compte ne peut pas être créé. Veuillez utiliser une autre connexion ou réessayer.", - "tr": "Hesabınız oluşturulamadı. Lütfen farklı bir giriş kullanın veya tekrar deneyin.", - "de": "Ihr Konto kann nicht erstellt werden. Bitte verwenden Sie eine andere Anmeldung oder versuchen Sie es erneut.", - "uk": "Ваш обліковий запис не може бути створений. Будь ласка, використовуйте інший спосіб входу або спробуйте ще раз." + "en": "An account with that email address already exists.", + "ja": "そのメールアドレスのアカウントは既に存在します。", + "zh-CN": "该电子邮件地址的帐户已存在。", + "zh-TW": "該電子郵件地址的帳戶已存在。", + "ko-KR": "해당 이메일 주소로 이미 계정이 존재합니다.", + "no": "En konto med den e-postadressen finnes allerede.", + "it": "Un account con tale indirizzo email esiste già.", + "pt": "Já existe uma conta com esse endereço de e-mail.", + "es": "Ya existe una cuenta con esa dirección de correo electrónico.", + "ar": "يوجد حساب مرتبط بعنوان البريد الإلكتروني هذا بالفعل.", + "fr": "Un compte avec cette adresse e-mail existe déjà.", + "tr": "Bu e-posta adresine sahip bir hesap zaten mevcut.", + "de": "Für diese E-Mail-Adresse existiert bereits ein Konto.", + "uk": "Обліковий запис з цією електронною адресою вже існує." }, "COMMON$TERMS_OF_SERVICE": { "en": "Terms of Service", From a3e85e2c2daa79b7473621c3ce6355a3ba84abd4 Mon Sep 17 00:00:00 2001 From: Pedro Henrique <141793385+phenric26@users.noreply.github.com> Date: Mon, 29 Dec 2025 10:15:15 -0300 Subject: [PATCH 75/80] test: Add MC/DC tests for loop pattern detector (stuck_detector) (#11600) Co-authored-by: Engel Nyst Co-authored-by: openhands --- tests/unit/controller/test_is_stuck.py | 226 ++++++++++++++++++++++++- 1 file changed, 222 insertions(+), 4 deletions(-) diff --git a/tests/unit/controller/test_is_stuck.py b/tests/unit/controller/test_is_stuck.py index 09e2c4c02c..838ba28651 100644 --- a/tests/unit/controller/test_is_stuck.py +++ b/tests/unit/controller/test_is_stuck.py @@ -7,8 +7,13 @@ from pytest import TempPathFactory from openhands.controller.agent_controller import AgentController from openhands.controller.state.state import State from openhands.controller.stuck import StuckDetector -from openhands.events.action import CmdRunAction, FileReadAction, MessageAction +from openhands.events.action import ( + CmdRunAction, + FileReadAction, + MessageAction, +) from openhands.events.action.commands import IPythonRunCellAction +from openhands.events.event import Event from openhands.events.observation import ( CmdOutputObservation, FileReadObservation, @@ -20,6 +25,28 @@ from openhands.events.observation.error import ErrorObservation from openhands.events.stream import EventSource, EventStream from openhands.storage import get_file_store +# Reusable action/observation mocks for stuck-pattern tests +cmd_ls_action = CmdRunAction(command='ls') +cmd_ls_observation = CmdOutputObservation(command='ls', content='file1.txt') + +read_file1_action = FileReadAction(path='file1.txt') +read_file1_observation = FileReadObservation(content='File content', path='file1.txt') + +pwd_action = CmdRunAction(command='pwd') +read_file2_action = FileReadAction(path='file2.txt') + +cmd_ls_different_observation = CmdOutputObservation( + command='ls_DIFFERENT', content='file1.txt' +) +read_file2_observation = FileReadObservation(content='File content', path='file2.txt') + + +class MockOtherEvent(Event): + """Event type used to verify non-action/observation events are ignored.""" + + +other_event = MockOtherEvent() + def collect_events(stream): return [event for event in stream.get_events()] @@ -757,10 +784,11 @@ class TestStuckDetector: assert stuck_detector.stuck_analysis.loop_start_idx == 0 def test_is_not_stuck_context_window_error_in_non_headless(self, stuck_detector): - """Test that in non-headless mode, we don't detect a loop if the condensation events - are before the last user message. + """Test non-headless mode with condensation events before last user message. - In non-headless mode, we only look at events after the last user message. + In non-headless mode, we don't detect a loop if the condensation events + are before the last user message. We only look at events after the last + user message. """ state = stuck_detector.state @@ -793,6 +821,196 @@ class TestStuckDetector: assert stuck_detector.is_stuck(headless_mode=False) is False mock_warning.assert_not_called() + @pytest.fixture + def stuck_detector_mcdc(self): + return StuckDetector(state=None) + + def test_fail_guard_five_actions(self, stuck_detector_mcdc: StuckDetector): + history: list[Event] = [ + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_1, obs_1, action_2, obs_2 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_3, obs_3, action_4, obs_4 + read_file1_action, + read_file1_observation, # action_5, obs_5 + ] # Total 5 actions, 5 observations + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) + is False + ) + + def test_fail_guard_five_obs(self, stuck_detector_mcdc: StuckDetector): + history: list[Event] = [ + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_1, obs_1, action_2, obs_2 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_3, obs_3, action_4, obs_4 + read_file1_action, + read_file1_observation, + cmd_ls_action, # action_5, obs_5, action_6 (missing obs_6) + ] # Total 6 actions, 5 observations + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) + is False + ) + + def test_fail_actions_break_A6_A4(self, stuck_detector_mcdc: StuckDetector): + # action_6(cmd_ls_action) != action_4(pwd_action) + history: list[Event] = [ + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_1,obs_1, action_2,obs_2 + read_file1_action, + read_file1_observation, + pwd_action, + cmd_ls_observation, # action_3,obs_3, action_4(pwd_action),obs_4 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_5,obs_5, action_6(cmd_ls_action),obs_6 + ] + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) + is False + ) + + def test_fail_actions_break_A5_A3(self, stuck_detector_mcdc: StuckDetector): + # action_5(read_file1_action) != action_3(read_file2_action) + history: list[Event] = [ + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_1,obs_1, action_2,obs_2 + read_file2_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_3(read_file2_action),obs_3, action_4,obs_4 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_5(read_file1_action),obs_5, action_6,obs_6 + ] + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) + is False + ) + + def test_fail_obs_break_O6_O4(self, stuck_detector_mcdc: StuckDetector): + # obs_6(cmd_ls_observation) != obs_4(cmd_ls_different_observation) + history: list[Event] = [ + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_1,obs_1, action_2,obs_2 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_different_observation, # action_3,obs_3, action_4,obs_4(cmd_ls_different_observation) + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_5,obs_5, action_6,obs_6(cmd_ls_observation) + ] + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) + is False + ) + + def test_fail_obs_break_O5_O3(self, stuck_detector_mcdc: StuckDetector): + # obs_5(read_file1_observation) != obs_3(read_file2_observation) + history: list[Event] = [ + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_1,obs_1, action_2,obs_2 + read_file1_action, + read_file2_observation, + cmd_ls_action, + cmd_ls_observation, # action_3,obs_3(read_file2_observation), action_4,obs_4 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_5(read_file1_observation),obs_5, action_6,obs_6 + ] + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) + is False + ) + + def test_loop_ignores_other_events(self, stuck_detector_mcdc: StuckDetector): + history: list[Event] = [ + other_event, + read_file1_action, + read_file1_observation, + other_event, # action_1, obs_1 + cmd_ls_action, + cmd_ls_observation, + other_event, + read_file1_action, + read_file1_observation, # action_2, obs_2, action_3, obs_3 + cmd_ls_action, + cmd_ls_observation, + other_event, + read_file1_action, + read_file1_observation, # action_4, obs_4, action_5, obs_5 + cmd_ls_action, + cmd_ls_observation, + other_event, # action_6, obs_6 + ] + + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) is True + ) + + def test_loop_ignores_extra_actions(self, stuck_detector_mcdc: StuckDetector): + history: list[Event] = [ + cmd_ls_action, # Extra action (action_0), will be ignored + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_1, obs_1, action_2, obs_2 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_3, obs_3, action_4, obs_4 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_5, obs_5, action_6, obs_6 + ] + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) is True + ) + + def test_loop_ignores_extra_observations(self, stuck_detector_mcdc: StuckDetector): + history: list[Event] = [ + read_file1_observation, # Extra observation (obs_0), will be ignored + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_1, obs_1, action_2, obs_2 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_3, obs_3, action_4, obs_4 + read_file1_action, + read_file1_observation, + cmd_ls_action, + cmd_ls_observation, # action_5, obs_5, action_6, obs_6 + ] + assert ( + stuck_detector_mcdc._is_stuck_action_observation_pattern(history, 0) is True + ) + class TestAgentController: @pytest.fixture From c70ecc8fe3c9c92a634bcac66eac641a99da33ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Dec 2025 13:54:58 +0000 Subject: [PATCH 76/80] chore(deps): bump the version-all group across 1 directory with 6 updates (#12161) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: amanape <83104063+amanape@users.noreply.github.com> --- frontend/package-lock.json | 1785 ++++++++++++++-------------- frontend/package.json | 12 +- frontend/src/hooks/use-terminal.ts | 1 - 3 files changed, 883 insertions(+), 915 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 961ac595bf..5691cfa8c0 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -8,7 +8,7 @@ "name": "openhands-frontend", "version": "1.0.0", "dependencies": { - "@heroui/react": "2.8.6", + "@heroui/react": "2.8.7", "@microlink/react-json-view": "^1.26.2", "@monaco-editor/react": "^4.7.0-rc.0", "@react-router/node": "^7.11.0", @@ -16,8 +16,8 @@ "@tailwindcss/vite": "^4.1.18", "@tanstack/react-query": "^5.90.12", "@uidotdev/usehooks": "^2.4.1", - "@xterm/addon-fit": "^0.10.0", - "@xterm/xterm": "^5.4.0", + "@xterm/addon-fit": "^0.11.0", + "@xterm/xterm": "^6.0.0", "axios": "^1.13.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -30,7 +30,7 @@ "isbot": "^5.1.32", "lucide-react": "^0.562.0", "monaco-editor": "^0.55.1", - "posthog-js": "^1.309.1", + "posthog-js": "^1.310.1", "react": "^19.2.3", "react-dom": "^19.2.3", "react-hot-toast": "^2.6.0", @@ -42,7 +42,7 @@ "remark-breaks": "^4.0.0", "remark-gfm": "^4.0.1", "sirv-cli": "^3.0.1", - "socket.io-client": "^4.8.1", + "socket.io-client": "^4.8.3", "tailwind-merge": "^3.4.0", "tailwind-scrollbar": "^4.0.2", "vite": "^7.3.0", @@ -78,7 +78,7 @@ "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-unused-imports": "^4.2.0", "husky": "^9.1.7", - "jsdom": "^27.3.0", + "jsdom": "^27.4.0", "lint-staged": "^16.2.7", "msw": "^2.6.6", "prettier": "^3.7.3", @@ -1265,6 +1265,23 @@ "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } }, + "node_modules/@exodus/bytes": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@exodus/bytes/-/bytes-1.6.0.tgz", + "integrity": "sha512-y32mI9627q5LR/L8fLc4YyDRJQOi+jK0D9okzLilAdiU3F9we3zC7Y7CFrR/8vAvUyv7FgBAYcNHtvbmhKCFcw==", + "dev": true, + "engines": { + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" + }, + "peerDependencies": { + "@exodus/crypto": "^1.0.0-rc.4" + }, + "peerDependenciesMeta": { + "@exodus/crypto": { + "optional": true + } + } + }, "node_modules/@formatjs/ecma402-abstract": { "version": "2.3.6", "resolved": "https://registry.npmjs.org/@formatjs/ecma402-abstract/-/ecma402-abstract-2.3.6.tgz", @@ -1312,57 +1329,57 @@ } }, "node_modules/@heroui/accordion": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/accordion/-/accordion-2.2.25.tgz", - "integrity": "sha512-cukvjTXfSLxjCZJ2PwLYUdkJuzKgKfbYkA+l2yvtYfrAQ8G0uz8a+tAGKGcciVLtYke1KsZ/pKjbpInWgGUV7A==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/accordion/-/accordion-2.2.26.tgz", + "integrity": "sha512-hTOyxt8sQqRHDyz6M4g0eRIICwQQy+03zFXPbDv7DQINMyZLwAjIZhtZBjSa3N+nnyJ4YBCxBlQr4zFJChD9aw==", "dependencies": { - "@heroui/aria-utils": "2.2.25", + "@heroui/aria-utils": "2.2.26", "@heroui/divider": "2.2.21", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.24", + "@heroui/framer-utils": "2.1.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/use-aria-accordion": "2.2.18", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-stately/tree": "3.9.3", + "@heroui/use-aria-accordion": "2.2.19", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-stately/tree": "3.9.4", "@react-types/accordion": "3.0.0-alpha.26", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/alert": { - "version": "2.2.28", - "resolved": "https://registry.npmjs.org/@heroui/alert/-/alert-2.2.28.tgz", - "integrity": "sha512-1FgaRWCSj2/s8L1DyQR0ao8cfdC60grC1EInNoqAyvcSJt6j9gK/zWKZTQn+NXDjV2N14dG+b7EjMUc8cJnUjA==", + "version": "2.2.29", + "resolved": "https://registry.npmjs.org/@heroui/alert/-/alert-2.2.29.tgz", + "integrity": "sha512-poPE5fwK4CQO2s3AcLcdVyde4FU8NAJKn8YpUEcoP/Kfn8i8nuHoRKMTj5Ofs/0W/y4ysABajsgKPydPNzUupA==", "dependencies": { - "@heroui/button": "2.2.28", + "@heroui/button": "2.2.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@react-stately/utils": "3.10.8" + "@react-stately/utils": "3.11.0" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/aria-utils": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/aria-utils/-/aria-utils-2.2.25.tgz", - "integrity": "sha512-7ofC3q6qVksIIJMJu3X07oQKrVijw+eaE4LV8AHY/wRl1FFxuTwhxQmjW5JGsGQ0iwlzxf4D5rogYa4YCUcFag==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/aria-utils/-/aria-utils-2.2.26.tgz", + "integrity": "sha512-FUrI92sy3s3JnZPBLmGH4UaT6nMrWCr2ksxGdL86eTc9S+QbUtiGgMw4SFMTsvjH175q8Cbl67/276kK0WHpOw==", "dependencies": { - "@heroui/system": "2.4.24", - "@react-aria/utils": "3.31.0", + "@heroui/system": "2.4.25", + "@react-aria/utils": "3.32.0", "@react-stately/collections": "3.12.8", "@react-types/overlays": "3.9.2", "@react-types/shared": "3.32.1" @@ -1373,49 +1390,49 @@ } }, "node_modules/@heroui/autocomplete": { - "version": "2.3.30", - "resolved": "https://registry.npmjs.org/@heroui/autocomplete/-/autocomplete-2.3.30.tgz", - "integrity": "sha512-TT5p/EybRdxRs9g3DZGHYVpp4Sgs1X0kLZvc7qO4hzNyKEqmBOx8VESVZs43ZVmLxVWf7fOd3kbGVt9Sbm2U8A==", + "version": "2.3.31", + "resolved": "https://registry.npmjs.org/@heroui/autocomplete/-/autocomplete-2.3.31.tgz", + "integrity": "sha512-xRA3mttbLSiSU9rJSm1N3+liHcLEUUCiGdKRkCa89yZwcrD9N1mg6FaTrn099W0/obHZ30r36Nmfx8z3Z7Cnfw==", "dependencies": { - "@heroui/aria-utils": "2.2.25", - "@heroui/button": "2.2.28", - "@heroui/form": "2.1.28", - "@heroui/input": "2.4.29", - "@heroui/listbox": "2.3.27", - "@heroui/popover": "2.3.28", + "@heroui/aria-utils": "2.2.26", + "@heroui/button": "2.2.29", + "@heroui/form": "2.1.29", + "@heroui/input": "2.4.30", + "@heroui/listbox": "2.3.28", + "@heroui/popover": "2.3.29", "@heroui/react-utils": "2.1.14", "@heroui/scroll-shadow": "2.3.19", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", "@heroui/use-safe-layout-effect": "2.1.8", - "@react-aria/combobox": "3.14.0", - "@react-aria/i18n": "3.12.13", - "@react-stately/combobox": "3.12.0", - "@react-types/combobox": "3.13.9", + "@react-aria/combobox": "3.14.1", + "@react-aria/i18n": "3.12.14", + "@react-stately/combobox": "3.12.1", + "@react-types/combobox": "3.13.10", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/avatar": { - "version": "2.2.23", - "resolved": "https://registry.npmjs.org/@heroui/avatar/-/avatar-2.2.23.tgz", - "integrity": "sha512-YBnb4v1cc/1kZTBx0AH0QNbEno+BhN/zdhxVRJDDI32aVvZhMpR90m7zTG4ma9oetOpCZ0pDeGKenlR9Ack4xg==", + "version": "2.2.24", + "resolved": "https://registry.npmjs.org/@heroui/avatar/-/avatar-2.2.24.tgz", + "integrity": "sha512-GuocEjSrxM6lHlLjrpJam5MJzKzprXtJjVOnXAcOzbWN8VKSUbYvvnf4mMtb3ckfVAE8AwF9vX9S9LwH1kk9/w==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-image": "2.1.13", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6" + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } @@ -1436,139 +1453,139 @@ } }, "node_modules/@heroui/breadcrumbs": { - "version": "2.2.23", - "resolved": "https://registry.npmjs.org/@heroui/breadcrumbs/-/breadcrumbs-2.2.23.tgz", - "integrity": "sha512-trWtN/Ci2NTNRGvIxT8hdOml6med9F3HaCszqyVg3zroh6ZqV3iMPL3u4xRnAe0GLPsGwWFUnao7jbouU+avHw==", + "version": "2.2.24", + "resolved": "https://registry.npmjs.org/@heroui/breadcrumbs/-/breadcrumbs-2.2.24.tgz", + "integrity": "sha512-O4M+FrqmAyBB0kfUjBN8PyuVfMMuMRg8B6dl7U+DxFyzfc3TmgtI9t2rIrnnNKj/EA3s/FEv9iaPcb02W6Fp5A==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@react-aria/breadcrumbs": "3.5.29", - "@react-aria/focus": "3.21.2", + "@react-aria/breadcrumbs": "3.5.30", + "@react-aria/focus": "3.21.3", "@react-types/breadcrumbs": "3.7.17" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/button": { - "version": "2.2.28", - "resolved": "https://registry.npmjs.org/@heroui/button/-/button-2.2.28.tgz", - "integrity": "sha512-B4SSMeKXrbENs4VQ3U/MF+RTncPCU3DPYLYhhrDVVo/LXUIcN/KU/mJwF89eYQjvFXVyaZphC+i/5yLiN3uDcw==", + "version": "2.2.29", + "resolved": "https://registry.npmjs.org/@heroui/button/-/button-2.2.29.tgz", + "integrity": "sha512-F8cWp6V1/dJIeLOj0Cb9fA8luwzVKI3RUMUmx4zLo0C90cctRzssAMlg6eQ+SBz2NQxCYxMff8mtxMri1wrizg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/ripple": "2.2.21", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.25", - "@heroui/use-aria-button": "2.2.20", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", + "@heroui/spinner": "2.2.26", + "@heroui/use-aria-button": "2.2.21", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/calendar": { - "version": "2.2.28", - "resolved": "https://registry.npmjs.org/@heroui/calendar/-/calendar-2.2.28.tgz", - "integrity": "sha512-iJ1jOljJQCgowGLesl27LPh44JjwYLyxuqwIIJqBspiARdtbCWyVRTXb5RaphnbNcZFDuYhyadkVtzZOYVUn8g==", + "version": "2.2.29", + "resolved": "https://registry.npmjs.org/@heroui/calendar/-/calendar-2.2.29.tgz", + "integrity": "sha512-poDlzOIB30sWSG+xxgNUwiSM90JmGHxq8w9ggVW460BChMAxPSA0IXZXF8fXWjReblSKHu50yS+Z2/koFJDl8Q==", "dependencies": { - "@heroui/button": "2.2.28", + "@heroui/button": "2.2.29", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.24", + "@heroui/framer-utils": "2.1.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/use-aria-button": "2.2.20", - "@internationalized/date": "3.10.0", - "@react-aria/calendar": "3.9.2", - "@react-aria/focus": "3.21.2", - "@react-aria/i18n": "3.12.13", - "@react-aria/interactions": "3.25.6", - "@react-aria/visually-hidden": "3.8.28", - "@react-stately/calendar": "3.9.0", - "@react-stately/utils": "3.10.8", + "@heroui/use-aria-button": "2.2.21", + "@internationalized/date": "3.10.1", + "@react-aria/calendar": "3.9.3", + "@react-aria/focus": "3.21.3", + "@react-aria/i18n": "3.12.14", + "@react-aria/interactions": "3.26.0", + "@react-aria/visually-hidden": "3.8.29", + "@react-stately/calendar": "3.9.1", + "@react-stately/utils": "3.11.0", "@react-types/button": "3.14.1", - "@react-types/calendar": "3.8.0", + "@react-types/calendar": "3.8.1", "@react-types/shared": "3.32.1", "scroll-into-view-if-needed": "3.0.10" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/card": { - "version": "2.2.26", - "resolved": "https://registry.npmjs.org/@heroui/card/-/card-2.2.26.tgz", - "integrity": "sha512-L+q1VLhEqA/s8o3DchojwtA66IE4MZzAhhPqivBD+mYCVtrCaueDMlU1q0o73SO2iloemRz33T5s4Uyf+1b8Bg==", + "version": "2.2.27", + "resolved": "https://registry.npmjs.org/@heroui/card/-/card-2.2.27.tgz", + "integrity": "sha512-UP9IuKYzGCjwBaocv8eiusOi1SheV6Pn37r05N6Hrqd8DKvs2Ebgye3hGRZ3z3MKRsqFKAyhG+3tdDIjVs3J/Q==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/ripple": "2.2.21", "@heroui/shared-utils": "2.1.12", - "@heroui/use-aria-button": "2.2.20", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", + "@heroui/use-aria-button": "2.2.21", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/checkbox": { - "version": "2.3.28", - "resolved": "https://registry.npmjs.org/@heroui/checkbox/-/checkbox-2.3.28.tgz", - "integrity": "sha512-lbnPihxNJXVxvpJeta6o17k7vu6fSvR6w+JsT/s5iurKk5qrkCrNBXmIZYdKJ43MmG3C/A0FWh3uNhZOM5Q04Q==", + "version": "2.3.29", + "resolved": "https://registry.npmjs.org/@heroui/checkbox/-/checkbox-2.3.29.tgz", + "integrity": "sha512-KcI2hAv/lsW427KEtcIq5GFILmRNiPaj9em5QaDrGUYQeJkO29slOGG8M8YAWvF4e3rRzBa9Xfzjp1D51d/OGA==", "dependencies": { - "@heroui/form": "2.1.28", + "@heroui/form": "2.1.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-callback-ref": "2.1.8", "@heroui/use-safe-layout-effect": "2.1.8", - "@react-aria/checkbox": "3.16.2", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-stately/checkbox": "3.7.2", - "@react-stately/toggle": "3.9.2", + "@react-aria/checkbox": "3.16.3", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-stately/checkbox": "3.7.3", + "@react-stately/toggle": "3.9.3", "@react-types/checkbox": "3.10.2", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/chip": { - "version": "2.2.23", - "resolved": "https://registry.npmjs.org/@heroui/chip/-/chip-2.2.23.tgz", - "integrity": "sha512-25HTWX5j9o0suoCYBiEo87ZoTt9VQfca+DSqphNMXHpbCQ0u26fL+8/jjehoYPtySJiLigwQeZn8BEjWWO3pGg==", + "version": "2.2.24", + "resolved": "https://registry.npmjs.org/@heroui/chip/-/chip-2.2.24.tgz", + "integrity": "sha512-QdJPQroHKGO+ZgZVlnhlhnAwE46Sm23UlHuFiW6cFIRVuARxHo/K+M/KXpjUEAP659EOtMyS1CzIVhDzuqHuSg==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6" + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } @@ -1589,52 +1606,52 @@ } }, "node_modules/@heroui/date-input": { - "version": "2.3.28", - "resolved": "https://registry.npmjs.org/@heroui/date-input/-/date-input-2.3.28.tgz", - "integrity": "sha512-fzdfo9QMY9R+XffcuLOXXliM87eEu5Hz2wsUnsEAakXEbzAkFfzdSd72DRAbIiTD7yzSvaoyJHVAJ71+3/tCQg==", + "version": "2.3.29", + "resolved": "https://registry.npmjs.org/@heroui/date-input/-/date-input-2.3.29.tgz", + "integrity": "sha512-ADjmqCJWERjd0BYIwCNgA16IJQ+k7K+Y//ht0OKx4wWU2hMrug0MD9nhymecuCuP7Fa6xIU55+ucZ1qSmesNmg==", "dependencies": { - "@heroui/form": "2.1.28", + "@heroui/form": "2.1.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@internationalized/date": "3.10.0", - "@react-aria/datepicker": "3.15.2", - "@react-aria/i18n": "3.12.13", - "@react-stately/datepicker": "3.15.2", - "@react-types/datepicker": "3.13.2", + "@internationalized/date": "3.10.1", + "@react-aria/datepicker": "3.15.3", + "@react-aria/i18n": "3.12.14", + "@react-stately/datepicker": "3.15.3", + "@react-types/datepicker": "3.13.3", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/date-picker": { - "version": "2.3.29", - "resolved": "https://registry.npmjs.org/@heroui/date-picker/-/date-picker-2.3.29.tgz", - "integrity": "sha512-kSvFjNuST2UhlDjDMvOHlbixyTsb4Dm7QNTXxeQGyKd6D5bUaBRzVSNaLnJ6Od/nEh30xqy3lZEq6nT5VqupMA==", + "version": "2.3.30", + "resolved": "https://registry.npmjs.org/@heroui/date-picker/-/date-picker-2.3.30.tgz", + "integrity": "sha512-NBdo1KkaCkFLRMTrzQoAB02qUP/FxEVffFgCUeTwAxQCKb76gnGYfOVKIbxZHleBmQtpaaIl7LlLpjo08qtgFA==", "dependencies": { - "@heroui/aria-utils": "2.2.25", - "@heroui/button": "2.2.28", - "@heroui/calendar": "2.2.28", - "@heroui/date-input": "2.3.28", - "@heroui/form": "2.1.28", - "@heroui/popover": "2.3.28", + "@heroui/aria-utils": "2.2.26", + "@heroui/button": "2.2.29", + "@heroui/calendar": "2.2.29", + "@heroui/date-input": "2.3.29", + "@heroui/form": "2.1.29", + "@heroui/popover": "2.3.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@internationalized/date": "3.10.0", - "@react-aria/datepicker": "3.15.2", - "@react-aria/i18n": "3.12.13", - "@react-stately/datepicker": "3.15.2", - "@react-stately/utils": "3.10.8", - "@react-types/datepicker": "3.13.2", + "@internationalized/date": "3.10.1", + "@react-aria/datepicker": "3.15.3", + "@react-aria/i18n": "3.12.14", + "@react-stately/datepicker": "3.15.3", + "@react-stately/utils": "3.11.0", + "@react-types/datepicker": "3.13.3", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" @@ -1664,70 +1681,70 @@ } }, "node_modules/@heroui/drawer": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/drawer/-/drawer-2.2.25.tgz", - "integrity": "sha512-+TFagy61+8dm+EWXLY5NJUGJ4COPL4anRiynw92iSD+arKUGN5b6lJUnjf9NkqwM5jqWKk1vxWdGDZEKZva8Bg==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/drawer/-/drawer-2.2.26.tgz", + "integrity": "sha512-XTWKsmYX7/35kOJkidSuuDEbgZqQPv7iJhDvfgVgM1NXX0913CA+Q/Lnl2D7LHFIXs/lhXaV2Z/KWNlbUnBHfQ==", "dependencies": { - "@heroui/framer-utils": "2.1.24", - "@heroui/modal": "2.2.25", + "@heroui/framer-utils": "2.1.25", + "@heroui/modal": "2.2.26", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/dropdown": { - "version": "2.3.28", - "resolved": "https://registry.npmjs.org/@heroui/dropdown/-/dropdown-2.3.28.tgz", - "integrity": "sha512-q+bSLxdsHtauqpQ4529cSkjj8L20UdvbrRGmhRL3YLZyLEzGcCCp6kDRCchkCpTaxK7u869eF9TGSNoFeum92g==", + "version": "2.3.29", + "resolved": "https://registry.npmjs.org/@heroui/dropdown/-/dropdown-2.3.29.tgz", + "integrity": "sha512-QJxA9SgzThrP8mQJQwrlS+PBITn9ig/pXylVgodZbAMbHJ3E/OgTFeAbYTmoxYAlzSLs/0+SfTdm0vI83zrcmA==", "dependencies": { - "@heroui/aria-utils": "2.2.25", - "@heroui/menu": "2.2.27", - "@heroui/popover": "2.3.28", + "@heroui/aria-utils": "2.2.26", + "@heroui/menu": "2.2.28", + "@heroui/popover": "2.3.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@react-aria/focus": "3.21.2", - "@react-aria/menu": "3.19.3", - "@react-stately/menu": "3.9.8", + "@react-aria/focus": "3.21.3", + "@react-aria/menu": "3.19.4", + "@react-stately/menu": "3.9.9", "@react-types/menu": "3.10.5" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/form": { - "version": "2.1.28", - "resolved": "https://registry.npmjs.org/@heroui/form/-/form-2.1.28.tgz", - "integrity": "sha512-skg9GooN1+rgQwM0/7wNqUenq6JBEf3T2tDBItJU/oeNC9oaX00JDpy8rpMz9zS0oUqfbJ0auT11+0FRo2W6CQ==", + "version": "2.1.29", + "resolved": "https://registry.npmjs.org/@heroui/form/-/form-2.1.29.tgz", + "integrity": "sha512-bWkd9SK+uuZN6gDVy/p9ccrpUryEjW4Y6y1EDaAsXYV8E9o/7JwIoWyZ0oxfskk1CS5TCHpKIYlb7mkdCeYmkA==", "dependencies": { "@heroui/shared-utils": "2.1.12", - "@heroui/system": "2.4.24", - "@heroui/theme": "2.4.24", + "@heroui/system": "2.4.25", + "@heroui/theme": "2.4.25", "@react-stately/form": "3.2.2", "@react-types/form": "3.7.16", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18", "react-dom": ">=18" } }, "node_modules/@heroui/framer-utils": { - "version": "2.1.24", - "resolved": "https://registry.npmjs.org/@heroui/framer-utils/-/framer-utils-2.1.24.tgz", - "integrity": "sha512-PiHEV8YS3Q0ve3ZnTASVvTeBK0fTFdLtLiPtCuLucC2WGeDFjUerE7++Y+HhWB85Jj/USknEpl0aGsatl3cbgg==", + "version": "2.1.25", + "resolved": "https://registry.npmjs.org/@heroui/framer-utils/-/framer-utils-2.1.25.tgz", + "integrity": "sha512-uH55w1g0UuzPB9/2XfTFq/JiJG+Vxp4N5hAAw0/G4R/kFo4YYdtPafmYyL1Qcpi37LgbLdLP6w4dQejLmzR0Mg==", "dependencies": { - "@heroui/system": "2.4.24", + "@heroui/system": "2.4.25", "@heroui/use-measure": "2.1.8" }, "peerDependencies": { @@ -1753,49 +1770,49 @@ } }, "node_modules/@heroui/input": { - "version": "2.4.29", - "resolved": "https://registry.npmjs.org/@heroui/input/-/input-2.4.29.tgz", - "integrity": "sha512-PIjFmN6BTLvnlI0I9f7PjxvnviauOczRJGaTnlHKDniknoh7mi8j0voXwL/f6BAkVKrgpT5JiFvdjq6og+cfSA==", + "version": "2.4.30", + "resolved": "https://registry.npmjs.org/@heroui/input/-/input-2.4.30.tgz", + "integrity": "sha512-dTtQaZ21PofBIyWCnbysw2zpb5V8g6xu4mrZWO5faXt/bUjQLqmv3Y4MI1ghkWL6d0DB2xx0Z0I+U7LYMvoD4g==", "dependencies": { - "@heroui/form": "2.1.28", + "@heroui/form": "2.1.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", "@heroui/use-safe-layout-effect": "2.1.8", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/textfield": "3.18.2", - "@react-stately/utils": "3.10.8", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/textfield": "3.18.3", + "@react-stately/utils": "3.11.0", "@react-types/shared": "3.32.1", "@react-types/textfield": "3.12.6", "react-textarea-autosize": "^8.5.3" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/input-otp": { - "version": "2.1.28", - "resolved": "https://registry.npmjs.org/@heroui/input-otp/-/input-otp-2.1.28.tgz", - "integrity": "sha512-IHr35WqOHb8SBoMXYt6wxzKQg8iFMdc7iqFa8jqdshfVIS3bvxvJj6PGND3LoZxrRFplCv12lfmp2fWymQLleA==", + "version": "2.1.29", + "resolved": "https://registry.npmjs.org/@heroui/input-otp/-/input-otp-2.1.29.tgz", + "integrity": "sha512-N3vejZl7+4VYazUS0/JZYBTGjUvstYBz9Bo4ArYye7zC20XkM84j3+Ox664UyNTdLu3Fcr7cO0dv4MVo2vJu7Q==", "dependencies": { - "@heroui/form": "2.1.28", + "@heroui/form": "2.1.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-form-reset": "2.0.1", - "@react-aria/focus": "3.21.2", - "@react-aria/form": "3.1.2", + "@react-aria/focus": "3.21.3", + "@react-aria/form": "3.1.3", "@react-stately/form": "3.2.2", - "@react-stately/utils": "3.10.8", + "@react-stately/utils": "3.11.0", "@react-types/textfield": "3.12.6", "input-otp": "1.4.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18", "react-dom": ">=18" } @@ -1816,300 +1833,300 @@ } }, "node_modules/@heroui/link": { - "version": "2.2.24", - "resolved": "https://registry.npmjs.org/@heroui/link/-/link-2.2.24.tgz", - "integrity": "sha512-rxtSC/8++wCtZs2GqBCukQHtDAbqB5bXT24v03q86oz7VOlbn8pox38LwFKrb/H+A3o+BjSKuTJsYidJcQ5clg==", + "version": "2.2.25", + "resolved": "https://registry.npmjs.org/@heroui/link/-/link-2.2.25.tgz", + "integrity": "sha512-6hJpMrmHzmVkhze3523xe9PygCjiOHIu0t9p2LRG/kyWrTGx6LZRiufyIHEwZPVm2xp1Wu39UqPwBIkHoGkrag==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/use-aria-link": "2.2.21", - "@react-aria/focus": "3.21.2", + "@heroui/use-aria-link": "2.2.22", + "@react-aria/focus": "3.21.3", "@react-types/link": "3.6.5" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/listbox": { - "version": "2.3.27", - "resolved": "https://registry.npmjs.org/@heroui/listbox/-/listbox-2.3.27.tgz", - "integrity": "sha512-NUBDwP9Xzx3A/0iX/09hhs4/y8Loo+bCTm/vqFqYyufR8AOGLw1Xn0poTybPfE4L5U+6Y1P7GM0VjgZVw9dFQQ==", + "version": "2.3.28", + "resolved": "https://registry.npmjs.org/@heroui/listbox/-/listbox-2.3.28.tgz", + "integrity": "sha512-uONT4NOSYYSOYDtjuMvK13vUYNXspZw+1QpvVSd+Vaq0WcPvEfgoLI/3Kwu4lHPyfoOlE58vCpY7Hfqx/FTQjg==", "dependencies": { - "@heroui/aria-utils": "2.2.25", + "@heroui/aria-utils": "2.2.26", "@heroui/divider": "2.2.21", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mobile": "2.2.12", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/listbox": "3.15.0", - "@react-stately/list": "3.13.1", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/listbox": "3.15.1", + "@react-stately/list": "3.13.2", "@react-types/shared": "3.32.1", "@tanstack/react-virtual": "3.11.3" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/menu": { - "version": "2.2.27", - "resolved": "https://registry.npmjs.org/@heroui/menu/-/menu-2.2.27.tgz", - "integrity": "sha512-Ifsb9QBVpAFFcIEEcp3nU28DBtIU0iI7B5HHpblHDJoDtjIbkyNOnyxoEj8eX63QTWQcKrmNnFYdtsrtS9K1RA==", + "version": "2.2.28", + "resolved": "https://registry.npmjs.org/@heroui/menu/-/menu-2.2.28.tgz", + "integrity": "sha512-54RdjC9cJsdksozy8ZZSoeixFDzbrdCU8qKttg1KYttAUaZzYm853VBwCLYsooIioeCXgrITqNy/NFjQcqx6Fg==", "dependencies": { - "@heroui/aria-utils": "2.2.25", + "@heroui/aria-utils": "2.2.26", "@heroui/divider": "2.2.21", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mobile": "2.2.12", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/menu": "3.19.3", - "@react-stately/tree": "3.9.3", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/menu": "3.19.4", + "@react-stately/tree": "3.9.4", "@react-types/menu": "3.10.5", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/modal": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/modal/-/modal-2.2.25.tgz", - "integrity": "sha512-qoUk0fe/GMbKHUWcW8XThp+TifEG6GgmpBKZ4x8hhM5o/t1cKAD4+F2pKahtih0ba5qjM+tFtwnUV7z7Mt8+xg==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/modal/-/modal-2.2.26.tgz", + "integrity": "sha512-NpnjTEweNExUb3pZWr17u15N1OHbBac4QY4aObwcbIJZKsInLU8NbuLbwyRw9nwAshGSf2FdnQ6dhmDZqwNqfA==", "dependencies": { "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.24", + "@heroui/framer-utils": "2.1.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/use-aria-button": "2.2.20", - "@heroui/use-aria-modal-overlay": "2.2.19", - "@heroui/use-disclosure": "2.2.17", - "@heroui/use-draggable": "2.1.18", + "@heroui/use-aria-button": "2.2.21", + "@heroui/use-aria-modal-overlay": "2.2.20", + "@heroui/use-disclosure": "2.2.18", + "@heroui/use-draggable": "2.1.19", "@heroui/use-viewport-size": "2.0.1", - "@react-aria/dialog": "3.5.31", - "@react-aria/focus": "3.21.2", - "@react-aria/overlays": "3.30.0", - "@react-stately/overlays": "3.6.20" + "@react-aria/dialog": "3.5.32", + "@react-aria/focus": "3.21.3", + "@react-aria/overlays": "3.31.0", + "@react-stately/overlays": "3.6.21" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/navbar": { - "version": "2.2.26", - "resolved": "https://registry.npmjs.org/@heroui/navbar/-/navbar-2.2.26.tgz", - "integrity": "sha512-uQhISgbQgea1ki0et3hDJ8+IXc35zMNowRQTKgWeEF8T3yS5X2fKuLzJc7/cf0vUGnxH0FPB3Z5Cb7o1nwjr9A==", + "version": "2.2.27", + "resolved": "https://registry.npmjs.org/@heroui/navbar/-/navbar-2.2.27.tgz", + "integrity": "sha512-O2G7kavGDuGbPtpzGMci7YmV8Kf7BOxQ6k7xqnwxWivWX2MdvDNyR+ca60FPPdQL14zH+KfrQmQpoPxgxr79pw==", "dependencies": { "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.24", + "@heroui/framer-utils": "2.1.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-resize": "2.1.8", "@heroui/use-scroll-position": "2.1.8", - "@react-aria/button": "3.14.2", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/overlays": "3.30.0", - "@react-stately/toggle": "3.9.2", - "@react-stately/utils": "3.10.8" + "@react-aria/button": "3.14.3", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/overlays": "3.31.0", + "@react-stately/toggle": "3.9.3", + "@react-stately/utils": "3.11.0" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/number-input": { - "version": "2.0.19", - "resolved": "https://registry.npmjs.org/@heroui/number-input/-/number-input-2.0.19.tgz", - "integrity": "sha512-5UHdznU9XIqjRH17dG277YQrTnUeifWmHdU76Jzf78+SVsJgQdLqcRINHPVj382q0kd6vLMzc4Hyb2fQ0g2WXg==", + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/@heroui/number-input/-/number-input-2.0.20.tgz", + "integrity": "sha512-WnSleY9eBRPhZIz4qVi1pYSkxMqNXEZLQgZaiMVbKdkeR9M2ASMo0Qv8+tLMT3KwRaxAu53BvQjp/hz8VADx1Q==", "dependencies": { - "@heroui/button": "2.2.28", - "@heroui/form": "2.1.28", + "@heroui/button": "2.2.29", + "@heroui/form": "2.1.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", "@heroui/use-safe-layout-effect": "2.1.8", - "@react-aria/focus": "3.21.2", - "@react-aria/i18n": "3.12.13", - "@react-aria/interactions": "3.25.6", - "@react-aria/numberfield": "3.12.2", - "@react-stately/numberfield": "3.10.2", + "@react-aria/focus": "3.21.3", + "@react-aria/i18n": "3.12.14", + "@react-aria/interactions": "3.26.0", + "@react-aria/numberfield": "3.12.3", + "@react-stately/numberfield": "3.10.3", "@react-types/button": "3.14.1", - "@react-types/numberfield": "3.8.15", + "@react-types/numberfield": "3.8.16", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/pagination": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/pagination/-/pagination-2.2.25.tgz", - "integrity": "sha512-PQZMNQ7wiv++cLEpEXDAdID3IQE2FlG1UkcuYhVYLPJgGSxoKKcM81wmE/HYMgmIMXySiZ+9E/UM8HATrpvTzA==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/pagination/-/pagination-2.2.26.tgz", + "integrity": "sha512-Ta70RAMo223BDFw3fAvYew1PauQ+b38Xa0zWnj5mkkrYrLXk7sjomunNtlUFPKkr0B8Dpuu67tp9a8AlmI1z8A==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", "@heroui/use-intersection-observer": "2.2.14", - "@heroui/use-pagination": "2.2.18", - "@react-aria/focus": "3.21.2", - "@react-aria/i18n": "3.12.13", - "@react-aria/interactions": "3.25.6", - "@react-aria/utils": "3.31.0", + "@heroui/use-pagination": "2.2.19", + "@react-aria/focus": "3.21.3", + "@react-aria/i18n": "3.12.14", + "@react-aria/interactions": "3.26.0", + "@react-aria/utils": "3.32.0", "scroll-into-view-if-needed": "3.0.10" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/popover": { - "version": "2.3.28", - "resolved": "https://registry.npmjs.org/@heroui/popover/-/popover-2.3.28.tgz", - "integrity": "sha512-0KHClVQVhLTCqUOtsKEZQ3dqPpNjd7qTISD2Ud3vACdLXprSLWmOzo2ItT6PAh881oIZnPS8l/0/jZ1ON/izdA==", + "version": "2.3.29", + "resolved": "https://registry.npmjs.org/@heroui/popover/-/popover-2.3.29.tgz", + "integrity": "sha512-ldEV2iJ8dHUxvEGSlARdqU7V/9Nr6X+AJmImEUHXASiDKnJ2GdiMyIuyx4eIC2cbldJ94W2dRUlJ1rt8Pvsm4w==", "dependencies": { - "@heroui/aria-utils": "2.2.25", - "@heroui/button": "2.2.28", + "@heroui/aria-utils": "2.2.26", + "@heroui/button": "2.2.29", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.24", + "@heroui/framer-utils": "2.1.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/use-aria-button": "2.2.20", - "@heroui/use-aria-overlay": "2.0.4", + "@heroui/use-aria-button": "2.2.21", + "@heroui/use-aria-overlay": "2.0.5", "@heroui/use-safe-layout-effect": "2.1.8", - "@react-aria/dialog": "3.5.31", - "@react-aria/focus": "3.21.2", - "@react-aria/overlays": "3.30.0", - "@react-stately/overlays": "3.6.20", + "@react-aria/dialog": "3.5.32", + "@react-aria/focus": "3.21.3", + "@react-aria/overlays": "3.31.0", + "@react-stately/overlays": "3.6.21", "@react-types/overlays": "3.9.2" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/progress": { - "version": "2.2.23", - "resolved": "https://registry.npmjs.org/@heroui/progress/-/progress-2.2.23.tgz", - "integrity": "sha512-5mfFPv5oW69yD5m/Y1cz0R+s4W8cwvLCZXzVtevoqyzkInNks8w2FKeGptkXcDeXVxqfhwDmNU4DXUmc4nRx3w==", + "version": "2.2.24", + "resolved": "https://registry.npmjs.org/@heroui/progress/-/progress-2.2.24.tgz", + "integrity": "sha512-1wGF1tSBx35//7+15dw06j1AB7+FhJiGYIH8hBefDSRD0U16htwXVxoVBk6v4Vd/yfpvVQTktA5fiT+Sl4XQlQ==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mounted": "2.1.8", - "@react-aria/progress": "3.4.27", + "@react-aria/progress": "3.4.28", "@react-types/progress": "3.5.16" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/radio": { - "version": "2.3.28", - "resolved": "https://registry.npmjs.org/@heroui/radio/-/radio-2.3.28.tgz", - "integrity": "sha512-qrzZpEXRl4EH3zKeCujyKeK2yvcvaOaosxdZnMrT2O7wxX9LeOp6ZPMwIdMFmJYj7iyPym2nUwFfQBne7JNuvA==", + "version": "2.3.29", + "resolved": "https://registry.npmjs.org/@heroui/radio/-/radio-2.3.29.tgz", + "integrity": "sha512-0nj6ws7R1yX5yh4plEjvRLbri6vRG6ogWDU9tJIb6D3vqxv7Lmpdna3+V+fdGdz4uvQp3YQebOY+UE3fCak/Ow==", "dependencies": { - "@heroui/form": "2.1.28", + "@heroui/form": "2.1.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/radio": "3.12.2", - "@react-aria/visually-hidden": "3.8.28", - "@react-stately/radio": "3.11.2", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/radio": "3.12.3", + "@react-aria/visually-hidden": "3.8.29", + "@react-stately/radio": "3.11.3", "@react-types/radio": "3.9.2", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/react": { - "version": "2.8.6", - "resolved": "https://registry.npmjs.org/@heroui/react/-/react-2.8.6.tgz", - "integrity": "sha512-iDmmkqZZtBqVqsSSZiV6PIWN3AEOZLQFXwt9Lob2Oy7gQQuFDP+iljg/ARc3fZ9JBNbJTfgGFdNjrnaFpPtRyw==", + "version": "2.8.7", + "resolved": "https://registry.npmjs.org/@heroui/react/-/react-2.8.7.tgz", + "integrity": "sha512-0PkjyvjXQpsjNz9P6q3YmJEWO7F+cyQjc2Ts7HqfvnOWPKAX3zAVnExq8d4Bdwnpp1F6TZpuC0woowVq6Fj4Fw==", "dependencies": { - "@heroui/accordion": "2.2.25", - "@heroui/alert": "2.2.28", - "@heroui/autocomplete": "2.3.30", - "@heroui/avatar": "2.2.23", + "@heroui/accordion": "2.2.26", + "@heroui/alert": "2.2.29", + "@heroui/autocomplete": "2.3.31", + "@heroui/avatar": "2.2.24", "@heroui/badge": "2.2.18", - "@heroui/breadcrumbs": "2.2.23", - "@heroui/button": "2.2.28", - "@heroui/calendar": "2.2.28", - "@heroui/card": "2.2.26", - "@heroui/checkbox": "2.3.28", - "@heroui/chip": "2.2.23", + "@heroui/breadcrumbs": "2.2.24", + "@heroui/button": "2.2.29", + "@heroui/calendar": "2.2.29", + "@heroui/card": "2.2.27", + "@heroui/checkbox": "2.3.29", + "@heroui/chip": "2.2.24", "@heroui/code": "2.2.22", - "@heroui/date-input": "2.3.28", - "@heroui/date-picker": "2.3.29", + "@heroui/date-input": "2.3.29", + "@heroui/date-picker": "2.3.30", "@heroui/divider": "2.2.21", - "@heroui/drawer": "2.2.25", - "@heroui/dropdown": "2.3.28", - "@heroui/form": "2.1.28", - "@heroui/framer-utils": "2.1.24", + "@heroui/drawer": "2.2.26", + "@heroui/dropdown": "2.3.29", + "@heroui/form": "2.1.29", + "@heroui/framer-utils": "2.1.25", "@heroui/image": "2.2.18", - "@heroui/input": "2.4.29", - "@heroui/input-otp": "2.1.28", + "@heroui/input": "2.4.30", + "@heroui/input-otp": "2.1.29", "@heroui/kbd": "2.2.23", - "@heroui/link": "2.2.24", - "@heroui/listbox": "2.3.27", - "@heroui/menu": "2.2.27", - "@heroui/modal": "2.2.25", - "@heroui/navbar": "2.2.26", - "@heroui/number-input": "2.0.19", - "@heroui/pagination": "2.2.25", - "@heroui/popover": "2.3.28", - "@heroui/progress": "2.2.23", - "@heroui/radio": "2.3.28", + "@heroui/link": "2.2.25", + "@heroui/listbox": "2.3.28", + "@heroui/menu": "2.2.28", + "@heroui/modal": "2.2.26", + "@heroui/navbar": "2.2.27", + "@heroui/number-input": "2.0.20", + "@heroui/pagination": "2.2.26", + "@heroui/popover": "2.3.29", + "@heroui/progress": "2.2.24", + "@heroui/radio": "2.3.29", "@heroui/ripple": "2.2.21", "@heroui/scroll-shadow": "2.3.19", - "@heroui/select": "2.4.29", + "@heroui/select": "2.4.30", "@heroui/skeleton": "2.2.18", - "@heroui/slider": "2.4.25", - "@heroui/snippet": "2.2.29", + "@heroui/slider": "2.4.26", + "@heroui/snippet": "2.2.30", "@heroui/spacer": "2.2.22", - "@heroui/spinner": "2.2.25", - "@heroui/switch": "2.2.25", - "@heroui/system": "2.4.24", - "@heroui/table": "2.2.28", - "@heroui/tabs": "2.2.25", - "@heroui/theme": "2.4.24", - "@heroui/toast": "2.0.18", - "@heroui/tooltip": "2.2.25", - "@heroui/user": "2.2.23", - "@react-aria/visually-hidden": "3.8.28" + "@heroui/spinner": "2.2.26", + "@heroui/switch": "2.2.26", + "@heroui/system": "2.4.25", + "@heroui/table": "2.2.29", + "@heroui/tabs": "2.2.26", + "@heroui/theme": "2.4.25", + "@heroui/toast": "2.0.19", + "@heroui/tooltip": "2.2.26", + "@heroui/user": "2.2.24", + "@react-aria/visually-hidden": "3.8.29" }, "peerDependencies": { "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", @@ -2170,33 +2187,33 @@ } }, "node_modules/@heroui/select": { - "version": "2.4.29", - "resolved": "https://registry.npmjs.org/@heroui/select/-/select-2.4.29.tgz", - "integrity": "sha512-rFsI+UNUtK6WTm6oDM8A45tu8rDqt1zHoSoBQ8RJDkRITDcKRBTaTnvJI/Ez+kMRNH4fQ45LgoSPxw/JOOMg4w==", + "version": "2.4.30", + "resolved": "https://registry.npmjs.org/@heroui/select/-/select-2.4.30.tgz", + "integrity": "sha512-laGM9ib4E/wxWu0T5/85yQZaKaT9HYP2hqy+xpD0HrKOZxBEINyTIbPJCq3cB9LiM6qqJJk/2A3rRjKNSj1Law==", "dependencies": { - "@heroui/aria-utils": "2.2.25", - "@heroui/form": "2.1.28", - "@heroui/listbox": "2.3.27", - "@heroui/popover": "2.3.28", + "@heroui/aria-utils": "2.2.26", + "@heroui/form": "2.1.29", + "@heroui/listbox": "2.3.28", + "@heroui/popover": "2.3.29", "@heroui/react-utils": "2.1.14", "@heroui/scroll-shadow": "2.3.19", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.25", - "@heroui/use-aria-button": "2.2.20", - "@heroui/use-aria-multiselect": "2.4.19", + "@heroui/spinner": "2.2.26", + "@heroui/use-aria-button": "2.2.21", + "@heroui/use-aria-multiselect": "2.4.20", "@heroui/use-form-reset": "2.0.1", "@heroui/use-safe-layout-effect": "2.1.8", - "@react-aria/focus": "3.21.2", - "@react-aria/form": "3.1.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/overlays": "3.30.0", - "@react-aria/visually-hidden": "3.8.28", + "@react-aria/focus": "3.21.3", + "@react-aria/form": "3.1.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/overlays": "3.31.0", + "@react-aria/visually-hidden": "3.8.29", "@react-types/shared": "3.32.1" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" @@ -2231,43 +2248,43 @@ } }, "node_modules/@heroui/slider": { - "version": "2.4.25", - "resolved": "https://registry.npmjs.org/@heroui/slider/-/slider-2.4.25.tgz", - "integrity": "sha512-1ULgaqsu1Vzyyx6S7TGs+13PX5BGArZhLiApQfKwiA3TFvT0MNzTVoWVgyFZ8XLqh4esSUnqddhivqQhbRzrHw==", + "version": "2.4.26", + "resolved": "https://registry.npmjs.org/@heroui/slider/-/slider-2.4.26.tgz", + "integrity": "sha512-hsJOyNjixw8QK5DC9yMWSOg9abbRuXRXao0ZxQH+/xM8F59eb5xZaqopbN7aFmBP7G28Tfge4i36vE8TsK2Q/g==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/tooltip": "2.2.25", - "@react-aria/focus": "3.21.2", - "@react-aria/i18n": "3.12.13", - "@react-aria/interactions": "3.25.6", - "@react-aria/slider": "3.8.2", - "@react-aria/visually-hidden": "3.8.28", - "@react-stately/slider": "3.7.2" + "@heroui/tooltip": "2.2.26", + "@react-aria/focus": "3.21.3", + "@react-aria/i18n": "3.12.14", + "@react-aria/interactions": "3.26.0", + "@react-aria/slider": "3.8.3", + "@react-aria/visually-hidden": "3.8.29", + "@react-stately/slider": "3.7.3" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/snippet": { - "version": "2.2.29", - "resolved": "https://registry.npmjs.org/@heroui/snippet/-/snippet-2.2.29.tgz", - "integrity": "sha512-RuyK/DldxvVYb6ToPk5cNNYeDkL+phKZPYHrUxBJK/PzuAkqi3AzQV7zHd+3IfTNxQbevRjzCXENE5F3GKP/MQ==", + "version": "2.2.30", + "resolved": "https://registry.npmjs.org/@heroui/snippet/-/snippet-2.2.30.tgz", + "integrity": "sha512-o/fNVh4jtYAH8/2F6uU7pFdJiCCWZYN0LaPC57dRo8FNxL6+kcxt13Lp+sCBVKEDnuBmMtlL1prjMedX7VqzfQ==", "dependencies": { - "@heroui/button": "2.2.28", + "@heroui/button": "2.2.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/tooltip": "2.2.25", + "@heroui/tooltip": "2.2.26", "@heroui/use-clipboard": "2.1.9", - "@react-aria/focus": "3.21.2" + "@react-aria/focus": "3.21.3" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" @@ -2289,51 +2306,51 @@ } }, "node_modules/@heroui/spinner": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/spinner/-/spinner-2.2.25.tgz", - "integrity": "sha512-zDuLJicUL51vGLEBbHWy/t6DlOvs9YILM4YLmzS/o84ExTgfrCycXNs6JkoteFiNu570qqZMeAA2aYneGfl/PQ==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/spinner/-/spinner-2.2.26.tgz", + "integrity": "sha512-AtZhUO+IrZwahdQ5FOVptOZRMz7Z51gDUuj1K3pEJvOiKW+zvqab9BHYW9A09nd7qMH+DMM/41PQJbZg+eOHzg==", "dependencies": { "@heroui/shared-utils": "2.1.12", - "@heroui/system": "2.4.24", + "@heroui/system": "2.4.25", "@heroui/system-rsc": "2.3.21" }, "peerDependencies": { - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/switch": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/switch/-/switch-2.2.25.tgz", - "integrity": "sha512-F0Yj+kgVfD2bdy6REFvNySeGuYg1OT2phwMPwSZGUl7ZFeGSvvWSnbYS4/wS3JIM5PyEibSaB8QIPc8r00xq1A==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/switch/-/switch-2.2.26.tgz", + "integrity": "sha512-c/FCzromB+ww8AObgA0H//jOrhxyn0MllWVeEwMXac7O6z/N4B+fJ8dLCu/vu1zchySFLuDq/PaETEMJ7hKW4A==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-safe-layout-effect": "2.1.8", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/switch": "3.7.8", - "@react-aria/visually-hidden": "3.8.28", - "@react-stately/toggle": "3.9.2" + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/switch": "3.7.9", + "@react-aria/visually-hidden": "3.8.29", + "@react-stately/toggle": "3.9.3" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/system": { - "version": "2.4.24", - "resolved": "https://registry.npmjs.org/@heroui/system/-/system-2.4.24.tgz", - "integrity": "sha512-9GKQgUc91otQfwmq6TLE72QKxtB341aK5NpBHS3gRoWYEuNN714Zl3OXwIZNvdXPJpsTaUo1ID1ibJU9tfgwdg==", + "version": "2.4.25", + "resolved": "https://registry.npmjs.org/@heroui/system/-/system-2.4.25.tgz", + "integrity": "sha512-F6UUoGTQ+Qas5wYkCzLjXE7u74Z9ygO0u0+dkTW7zCaY7ds65CcmvZ/ahKz2ES3Tk6TNks1MJSyaQ9rFLs8AqA==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/system-rsc": "2.3.21", - "@react-aria/i18n": "3.12.13", - "@react-aria/overlays": "3.30.0", - "@react-aria/utils": "3.31.0" + "@react-aria/i18n": "3.12.14", + "@react-aria/overlays": "3.31.0", + "@react-aria/utils": "3.32.0" }, "peerDependencies": { "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", @@ -2354,19 +2371,19 @@ } }, "node_modules/@heroui/table": { - "version": "2.2.28", - "resolved": "https://registry.npmjs.org/@heroui/table/-/table-2.2.28.tgz", - "integrity": "sha512-0z3xs0kxDXvvd9gy/uHgvK0/bmpJF0m9t3omNMnB0I0EUx+gJ/CnaaPiF9M5veg/128rc45J7X2FgY3fPAKcmA==", + "version": "2.2.29", + "resolved": "https://registry.npmjs.org/@heroui/table/-/table-2.2.29.tgz", + "integrity": "sha512-/YLP1+cSSiolj1kvU6YSge4BNvwqT7yDom8YebBHCjidwOBbORGHh6HJ9btVk2GUzdTh57N9vErh9VCEuz5/DA==", "dependencies": { - "@heroui/checkbox": "2.3.28", + "@heroui/checkbox": "2.3.29", "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/table": "3.17.8", - "@react-aria/visually-hidden": "3.8.28", - "@react-stately/table": "3.15.1", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/table": "3.17.9", + "@react-aria/visually-hidden": "3.8.29", + "@react-stately/table": "3.15.2", "@react-stately/virtualizer": "4.4.4", "@react-types/grid": "3.3.6", "@react-types/table": "3.13.4", @@ -2374,39 +2391,39 @@ }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/tabs": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/tabs/-/tabs-2.2.25.tgz", - "integrity": "sha512-bIpz/8TTNMabmzObN2zs+3WhQXbKyr9tZUPkk3rMQxIshpg9oyyEWOS8XiMBxrEzSByLfPNypl5sX1au6Dw2Ew==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/tabs/-/tabs-2.2.26.tgz", + "integrity": "sha512-RK5TjDI2KY1i/zyO/zzwkiDnQEYxcXSu9QCigNLcCZ6SXq0J3n83FC5Vv91kFwU9aTRuwdxIHv5KzV7D8Xe14w==", "dependencies": { - "@heroui/aria-utils": "2.2.25", + "@heroui/aria-utils": "2.2.26", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", "@heroui/use-is-mounted": "2.1.8", - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/tabs": "3.10.8", - "@react-stately/tabs": "3.8.6", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/tabs": "3.10.9", + "@react-stately/tabs": "3.8.7", "@react-types/shared": "3.32.1", "scroll-into-view-if-needed": "3.0.10" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/theme": { - "version": "2.4.24", - "resolved": "https://registry.npmjs.org/@heroui/theme/-/theme-2.4.24.tgz", - "integrity": "sha512-lL+anmY4GGWwKyTbJ2PEBZE4talIZ3hu4yGpku9TktCVG2nC2YTwiWQFJ+Jcbf8Cf9vuLzI1sla5bz2jUqiBRA==", + "version": "2.4.25", + "resolved": "https://registry.npmjs.org/@heroui/theme/-/theme-2.4.25.tgz", + "integrity": "sha512-nTptYhO1V9rMoh9SJDnMfaSmFuoXvbem1UuwgHcraRtqy/TIVBPqv26JEGzSoUCL194TDGOJpqrpMuab/PdXcw==", "dependencies": { "@heroui/shared-utils": "2.1.12", "color": "^4.2.3", @@ -2421,62 +2438,62 @@ } }, "node_modules/@heroui/toast": { - "version": "2.0.18", - "resolved": "https://registry.npmjs.org/@heroui/toast/-/toast-2.0.18.tgz", - "integrity": "sha512-5IoqEq10W/AaUgKWKIR7bbTB6U+rHMkikzGwW+IndsvFLR3meyb5l4K5cmVCmDsMHubUaRa9UFDeAokyNXvpWA==", + "version": "2.0.19", + "resolved": "https://registry.npmjs.org/@heroui/toast/-/toast-2.0.19.tgz", + "integrity": "sha512-KUl/vIMoZQxpjLPxx57XKh39Ai1CyPqm+1Pn20xOVk0fV+2SqC7OW6xivwwBnS7rysx8JfMHwNVJUHTU0o9K1Q==", "dependencies": { "@heroui/react-utils": "2.1.14", "@heroui/shared-icons": "2.1.10", "@heroui/shared-utils": "2.1.12", - "@heroui/spinner": "2.2.25", + "@heroui/spinner": "2.2.26", "@heroui/use-is-mobile": "2.2.12", - "@react-aria/interactions": "3.25.6", - "@react-aria/toast": "3.0.8", + "@react-aria/interactions": "3.26.0", + "@react-aria/toast": "3.0.9", "@react-stately/toast": "3.1.2" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/tooltip": { - "version": "2.2.25", - "resolved": "https://registry.npmjs.org/@heroui/tooltip/-/tooltip-2.2.25.tgz", - "integrity": "sha512-f+WxkQy0YBzzE6VhzVgA/CeD7nvo0hhOapx0UScU8zsQ1J+n5Kr5YY/7CgMHmFLyC/Amrqlf7WSgljRl4iWivQ==", + "version": "2.2.26", + "resolved": "https://registry.npmjs.org/@heroui/tooltip/-/tooltip-2.2.26.tgz", + "integrity": "sha512-VERreBoBAjqFLvJmBMVvRCYGxr+nq8gcncC14ewWqCzWwv/WQm9wVSqHLnwCtZelOz7ofaDDqvxmGjMqzMnqFw==", "dependencies": { - "@heroui/aria-utils": "2.2.25", + "@heroui/aria-utils": "2.2.26", "@heroui/dom-animation": "2.1.10", - "@heroui/framer-utils": "2.1.24", + "@heroui/framer-utils": "2.1.25", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@heroui/use-aria-overlay": "2.0.4", + "@heroui/use-aria-overlay": "2.0.5", "@heroui/use-safe-layout-effect": "2.1.8", - "@react-aria/overlays": "3.30.0", - "@react-aria/tooltip": "3.8.8", - "@react-stately/tooltip": "3.5.8", + "@react-aria/overlays": "3.31.0", + "@react-aria/tooltip": "3.9.0", + "@react-stately/tooltip": "3.5.9", "@react-types/overlays": "3.9.2", - "@react-types/tooltip": "3.4.21" + "@react-types/tooltip": "3.5.0" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "framer-motion": ">=11.5.6 || >=12.0.0-alpha.1", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/use-aria-accordion": { - "version": "2.2.18", - "resolved": "https://registry.npmjs.org/@heroui/use-aria-accordion/-/use-aria-accordion-2.2.18.tgz", - "integrity": "sha512-qjRkae2p4MFDrNqO6v6YCor0BtVi3idMd1dsI82XM16bxLQ2stqG4Ajrg60xV0AN+WKZUq10oetqkJuY6MYg0w==", + "version": "2.2.19", + "resolved": "https://registry.npmjs.org/@heroui/use-aria-accordion/-/use-aria-accordion-2.2.19.tgz", + "integrity": "sha512-4HGY2zr+MIzRtIO9epFQGeU7VeGqhCotxxXzscfwxLfEeHBJwQvMAsu7yrUQ/uyMGvSiohHlJRgIsuT1xzxH1Q==", "dependencies": { - "@react-aria/button": "3.14.2", - "@react-aria/focus": "3.21.2", - "@react-aria/selection": "3.26.0", - "@react-stately/tree": "3.9.3", + "@react-aria/button": "3.14.3", + "@react-aria/focus": "3.21.3", + "@react-aria/selection": "3.27.0", + "@react-stately/tree": "3.9.4", "@react-types/accordion": "3.0.0-alpha.26", "@react-types/shared": "3.32.1" }, @@ -2485,13 +2502,13 @@ } }, "node_modules/@heroui/use-aria-button": { - "version": "2.2.20", - "resolved": "https://registry.npmjs.org/@heroui/use-aria-button/-/use-aria-button-2.2.20.tgz", - "integrity": "sha512-Y0Bmze/pxEACKsHMbA1sYA3ghMJ+9fSnWvZBwlUxqiVXDEy2YrrK2JmXEgsuHGQdKD9RqU2Od3V4VqIIiaHiMA==", + "version": "2.2.21", + "resolved": "https://registry.npmjs.org/@heroui/use-aria-button/-/use-aria-button-2.2.21.tgz", + "integrity": "sha512-8Lhjt1xoDpjhqvEbFC21NEgU89p7Z+MAzrDyoF1eYUn/w4ahhBgcQStP6WicLfx50tOE10WDpPBq72tah/O+ww==", "dependencies": { - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/utils": "3.31.0", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/utils": "3.32.0", "@react-types/button": "3.14.1", "@react-types/shared": "3.32.1" }, @@ -2500,13 +2517,13 @@ } }, "node_modules/@heroui/use-aria-link": { - "version": "2.2.21", - "resolved": "https://registry.npmjs.org/@heroui/use-aria-link/-/use-aria-link-2.2.21.tgz", - "integrity": "sha512-sG2rUutT/E/FYguzZmg715cXcM6+ue9wRfs2Gi6epWJwIVpS51uEagJKY0wIutJDfuCPfQ9AuxXfJek4CnxjKw==", + "version": "2.2.22", + "resolved": "https://registry.npmjs.org/@heroui/use-aria-link/-/use-aria-link-2.2.22.tgz", + "integrity": "sha512-T7wESiV9IBqe5MILMZ1pL+GIWxyPVj7ag/KUhZUH3v/dm94m+f2Ua7rXxzI+hj2H51s189YP+Eb1PagHMDrfPg==", "dependencies": { - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/utils": "3.31.0", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/utils": "3.32.0", "@react-types/link": "3.6.5", "@react-types/shared": "3.32.1" }, @@ -2515,14 +2532,14 @@ } }, "node_modules/@heroui/use-aria-modal-overlay": { - "version": "2.2.19", - "resolved": "https://registry.npmjs.org/@heroui/use-aria-modal-overlay/-/use-aria-modal-overlay-2.2.19.tgz", - "integrity": "sha512-MPvszNrt+1DauiSyOAwb0pKbYahpEVi9hrmidnO8cd1SA7B2ES0fNRBeNMAwcaeR/Nzsv+Cw1hRXt3egwqi0lg==", + "version": "2.2.20", + "resolved": "https://registry.npmjs.org/@heroui/use-aria-modal-overlay/-/use-aria-modal-overlay-2.2.20.tgz", + "integrity": "sha512-AIYfpnpiRVJm3InKlroGqQSZ1hjBI0Y5oMhMrXuQqrySsMKzMye3zMcEBWf8dEho1l+/U0dgNIUJFbkEFsOc8w==", "dependencies": { - "@heroui/use-aria-overlay": "2.0.4", - "@react-aria/overlays": "3.30.0", - "@react-aria/utils": "3.31.0", - "@react-stately/overlays": "3.6.20" + "@heroui/use-aria-overlay": "2.0.5", + "@react-aria/overlays": "3.31.0", + "@react-aria/utils": "3.32.0", + "@react-stately/overlays": "3.6.21" }, "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0", @@ -2530,20 +2547,20 @@ } }, "node_modules/@heroui/use-aria-multiselect": { - "version": "2.4.19", - "resolved": "https://registry.npmjs.org/@heroui/use-aria-multiselect/-/use-aria-multiselect-2.4.19.tgz", - "integrity": "sha512-RLDSpOLJqNESn6OK/zKuyTriK6sqMby76si/4kTMCs+4lmMPOyFKP3fREywu+zyJjRUCuZPa6xYuN2OHKQRDow==", + "version": "2.4.20", + "resolved": "https://registry.npmjs.org/@heroui/use-aria-multiselect/-/use-aria-multiselect-2.4.20.tgz", + "integrity": "sha512-Tvbk2AaWfGYgL6Sn9SwsI+nSOcaD1e3wWGPEqHzeFgoSV6cT7oLY70TODD/HyTF+LKYPtYUbAenxDd80Z5j+Eg==", "dependencies": { - "@react-aria/i18n": "3.12.13", - "@react-aria/interactions": "3.25.6", - "@react-aria/label": "3.7.22", - "@react-aria/listbox": "3.15.0", - "@react-aria/menu": "3.19.3", - "@react-aria/selection": "3.26.0", - "@react-aria/utils": "3.31.0", + "@react-aria/i18n": "3.12.14", + "@react-aria/interactions": "3.26.0", + "@react-aria/label": "3.7.23", + "@react-aria/listbox": "3.15.1", + "@react-aria/menu": "3.19.4", + "@react-aria/selection": "3.27.0", + "@react-aria/utils": "3.32.0", "@react-stately/form": "3.2.2", - "@react-stately/list": "3.13.1", - "@react-stately/menu": "3.9.8", + "@react-stately/list": "3.13.2", + "@react-stately/menu": "3.9.9", "@react-types/button": "3.14.1", "@react-types/overlays": "3.9.2", "@react-types/shared": "3.32.1" @@ -2554,13 +2571,13 @@ } }, "node_modules/@heroui/use-aria-overlay": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@heroui/use-aria-overlay/-/use-aria-overlay-2.0.4.tgz", - "integrity": "sha512-iv+y0+OvQd1eWiZftPI07JE3c5AdK85W5k3rDlhk5MFEI3dllkIpu8z8zLh3ge/BQGFiGkySVC5iXl8w84gMUQ==", + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@heroui/use-aria-overlay/-/use-aria-overlay-2.0.5.tgz", + "integrity": "sha512-2g1HxRoDzGAqIkW7s09WEXg+SAWslh+ZkIuixNAqsA60FHSAzQtGCNpbE2yFeMrukhbmRfS8t3hT2JVZVAXG7w==", "dependencies": { - "@react-aria/focus": "3.21.2", - "@react-aria/interactions": "3.25.6", - "@react-aria/overlays": "3.30.0", + "@react-aria/focus": "3.21.3", + "@react-aria/interactions": "3.26.0", + "@react-aria/overlays": "3.31.0", "@react-types/shared": "3.32.1" }, "peerDependencies": { @@ -2599,24 +2616,24 @@ } }, "node_modules/@heroui/use-disclosure": { - "version": "2.2.17", - "resolved": "https://registry.npmjs.org/@heroui/use-disclosure/-/use-disclosure-2.2.17.tgz", - "integrity": "sha512-S3pN0WmpcTTZuQHcXw4RcTVsxLaCZ95H5qi/JPN83ahhWTCC+pN8lwE37vSahbMTM1YriiHyTM6AWpv/E3Jq7w==", + "version": "2.2.18", + "resolved": "https://registry.npmjs.org/@heroui/use-disclosure/-/use-disclosure-2.2.18.tgz", + "integrity": "sha512-aR/4oITXOyt8uze9EdfL/b2j8pg75dc92Q8FfoT17MibD6nKI1VmQDA+9CAtUwuKq6rSrEGqc14muO3GYpTH4g==", "dependencies": { "@heroui/use-callback-ref": "2.1.8", - "@react-aria/utils": "3.31.0", - "@react-stately/utils": "3.10.8" + "@react-aria/utils": "3.32.0", + "@react-stately/utils": "3.11.0" }, "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" } }, "node_modules/@heroui/use-draggable": { - "version": "2.1.18", - "resolved": "https://registry.npmjs.org/@heroui/use-draggable/-/use-draggable-2.1.18.tgz", - "integrity": "sha512-ihQdmLGYJ6aTEaJ0/yCXYn6VRdrRV2eO03XD2A3KANZPb1Bj/n4r298xNMql5VnGq5ZNDJB9nTv8NNCu9pmPdg==", + "version": "2.1.19", + "resolved": "https://registry.npmjs.org/@heroui/use-draggable/-/use-draggable-2.1.19.tgz", + "integrity": "sha512-pk0Oe4QLcjr1gndcuvq+8z6eoM+v3lvbmEDxbsEjeST9AwykfmT/60X+xrPFtCkfYldYXk1UxBPiGwrFs0lscQ==", "dependencies": { - "@react-aria/interactions": "3.25.6" + "@react-aria/interactions": "3.26.0" }, "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" @@ -2678,12 +2695,12 @@ } }, "node_modules/@heroui/use-pagination": { - "version": "2.2.18", - "resolved": "https://registry.npmjs.org/@heroui/use-pagination/-/use-pagination-2.2.18.tgz", - "integrity": "sha512-qm1mUe5UgV0kPZItcs/jiX/BxzdDagmcxaJkYR6DkhfMRoCuOdoJhcoh8ncbCAgHpzPESPn1VxsOcG4/Y+Jkdw==", + "version": "2.2.19", + "resolved": "https://registry.npmjs.org/@heroui/use-pagination/-/use-pagination-2.2.19.tgz", + "integrity": "sha512-0VLyxge+rPBexK7xoLgPwCC8ngh9vIgHEtS+sRvulcEy4grG9EvZWUfMpMeiboFc5Ku2l5u+D9jYkaV06EY4Rw==", "dependencies": { "@heroui/shared-utils": "2.1.12", - "@react-aria/i18n": "3.12.13" + "@react-aria/i18n": "3.12.14" }, "peerDependencies": { "react": ">=18 || >=19.0.0-rc.0" @@ -2722,18 +2739,18 @@ } }, "node_modules/@heroui/user": { - "version": "2.2.23", - "resolved": "https://registry.npmjs.org/@heroui/user/-/user-2.2.23.tgz", - "integrity": "sha512-o/ngJ4yTD4svjYKSP3hJNwhyWLhHk5g/wjqGvH81INfpeV7wPlzpM/C6LIezGB3rZjGM9d4ozSofv6spbCKCiA==", + "version": "2.2.24", + "resolved": "https://registry.npmjs.org/@heroui/user/-/user-2.2.24.tgz", + "integrity": "sha512-SH8MlILc1Nn7lBvbvsqNok6H36+FrhT9VIQlKwzzX/tidr15LRK74F1k8UPV7PBAxDKxQ0FRCictCXI8dN9lcQ==", "dependencies": { - "@heroui/avatar": "2.2.23", + "@heroui/avatar": "2.2.24", "@heroui/react-utils": "2.1.14", "@heroui/shared-utils": "2.1.12", - "@react-aria/focus": "3.21.2" + "@react-aria/focus": "3.21.3" }, "peerDependencies": { "@heroui/system": ">=2.4.18", - "@heroui/theme": ">=2.4.23", + "@heroui/theme": ">=2.4.24", "react": ">=18 || >=19.0.0-rc.0", "react-dom": ">=18 || >=19.0.0-rc.0" } @@ -2936,9 +2953,9 @@ } }, "node_modules/@internationalized/date": { - "version": "3.10.0", - "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.10.0.tgz", - "integrity": "sha512-oxDR/NTEJ1k+UFVQElaNIk65E/Z83HK1z1WI3lQyhTtnNg4R5oVXaPzK3jcpKG8UHKDVuDQHzn+wsxSz8RP3aw==", + "version": "3.10.1", + "resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.10.1.tgz", + "integrity": "sha512-oJrXtQiAXLvT9clCf1K4kxp3eKsQhIaZqxEyowkBcsvZDdZkbWrVmnGknxs5flTD0VGsxrxKgBCZty1EzoiMzA==", "dependencies": { "@swc/helpers": "^0.5.0" } @@ -3192,21 +3209,21 @@ "license": "MIT" }, "node_modules/@posthog/core": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.8.1.tgz", - "integrity": "sha512-jfzBtQIk9auRi/biO+G/gumK5KxqsD5wOr7XpYMROE/I3pazjP4zIziinp21iQuIQJMXrDvwt9Af3njgOGwtew==", + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/@posthog/core/-/core-1.9.0.tgz", + "integrity": "sha512-j7KSWxJTUtNyKynLt/p0hfip/3I46dWU2dk+pt7dKRoz2l5CYueHuHK4EO7Wlgno5yo1HO4sc4s30MXMTICHJw==", "dependencies": { "cross-spawn": "^7.0.6" } }, "node_modules/@react-aria/breadcrumbs": { - "version": "3.5.29", - "resolved": "https://registry.npmjs.org/@react-aria/breadcrumbs/-/breadcrumbs-3.5.29.tgz", - "integrity": "sha512-rKS0dryllaZJqrr3f/EAf2liz8CBEfmL5XACj+Z1TAig6GIYe1QuA3BtkX0cV9OkMugXdX8e3cbA7nD10ORRqg==", + "version": "3.5.30", + "resolved": "https://registry.npmjs.org/@react-aria/breadcrumbs/-/breadcrumbs-3.5.30.tgz", + "integrity": "sha512-DZymglA70SwvDJA7GB147sUexvdDy6vWcriGrlEHhMMzBLhGB30I5J96R4pPzURLxXISrWFH56KC5rRgIqsqqg==", "dependencies": { - "@react-aria/i18n": "^3.12.13", - "@react-aria/link": "^3.8.6", - "@react-aria/utils": "^3.31.0", + "@react-aria/i18n": "^3.12.14", + "@react-aria/link": "^3.8.7", + "@react-aria/utils": "^3.32.0", "@react-types/breadcrumbs": "^3.7.17", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3217,14 +3234,14 @@ } }, "node_modules/@react-aria/button": { - "version": "3.14.2", - "resolved": "https://registry.npmjs.org/@react-aria/button/-/button-3.14.2.tgz", - "integrity": "sha512-VbLIA+Kd6f/MDjd+TJBUg2+vNDw66pnvsj2E4RLomjI9dfBuN7d+Yo2UnsqKVyhePjCUZ6xxa2yDuD63IOSIYA==", + "version": "3.14.3", + "resolved": "https://registry.npmjs.org/@react-aria/button/-/button-3.14.3.tgz", + "integrity": "sha512-iJTuEECs9im7TwrCRZ0dvuwp8Gao0+I1IuYs1LQvJQgKLpgRH2/6jAiqb2bdAcoAjdbaMs7Xe0xUwURpVNkEyA==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/toolbar": "3.0.0-beta.21", - "@react-aria/utils": "^3.31.0", - "@react-stately/toggle": "^3.9.2", + "@react-aria/interactions": "^3.26.0", + "@react-aria/toolbar": "3.0.0-beta.22", + "@react-aria/utils": "^3.32.0", + "@react-stately/toggle": "^3.9.3", "@react-types/button": "^3.14.1", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3235,18 +3252,18 @@ } }, "node_modules/@react-aria/calendar": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@react-aria/calendar/-/calendar-3.9.2.tgz", - "integrity": "sha512-uSLxLgOPRnEU4Jg59lAhUVA+uDx/55NBg4lpfsP2ynazyiJ5LCXmYceJi+VuOqMml7d9W0dB87OldOeLdIxYVA==", + "version": "3.9.3", + "resolved": "https://registry.npmjs.org/@react-aria/calendar/-/calendar-3.9.3.tgz", + "integrity": "sha512-F12UQ4zd8GIxpJxs9GAHzDD9Lby2hESHm0LF5tjsYBIOBJc5K7ICeeE5UqLMBPzgnEP5nfh1CKS8KhCB0mS7PA==", "dependencies": { - "@internationalized/date": "^3.10.0", - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", + "@internationalized/date": "^3.10.1", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", "@react-aria/live-announcer": "^3.4.4", - "@react-aria/utils": "^3.31.0", - "@react-stately/calendar": "^3.9.0", + "@react-aria/utils": "^3.32.0", + "@react-stately/calendar": "^3.9.1", "@react-types/button": "^3.14.1", - "@react-types/calendar": "^3.8.0", + "@react-types/calendar": "^3.8.1", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -3256,18 +3273,18 @@ } }, "node_modules/@react-aria/checkbox": { - "version": "3.16.2", - "resolved": "https://registry.npmjs.org/@react-aria/checkbox/-/checkbox-3.16.2.tgz", - "integrity": "sha512-29Mj9ZqXioJ0bcMnNGooHztnTau5pikZqX3qCRj5bYR3by/ZFFavYoMroh9F7s/MbFm/tsKX+Sf02lYFEdXRjA==", + "version": "3.16.3", + "resolved": "https://registry.npmjs.org/@react-aria/checkbox/-/checkbox-3.16.3.tgz", + "integrity": "sha512-2p1haCUtERo5XavBAWNaX//dryNVnOOWfSKyzLs4UiCZR/NL0ttN+Nu/i445q0ipjLqZ6bBJtx0g0NNrubbU7Q==", "dependencies": { - "@react-aria/form": "^3.1.2", - "@react-aria/interactions": "^3.25.6", - "@react-aria/label": "^3.7.22", - "@react-aria/toggle": "^3.12.2", - "@react-aria/utils": "^3.31.0", - "@react-stately/checkbox": "^3.7.2", + "@react-aria/form": "^3.1.3", + "@react-aria/interactions": "^3.26.0", + "@react-aria/label": "^3.7.23", + "@react-aria/toggle": "^3.12.3", + "@react-aria/utils": "^3.32.0", + "@react-stately/checkbox": "^3.7.3", "@react-stately/form": "^3.2.2", - "@react-stately/toggle": "^3.9.2", + "@react-stately/toggle": "^3.9.3", "@react-types/checkbox": "^3.10.2", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3278,24 +3295,24 @@ } }, "node_modules/@react-aria/combobox": { - "version": "3.14.0", - "resolved": "https://registry.npmjs.org/@react-aria/combobox/-/combobox-3.14.0.tgz", - "integrity": "sha512-z4ro0Hma//p4nL2IJx5iUa7NwxeXbzSoZ0se5uTYjG1rUUMszg+wqQh/AQoL+eiULn7rs18JY9wwNbVIkRNKWA==", + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/@react-aria/combobox/-/combobox-3.14.1.tgz", + "integrity": "sha512-wuP/4UQrGsYXLw1Gk8G/FcnUlHuoViA9G6w3LhtUgu5Q3E5DvASJalxej3NtyYU+4w4epD1gJidzosAL0rf8Ug==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/listbox": "^3.15.0", + "@react-aria/focus": "^3.21.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/listbox": "^3.15.1", "@react-aria/live-announcer": "^3.4.4", - "@react-aria/menu": "^3.19.3", - "@react-aria/overlays": "^3.30.0", - "@react-aria/selection": "^3.26.0", - "@react-aria/textfield": "^3.18.2", - "@react-aria/utils": "^3.31.0", + "@react-aria/menu": "^3.19.4", + "@react-aria/overlays": "^3.31.0", + "@react-aria/selection": "^3.27.0", + "@react-aria/textfield": "^3.18.3", + "@react-aria/utils": "^3.32.0", "@react-stately/collections": "^3.12.8", - "@react-stately/combobox": "^3.12.0", + "@react-stately/combobox": "^3.12.1", "@react-stately/form": "^3.2.2", "@react-types/button": "^3.14.1", - "@react-types/combobox": "^3.13.9", + "@react-types/combobox": "^3.13.10", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -3305,25 +3322,25 @@ } }, "node_modules/@react-aria/datepicker": { - "version": "3.15.2", - "resolved": "https://registry.npmjs.org/@react-aria/datepicker/-/datepicker-3.15.2.tgz", - "integrity": "sha512-th078hyNqPf4P2K10su/y32zPDjs3lOYVdHvsL9/+5K1dnTvLHCK5vgUyLuyn8FchhF7cmHV49D+LZVv65PEpQ==", + "version": "3.15.3", + "resolved": "https://registry.npmjs.org/@react-aria/datepicker/-/datepicker-3.15.3.tgz", + "integrity": "sha512-0KkLYeLs+IubHXb879n8dzzKU/NWcxC9DXtv7M/ofL7vAvMSTmaceYJcMW+2gGYhJVpyYz8B6bk0W7kTxgB3jg==", "dependencies": { - "@internationalized/date": "^3.10.0", + "@internationalized/date": "^3.10.1", "@internationalized/number": "^3.6.5", "@internationalized/string": "^3.2.7", - "@react-aria/focus": "^3.21.2", - "@react-aria/form": "^3.1.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", - "@react-aria/label": "^3.7.22", - "@react-aria/spinbutton": "^3.6.19", - "@react-aria/utils": "^3.31.0", - "@react-stately/datepicker": "^3.15.2", + "@react-aria/focus": "^3.21.3", + "@react-aria/form": "^3.1.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", + "@react-aria/label": "^3.7.23", + "@react-aria/spinbutton": "^3.7.0", + "@react-aria/utils": "^3.32.0", + "@react-stately/datepicker": "^3.15.3", "@react-stately/form": "^3.2.2", "@react-types/button": "^3.14.1", - "@react-types/calendar": "^3.8.0", - "@react-types/datepicker": "^3.13.2", + "@react-types/calendar": "^3.8.1", + "@react-types/datepicker": "^3.13.3", "@react-types/dialog": "^3.5.22", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3334,13 +3351,13 @@ } }, "node_modules/@react-aria/dialog": { - "version": "3.5.31", - "resolved": "https://registry.npmjs.org/@react-aria/dialog/-/dialog-3.5.31.tgz", - "integrity": "sha512-inxQMyrzX0UBW9Mhraq0nZ4HjHdygQvllzloT1E/RlDd61lr3RbmJR6pLsrbKOTtSvDIBJpCso1xEdHCFNmA0Q==", + "version": "3.5.32", + "resolved": "https://registry.npmjs.org/@react-aria/dialog/-/dialog-3.5.32.tgz", + "integrity": "sha512-2puMjsJS2FtB8LiFuQDAdBSU4dt3lqdJn4FWt/8GL6l91RZBqp2Dnm5Obuee6rV2duNJZcSAUWsQZ/S1iW8Y2g==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/overlays": "^3.30.0", - "@react-aria/utils": "^3.31.0", + "@react-aria/interactions": "^3.26.0", + "@react-aria/overlays": "^3.31.0", + "@react-aria/utils": "^3.32.0", "@react-types/dialog": "^3.5.22", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3351,12 +3368,12 @@ } }, "node_modules/@react-aria/focus": { - "version": "3.21.2", - "resolved": "https://registry.npmjs.org/@react-aria/focus/-/focus-3.21.2.tgz", - "integrity": "sha512-JWaCR7wJVggj+ldmM/cb/DXFg47CXR55lznJhZBh4XVqJjMKwaOOqpT5vNN7kpC1wUpXicGNuDnJDN1S/+6dhQ==", + "version": "3.21.3", + "resolved": "https://registry.npmjs.org/@react-aria/focus/-/focus-3.21.3.tgz", + "integrity": "sha512-FsquWvjSCwC2/sBk4b+OqJyONETUIXQ2vM0YdPAuC+QFQh2DT6TIBo6dOZVSezlhudDla69xFBd6JvCFq1AbUw==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/utils": "^3.31.0", + "@react-aria/interactions": "^3.26.0", + "@react-aria/utils": "^3.32.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0", "clsx": "^2.0.0" @@ -3367,12 +3384,12 @@ } }, "node_modules/@react-aria/form": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@react-aria/form/-/form-3.1.2.tgz", - "integrity": "sha512-R3i7L7Ci61PqZQvOrnL9xJeWEbh28UkTVgkj72EvBBn39y4h7ReH++0stv7rRs8p5ozETSKezBbGfu4UsBewWw==", + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@react-aria/form/-/form-3.1.3.tgz", + "integrity": "sha512-HAKnPjMiqTxoGLVbfZyGYcZQ1uu6aSeCi9ODmtZuKM5DWZZnTUjDmM1i2L6IXvF+d1kjyApyJC7VTbKZ8AI77g==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/utils": "^3.31.0", + "@react-aria/interactions": "^3.26.0", + "@react-aria/utils": "^3.32.0", "@react-stately/form": "^3.2.2", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3383,19 +3400,19 @@ } }, "node_modules/@react-aria/grid": { - "version": "3.14.5", - "resolved": "https://registry.npmjs.org/@react-aria/grid/-/grid-3.14.5.tgz", - "integrity": "sha512-XHw6rgjlTqc85e3zjsWo3U0EVwjN5MOYtrolCKc/lc2ItNdcY3OlMhpsU9+6jHwg/U3VCSWkGvwAz9hg7krd8Q==", + "version": "3.14.6", + "resolved": "https://registry.npmjs.org/@react-aria/grid/-/grid-3.14.6.tgz", + "integrity": "sha512-xagBKHNPu4Ovt/I5He7T/oIEq82MDMSrRi5Sw3oxSCwwtZpv+7eyKRSrFz9vrNUzNgWCcx5VHLE660bLdeVNDQ==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", + "@react-aria/focus": "^3.21.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", "@react-aria/live-announcer": "^3.4.4", - "@react-aria/selection": "^3.26.0", - "@react-aria/utils": "^3.31.0", + "@react-aria/selection": "^3.27.0", + "@react-aria/utils": "^3.32.0", "@react-stately/collections": "^3.12.8", - "@react-stately/grid": "^3.11.6", - "@react-stately/selection": "^3.20.6", + "@react-stately/grid": "^3.11.7", + "@react-stately/selection": "^3.20.7", "@react-types/checkbox": "^3.10.2", "@react-types/grid": "^3.3.6", "@react-types/shared": "^3.32.1", @@ -3407,16 +3424,16 @@ } }, "node_modules/@react-aria/i18n": { - "version": "3.12.13", - "resolved": "https://registry.npmjs.org/@react-aria/i18n/-/i18n-3.12.13.tgz", - "integrity": "sha512-YTM2BPg0v1RvmP8keHenJBmlx8FXUKsdYIEX7x6QWRd1hKlcDwphfjzvt0InX9wiLiPHsT5EoBTpuUk8SXc0Mg==", + "version": "3.12.14", + "resolved": "https://registry.npmjs.org/@react-aria/i18n/-/i18n-3.12.14.tgz", + "integrity": "sha512-zYvs1FlLamFD49uneX3i5mPHrAsB3OjVpSWApTcPw8ydxOaphQDp/Q1aqrbcxlrQCcxZdXWHuvLlbkNR4+8jzw==", "dependencies": { - "@internationalized/date": "^3.10.0", + "@internationalized/date": "^3.10.1", "@internationalized/message": "^3.1.8", "@internationalized/number": "^3.6.5", "@internationalized/string": "^3.2.7", "@react-aria/ssr": "^3.9.10", - "@react-aria/utils": "^3.31.0", + "@react-aria/utils": "^3.32.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -3426,12 +3443,12 @@ } }, "node_modules/@react-aria/interactions": { - "version": "3.25.6", - "resolved": "https://registry.npmjs.org/@react-aria/interactions/-/interactions-3.25.6.tgz", - "integrity": "sha512-5UgwZmohpixwNMVkMvn9K1ceJe6TzlRlAfuYoQDUuOkk62/JVJNDLAPKIf5YMRc7d2B0rmfgaZLMtbREb0Zvkw==", + "version": "3.26.0", + "resolved": "https://registry.npmjs.org/@react-aria/interactions/-/interactions-3.26.0.tgz", + "integrity": "sha512-AAEcHiltjfbmP1i9iaVw34Mb7kbkiHpYdqieWufldh4aplWgsF11YQZOfaCJW4QoR2ML4Zzoa9nfFwLXA52R7Q==", "dependencies": { "@react-aria/ssr": "^3.9.10", - "@react-aria/utils": "^3.31.0", + "@react-aria/utils": "^3.32.0", "@react-stately/flags": "^3.1.2", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3442,11 +3459,11 @@ } }, "node_modules/@react-aria/label": { - "version": "3.7.22", - "resolved": "https://registry.npmjs.org/@react-aria/label/-/label-3.7.22.tgz", - "integrity": "sha512-jLquJeA5ZNqDT64UpTc9XJ7kQYltUlNcgxZ37/v4mHe0UZ7QohCKdKQhXHONb0h2jjNUpp2HOZI8J9++jOpzxA==", + "version": "3.7.23", + "resolved": "https://registry.npmjs.org/@react-aria/label/-/label-3.7.23.tgz", + "integrity": "sha512-dRkuCJfsyBHPTq3WOJVHNRvNyQL4cRRLELmjYfUX9/jQKIsUW2l71YnUHZTRCSn2ZjhdAcdwq96fNcQo0hncBQ==", "dependencies": { - "@react-aria/utils": "^3.31.0", + "@react-aria/utils": "^3.32.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -3456,11 +3473,11 @@ } }, "node_modules/@react-aria/landmark": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/@react-aria/landmark/-/landmark-3.0.7.tgz", - "integrity": "sha512-t8c610b8hPLS6Vwv+rbuSyljZosI1s5+Tosfa0Fk4q7d+Ex6Yj7hLfUFy59GxZAufhUYfGX396fT0gPqAbU1tg==", + "version": "3.0.8", + "resolved": "https://registry.npmjs.org/@react-aria/landmark/-/landmark-3.0.8.tgz", + "integrity": "sha512-xuY8kYxCrF9C0h0Pj2lZHoxCidNfQ/SrkYWXuiN+LuBTJGCmPVif93gt7TklQ0rKJ+pKJsUgh8AC0pgwI3QP7A==", "dependencies": { - "@react-aria/utils": "^3.31.0", + "@react-aria/utils": "^3.32.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0", "use-sync-external-store": "^1.4.0" @@ -3471,12 +3488,12 @@ } }, "node_modules/@react-aria/link": { - "version": "3.8.6", - "resolved": "https://registry.npmjs.org/@react-aria/link/-/link-3.8.6.tgz", - "integrity": "sha512-7F7UDJnwbU9IjfoAdl6f3Hho5/WB7rwcydUOjUux0p7YVWh/fTjIFjfAGyIir7MJhPapun1D0t97QQ3+8jXVcg==", + "version": "3.8.7", + "resolved": "https://registry.npmjs.org/@react-aria/link/-/link-3.8.7.tgz", + "integrity": "sha512-TOC6Hf/x3N0P8SLR1KD/dGiJ9PmwAq8H57RiwbFbdINnG/HIvIQr5MxGTjwBvOOWcJu9brgWL5HkQaZK7Q/4Yw==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/utils": "^3.31.0", + "@react-aria/interactions": "^3.26.0", + "@react-aria/utils": "^3.32.0", "@react-types/link": "^3.6.5", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3487,16 +3504,16 @@ } }, "node_modules/@react-aria/listbox": { - "version": "3.15.0", - "resolved": "https://registry.npmjs.org/@react-aria/listbox/-/listbox-3.15.0.tgz", - "integrity": "sha512-Ub1Wu79R9sgxM7h4HeEdjOgOKDHwduvYcnDqsSddGXgpkL8ADjsy2YUQ0hHY5VnzA4BxK36bLp4mzSna8Qvj1w==", + "version": "3.15.1", + "resolved": "https://registry.npmjs.org/@react-aria/listbox/-/listbox-3.15.1.tgz", + "integrity": "sha512-81iDLFhmPXvLOtkI0SKzgrngfzwfR2o9oFDAYRfpYCOxgT7jjh8SaB4wCteJXRiMwymRGmgyTvD4yxWTluEeXA==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/label": "^3.7.22", - "@react-aria/selection": "^3.26.0", - "@react-aria/utils": "^3.31.0", + "@react-aria/interactions": "^3.26.0", + "@react-aria/label": "^3.7.23", + "@react-aria/selection": "^3.27.0", + "@react-aria/utils": "^3.32.0", "@react-stately/collections": "^3.12.8", - "@react-stately/list": "^3.13.1", + "@react-stately/list": "^3.13.2", "@react-types/listbox": "^3.7.4", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3515,20 +3532,20 @@ } }, "node_modules/@react-aria/menu": { - "version": "3.19.3", - "resolved": "https://registry.npmjs.org/@react-aria/menu/-/menu-3.19.3.tgz", - "integrity": "sha512-52fh8y8b2776R2VrfZPpUBJYC9oTP7XDy+zZuZTxPEd7Ywk0JNUl5F92y6ru22yPkS13sdhrNM/Op+V/KulmAg==", + "version": "3.19.4", + "resolved": "https://registry.npmjs.org/@react-aria/menu/-/menu-3.19.4.tgz", + "integrity": "sha512-0A0DUEkEvZynmaD3zktHavM+EmgZSR/ht+g1ExS2jXe73CegA+dbSRfPl9eIKcHxaRrWOV96qMj2pTf0yWTBDg==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", - "@react-aria/overlays": "^3.30.0", - "@react-aria/selection": "^3.26.0", - "@react-aria/utils": "^3.31.0", + "@react-aria/focus": "^3.21.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", + "@react-aria/overlays": "^3.31.0", + "@react-aria/selection": "^3.27.0", + "@react-aria/utils": "^3.32.0", "@react-stately/collections": "^3.12.8", - "@react-stately/menu": "^3.9.8", - "@react-stately/selection": "^3.20.6", - "@react-stately/tree": "^3.9.3", + "@react-stately/menu": "^3.9.9", + "@react-stately/selection": "^3.20.7", + "@react-stately/tree": "^3.9.4", "@react-types/button": "^3.14.1", "@react-types/menu": "^3.10.5", "@react-types/shared": "^3.32.1", @@ -3540,19 +3557,19 @@ } }, "node_modules/@react-aria/numberfield": { - "version": "3.12.2", - "resolved": "https://registry.npmjs.org/@react-aria/numberfield/-/numberfield-3.12.2.tgz", - "integrity": "sha512-M2b+z0HIXiXpGAWOQkO2kpIjaLNUXJ5Q3/GMa3Fkr+B1piFX0VuOynYrtddKVrmXCe+r5t+XcGb0KS29uqv7nQ==", + "version": "3.12.3", + "resolved": "https://registry.npmjs.org/@react-aria/numberfield/-/numberfield-3.12.3.tgz", + "integrity": "sha512-70LRXWPEuj2X8mbQXUx6l6We+RGs49Kb+2eUiSSLArHK4RvTWJWEfSjHL5IHHJ+j2AkbORdryD7SR3gcXSX+5w==", "dependencies": { - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", - "@react-aria/spinbutton": "^3.6.19", - "@react-aria/textfield": "^3.18.2", - "@react-aria/utils": "^3.31.0", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", + "@react-aria/spinbutton": "^3.7.0", + "@react-aria/textfield": "^3.18.3", + "@react-aria/utils": "^3.32.0", "@react-stately/form": "^3.2.2", - "@react-stately/numberfield": "^3.10.2", + "@react-stately/numberfield": "^3.10.3", "@react-types/button": "^3.14.1", - "@react-types/numberfield": "^3.8.15", + "@react-types/numberfield": "^3.8.16", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -3562,17 +3579,17 @@ } }, "node_modules/@react-aria/overlays": { - "version": "3.30.0", - "resolved": "https://registry.npmjs.org/@react-aria/overlays/-/overlays-3.30.0.tgz", - "integrity": "sha512-UpjqSjYZx5FAhceWCRVsW6fX1sEwya1fQ/TKkL53FAlLFR8QKuoKqFlmiL43YUFTcGK3UdEOy3cWTleLQwdSmQ==", + "version": "3.31.0", + "resolved": "https://registry.npmjs.org/@react-aria/overlays/-/overlays-3.31.0.tgz", + "integrity": "sha512-Vq41X1s8XheGIhGbbuqRJslJEX08qmMVX//dwuBaFX9T18mMR04tumKOMxp8Lz+vqwdGLvjNUYDMcgolL+AMjw==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", + "@react-aria/focus": "^3.21.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", "@react-aria/ssr": "^3.9.10", - "@react-aria/utils": "^3.31.0", - "@react-aria/visually-hidden": "^3.8.28", - "@react-stately/overlays": "^3.6.20", + "@react-aria/utils": "^3.32.0", + "@react-aria/visually-hidden": "^3.8.29", + "@react-stately/overlays": "^3.6.21", "@react-types/button": "^3.14.1", "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1", @@ -3584,13 +3601,13 @@ } }, "node_modules/@react-aria/progress": { - "version": "3.4.27", - "resolved": "https://registry.npmjs.org/@react-aria/progress/-/progress-3.4.27.tgz", - "integrity": "sha512-0OA1shs1575g1zmO8+rWozdbTnxThFFhOfuoL1m7UV5Dley6FHpueoKB1ECv7B+Qm4dQt6DoEqLg7wsbbQDhmg==", + "version": "3.4.28", + "resolved": "https://registry.npmjs.org/@react-aria/progress/-/progress-3.4.28.tgz", + "integrity": "sha512-3NUUAu+rwf1M7pau9WFkrxe/PlBPiqCl/1maGU7iufVveHnz+SVVqXdNkjYx+WkPE0ViwG86Zx6OU4AYJ1pjNw==", "dependencies": { - "@react-aria/i18n": "^3.12.13", - "@react-aria/label": "^3.7.22", - "@react-aria/utils": "^3.31.0", + "@react-aria/i18n": "^3.12.14", + "@react-aria/label": "^3.7.23", + "@react-aria/utils": "^3.32.0", "@react-types/progress": "^3.5.16", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3601,17 +3618,17 @@ } }, "node_modules/@react-aria/radio": { - "version": "3.12.2", - "resolved": "https://registry.npmjs.org/@react-aria/radio/-/radio-3.12.2.tgz", - "integrity": "sha512-I11f6I90neCh56rT/6ieAs3XyDKvEfbj/QmbU5cX3p+SJpRRPN0vxQi5D1hkh0uxDpeClxygSr31NmZsd4sqfg==", + "version": "3.12.3", + "resolved": "https://registry.npmjs.org/@react-aria/radio/-/radio-3.12.3.tgz", + "integrity": "sha512-noucVX++9J3VYWg7dB+r09NVX8UZSR1TWUMCbT/MffzhltOsmiLJVvgJ0uEeeVRuu3+ZM63jOshrzG89anX4TQ==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/form": "^3.1.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", - "@react-aria/label": "^3.7.22", - "@react-aria/utils": "^3.31.0", - "@react-stately/radio": "^3.11.2", + "@react-aria/focus": "^3.21.3", + "@react-aria/form": "^3.1.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", + "@react-aria/label": "^3.7.23", + "@react-aria/utils": "^3.32.0", + "@react-stately/radio": "^3.11.3", "@react-types/radio": "^3.9.2", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3622,15 +3639,15 @@ } }, "node_modules/@react-aria/selection": { - "version": "3.26.0", - "resolved": "https://registry.npmjs.org/@react-aria/selection/-/selection-3.26.0.tgz", - "integrity": "sha512-ZBH3EfWZ+RfhTj01dH8L17uT7iNbXWS8u77/fUpHgtrm0pwNVhx0TYVnLU1YpazQ/3WVpvWhmBB8sWwD1FlD/g==", + "version": "3.27.0", + "resolved": "https://registry.npmjs.org/@react-aria/selection/-/selection-3.27.0.tgz", + "integrity": "sha512-4zgreuCu4QM4t2U7aF3mbMvIKCEkTEo6h6nGJvbyZALZ/eFtLTvUiV8/5CGDJRLGvgMvi3XxUeF9PZbpk5nMJg==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", - "@react-aria/utils": "^3.31.0", - "@react-stately/selection": "^3.20.6", + "@react-aria/focus": "^3.21.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", + "@react-aria/utils": "^3.32.0", + "@react-stately/selection": "^3.20.7", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -3640,15 +3657,15 @@ } }, "node_modules/@react-aria/slider": { - "version": "3.8.2", - "resolved": "https://registry.npmjs.org/@react-aria/slider/-/slider-3.8.2.tgz", - "integrity": "sha512-6KyUGaVzRE4xAz1LKHbNh1q5wzxe58pdTHFSnxNe6nk1SCoHw7NfI4h2s2m6LgJ0megFxsT0Ir8aHaFyyxmbgg==", + "version": "3.8.3", + "resolved": "https://registry.npmjs.org/@react-aria/slider/-/slider-3.8.3.tgz", + "integrity": "sha512-tOZVH+wLt3ik0C3wyuXqHL9fvnQ5S+/tHMYB7z8aZV5cEe36Gt4efBILphlA7ChkL/RvpHGK2AGpEGxvuEQIuQ==", "dependencies": { - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", - "@react-aria/label": "^3.7.22", - "@react-aria/utils": "^3.31.0", - "@react-stately/slider": "^3.7.2", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", + "@react-aria/label": "^3.7.23", + "@react-aria/utils": "^3.32.0", + "@react-stately/slider": "^3.7.3", "@react-types/shared": "^3.32.1", "@react-types/slider": "^3.8.2", "@swc/helpers": "^0.5.0" @@ -3659,13 +3676,13 @@ } }, "node_modules/@react-aria/spinbutton": { - "version": "3.6.19", - "resolved": "https://registry.npmjs.org/@react-aria/spinbutton/-/spinbutton-3.6.19.tgz", - "integrity": "sha512-xOIXegDpts9t3RSHdIN0iYQpdts0FZ3LbpYJIYVvdEHo9OpDS+ElnDzCGtwZLguvZlwc5s1LAKuKopDUsAEMkw==", + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/@react-aria/spinbutton/-/spinbutton-3.7.0.tgz", + "integrity": "sha512-FOyH94BZp+jNhUJuZqXSubQZDNQEJyW/J19/gwCxQvQvxAP79dhDFshh1UtrL4EjbjIflmaOes+sH/XEHUnJVA==", "dependencies": { - "@react-aria/i18n": "^3.12.13", + "@react-aria/i18n": "^3.12.14", "@react-aria/live-announcer": "^3.4.4", - "@react-aria/utils": "^3.31.0", + "@react-aria/utils": "^3.32.0", "@react-types/button": "^3.14.1", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3690,12 +3707,12 @@ } }, "node_modules/@react-aria/switch": { - "version": "3.7.8", - "resolved": "https://registry.npmjs.org/@react-aria/switch/-/switch-3.7.8.tgz", - "integrity": "sha512-AfsUq1/YiuoprhcBUD9vDPyWaigAwctQNW1fMb8dROL+i/12B+Zekj8Ml+jbU69/kIVtfL0Jl7/0Bo9KK3X0xQ==", + "version": "3.7.9", + "resolved": "https://registry.npmjs.org/@react-aria/switch/-/switch-3.7.9.tgz", + "integrity": "sha512-RZtuFRXews0PBx8Fc2R/kqaIARD5YIM5uYtmwnWfY7y5bEsBGONxp0d+m2vDyY7yk+VNpVFBdwewY9GbZmH1CA==", "dependencies": { - "@react-aria/toggle": "^3.12.2", - "@react-stately/toggle": "^3.9.2", + "@react-aria/toggle": "^3.12.3", + "@react-stately/toggle": "^3.9.3", "@react-types/shared": "^3.32.1", "@react-types/switch": "^3.5.15", "@swc/helpers": "^0.5.0" @@ -3706,20 +3723,20 @@ } }, "node_modules/@react-aria/table": { - "version": "3.17.8", - "resolved": "https://registry.npmjs.org/@react-aria/table/-/table-3.17.8.tgz", - "integrity": "sha512-bXiZoxTMbsqUJsYDhHPzKc3jw0HFJ/xMsJ49a0f7mp5r9zACxNLeIU0wJ4Uvx37dnYOHKzGliG+rj5l4sph7MA==", + "version": "3.17.9", + "resolved": "https://registry.npmjs.org/@react-aria/table/-/table-3.17.9.tgz", + "integrity": "sha512-Jby561E1YfzoRgtp+RQuhDz4vnxlcqol9RTgQQ7FWXC2IcN9Pny1COU34LkA1cL9VeB9LJ0+qfMhGw4aAwaUmw==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/grid": "^3.14.5", - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", + "@react-aria/focus": "^3.21.3", + "@react-aria/grid": "^3.14.6", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", "@react-aria/live-announcer": "^3.4.4", - "@react-aria/utils": "^3.31.0", - "@react-aria/visually-hidden": "^3.8.28", + "@react-aria/utils": "^3.32.0", + "@react-aria/visually-hidden": "^3.8.29", "@react-stately/collections": "^3.12.8", "@react-stately/flags": "^3.1.2", - "@react-stately/table": "^3.15.1", + "@react-stately/table": "^3.15.2", "@react-types/checkbox": "^3.10.2", "@react-types/grid": "^3.3.6", "@react-types/shared": "^3.32.1", @@ -3732,17 +3749,17 @@ } }, "node_modules/@react-aria/tabs": { - "version": "3.10.8", - "resolved": "https://registry.npmjs.org/@react-aria/tabs/-/tabs-3.10.8.tgz", - "integrity": "sha512-sPPJyTyoAqsBh76JinBAxStOcbjZvyWFYKpJ9Uqw+XT0ObshAPPFSGeh8DiQemPs02RwJdrfARPMhyqiX8t59A==", + "version": "3.10.9", + "resolved": "https://registry.npmjs.org/@react-aria/tabs/-/tabs-3.10.9.tgz", + "integrity": "sha512-2+FNd7Ohr3hrEgYrKdZW0FWbgybzTVZft6tw95oQ2+9PnjdDVdtzHliI+8HY8jzb4hTf4bU7O8n+s/HBlCBSIw==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/selection": "^3.26.0", - "@react-aria/utils": "^3.31.0", - "@react-stately/tabs": "^3.8.6", + "@react-aria/focus": "^3.21.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/selection": "^3.27.0", + "@react-aria/utils": "^3.32.0", + "@react-stately/tabs": "^3.8.7", "@react-types/shared": "^3.32.1", - "@react-types/tabs": "^3.3.19", + "@react-types/tabs": "^3.3.20", "@swc/helpers": "^0.5.0" }, "peerDependencies": { @@ -3751,16 +3768,16 @@ } }, "node_modules/@react-aria/textfield": { - "version": "3.18.2", - "resolved": "https://registry.npmjs.org/@react-aria/textfield/-/textfield-3.18.2.tgz", - "integrity": "sha512-G+lM8VYSor6g9Yptc6hLZ6BF+0cq0pYol1z6wdQUQgJN8tg4HPtzq75lsZtlCSIznL3amgRAxJtd0dUrsAnvaQ==", + "version": "3.18.3", + "resolved": "https://registry.npmjs.org/@react-aria/textfield/-/textfield-3.18.3.tgz", + "integrity": "sha512-ehiSHOKuKCwPdxFe7wGE0QJlSeeJR4iJuH+OdsYVlZzYbl9J/uAdGbpsj/zPhNtBo1g/Td76U8TtTlYRZ8lUZw==", "dependencies": { - "@react-aria/form": "^3.1.2", - "@react-aria/interactions": "^3.25.6", - "@react-aria/label": "^3.7.22", - "@react-aria/utils": "^3.31.0", + "@react-aria/form": "^3.1.3", + "@react-aria/interactions": "^3.26.0", + "@react-aria/label": "^3.7.23", + "@react-aria/utils": "^3.32.0", "@react-stately/form": "^3.2.2", - "@react-stately/utils": "^3.10.8", + "@react-stately/utils": "^3.11.0", "@react-types/shared": "^3.32.1", "@react-types/textfield": "^3.12.6", "@swc/helpers": "^0.5.0" @@ -3771,14 +3788,14 @@ } }, "node_modules/@react-aria/toast": { - "version": "3.0.8", - "resolved": "https://registry.npmjs.org/@react-aria/toast/-/toast-3.0.8.tgz", - "integrity": "sha512-rfJIms6AkMyQ7ZgKrMZgGfPwGcB/t1JoEwbc1PAmXcAvFI/hzF6YF7ZFDXiq38ucFsP9PnHmbXIzM9w4ccl18A==", + "version": "3.0.9", + "resolved": "https://registry.npmjs.org/@react-aria/toast/-/toast-3.0.9.tgz", + "integrity": "sha512-2sRitczXl5VEwyq97o8TVvq3bIqLA7EfA7dhDPkYlHGa4T1vzKkhNqgkskKd9+Tw7gqeFRFjnokh+es9jkM11g==", "dependencies": { - "@react-aria/i18n": "^3.12.13", - "@react-aria/interactions": "^3.25.6", - "@react-aria/landmark": "^3.0.7", - "@react-aria/utils": "^3.31.0", + "@react-aria/i18n": "^3.12.14", + "@react-aria/interactions": "^3.26.0", + "@react-aria/landmark": "^3.0.8", + "@react-aria/utils": "^3.32.0", "@react-stately/toast": "^3.1.2", "@react-types/button": "^3.14.1", "@react-types/shared": "^3.32.1", @@ -3790,13 +3807,13 @@ } }, "node_modules/@react-aria/toggle": { - "version": "3.12.2", - "resolved": "https://registry.npmjs.org/@react-aria/toggle/-/toggle-3.12.2.tgz", - "integrity": "sha512-g25XLYqJuJpt0/YoYz2Rab8ax+hBfbssllcEFh0v0jiwfk2gwTWfRU9KAZUvxIqbV8Nm8EBmrYychDpDcvW1kw==", + "version": "3.12.3", + "resolved": "https://registry.npmjs.org/@react-aria/toggle/-/toggle-3.12.3.tgz", + "integrity": "sha512-mciUbeVP99fRObnH5qLFrkKXX+5VKeV6BhFJlmz1eo3ltR/0xZKnUcycA2CGzmqtB70w09CAhr8NMEnpNH8dwQ==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/utils": "^3.31.0", - "@react-stately/toggle": "^3.9.2", + "@react-aria/interactions": "^3.26.0", + "@react-aria/utils": "^3.32.0", + "@react-stately/toggle": "^3.9.3", "@react-types/checkbox": "^3.10.2", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -3807,13 +3824,13 @@ } }, "node_modules/@react-aria/toolbar": { - "version": "3.0.0-beta.21", - "resolved": "https://registry.npmjs.org/@react-aria/toolbar/-/toolbar-3.0.0-beta.21.tgz", - "integrity": "sha512-yRCk/GD8g+BhdDgxd3I0a0c8Ni4Wyo6ERzfSoBkPkwQ4X2E2nkopmraM9D0fXw4UcIr4bnmvADzkHXtBN0XrBg==", + "version": "3.0.0-beta.22", + "resolved": "https://registry.npmjs.org/@react-aria/toolbar/-/toolbar-3.0.0-beta.22.tgz", + "integrity": "sha512-Q1gOj6N4vzvpGrIoNAxpUudEQP82UgQACENH/bcH8FnEMbSP7DHvVfDhj7GTU6ldMXO2cjqLhiidoUK53gkCiA==", "dependencies": { - "@react-aria/focus": "^3.21.2", - "@react-aria/i18n": "^3.12.13", - "@react-aria/utils": "^3.31.0", + "@react-aria/focus": "^3.21.3", + "@react-aria/i18n": "^3.12.14", + "@react-aria/utils": "^3.32.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -3823,15 +3840,15 @@ } }, "node_modules/@react-aria/tooltip": { - "version": "3.8.8", - "resolved": "https://registry.npmjs.org/@react-aria/tooltip/-/tooltip-3.8.8.tgz", - "integrity": "sha512-CmHUqtXtFWmG4AHMEr9hIVex+oscK6xcM2V47gq9ijNInxe3M6UBu/dBdkgGP/jYv9N7tzCAjTR8nNIHQXwvWw==", + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/@react-aria/tooltip/-/tooltip-3.9.0.tgz", + "integrity": "sha512-2O1DXEV8/+DeUq9dIlAfaNa7lSG+7FCZDuF+sNiPYnZM6tgFOrsId26uMF5EuwpVfOvXSSGnq0+6Ma2On7mZPg==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/utils": "^3.31.0", - "@react-stately/tooltip": "^3.5.8", + "@react-aria/interactions": "^3.26.0", + "@react-aria/utils": "^3.32.0", + "@react-stately/tooltip": "^3.5.9", "@react-types/shared": "^3.32.1", - "@react-types/tooltip": "^3.4.21", + "@react-types/tooltip": "^3.5.0", "@swc/helpers": "^0.5.0" }, "peerDependencies": { @@ -3840,13 +3857,13 @@ } }, "node_modules/@react-aria/utils": { - "version": "3.31.0", - "resolved": "https://registry.npmjs.org/@react-aria/utils/-/utils-3.31.0.tgz", - "integrity": "sha512-ABOzCsZrWzf78ysswmguJbx3McQUja7yeGj6/vZo4JVsZNlxAN+E9rs381ExBRI0KzVo6iBTeX5De8eMZPJXig==", + "version": "3.32.0", + "resolved": "https://registry.npmjs.org/@react-aria/utils/-/utils-3.32.0.tgz", + "integrity": "sha512-/7Rud06+HVBIlTwmwmJa2W8xVtgxgzm0+kLbuFooZRzKDON6hhozS1dOMR/YLMxyJOaYOTpImcP4vRR9gL1hEg==", "dependencies": { "@react-aria/ssr": "^3.9.10", "@react-stately/flags": "^3.1.2", - "@react-stately/utils": "^3.10.8", + "@react-stately/utils": "^3.11.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0", "clsx": "^2.0.0" @@ -3857,12 +3874,12 @@ } }, "node_modules/@react-aria/visually-hidden": { - "version": "3.8.28", - "resolved": "https://registry.npmjs.org/@react-aria/visually-hidden/-/visually-hidden-3.8.28.tgz", - "integrity": "sha512-KRRjbVVob2CeBidF24dzufMxBveEUtUu7IM+hpdZKB+gxVROoh4XRLPv9SFmaH89Z7D9To3QoykVZoWD0lan6Q==", + "version": "3.8.29", + "resolved": "https://registry.npmjs.org/@react-aria/visually-hidden/-/visually-hidden-3.8.29.tgz", + "integrity": "sha512-1joCP+MHBLd+YA6Gb08nMFfDBhOF0Kh1gR1SA8zoxEB5RMfQEEkufIB8k0GGwvHGSCK3gFyO8UAVsD0+rRYEyg==", "dependencies": { - "@react-aria/interactions": "^3.25.6", - "@react-aria/utils": "^3.31.0", + "@react-aria/interactions": "^3.26.0", + "@react-aria/utils": "^3.32.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -4005,13 +4022,13 @@ } }, "node_modules/@react-stately/calendar": { - "version": "3.9.0", - "resolved": "https://registry.npmjs.org/@react-stately/calendar/-/calendar-3.9.0.tgz", - "integrity": "sha512-U5Nf2kx9gDhJRxdDUm5gjfyUlt/uUfOvM1vDW2UA62cA6+2k2cavMLc2wNlXOb/twFtl6p0joYKHG7T4xnEFkg==", + "version": "3.9.1", + "resolved": "https://registry.npmjs.org/@react-stately/calendar/-/calendar-3.9.1.tgz", + "integrity": "sha512-q0Q8fivpQa1rcLg5daUVxwVj1smCp1VnpX9A5Q5PkI9lH9x+xdS0Y6eOqb8Ih3TKBDkx9/oEZonOX7RYNIzSig==", "dependencies": { - "@internationalized/date": "^3.10.0", - "@react-stately/utils": "^3.10.8", - "@react-types/calendar": "^3.8.0", + "@internationalized/date": "^3.10.1", + "@react-stately/utils": "^3.11.0", + "@react-types/calendar": "^3.8.1", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -4020,12 +4037,12 @@ } }, "node_modules/@react-stately/checkbox": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/@react-stately/checkbox/-/checkbox-3.7.2.tgz", - "integrity": "sha512-j1ycUVz5JmqhaL6mDZgDNZqBilOB8PBW096sDPFaTtuYreDx2HOd1igxiIvwlvPESZwsJP7FVM3mYnaoXtpKPA==", + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/@react-stately/checkbox/-/checkbox-3.7.3.tgz", + "integrity": "sha512-ve2K+uWT+NRM1JMn+tkWJDP2iBAaWvbZ0TbSXs371IUcTWaNW61HygZ+UFOB/frAZGloazEKGqAsX5XjFpgB9w==", "dependencies": { "@react-stately/form": "^3.2.2", - "@react-stately/utils": "^3.10.8", + "@react-stately/utils": "^3.11.0", "@react-types/checkbox": "^3.10.2", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4047,16 +4064,16 @@ } }, "node_modules/@react-stately/combobox": { - "version": "3.12.0", - "resolved": "https://registry.npmjs.org/@react-stately/combobox/-/combobox-3.12.0.tgz", - "integrity": "sha512-A6q9R/7cEa/qoQsBkdslXWvD7ztNLLQ9AhBhVN9QvzrmrH5B4ymUwcTU8lWl22ykH7RRwfonLeLXJL4C+/L2oQ==", + "version": "3.12.1", + "resolved": "https://registry.npmjs.org/@react-stately/combobox/-/combobox-3.12.1.tgz", + "integrity": "sha512-RwfTTYgKJ9raIY+7grZ5DbfVRSO5pDjo/ur2VN/28LZzM0eOQrLFQ00vpBmY7/R64sHRpcXLDxpz5cqpKCdvTw==", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/form": "^3.2.2", - "@react-stately/list": "^3.13.1", - "@react-stately/overlays": "^3.6.20", - "@react-stately/utils": "^3.10.8", - "@react-types/combobox": "^3.13.9", + "@react-stately/list": "^3.13.2", + "@react-stately/overlays": "^3.6.21", + "@react-stately/utils": "^3.11.0", + "@react-types/combobox": "^3.13.10", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -4065,16 +4082,16 @@ } }, "node_modules/@react-stately/datepicker": { - "version": "3.15.2", - "resolved": "https://registry.npmjs.org/@react-stately/datepicker/-/datepicker-3.15.2.tgz", - "integrity": "sha512-S5GL+W37chvV8knv9v0JRv0L6hKo732qqabCCHXzOpYxkLIkV4f/y3cHdEzFWzpZ0O0Gkg7WgeYo160xOdBKYg==", + "version": "3.15.3", + "resolved": "https://registry.npmjs.org/@react-stately/datepicker/-/datepicker-3.15.3.tgz", + "integrity": "sha512-RDYoz1R/EkCyxHYewb58T7DngU3gl6CnQL7xiWiDlayPnstGaanoQ3yCZGJaIQwR8PrKdNbQwXF9NlSmj8iCOw==", "dependencies": { - "@internationalized/date": "^3.10.0", + "@internationalized/date": "^3.10.1", "@internationalized/string": "^3.2.7", "@react-stately/form": "^3.2.2", - "@react-stately/overlays": "^3.6.20", - "@react-stately/utils": "^3.10.8", - "@react-types/datepicker": "^3.13.2", + "@react-stately/overlays": "^3.6.21", + "@react-stately/utils": "^3.11.0", + "@react-types/datepicker": "^3.13.3", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -4103,12 +4120,12 @@ } }, "node_modules/@react-stately/grid": { - "version": "3.11.6", - "resolved": "https://registry.npmjs.org/@react-stately/grid/-/grid-3.11.6.tgz", - "integrity": "sha512-vWPAkzpeTIsrurHfMubzMuqEw7vKzFhIJeEK5sEcLunyr1rlADwTzeWrHNbPMl66NAIAi70Dr1yNq+kahQyvMA==", + "version": "3.11.7", + "resolved": "https://registry.npmjs.org/@react-stately/grid/-/grid-3.11.7.tgz", + "integrity": "sha512-SqzBSxUTFZKLZicfXDK+M0A3gh07AYK1pmU/otcq2cjZ0nSC4CceKijQ2GBZnl+YGcGHI1RgkhpLP6ZioMYctQ==", "dependencies": { "@react-stately/collections": "^3.12.8", - "@react-stately/selection": "^3.20.6", + "@react-stately/selection": "^3.20.7", "@react-types/grid": "^3.3.6", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4118,13 +4135,13 @@ } }, "node_modules/@react-stately/list": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/@react-stately/list/-/list-3.13.1.tgz", - "integrity": "sha512-eHaoauh21twbcl0kkwULhVJ+CzYcy1jUjMikNVMHOQdhr4WIBdExf7PmSgKHKqsSPhpGg6IpTCY2dUX3RycjDg==", + "version": "3.13.2", + "resolved": "https://registry.npmjs.org/@react-stately/list/-/list-3.13.2.tgz", + "integrity": "sha512-dGFALuQWNNOkv7W12qSsXLF4mJHLeWeK2hVvdyj4SI8Vxku+BOfaVKuW3sn3mNiixI1dM/7FY2ip4kK+kv27vw==", "dependencies": { "@react-stately/collections": "^3.12.8", - "@react-stately/selection": "^3.20.6", - "@react-stately/utils": "^3.10.8", + "@react-stately/selection": "^3.20.7", + "@react-stately/utils": "^3.11.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -4133,11 +4150,11 @@ } }, "node_modules/@react-stately/menu": { - "version": "3.9.8", - "resolved": "https://registry.npmjs.org/@react-stately/menu/-/menu-3.9.8.tgz", - "integrity": "sha512-bo0NOhofnTHLESiYfsSSw6gyXiPVJJ0UlN2igUXtJk5PmyhWjFzUzTzcnd7B028OB0si9w3LIWM3stqz5271Eg==", + "version": "3.9.9", + "resolved": "https://registry.npmjs.org/@react-stately/menu/-/menu-3.9.9.tgz", + "integrity": "sha512-moW5JANxMxPilfR0SygpCWCZe7Ef09oadgzTZthRymNRv0PXVS9ad4wd1EkwuMvPH/n0uZLZE2s8hNyFDgyqPA==", "dependencies": { - "@react-stately/overlays": "^3.6.20", + "@react-stately/overlays": "^3.6.21", "@react-types/menu": "^3.10.5", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4147,14 +4164,14 @@ } }, "node_modules/@react-stately/numberfield": { - "version": "3.10.2", - "resolved": "https://registry.npmjs.org/@react-stately/numberfield/-/numberfield-3.10.2.tgz", - "integrity": "sha512-jlKVFYaH3RX5KvQ7a+SAMQuPccZCzxLkeYkBE64u1Zvi7YhJ8hkTMHG/fmZMbk1rHlseE2wfBdk0Rlya3MvoNQ==", + "version": "3.10.3", + "resolved": "https://registry.npmjs.org/@react-stately/numberfield/-/numberfield-3.10.3.tgz", + "integrity": "sha512-40g/oyVcWoEaLqkr61KuHZzQVLLXFi3oa2K8XLnb6o+859SM4TX3XPNqL6eNQjXSKoJO5Hlgpqhee9j+VDbGog==", "dependencies": { "@internationalized/number": "^3.6.5", "@react-stately/form": "^3.2.2", - "@react-stately/utils": "^3.10.8", - "@react-types/numberfield": "^3.8.15", + "@react-stately/utils": "^3.11.0", + "@react-types/numberfield": "^3.8.16", "@swc/helpers": "^0.5.0" }, "peerDependencies": { @@ -4162,11 +4179,11 @@ } }, "node_modules/@react-stately/overlays": { - "version": "3.6.20", - "resolved": "https://registry.npmjs.org/@react-stately/overlays/-/overlays-3.6.20.tgz", - "integrity": "sha512-YAIe+uI8GUXX8F/0Pzr53YeC5c/bjqbzDFlV8NKfdlCPa6+Jp4B/IlYVjIooBj9+94QvbQdjylegvYWK/iPwlg==", + "version": "3.6.21", + "resolved": "https://registry.npmjs.org/@react-stately/overlays/-/overlays-3.6.21.tgz", + "integrity": "sha512-7f25H1PS2g+SNvuWPEW30pSGqYNHxesCP4w+1RcV/XV1oQI7oP5Ji2WfI0QsJEFc9wP/ZO1pyjHNKpfLI3O88g==", "dependencies": { - "@react-stately/utils": "^3.10.8", + "@react-stately/utils": "^3.11.0", "@react-types/overlays": "^3.9.2", "@swc/helpers": "^0.5.0" }, @@ -4175,12 +4192,12 @@ } }, "node_modules/@react-stately/radio": { - "version": "3.11.2", - "resolved": "https://registry.npmjs.org/@react-stately/radio/-/radio-3.11.2.tgz", - "integrity": "sha512-UM7L6AW+k8edhSBUEPZAqiWNRNadfOKK7BrCXyBiG79zTz0zPcXRR+N+gzkDn7EMSawDeyK1SHYUuoSltTactg==", + "version": "3.11.3", + "resolved": "https://registry.npmjs.org/@react-stately/radio/-/radio-3.11.3.tgz", + "integrity": "sha512-8+Cy0azV1aBWKcBfGHi3nBa285lAS6XhmVw2LfEwxq8DeVKTbJAaCHHwvDoclxDiOAnqzE0pio0QMD8rYISt9g==", "dependencies": { "@react-stately/form": "^3.2.2", - "@react-stately/utils": "^3.10.8", + "@react-stately/utils": "^3.11.0", "@react-types/radio": "^3.9.2", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4190,12 +4207,12 @@ } }, "node_modules/@react-stately/selection": { - "version": "3.20.6", - "resolved": "https://registry.npmjs.org/@react-stately/selection/-/selection-3.20.6.tgz", - "integrity": "sha512-a0bjuP2pJYPKEiedz2Us1W1aSz0iHRuyeQEdBOyL6Z6VUa6hIMq9H60kvseir2T85cOa4QggizuRV7mcO6bU5w==", + "version": "3.20.7", + "resolved": "https://registry.npmjs.org/@react-stately/selection/-/selection-3.20.7.tgz", + "integrity": "sha512-NkiRsNCfORBIHNF1bCavh4Vvj+Yd5NffE10iXtaFuhF249NlxLynJZmkcVCqNP9taC2pBIHX00+9tcBgxhG+mA==", "dependencies": { "@react-stately/collections": "^3.12.8", - "@react-stately/utils": "^3.10.8", + "@react-stately/utils": "^3.11.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -4204,11 +4221,11 @@ } }, "node_modules/@react-stately/slider": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/@react-stately/slider/-/slider-3.7.2.tgz", - "integrity": "sha512-EVBHUdUYwj++XqAEiQg2fGi8Reccznba0uyQ3gPejF0pAc390Q/J5aqiTEDfiCM7uJ6WHxTM6lcCqHQBISk2dQ==", + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/@react-stately/slider/-/slider-3.7.3.tgz", + "integrity": "sha512-9QGnQNXFAH52BzxtU7weyOV/VV7/so6uIvE8VOHfc6QR3GMBM/kJvqBCTWZfQ0pxDIsRagBQDD/tjB09ixTOzg==", "dependencies": { - "@react-stately/utils": "^3.10.8", + "@react-stately/utils": "^3.11.0", "@react-types/shared": "^3.32.1", "@react-types/slider": "^3.8.2", "@swc/helpers": "^0.5.0" @@ -4218,15 +4235,15 @@ } }, "node_modules/@react-stately/table": { - "version": "3.15.1", - "resolved": "https://registry.npmjs.org/@react-stately/table/-/table-3.15.1.tgz", - "integrity": "sha512-MhMAgE/LgAzHcAn1P3p/nQErzJ6DiixSJ1AOt2JlnAKEb5YJg4ATKWCb2IjBLwywt9ZCzfm3KMUzkctZqAoxwA==", + "version": "3.15.2", + "resolved": "https://registry.npmjs.org/@react-stately/table/-/table-3.15.2.tgz", + "integrity": "sha512-vgEArBN5ocqsQdeORBj6xk8acu5iFnd/CyXEQKl0R5RyuYuw0ms8UmFHvs8Fv1HONehPYg+XR4QPliDFPX8R9A==", "dependencies": { "@react-stately/collections": "^3.12.8", "@react-stately/flags": "^3.1.2", - "@react-stately/grid": "^3.11.6", - "@react-stately/selection": "^3.20.6", - "@react-stately/utils": "^3.10.8", + "@react-stately/grid": "^3.11.7", + "@react-stately/selection": "^3.20.7", + "@react-stately/utils": "^3.11.0", "@react-types/grid": "^3.3.6", "@react-types/shared": "^3.32.1", "@react-types/table": "^3.13.4", @@ -4237,13 +4254,13 @@ } }, "node_modules/@react-stately/tabs": { - "version": "3.8.6", - "resolved": "https://registry.npmjs.org/@react-stately/tabs/-/tabs-3.8.6.tgz", - "integrity": "sha512-9RYxmgjVIxUpIsGKPIF7uRoHWOEz8muwaYiStCVeyiYBPmarvZoIYtTXcwSMN/vEs7heVN5uGCL6/bfdY4+WiA==", + "version": "3.8.7", + "resolved": "https://registry.npmjs.org/@react-stately/tabs/-/tabs-3.8.7.tgz", + "integrity": "sha512-ETZEzg7s9F2SCvisZ2cCpLx6XBHqdvVgDGU5l3C3s9zBKBr6lgyLFt61IdGW8XXZRUvw4mMGT6tGQbXeGvR0Wg==", "dependencies": { - "@react-stately/list": "^3.13.1", + "@react-stately/list": "^3.13.2", "@react-types/shared": "^3.32.1", - "@react-types/tabs": "^3.3.19", + "@react-types/tabs": "^3.3.20", "@swc/helpers": "^0.5.0" }, "peerDependencies": { @@ -4263,11 +4280,11 @@ } }, "node_modules/@react-stately/toggle": { - "version": "3.9.2", - "resolved": "https://registry.npmjs.org/@react-stately/toggle/-/toggle-3.9.2.tgz", - "integrity": "sha512-dOxs9wrVXHUmA7lc8l+N9NbTJMAaXcYsnNGsMwfXIXQ3rdq+IjWGNYJ52UmNQyRYFcg0jrzRrU16TyGbNjOdNQ==", + "version": "3.9.3", + "resolved": "https://registry.npmjs.org/@react-stately/toggle/-/toggle-3.9.3.tgz", + "integrity": "sha512-G6aA/aTnid/6dQ9dxNEd7/JqzRmVkVYYpOAP+l02hepiuSmFwLu4nE98i4YFBQqFZ5b4l01gMrS90JGL7HrNmw==", "dependencies": { - "@react-stately/utils": "^3.10.8", + "@react-stately/utils": "^3.11.0", "@react-types/checkbox": "^3.10.2", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" @@ -4277,12 +4294,12 @@ } }, "node_modules/@react-stately/tooltip": { - "version": "3.5.8", - "resolved": "https://registry.npmjs.org/@react-stately/tooltip/-/tooltip-3.5.8.tgz", - "integrity": "sha512-gkcUx2ROhCiGNAYd2BaTejakXUUNLPnnoJ5+V/mN480pN+OrO8/2V9pqb/IQmpqxLsso93zkM3A4wFHHLBBmPQ==", + "version": "3.5.9", + "resolved": "https://registry.npmjs.org/@react-stately/tooltip/-/tooltip-3.5.9.tgz", + "integrity": "sha512-YwqtxFqQFfJtbeh+axHVGAfz9XHf73UaBndHxSbVM/T5c1PfI2yOB39T2FOU5fskZ2VMO3qTDhiXmFgGbGYSfQ==", "dependencies": { - "@react-stately/overlays": "^3.6.20", - "@react-types/tooltip": "^3.4.21", + "@react-stately/overlays": "^3.6.21", + "@react-types/tooltip": "^3.5.0", "@swc/helpers": "^0.5.0" }, "peerDependencies": { @@ -4290,13 +4307,13 @@ } }, "node_modules/@react-stately/tree": { - "version": "3.9.3", - "resolved": "https://registry.npmjs.org/@react-stately/tree/-/tree-3.9.3.tgz", - "integrity": "sha512-ZngG79nLFxE/GYmpwX6E/Rma2MMkzdoJPRI3iWk3dgqnGMMzpPnUp/cvjDsU3UHF7xDVusC5BT6pjWN0uxCIFQ==", + "version": "3.9.4", + "resolved": "https://registry.npmjs.org/@react-stately/tree/-/tree-3.9.4.tgz", + "integrity": "sha512-Re1fdEiR0hHPcEda+7ecw+52lgGfFW0MAEDzFg9I6J/t8STQSP+1YC0VVVkv2xRrkLbKLPqggNKgmD8nggecnw==", "dependencies": { "@react-stately/collections": "^3.12.8", - "@react-stately/selection": "^3.20.6", - "@react-stately/utils": "^3.10.8", + "@react-stately/selection": "^3.20.7", + "@react-stately/utils": "^3.11.0", "@react-types/shared": "^3.32.1", "@swc/helpers": "^0.5.0" }, @@ -4305,9 +4322,9 @@ } }, "node_modules/@react-stately/utils": { - "version": "3.10.8", - "resolved": "https://registry.npmjs.org/@react-stately/utils/-/utils-3.10.8.tgz", - "integrity": "sha512-SN3/h7SzRsusVQjQ4v10LaVsDc81jyyR0DD5HnsQitm/I5WDpaSr2nRHtyloPFU48jlql1XX/S04T2DLQM7Y3g==", + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/@react-stately/utils/-/utils-3.11.0.tgz", + "integrity": "sha512-8LZpYowJ9eZmmYLpudbo/eclIRnbhWIJZ994ncmlKlouNzKohtM8qTC6B1w1pwUbiwGdUoyzLuQbeaIor5Dvcw==", "dependencies": { "@swc/helpers": "^0.5.0" }, @@ -4363,11 +4380,11 @@ } }, "node_modules/@react-types/calendar": { - "version": "3.8.0", - "resolved": "https://registry.npmjs.org/@react-types/calendar/-/calendar-3.8.0.tgz", - "integrity": "sha512-ZDZgfZgbz1ydWOFs1mH7QFfX3ioJrmb3Y/lkoubQE0HWXLZzyYNvhhKyFJRS1QJ40IofLSBHriwbQb/tsUnGlw==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/@react-types/calendar/-/calendar-3.8.1.tgz", + "integrity": "sha512-B0UuitMP7YkArBAQldwSZSNL2WwazNGCG+lp6yEDj831NrH9e36/jcjv1rObQ9ZMS6uDX9LXu5C8V5RFwGQabA==", "dependencies": { - "@internationalized/date": "^3.10.0", + "@internationalized/date": "^3.10.1", "@react-types/shared": "^3.32.1" }, "peerDependencies": { @@ -4386,9 +4403,9 @@ } }, "node_modules/@react-types/combobox": { - "version": "3.13.9", - "resolved": "https://registry.npmjs.org/@react-types/combobox/-/combobox-3.13.9.tgz", - "integrity": "sha512-G6GmLbzVkLW6VScxPAr/RtliEyPhBClfYaIllK1IZv+Z42SVnOpKzhnoe79BpmiFqy1AaC3+LjZX783mrsHCwA==", + "version": "3.13.10", + "resolved": "https://registry.npmjs.org/@react-types/combobox/-/combobox-3.13.10.tgz", + "integrity": "sha512-Wo4iix++ID6JzoH9eD7ddGUlirQiGpN/VQc3iFjnaTXiJ/cj3v+1oGsDGCZZTklTVeUMU7SRBfMhMgxHHIYLXA==", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4397,12 +4414,12 @@ } }, "node_modules/@react-types/datepicker": { - "version": "3.13.2", - "resolved": "https://registry.npmjs.org/@react-types/datepicker/-/datepicker-3.13.2.tgz", - "integrity": "sha512-+M6UZxJnejYY8kz0spbY/hP08QJ5rsZ3aNarRQQHc48xV2oelFLX5MhAqizfLEsvyfb0JYrhWoh4z1xZtAmYCg==", + "version": "3.13.3", + "resolved": "https://registry.npmjs.org/@react-types/datepicker/-/datepicker-3.13.3.tgz", + "integrity": "sha512-OTRa3banGxcUQKRTLUzr0zTVUMUL+Az1BWARCYQ+8Z/dlkYXYUW0fnS5I0pUEqihgai15KxiY13U0gAqbNSfcA==", "dependencies": { - "@internationalized/date": "^3.10.0", - "@react-types/calendar": "^3.8.0", + "@internationalized/date": "^3.10.1", + "@react-types/calendar": "^3.8.1", "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1" }, @@ -4479,9 +4496,9 @@ } }, "node_modules/@react-types/numberfield": { - "version": "3.8.15", - "resolved": "https://registry.npmjs.org/@react-types/numberfield/-/numberfield-3.8.15.tgz", - "integrity": "sha512-97r92D23GKCOjGIGMeW9nt+/KlfM3GeWH39Czcmd2/D5y3k6z4j0avbsfx2OttCtJszrnENjw3GraYGYI2KosQ==", + "version": "3.8.16", + "resolved": "https://registry.npmjs.org/@react-types/numberfield/-/numberfield-3.8.16.tgz", + "integrity": "sha512-945F0GsD7K2T293YXhap+2Runl3tZWbnhadXVHFWLbqIKKONZFSZTfLKxQcbFr+bQXr2uh1bVJhYcOiS1l5M+A==", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4565,9 +4582,9 @@ } }, "node_modules/@react-types/tabs": { - "version": "3.3.19", - "resolved": "https://registry.npmjs.org/@react-types/tabs/-/tabs-3.3.19.tgz", - "integrity": "sha512-fE+qI43yR5pAMpeqPxGqQq9jDHXEPqXskuxNHERMW0PYMdPyem2Cw6goc5F4qeZO3Hf6uPZgHkvJz2OAq7TbBw==", + "version": "3.3.20", + "resolved": "https://registry.npmjs.org/@react-types/tabs/-/tabs-3.3.20.tgz", + "integrity": "sha512-Kjq4PypapdMOVPAQgaFIKH65Kr3YnRvaxBGd6RYizTsqYImQhXoGj6B4lBpjYy4KhfRd4dYS82frHqTGKmBYiA==", "dependencies": { "@react-types/shared": "^3.32.1" }, @@ -4587,9 +4604,9 @@ } }, "node_modules/@react-types/tooltip": { - "version": "3.4.21", - "resolved": "https://registry.npmjs.org/@react-types/tooltip/-/tooltip-3.4.21.tgz", - "integrity": "sha512-ugGHOZU6WbOdeTdbjnaEc+Ms7/WhsUCg+T3PCOIeOT9FG02Ce189yJ/+hd7oqL/tVwIhEMYJIqSCgSELFox+QA==", + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@react-types/tooltip/-/tooltip-3.5.0.tgz", + "integrity": "sha512-o/m1wlKlOD2sLb9vZLWdVkD5LFLHBMLGeeK/bhyUtp0IEdUeKy0ZRTS7pa/A50trov9RvdbzLK79xG8nKNxHew==", "dependencies": { "@react-types/overlays": "^3.9.2", "@react-types/shared": "^3.32.1" @@ -5192,9 +5209,9 @@ } }, "node_modules/@swc/helpers": { - "version": "0.5.17", - "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz", - "integrity": "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==", + "version": "0.5.18", + "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.18.tgz", + "integrity": "sha512-TXTnIcNJQEKwThMMqBXsZ4VGAza6bvN4pa41Rkqoio6QBKMvo+5lexeTMScGCIxtzgQJzElcvIltani+adC5PQ==", "dependencies": { "tslib": "^2.8.0" } @@ -6382,19 +6399,17 @@ } }, "node_modules/@xterm/addon-fit": { - "version": "0.10.0", - "resolved": "https://registry.npmjs.org/@xterm/addon-fit/-/addon-fit-0.10.0.tgz", - "integrity": "sha512-UFYkDm4HUahf2lnEyHvio51TNGiLK66mqP2JoATy7hRZeXaGMRDr00JiSF7m63vR5WKATF605yEggJKsw0JpMQ==", - "license": "MIT", - "peerDependencies": { - "@xterm/xterm": "^5.0.0" - } + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@xterm/addon-fit/-/addon-fit-0.11.0.tgz", + "integrity": "sha512-jYcgT6xtVYhnhgxh3QgYDnnNMYTcf8ElbxxFzX0IZo+vabQqSPAjC3c1wJrKB5E19VwQei89QCiZZP86DCPF7g==" }, "node_modules/@xterm/xterm": { - "version": "5.5.0", - "resolved": "https://registry.npmjs.org/@xterm/xterm/-/xterm-5.5.0.tgz", - "integrity": "sha512-hqJHYaQb5OptNunnyAnkHyM8aCjZ1MEIDTQu1iIbbTD/xops91NB5yq1ZK/dC2JDbVWtF23zUtl9JE2NqwT87A==", - "license": "MIT" + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@xterm/xterm/-/xterm-6.0.0.tgz", + "integrity": "sha512-TQwDdQGtwwDt+2cgKDLn0IRaSxYu1tSUjgKarSDkUM0ZNiSRXFpjxEsvc/Zgc5kq5omJ+V0a8/kIM2WD3sMOYg==", + "workspaces": [ + "addons/*" + ] }, "node_modules/accepts": { "version": "1.3.8", @@ -9932,16 +9947,15 @@ "license": "CC0-1.0" }, "node_modules/html-encoding-sniffer": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", - "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-6.0.0.tgz", + "integrity": "sha512-CV9TW3Y3f8/wT0BRFc1/KAVQ3TUHiXmaAb6VW9vtiMFf7SLoMd1PdAc4W3KFOFETBJUb90KatHqlsZMWV+R9Gg==", "dev": true, - "license": "MIT", "dependencies": { - "whatwg-encoding": "^3.1.1" + "@exodus/bytes": "^1.6.0" }, "engines": { - "node": ">=18" + "node": "^20.19.0 || ^22.12.0 || >=24.0.0" } }, "node_modules/html-escaper": { @@ -10823,18 +10837,18 @@ } }, "node_modules/jsdom": { - "version": "27.3.0", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.3.0.tgz", - "integrity": "sha512-GtldT42B8+jefDUC4yUKAvsaOrH7PDHmZxZXNgF2xMmymjUbRYJvpAybZAKEmXDGTM0mCsz8duOa4vTm5AY2Kg==", + "version": "27.4.0", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.4.0.tgz", + "integrity": "sha512-mjzqwWRD9Y1J1KUi7W97Gja1bwOOM5Ug0EZ6UDK3xS7j7mndrkwozHtSblfomlzyB4NepioNt+B2sOSzczVgtQ==", "dev": true, - "license": "MIT", "dependencies": { "@acemir/cssom": "^0.9.28", "@asamuzakjp/dom-selector": "^6.7.6", + "@exodus/bytes": "^1.6.0", "cssstyle": "^5.3.4", "data-urls": "^6.0.0", "decimal.js": "^10.6.0", - "html-encoding-sniffer": "^4.0.0", + "html-encoding-sniffer": "^6.0.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.6", "is-potential-custom-element-name": "^1.0.1", @@ -10844,7 +10858,6 @@ "tough-cookie": "^6.0.0", "w3c-xmlserializer": "^5.0.0", "webidl-conversions": "^8.0.0", - "whatwg-encoding": "^3.1.1", "whatwg-mimetype": "^4.0.0", "whatwg-url": "^15.1.0", "ws": "^8.18.3", @@ -13371,11 +13384,11 @@ } }, "node_modules/posthog-js": { - "version": "1.309.1", - "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.309.1.tgz", - "integrity": "sha512-JUJcQhYzNNKO0cgnSbowCsVi2RTu75XGZ2EmnTQti4tMGRCTOv/HCnZasdFniBGZ0rLugQkaScYca/84Ta2u5Q==", + "version": "1.310.1", + "resolved": "https://registry.npmjs.org/posthog-js/-/posthog-js-1.310.1.tgz", + "integrity": "sha512-UkR6zzlWNtqHDXHJl2Yk062DOmZyVKTPL5mX4j4V+u3RiYbMHJe47+PpMMUsvK1R2e1r/m9uSlHaJMJRzyUjGg==", "dependencies": { - "@posthog/core": "1.8.1", + "@posthog/core": "1.9.0", "core-js": "^3.38.1", "fflate": "^0.4.8", "preact": "^10.19.3", @@ -14647,13 +14660,12 @@ } }, "node_modules/socket.io-client": { - "version": "4.8.1", - "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.8.1.tgz", - "integrity": "sha512-hJVXfu3E28NmzGk8o1sHhN3om52tRvwYeidbj7xKy2eIIse5IoKX3USlS6Tqt3BHAtflLIkCQBkzVrEEfWUyYQ==", - "license": "MIT", + "version": "4.8.3", + "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.8.3.tgz", + "integrity": "sha512-uP0bpjWrjQmUt5DTHq9RuoCBdFJF10cdX9X+a368j/Ft0wmaVgxlrjvK3kjvgCODOMMOz9lcaRzxmso0bTWZ/g==", "dependencies": { "@socket.io/component-emitter": "~3.1.0", - "debug": "~4.3.2", + "debug": "~4.4.1", "engine.io-client": "~6.6.1", "socket.io-parser": "~4.2.4" }, @@ -14661,23 +14673,6 @@ "node": ">=10.0.0" } }, - "node_modules/socket.io-client/node_modules/debug": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", - "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, "node_modules/socket.io-parser": { "version": "4.2.4", "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.2.4.tgz", @@ -16163,32 +16158,6 @@ "node": ">=20" } }, - "node_modules/whatwg-encoding": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz", - "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "iconv-lite": "0.6.3" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/whatwg-encoding/node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/whatwg-mimetype": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index f08f6ea3b6..c7524a4302 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -7,7 +7,7 @@ "node": ">=22.0.0" }, "dependencies": { - "@heroui/react": "2.8.6", + "@heroui/react": "2.8.7", "@microlink/react-json-view": "^1.26.2", "@monaco-editor/react": "^4.7.0-rc.0", "@react-router/node": "^7.11.0", @@ -15,8 +15,8 @@ "@tailwindcss/vite": "^4.1.18", "@tanstack/react-query": "^5.90.12", "@uidotdev/usehooks": "^2.4.1", - "@xterm/addon-fit": "^0.10.0", - "@xterm/xterm": "^5.4.0", + "@xterm/addon-fit": "^0.11.0", + "@xterm/xterm": "^6.0.0", "axios": "^1.13.2", "class-variance-authority": "^0.7.1", "clsx": "^2.1.1", @@ -29,7 +29,7 @@ "isbot": "^5.1.32", "lucide-react": "^0.562.0", "monaco-editor": "^0.55.1", - "posthog-js": "^1.309.1", + "posthog-js": "^1.310.1", "react": "^19.2.3", "react-dom": "^19.2.3", "react-hot-toast": "^2.6.0", @@ -41,7 +41,7 @@ "remark-breaks": "^4.0.0", "remark-gfm": "^4.0.1", "sirv-cli": "^3.0.1", - "socket.io-client": "^4.8.1", + "socket.io-client": "^4.8.3", "tailwind-merge": "^3.4.0", "tailwind-scrollbar": "^4.0.2", "vite": "^7.3.0", @@ -109,7 +109,7 @@ "eslint-plugin-react-hooks": "^4.6.2", "eslint-plugin-unused-imports": "^4.2.0", "husky": "^9.1.7", - "jsdom": "^27.3.0", + "jsdom": "^27.4.0", "lint-staged": "^16.2.7", "msw": "^2.6.6", "prettier": "^3.7.3", diff --git a/frontend/src/hooks/use-terminal.ts b/frontend/src/hooks/use-terminal.ts index caa2e42a15..5791bc0fa0 100644 --- a/frontend/src/hooks/use-terminal.ts +++ b/frontend/src/hooks/use-terminal.ts @@ -88,7 +88,6 @@ export const useTerminal = () => { fontSize: 14, scrollback: 10000, scrollSensitivity: 1, - fastScrollModifier: "alt", fastScrollSensitivity: 5, allowTransparency: true, disableStdin: true, // Make terminal read-only From d68b2cdd1aaf849358015461a78459873f6ab06a Mon Sep 17 00:00:00 2001 From: "sp.wack" <83104063+amanape@users.noreply.github.com> Date: Mon, 29 Dec 2025 18:01:22 +0400 Subject: [PATCH 77/80] hotfix(frontend): fix provider type import (#12187) --- frontend/src/context/conversation-subscriptions-provider.tsx | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/src/context/conversation-subscriptions-provider.tsx b/frontend/src/context/conversation-subscriptions-provider.tsx index d9d5cbde72..ab36d75774 100644 --- a/frontend/src/context/conversation-subscriptions-provider.tsx +++ b/frontend/src/context/conversation-subscriptions-provider.tsx @@ -19,6 +19,7 @@ import { renderConversationCreatedToast, renderConversationFinishedToast, } from "#/components/features/chat/microagent/microagent-status-toast"; +import { Provider } from "#/types/settings"; interface ConversationSocket { socket: Socket; @@ -31,7 +32,7 @@ interface ConversationSubscriptionsContextType { subscribeToConversation: (options: { conversationId: string; sessionApiKey: string | null; - providersSet: import("#/types/settings").Provider[]; + providersSet: Provider[]; baseUrl: string; socketPath?: string; onEvent?: (event: unknown, conversationId: string) => void; @@ -135,7 +136,7 @@ export function ConversationSubscriptionsProvider({ (options: { conversationId: string; sessionApiKey: string | null; - providersSet: import("#/types/settings").Provider[]; + providersSet: Provider[]; baseUrl: string; socketPath?: string; onEvent?: (event: unknown, conversationId: string) => void; From 2261281656d2b74a7a12592331919c725fe9c267 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Dec 2025 14:33:52 +0000 Subject: [PATCH 78/80] chore(deps): bump @tanstack/react-query from 5.90.12 to 5.90.14 in /frontend in the version-all group (#12189) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/package-lock.json | 18 ++++++++---------- frontend/package.json | 2 +- 2 files changed, 9 insertions(+), 11 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 5691cfa8c0..bc1a20c9f2 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -14,7 +14,7 @@ "@react-router/node": "^7.11.0", "@react-router/serve": "^7.11.0", "@tailwindcss/vite": "^4.1.18", - "@tanstack/react-query": "^5.90.12", + "@tanstack/react-query": "^5.90.14", "@uidotdev/usehooks": "^2.4.1", "@xterm/addon-fit": "^0.11.0", "@xterm/xterm": "^6.0.0", @@ -5543,22 +5543,20 @@ } }, "node_modules/@tanstack/query-core": { - "version": "5.90.12", - "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.12.tgz", - "integrity": "sha512-T1/8t5DhV/SisWjDnaiU2drl6ySvsHj1bHBCWNXd+/T+Hh1cf6JodyEYMd5sgwm+b/mETT4EV3H+zCVczCU5hg==", - "license": "MIT", + "version": "5.90.14", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.14.tgz", + "integrity": "sha512-/6di2yNI+YxpVrH9Ig74Q+puKnkCE+D0LGyagJEGndJHJc6ahkcc/UqirHKy8zCYE/N9KLggxcQvzYCsUBWgdw==", "funding": { "type": "github", "url": "https://github.com/sponsors/tannerlinsley" } }, "node_modules/@tanstack/react-query": { - "version": "5.90.12", - "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.12.tgz", - "integrity": "sha512-graRZspg7EoEaw0a8faiUASCyJrqjKPdqJ9EwuDRUF9mEYJ1YPczI9H+/agJ0mOJkPCJDk0lsz5QTrLZ/jQ2rg==", - "license": "MIT", + "version": "5.90.14", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.14.tgz", + "integrity": "sha512-JAMuULej09hrZ14W9+mxoRZ44rR2BuZfCd6oKTQVNfynQxCN3muH3jh3W46gqZNw5ZqY0ZVaS43Imb3dMr6tgw==", "dependencies": { - "@tanstack/query-core": "5.90.12" + "@tanstack/query-core": "5.90.14" }, "funding": { "type": "github", diff --git a/frontend/package.json b/frontend/package.json index c7524a4302..4d13caa0da 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -13,7 +13,7 @@ "@react-router/node": "^7.11.0", "@react-router/serve": "^7.11.0", "@tailwindcss/vite": "^4.1.18", - "@tanstack/react-query": "^5.90.12", + "@tanstack/react-query": "^5.90.14", "@uidotdev/usehooks": "^2.4.1", "@xterm/addon-fit": "^0.11.0", "@xterm/xterm": "^6.0.0", From 8d69b4066f84867c0363dc100d6d3bc2d67ad67b Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Mon, 29 Dec 2025 21:57:14 +0700 Subject: [PATCH 79/80] fix(backend): exception occurs when running the latest code from the main branch (v1 conversations) (#12183) --- openhands/app_server/sandbox/sandbox_spec_service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/openhands/app_server/sandbox/sandbox_spec_service.py b/openhands/app_server/sandbox/sandbox_spec_service.py index 77f4f4a6ab..a3f77db134 100644 --- a/openhands/app_server/sandbox/sandbox_spec_service.py +++ b/openhands/app_server/sandbox/sandbox_spec_service.py @@ -13,7 +13,7 @@ from openhands.sdk.utils.models import DiscriminatedUnionMixin # The version of the agent server to use for deployments. # Typically this will be the same as the values from the pyproject.toml -AGENT_SERVER_IMAGE = 'ghcr.io/openhands/agent-server:97652be-python' +AGENT_SERVER_IMAGE = 'ghcr.io/openhands/agent-server:0b7ccc9-python' class SandboxSpecService(ABC): From d3afbfa4479bbbb41e9da1a7c43980e3f7735b6b Mon Sep 17 00:00:00 2001 From: Hiep Le <69354317+hieptl@users.noreply.github.com> Date: Mon, 29 Dec 2025 22:43:07 +0700 Subject: [PATCH 80/80] refactor(backend): add description field support for secrets (v1 conversations) (#12080) --- .../live_status_app_conversation_service.py | 6 +- .../app_server/user/auth_user_context.py | 7 +- ...st_live_status_app_conversation_service.py | 180 +++++++++++++++--- 3 files changed, 162 insertions(+), 31 deletions(-) diff --git a/openhands/app_server/app_conversation/live_status_app_conversation_service.py b/openhands/app_server/app_conversation/live_status_app_conversation_service.py index 11d9e4fef8..887c4dfeb4 100644 --- a/openhands/app_server/app_conversation/live_status_app_conversation_service.py +++ b/openhands/app_server/app_conversation/live_status_app_conversation_service.py @@ -579,6 +579,7 @@ class LiveStatusAppConversationService(AppConversationServiceBase): continue secret_name = f'{provider_type.name}_TOKEN' + description = f'{provider_type.name} authentication token' if self.web_url: # Create an access token for web-based authentication @@ -598,12 +599,15 @@ class LiveStatusAppConversationService(AppConversationServiceBase): secrets[secret_name] = LookupSecret( url=self.web_url + '/api/v1/webhooks/secrets', headers=headers, + description=description, ) else: # Use static token for environments without web URL access static_token = await self.user_context.get_latest_token(provider_type) if static_token: - secrets[secret_name] = StaticSecret(value=static_token) + secrets[secret_name] = StaticSecret( + value=static_token, description=description + ) return secrets diff --git a/openhands/app_server/user/auth_user_context.py b/openhands/app_server/user/auth_user_context.py index 4d64888427..7adf7f902a 100644 --- a/openhands/app_server/user/auth_user_context.py +++ b/openhands/app_server/user/auth_user_context.py @@ -81,7 +81,12 @@ class AuthUserContext(UserContext): secrets = await self.user_auth.get_secrets() if secrets: for name, custom_secret in secrets.custom_secrets.items(): - results[name] = StaticSecret(value=custom_secret.secret) + results[name] = StaticSecret( + value=custom_secret.secret, + description=custom_secret.description + if custom_secret.description + else None, + ) return results diff --git a/tests/unit/app_server/test_live_status_app_conversation_service.py b/tests/unit/app_server/test_live_status_app_conversation_service.py index f05cb0581a..126e54c69c 100644 --- a/tests/unit/app_server/test_live_status_app_conversation_service.py +++ b/tests/unit/app_server/test_live_status_app_conversation_service.py @@ -8,6 +8,7 @@ from unittest.mock import AsyncMock, Mock, patch from uuid import UUID, uuid4 import pytest +from pydantic import SecretStr from openhands.agent_server.models import ( SendMessageRequest, @@ -29,7 +30,7 @@ from openhands.app_server.sandbox.sandbox_models import ( ) from openhands.app_server.sandbox.sandbox_spec_models import SandboxSpecInfo from openhands.app_server.user.user_context import UserContext -from openhands.integrations.provider import ProviderType +from openhands.integrations.provider import ProviderToken, ProviderType from openhands.sdk import Agent, Event from openhands.sdk.llm import LLM from openhands.sdk.secret import LookupSecret, StaticSecret @@ -114,10 +115,6 @@ class TestLiveStatusAppConversationService: async def test_setup_secrets_for_git_providers_with_web_url(self): """Test _setup_secrets_for_git_providers with web URL (creates access token).""" # Arrange - from pydantic import SecretStr - - from openhands.integrations.provider import ProviderToken - base_secrets = {} self.mock_user_context.get_secrets.return_value = base_secrets self.mock_jwt_service.create_jws_token.return_value = 'test_access_token' @@ -144,6 +141,9 @@ class TestLiveStatusAppConversationService: == 'https://test.example.com/api/v1/webhooks/secrets' ) assert result['GITHUB_TOKEN'].headers['X-Access-Token'] == 'test_access_token' + # Verify descriptions are included + assert result['GITHUB_TOKEN'].description == 'GITHUB authentication token' + assert result['GITLAB_TOKEN'].description == 'GITLAB authentication token' # Should be called twice, once for each provider assert self.mock_jwt_service.create_jws_token.call_count == 2 @@ -152,10 +152,6 @@ class TestLiveStatusAppConversationService: async def test_setup_secrets_for_git_providers_with_saas_mode(self): """Test _setup_secrets_for_git_providers with SaaS mode (includes keycloak cookie).""" # Arrange - from pydantic import SecretStr - - from openhands.integrations.provider import ProviderToken - self.service.app_mode = 'saas' self.service.keycloak_auth_cookie = 'test_cookie' base_secrets = {} @@ -179,15 +175,13 @@ class TestLiveStatusAppConversationService: assert isinstance(lookup_secret, LookupSecret) assert 'Cookie' in lookup_secret.headers assert lookup_secret.headers['Cookie'] == 'keycloak_auth=test_cookie' + # Verify description is included + assert lookup_secret.description == 'GITLAB authentication token' @pytest.mark.asyncio async def test_setup_secrets_for_git_providers_without_web_url(self): """Test _setup_secrets_for_git_providers without web URL (uses static token).""" # Arrange - from pydantic import SecretStr - - from openhands.integrations.provider import ProviderToken - self.service.web_url = None base_secrets = {} self.mock_user_context.get_secrets.return_value = base_secrets @@ -208,6 +202,8 @@ class TestLiveStatusAppConversationService: assert 'GITHUB_TOKEN' in result assert isinstance(result['GITHUB_TOKEN'], StaticSecret) assert result['GITHUB_TOKEN'].value.get_secret_value() == 'static_token_value' + # Verify description is included + assert result['GITHUB_TOKEN'].description == 'GITHUB authentication token' self.mock_user_context.get_latest_token.assert_called_once_with( ProviderType.GITHUB ) @@ -216,10 +212,6 @@ class TestLiveStatusAppConversationService: async def test_setup_secrets_for_git_providers_no_static_token(self): """Test _setup_secrets_for_git_providers when no static token is available.""" # Arrange - from pydantic import SecretStr - - from openhands.integrations.provider import ProviderToken - self.service.web_url = None base_secrets = {} self.mock_user_context.get_secrets.return_value = base_secrets @@ -240,6 +232,148 @@ class TestLiveStatusAppConversationService: assert 'GITHUB_TOKEN' not in result assert result == base_secrets + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_descriptions_included(self): + """Test _setup_secrets_for_git_providers includes descriptions for all provider types.""" + # Arrange + base_secrets = {} + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_jwt_service.create_jws_token.return_value = 'test_access_token' + + # Mock provider tokens for multiple providers + provider_tokens = { + ProviderType.GITHUB: ProviderToken(token=SecretStr('github_token')), + ProviderType.GITLAB: ProviderToken(token=SecretStr('gitlab_token')), + ProviderType.BITBUCKET: ProviderToken(token=SecretStr('bitbucket_token')), + } + self.mock_user_context.get_provider_tokens = AsyncMock( + return_value=provider_tokens + ) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert - verify all secrets have correct descriptions + assert 'GITHUB_TOKEN' in result + assert isinstance(result['GITHUB_TOKEN'], LookupSecret) + assert result['GITHUB_TOKEN'].description == 'GITHUB authentication token' + + assert 'GITLAB_TOKEN' in result + assert isinstance(result['GITLAB_TOKEN'], LookupSecret) + assert result['GITLAB_TOKEN'].description == 'GITLAB authentication token' + + assert 'BITBUCKET_TOKEN' in result + assert isinstance(result['BITBUCKET_TOKEN'], LookupSecret) + assert result['BITBUCKET_TOKEN'].description == 'BITBUCKET authentication token' + + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_static_secret_description(self): + """Test _setup_secrets_for_git_providers includes description for StaticSecret.""" + # Arrange + self.service.web_url = None + base_secrets = {} + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_user_context.get_latest_token.return_value = 'static_token_value' + + # Mock provider tokens for multiple providers + provider_tokens = { + ProviderType.GITHUB: ProviderToken(token=SecretStr('github_token')), + ProviderType.GITLAB: ProviderToken(token=SecretStr('gitlab_token')), + } + self.mock_user_context.get_provider_tokens = AsyncMock( + return_value=provider_tokens + ) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert - verify StaticSecret objects have descriptions + assert 'GITHUB_TOKEN' in result + assert isinstance(result['GITHUB_TOKEN'], StaticSecret) + assert result['GITHUB_TOKEN'].description == 'GITHUB authentication token' + + assert 'GITLAB_TOKEN' in result + assert isinstance(result['GITLAB_TOKEN'], StaticSecret) + assert result['GITLAB_TOKEN'].description == 'GITLAB authentication token' + + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_preserves_custom_secret_descriptions( + self, + ): + """Test _setup_secrets_for_git_providers preserves descriptions from custom secrets.""" + # Arrange + # Mock custom secrets with descriptions + custom_secret_with_desc = StaticSecret( + value=SecretStr('custom_secret_value'), + description='Custom API key for external service', + ) + custom_secret_no_desc = StaticSecret( + value=SecretStr('another_secret_value'), + description=None, + ) + base_secrets = { + 'CUSTOM_API_KEY': custom_secret_with_desc, + 'ANOTHER_SECRET': custom_secret_no_desc, + } + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_jwt_service.create_jws_token.return_value = 'test_access_token' + + # Mock provider tokens + provider_tokens = { + ProviderType.GITHUB: ProviderToken(token=SecretStr('github_token')), + } + self.mock_user_context.get_provider_tokens = AsyncMock( + return_value=provider_tokens + ) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert - verify custom secrets are preserved with their descriptions + assert 'CUSTOM_API_KEY' in result + assert isinstance(result['CUSTOM_API_KEY'], StaticSecret) + assert ( + result['CUSTOM_API_KEY'].description + == 'Custom API key for external service' + ) + assert ( + result['CUSTOM_API_KEY'].value.get_secret_value() == 'custom_secret_value' + ) + + assert 'ANOTHER_SECRET' in result + assert isinstance(result['ANOTHER_SECRET'], StaticSecret) + assert result['ANOTHER_SECRET'].description is None + assert ( + result['ANOTHER_SECRET'].value.get_secret_value() == 'another_secret_value' + ) + + # Verify git provider token is also included + assert 'GITHUB_TOKEN' in result + assert result['GITHUB_TOKEN'].description == 'GITHUB authentication token' + + @pytest.mark.asyncio + async def test_setup_secrets_for_git_providers_custom_secret_empty_description( + self, + ): + """Test _setup_secrets_for_git_providers handles custom secrets with empty descriptions.""" + # Arrange + custom_secret_empty_desc = StaticSecret( + value=SecretStr('secret_value'), + description='', # Empty string description + ) + base_secrets = {'MY_SECRET': custom_secret_empty_desc} + self.mock_user_context.get_secrets.return_value = base_secrets + self.mock_user_context.get_provider_tokens = AsyncMock(return_value=None) + + # Act + result = await self.service._setup_secrets_for_git_providers(self.mock_user) + + # Assert - empty description should be preserved as-is + assert 'MY_SECRET' in result + assert isinstance(result['MY_SECRET'], StaticSecret) + # Empty string description is preserved + assert result['MY_SECRET'].description == '' + @pytest.mark.asyncio async def test_configure_llm_and_mcp_with_custom_model(self): """Test _configure_llm_and_mcp with custom LLM model.""" @@ -370,8 +504,6 @@ class TestLiveStatusAppConversationService: async def test_configure_llm_and_mcp_tavily_with_user_search_api_key(self): """Test _configure_llm_and_mcp adds tavily when user has search_api_key.""" # Arrange - from pydantic import SecretStr - self.mock_user.search_api_key = SecretStr('user_search_key') self.mock_user_context.get_mcp_api_key.return_value = 'mcp_api_key' @@ -416,8 +548,6 @@ class TestLiveStatusAppConversationService: async def test_configure_llm_and_mcp_tavily_user_key_takes_precedence(self): """Test _configure_llm_and_mcp user search_api_key takes precedence over env key.""" # Arrange - from pydantic import SecretStr - self.mock_user.search_api_key = SecretStr('user_search_key') self.service.tavily_api_key = 'env_tavily_key' self.mock_user_context.get_mcp_api_key.return_value = None @@ -486,8 +616,6 @@ class TestLiveStatusAppConversationService: Even in SAAS mode, if the user has their own search_api_key, tavily should be added. """ # Arrange - simulate SAAS mode with user having their own search key - from pydantic import SecretStr - self.service.app_mode = AppMode.SAAS.value self.service.tavily_api_key = None # In SAAS mode, this should be None self.mock_user.search_api_key = SecretStr('user_search_key') @@ -512,8 +640,6 @@ class TestLiveStatusAppConversationService: async def test_configure_llm_and_mcp_tavily_with_empty_user_search_key(self): """Test _configure_llm_and_mcp handles empty user search_api_key correctly.""" # Arrange - from pydantic import SecretStr - self.mock_user.search_api_key = SecretStr('') # Empty string self.service.tavily_api_key = 'env_tavily_key' self.mock_user_context.get_mcp_api_key.return_value = None @@ -537,8 +663,6 @@ class TestLiveStatusAppConversationService: async def test_configure_llm_and_mcp_tavily_with_whitespace_user_search_key(self): """Test _configure_llm_and_mcp handles whitespace-only user search_api_key correctly.""" # Arrange - from pydantic import SecretStr - self.mock_user.search_api_key = SecretStr(' ') # Whitespace only self.service.tavily_api_key = 'env_tavily_key' self.mock_user_context.get_mcp_api_key.return_value = None @@ -1314,8 +1438,6 @@ class TestLiveStatusAppConversationService: async def test_configure_llm_and_mcp_merges_system_and_custom_servers(self): """Test _configure_llm_and_mcp merges both system and custom MCP servers.""" # Arrange - from pydantic import SecretStr - from openhands.core.config.mcp_config import ( MCPConfig, MCPSSEServerConfig,