fix(backend): apply user-defined condenser_max_size in new v1 conversations (#11862)

This commit is contained in:
Hiep Le
2025-12-03 00:24:25 +07:00
committed by GitHub
parent 6139e39449
commit fd13c91387
5 changed files with 229 additions and 19 deletions

View File

@@ -9,6 +9,7 @@ from typing import AsyncGenerator
import base62 import base62
from openhands.app_server.app_conversation.app_conversation_models import ( from openhands.app_server.app_conversation.app_conversation_models import (
AgentType,
AppConversationStartTask, AppConversationStartTask,
AppConversationStartTaskStatus, AppConversationStartTaskStatus,
) )
@@ -25,7 +26,9 @@ from openhands.app_server.sandbox.sandbox_models import SandboxInfo
from openhands.app_server.user.user_context import UserContext from openhands.app_server.user.user_context import UserContext
from openhands.sdk import Agent from openhands.sdk import Agent
from openhands.sdk.context.agent_context import AgentContext from openhands.sdk.context.agent_context import AgentContext
from openhands.sdk.context.condenser import LLMSummarizingCondenser
from openhands.sdk.context.skills import load_user_skills from openhands.sdk.context.skills import load_user_skills
from openhands.sdk.llm import LLM
from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace
_logger = logging.getLogger(__name__) _logger = logging.getLogger(__name__)
@@ -340,3 +343,39 @@ class AppConversationServiceBase(AppConversationService, ABC):
return return
_logger.info('Git pre-commit hook installed successfully') _logger.info('Git pre-commit hook installed successfully')
def _create_condenser(
self,
llm: LLM,
agent_type: AgentType,
condenser_max_size: int | None,
) -> LLMSummarizingCondenser:
"""Create a condenser based on user settings and agent type.
Args:
llm: The LLM instance to use for condensation
agent_type: Type of agent (PLAN or DEFAULT)
condenser_max_size: condenser_max_size setting
Returns:
Configured LLMSummarizingCondenser instance
"""
# LLMSummarizingCondenser has defaults: max_size=120, keep_first=4
condenser_kwargs = {
'llm': llm.model_copy(
update={
'usage_id': (
'condenser'
if agent_type == AgentType.DEFAULT
else 'planning_condenser'
)
}
),
}
# Only override max_size if user has a custom value
if condenser_max_size is not None:
condenser_kwargs['max_size'] = condenser_max_size
condenser = LLMSummarizingCondenser(**condenser_kwargs)
return condenser

View File

@@ -76,12 +76,10 @@ from openhands.sdk.security.confirmation_policy import AlwaysConfirm
from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace from openhands.sdk.workspace.remote.async_remote_workspace import AsyncRemoteWorkspace
from openhands.server.types import AppMode from openhands.server.types import AppMode
from openhands.tools.preset.default import ( from openhands.tools.preset.default import (
get_default_condenser,
get_default_tools, get_default_tools,
) )
from openhands.tools.preset.planning import ( from openhands.tools.preset.planning import (
format_plan_structure, format_plan_structure,
get_planning_condenser,
get_planning_tools, get_planning_tools,
) )
@@ -643,6 +641,7 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
agent_type: AgentType, agent_type: AgentType,
system_message_suffix: str | None, system_message_suffix: str | None,
mcp_config: dict, mcp_config: dict,
condenser_max_size: int | None,
) -> Agent: ) -> Agent:
"""Create an agent with appropriate tools and context based on agent type. """Create an agent with appropriate tools and context based on agent type.
@@ -651,10 +650,14 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
agent_type: Type of agent to create (PLAN or DEFAULT) agent_type: Type of agent to create (PLAN or DEFAULT)
system_message_suffix: Optional suffix for system messages system_message_suffix: Optional suffix for system messages
mcp_config: MCP configuration dictionary mcp_config: MCP configuration dictionary
condenser_max_size: condenser_max_size setting
Returns: Returns:
Configured Agent instance with context Configured Agent instance with context
""" """
# Create condenser with user's settings
condenser = self._create_condenser(llm, agent_type, condenser_max_size)
# Create agent based on type # Create agent based on type
if agent_type == AgentType.PLAN: if agent_type == AgentType.PLAN:
agent = Agent( agent = Agent(
@@ -662,9 +665,7 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
tools=get_planning_tools(), tools=get_planning_tools(),
system_prompt_filename='system_prompt_planning.j2', system_prompt_filename='system_prompt_planning.j2',
system_prompt_kwargs={'plan_structure': format_plan_structure()}, system_prompt_kwargs={'plan_structure': format_plan_structure()},
condenser=get_planning_condenser( condenser=condenser,
llm=llm.model_copy(update={'usage_id': 'planning_condenser'})
),
security_analyzer=None, security_analyzer=None,
mcp_config=mcp_config, mcp_config=mcp_config,
) )
@@ -673,9 +674,7 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
llm=llm, llm=llm,
tools=get_default_tools(enable_browser=True), tools=get_default_tools(enable_browser=True),
system_prompt_kwargs={'cli_mode': False}, system_prompt_kwargs={'cli_mode': False},
condenser=get_default_condenser( condenser=condenser,
llm=llm.model_copy(update={'usage_id': 'condenser'})
),
mcp_config=mcp_config, mcp_config=mcp_config,
) )
@@ -777,7 +776,7 @@ class LiveStatusAppConversationService(AppConversationServiceBase):
# Create agent with context # Create agent with context
agent = self._create_agent_with_context( agent = self._create_agent_with_context(
llm, agent_type, system_message_suffix, mcp_config llm, agent_type, system_message_suffix, mcp_config, user.condenser_max_size
) )
# Finalize and return the conversation request # Finalize and return the conversation request

View File

@@ -5,13 +5,15 @@ and the recent bug fixes for git checkout operations.
""" """
import subprocess import subprocess
from unittest.mock import AsyncMock, MagicMock, patch from unittest.mock import AsyncMock, MagicMock, Mock, patch
import pytest import pytest
from openhands.app_server.app_conversation.app_conversation_models import AgentType
from openhands.app_server.app_conversation.app_conversation_service_base import ( from openhands.app_server.app_conversation.app_conversation_service_base import (
AppConversationServiceBase, AppConversationServiceBase,
) )
from openhands.app_server.user.user_context import UserContext
class MockUserInfo: class MockUserInfo:
@@ -286,6 +288,152 @@ async def test_clone_or_init_git_repo_custom_timeout(service):
) )
@patch(
'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser'
)
def test_create_condenser_default_agent_with_none_max_size(mock_condenser_class):
"""Test _create_condenser for DEFAULT agent with condenser_max_size = None uses default."""
# Arrange
mock_user_context = Mock(spec=UserContext)
with patch.object(
AppConversationServiceBase,
'__abstractmethods__',
set(),
):
service = AppConversationServiceBase(
init_git_in_empty_workspace=True,
user_context=mock_user_context,
)
mock_llm = MagicMock()
mock_llm_copy = MagicMock()
mock_llm_copy.usage_id = 'condenser'
mock_llm.model_copy.return_value = mock_llm_copy
mock_condenser_instance = MagicMock()
mock_condenser_class.return_value = mock_condenser_instance
# Act
service._create_condenser(mock_llm, AgentType.DEFAULT, None)
# Assert
mock_condenser_class.assert_called_once()
call_kwargs = mock_condenser_class.call_args[1]
# When condenser_max_size is None, max_size should not be passed (uses SDK default of 120)
assert 'max_size' not in call_kwargs
# keep_first is never passed (uses SDK default of 4)
assert 'keep_first' not in call_kwargs
assert call_kwargs['llm'].usage_id == 'condenser'
mock_llm.model_copy.assert_called_once()
@patch(
'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser'
)
def test_create_condenser_default_agent_with_custom_max_size(mock_condenser_class):
"""Test _create_condenser for DEFAULT agent with custom condenser_max_size."""
# Arrange
mock_user_context = Mock(spec=UserContext)
with patch.object(
AppConversationServiceBase,
'__abstractmethods__',
set(),
):
service = AppConversationServiceBase(
init_git_in_empty_workspace=True,
user_context=mock_user_context,
)
mock_llm = MagicMock()
mock_llm_copy = MagicMock()
mock_llm_copy.usage_id = 'condenser'
mock_llm.model_copy.return_value = mock_llm_copy
mock_condenser_instance = MagicMock()
mock_condenser_class.return_value = mock_condenser_instance
# Act
service._create_condenser(mock_llm, AgentType.DEFAULT, 150)
# Assert
mock_condenser_class.assert_called_once()
call_kwargs = mock_condenser_class.call_args[1]
assert call_kwargs['max_size'] == 150 # Custom value should be used
# keep_first is never passed (uses SDK default of 4)
assert 'keep_first' not in call_kwargs
assert call_kwargs['llm'].usage_id == 'condenser'
mock_llm.model_copy.assert_called_once()
@patch(
'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser'
)
def test_create_condenser_plan_agent_with_none_max_size(mock_condenser_class):
"""Test _create_condenser for PLAN agent with condenser_max_size = None uses default."""
# Arrange
mock_user_context = Mock(spec=UserContext)
with patch.object(
AppConversationServiceBase,
'__abstractmethods__',
set(),
):
service = AppConversationServiceBase(
init_git_in_empty_workspace=True,
user_context=mock_user_context,
)
mock_llm = MagicMock()
mock_llm_copy = MagicMock()
mock_llm_copy.usage_id = 'planning_condenser'
mock_llm.model_copy.return_value = mock_llm_copy
mock_condenser_instance = MagicMock()
mock_condenser_class.return_value = mock_condenser_instance
# Act
service._create_condenser(mock_llm, AgentType.PLAN, None)
# Assert
mock_condenser_class.assert_called_once()
call_kwargs = mock_condenser_class.call_args[1]
# When condenser_max_size is None, max_size should not be passed (uses SDK default of 120)
assert 'max_size' not in call_kwargs
# keep_first is never passed (uses SDK default of 4)
assert 'keep_first' not in call_kwargs
assert call_kwargs['llm'].usage_id == 'planning_condenser'
mock_llm.model_copy.assert_called_once()
@patch(
'openhands.app_server.app_conversation.app_conversation_service_base.LLMSummarizingCondenser'
)
def test_create_condenser_plan_agent_with_custom_max_size(mock_condenser_class):
"""Test _create_condenser for PLAN agent with custom condenser_max_size."""
# Arrange
mock_user_context = Mock(spec=UserContext)
with patch.object(
AppConversationServiceBase,
'__abstractmethods__',
set(),
):
service = AppConversationServiceBase(
init_git_in_empty_workspace=True,
user_context=mock_user_context,
)
mock_llm = MagicMock()
mock_llm_copy = MagicMock()
mock_llm_copy.usage_id = 'planning_condenser'
mock_llm.model_copy.return_value = mock_llm_copy
mock_condenser_instance = MagicMock()
mock_condenser_class.return_value = mock_condenser_instance
# Act
service._create_condenser(mock_llm, AgentType.PLAN, 200)
# Assert
mock_condenser_class.assert_called_once()
call_kwargs = mock_condenser_class.call_args[1]
assert call_kwargs['max_size'] == 200 # Custom value should be used
# keep_first is never passed (uses SDK default of 4)
assert 'keep_first' not in call_kwargs
assert call_kwargs['llm'].usage_id == 'planning_condenser'
mock_llm.model_copy.assert_called_once()
# ============================================================================= # =============================================================================
# Tests for _configure_git_user_settings # Tests for _configure_git_user_settings
# ============================================================================= # =============================================================================

View File

@@ -63,6 +63,7 @@ class TestLiveStatusAppConversationService:
self.mock_user.llm_api_key = 'test_api_key' self.mock_user.llm_api_key = 'test_api_key'
self.mock_user.confirmation_mode = False self.mock_user.confirmation_mode = False
self.mock_user.search_api_key = None # Default to None self.mock_user.search_api_key = None # Default to None
self.mock_user.condenser_max_size = None # Default to None
# Mock sandbox # Mock sandbox
self.mock_sandbox = Mock(spec=SandboxInfo) self.mock_sandbox = Mock(spec=SandboxInfo)
@@ -421,20 +422,21 @@ class TestLiveStatusAppConversationService:
'openhands.app_server.app_conversation.live_status_app_conversation_service.get_planning_tools' 'openhands.app_server.app_conversation.live_status_app_conversation_service.get_planning_tools'
) )
@patch( @patch(
'openhands.app_server.app_conversation.live_status_app_conversation_service.get_planning_condenser' 'openhands.app_server.app_conversation.app_conversation_service_base.AppConversationServiceBase._create_condenser'
) )
@patch( @patch(
'openhands.app_server.app_conversation.live_status_app_conversation_service.format_plan_structure' 'openhands.app_server.app_conversation.live_status_app_conversation_service.format_plan_structure'
) )
def test_create_agent_with_context_planning_agent( def test_create_agent_with_context_planning_agent(
self, mock_format_plan, mock_get_condenser, mock_get_tools self, mock_format_plan, mock_create_condenser, mock_get_tools
): ):
"""Test _create_agent_with_context for planning agent type.""" """Test _create_agent_with_context for planning agent type."""
# Arrange # Arrange
mock_llm = Mock(spec=LLM) mock_llm = Mock(spec=LLM)
mock_llm.model_copy.return_value = mock_llm mock_llm.model_copy.return_value = mock_llm
mock_get_tools.return_value = [] mock_get_tools.return_value = []
mock_get_condenser.return_value = Mock() mock_condenser = Mock()
mock_create_condenser.return_value = mock_condenser
mock_format_plan.return_value = 'test_plan_structure' mock_format_plan.return_value = 'test_plan_structure'
mcp_config = {'default': {'url': 'test'}} mcp_config = {'default': {'url': 'test'}}
system_message_suffix = 'Test suffix' system_message_suffix = 'Test suffix'
@@ -448,7 +450,11 @@ class TestLiveStatusAppConversationService:
mock_agent_class.return_value = mock_agent_instance mock_agent_class.return_value = mock_agent_instance
self.service._create_agent_with_context( self.service._create_agent_with_context(
mock_llm, AgentType.PLAN, system_message_suffix, mcp_config mock_llm,
AgentType.PLAN,
system_message_suffix,
mcp_config,
self.mock_user.condenser_max_size,
) )
# Assert # Assert
@@ -462,22 +468,27 @@ class TestLiveStatusAppConversationService:
) )
assert call_kwargs['mcp_config'] == mcp_config assert call_kwargs['mcp_config'] == mcp_config
assert call_kwargs['security_analyzer'] is None assert call_kwargs['security_analyzer'] is None
assert call_kwargs['condenser'] == mock_condenser
mock_create_condenser.assert_called_once_with(
mock_llm, AgentType.PLAN, self.mock_user.condenser_max_size
)
@patch( @patch(
'openhands.app_server.app_conversation.live_status_app_conversation_service.get_default_tools' 'openhands.app_server.app_conversation.live_status_app_conversation_service.get_default_tools'
) )
@patch( @patch(
'openhands.app_server.app_conversation.live_status_app_conversation_service.get_default_condenser' 'openhands.app_server.app_conversation.app_conversation_service_base.AppConversationServiceBase._create_condenser'
) )
def test_create_agent_with_context_default_agent( def test_create_agent_with_context_default_agent(
self, mock_get_condenser, mock_get_tools self, mock_create_condenser, mock_get_tools
): ):
"""Test _create_agent_with_context for default agent type.""" """Test _create_agent_with_context for default agent type."""
# Arrange # Arrange
mock_llm = Mock(spec=LLM) mock_llm = Mock(spec=LLM)
mock_llm.model_copy.return_value = mock_llm mock_llm.model_copy.return_value = mock_llm
mock_get_tools.return_value = [] mock_get_tools.return_value = []
mock_get_condenser.return_value = Mock() mock_condenser = Mock()
mock_create_condenser.return_value = mock_condenser
mcp_config = {'default': {'url': 'test'}} mcp_config = {'default': {'url': 'test'}}
# Act # Act
@@ -489,7 +500,11 @@ class TestLiveStatusAppConversationService:
mock_agent_class.return_value = mock_agent_instance mock_agent_class.return_value = mock_agent_instance
self.service._create_agent_with_context( self.service._create_agent_with_context(
mock_llm, AgentType.DEFAULT, None, mcp_config mock_llm,
AgentType.DEFAULT,
None,
mcp_config,
self.mock_user.condenser_max_size,
) )
# Assert # Assert
@@ -498,7 +513,11 @@ class TestLiveStatusAppConversationService:
assert call_kwargs['llm'] == mock_llm assert call_kwargs['llm'] == mock_llm
assert call_kwargs['system_prompt_kwargs']['cli_mode'] is False assert call_kwargs['system_prompt_kwargs']['cli_mode'] is False
assert call_kwargs['mcp_config'] == mcp_config assert call_kwargs['mcp_config'] == mcp_config
assert call_kwargs['condenser'] == mock_condenser
mock_get_tools.assert_called_once_with(enable_browser=True) mock_get_tools.assert_called_once_with(enable_browser=True)
mock_create_condenser.assert_called_once_with(
mock_llm, AgentType.DEFAULT, self.mock_user.condenser_max_size
)
@pytest.mark.asyncio @pytest.mark.asyncio
@patch( @patch(
@@ -693,6 +712,10 @@ class TestLiveStatusAppConversationService:
self.mock_user, 'gpt-4' self.mock_user, 'gpt-4'
) )
self.service._create_agent_with_context.assert_called_once_with( self.service._create_agent_with_context.assert_called_once_with(
mock_llm, AgentType.DEFAULT, 'Test suffix', mock_mcp_config mock_llm,
AgentType.DEFAULT,
'Test suffix',
mock_mcp_config,
self.mock_user.condenser_max_size,
) )
self.service._finalize_conversation_request.assert_called_once() self.service._finalize_conversation_request.assert_called_once()

View File

@@ -152,6 +152,7 @@ class TestExperimentManagerIntegration:
llm_base_url=None, llm_base_url=None,
llm_api_key=None, llm_api_key=None,
confirmation_mode=False, confirmation_mode=False,
condenser_max_size=None,
) )
async def get_secrets(self): async def get_secrets(self):