diff --git a/core/agents/bug_hunter.py b/core/agents/bug_hunter.py index 656166b0..ca81cf22 100644 --- a/core/agents/bug_hunter.py +++ b/core/agents/bug_hunter.py @@ -5,7 +5,7 @@ from pydantic import BaseModel, Field from core.agents.base import BaseAgent from core.agents.convo import AgentConvo from core.agents.response import AgentResponse -from core.config import magic_words +from core.config import BUG_HUNT_AGENT_NAME, magic_words from core.db.models.project_state import IterationStatus from core.llm.parser import JSONParser from core.log import get_logger @@ -61,7 +61,7 @@ class BugHunter(BaseAgent): self.next_state.current_iteration["bug_reproduction_description"] = bug_reproduction_instructions async def check_logs(self, logs_message: str = None): - llm = self.get_llm() + llm = self.get_llm(BUG_HUNT_AGENT_NAME) convo = AgentConvo(self).template( "iteration", current_task=self.current_state.current_task, @@ -90,6 +90,7 @@ class BugHunter(BaseAgent): ) .require_schema(HuntConclusionOptions) ) + llm = self.get_llm() hunt_conclusion = await llm(convo, parser=JSONParser(HuntConclusionOptions), temperature=0) self.next_state.current_iteration["description"] = human_readable_instructions diff --git a/core/config/__init__.py b/core/config/__init__.py index 20152e0f..5fae2f21 100644 --- a/core/config/__init__.py +++ b/core/config/__init__.py @@ -36,6 +36,7 @@ IGNORE_SIZE_THRESHOLD = 50000 # 50K+ files are ignored by default DEFAULT_AGENT_NAME = "default" DESCRIBE_FILES_AGENT_NAME = "CodeMonkey.describe_files" ROUTE_FILES_AGENT_NAME = "Troubleshooter.get_route_files" +BUG_HUNT_AGENT_NAME = "BugHunter.logs_or_fix" # Endpoint for the external documentation EXTERNAL_DOCUMENTATION_API = "http://docs-pythagora-io-439719575.us-east-1.elb.amazonaws.com" @@ -310,6 +311,7 @@ class Config(_StrictModel): default={ DEFAULT_AGENT_NAME: AgentLLMConfig(), DESCRIBE_FILES_AGENT_NAME: AgentLLMConfig(model="gpt-3.5-turbo", temperature=0.0), + BUG_HUNT_AGENT_NAME: AgentLLMConfig(model="claude-3-5-sonnet-20240620", temperature=0.0), ROUTE_FILES_AGENT_NAME: AgentLLMConfig(model="gpt-4o", temperature=0.0), } ) diff --git a/core/llm/anthropic_client.py b/core/llm/anthropic_client.py index f458a7b2..a834079a 100644 --- a/core/llm/anthropic_client.py +++ b/core/llm/anthropic_client.py @@ -23,7 +23,7 @@ class AnthropicClient(BaseLLMClient): def _init_client(self): self.client = AsyncAnthropic( api_key=self.config.api_key, - base_url=self.config.base_url, + # base_url=self.config.base_url, timeout=Timeout( max(self.config.connect_timeout, self.config.read_timeout), connect=self.config.connect_timeout,