Merge pull request #1054 from Pythagora-io/fixes

properly setup default models
This commit is contained in:
LeonOstrez
2024-07-16 13:22:30 +01:00
committed by GitHub
5 changed files with 8 additions and 9 deletions

View File

@@ -5,7 +5,7 @@ from pydantic import BaseModel, Field
from core.agents.base import BaseAgent from core.agents.base import BaseAgent
from core.agents.convo import AgentConvo from core.agents.convo import AgentConvo
from core.agents.response import AgentResponse, ResponseType from core.agents.response import AgentResponse, ResponseType
from core.config import DESCRIBE_FILES_AGENT_NAME from core.config import CODE_MONKEY_AGENT_NAME, DESCRIBE_FILES_AGENT_NAME
from core.llm.parser import JSONParser, OptionalCodeBlockParser from core.llm.parser import JSONParser, OptionalCodeBlockParser
from core.log import get_logger from core.log import get_logger
@@ -56,7 +56,7 @@ class CodeMonkey(BaseAgent):
iterations = self.current_state.iterations iterations = self.current_state.iterations
user_feedback = None user_feedback = None
user_feedback_qa = None user_feedback_qa = None
llm = self.get_llm() llm = self.get_llm(CODE_MONKEY_AGENT_NAME)
if "task_review_feedback" in task and task["task_review_feedback"]: if "task_review_feedback" in task and task["task_review_feedback"]:
instructions = task.get("task_review_feedback") instructions = task.get("task_review_feedback")

View File

@@ -7,7 +7,6 @@ from core.agents.base import BaseAgent
from core.agents.convo import AgentConvo from core.agents.convo import AgentConvo
from core.agents.mixins import IterationPromptMixin from core.agents.mixins import IterationPromptMixin
from core.agents.response import AgentResponse from core.agents.response import AgentResponse
from core.config import ROUTE_FILES_AGENT_NAME
from core.db.models.file import File from core.db.models.file import File
from core.db.models.project_state import TaskStatus from core.db.models.project_state import TaskStatus
from core.llm.parser import JSONParser, OptionalCodeBlockParser from core.llm.parser import JSONParser, OptionalCodeBlockParser
@@ -158,7 +157,7 @@ class Troubleshooter(IterationPromptMixin, BaseAgent):
async def _get_route_files(self) -> list[File]: async def _get_route_files(self) -> list[File]:
"""Returns the list of file paths that have routes defined in them.""" """Returns the list of file paths that have routes defined in them."""
llm = self.get_llm(ROUTE_FILES_AGENT_NAME) llm = self.get_llm()
convo = AgentConvo(self).template("get_route_files").require_schema(RouteFilePaths) convo = AgentConvo(self).template("get_route_files").require_schema(RouteFilePaths)
file_list = await llm(convo, parser=JSONParser(RouteFilePaths)) file_list = await llm(convo, parser=JSONParser(RouteFilePaths))
route_files: set[str] = set(file_list.files) route_files: set[str] = set(file_list.files)

View File

@@ -34,8 +34,8 @@ IGNORE_SIZE_THRESHOLD = 50000 # 50K+ files are ignored by default
# Agents with sane setup in the default configuration # Agents with sane setup in the default configuration
DEFAULT_AGENT_NAME = "default" DEFAULT_AGENT_NAME = "default"
CODE_MONKEY_AGENT_NAME = "CodeMonkey"
DESCRIBE_FILES_AGENT_NAME = "CodeMonkey.describe_files" DESCRIBE_FILES_AGENT_NAME = "CodeMonkey.describe_files"
ROUTE_FILES_AGENT_NAME = "Troubleshooter.get_route_files"
# Endpoint for the external documentation # Endpoint for the external documentation
EXTERNAL_DOCUMENTATION_API = "http://docs-pythagora-io-439719575.us-east-1.elb.amazonaws.com" EXTERNAL_DOCUMENTATION_API = "http://docs-pythagora-io-439719575.us-east-1.elb.amazonaws.com"
@@ -111,7 +111,7 @@ class AgentLLMConfig(_StrictModel):
""" """
provider: LLMProvider = LLMProvider.OPENAI provider: LLMProvider = LLMProvider.OPENAI
model: str = Field(description="Model to use", default="gpt-4-0125-preview") model: str = Field(description="Model to use", default="gpt-4o-2024-05-13")
temperature: float = Field( temperature: float = Field(
default=0.5, default=0.5,
description="Temperature to use for sampling", description="Temperature to use for sampling",
@@ -309,8 +309,8 @@ class Config(_StrictModel):
agent: dict[str, AgentLLMConfig] = Field( agent: dict[str, AgentLLMConfig] = Field(
default={ default={
DEFAULT_AGENT_NAME: AgentLLMConfig(), DEFAULT_AGENT_NAME: AgentLLMConfig(),
CODE_MONKEY_AGENT_NAME: AgentLLMConfig(model="gpt-4-0125-preview", temperature=0.0),
DESCRIBE_FILES_AGENT_NAME: AgentLLMConfig(model="gpt-3.5-turbo", temperature=0.0), DESCRIBE_FILES_AGENT_NAME: AgentLLMConfig(model="gpt-3.5-turbo", temperature=0.0),
ROUTE_FILES_AGENT_NAME: AgentLLMConfig(model="gpt-4o", temperature=0.0),
} }
) )
prompt: PromptConfig = PromptConfig() prompt: PromptConfig = PromptConfig()

View File

@@ -28,7 +28,7 @@
"agent": { "agent": {
"default": { "default": {
"provider": "openai", "provider": "openai",
"model": "gpt-4o", "model": "gpt-4o-2024-05-13",
"temperature": 0.5 "temperature": 0.5
}, },
"CodeMonkey": { "CodeMonkey": {

View File

@@ -65,7 +65,7 @@ def test_builtin_defaults():
config = ConfigLoader.from_json("{}") config = ConfigLoader.from_json("{}")
assert config.llm_for_agent().provider == LLMProvider.OPENAI assert config.llm_for_agent().provider == LLMProvider.OPENAI
assert config.llm_for_agent().model == "gpt-4-0125-preview" assert config.llm_for_agent().model == "gpt-4o-2024-05-13"
assert config.llm_for_agent().base_url is None assert config.llm_for_agent().base_url is None
assert config.llm_for_agent().api_key is None assert config.llm_for_agent().api_key is None