add sending of project stages to extension

This commit is contained in:
LeonOstrez
2025-01-08 18:37:28 +01:00
parent d31bbde63a
commit 6b1e9a2c54
13 changed files with 49 additions and 28 deletions

View File

@@ -16,7 +16,6 @@ from core.templates.registry import (
PROJECT_TEMPLATES,
ProjectTemplateEnum,
)
from core.ui.base import ProjectStage
ARCHITECTURE_STEP_NAME = "Project architecture"
WARN_SYSTEM_DEPS = ["docker", "kubernetes", "microservices"]
@@ -97,8 +96,6 @@ class Architect(BaseAgent):
display_name = "Architect"
async def run(self) -> AgentResponse:
await self.ui.send_project_stage(ProjectStage.ARCHITECTURE)
spec = self.current_state.specification.clone()
if spec.example_project:

View File

@@ -12,7 +12,7 @@ from core.db.models.project_state import IterationStatus
from core.llm.parser import JSONParser
from core.log import get_logger
from core.telemetry import telemetry
from core.ui.base import pythagora_source
from core.ui.base import ProjectStage, pythagora_source
log = get_logger(__name__)
@@ -169,6 +169,11 @@ class BugHunter(ChatWithBreakdownMixin, BaseAgent):
"continue": "Continue without feedback", # DO NOT CHANGE THIS TEXT without changing it in the extension (it is hardcoded)
"start_pair_programming": "Start Pair Programming",
}
await self.ui.send_project_stage(
{
"stage": ProjectStage.ADDITIONAL_FEEDBACK,
}
)
user_feedback = await self.ask_question(
"Please add any additional feedback that could help Pythagora solve this bug",
buttons=buttons,

View File

@@ -15,6 +15,7 @@ from core.db.models.specification import Complexity
from core.llm.parser import JSONParser
from core.log import get_logger
from core.telemetry import telemetry
from core.ui.base import ProjectStage
log = get_logger(__name__)
@@ -307,6 +308,12 @@ class Developer(ChatWithBreakdownMixin, RelevantFilesMixin, BaseAgent):
description = self.current_state.current_task["description"]
task_index = self.current_state.tasks.index(self.current_state.current_task) + 1
await self.ui.send_project_stage(
{
"stage": ProjectStage.STARTING_TASK,
"task_index": task_index,
}
)
await self.send_message(f"Starting task #{task_index} with the description:\n\n" + description)
if self.current_state.run_command:
await self.ui.send_run_command(self.current_state.run_command)

View File

@@ -9,6 +9,7 @@ from core.llm.parser import DescriptiveCodeBlockParser
from core.log import get_logger
from core.telemetry import telemetry
from core.templates.registry import PROJECT_TEMPLATES
from core.ui.base import ProjectStage
log = get_logger(__name__)
@@ -106,6 +107,7 @@ class Frontend(FileDiffMixin, BaseAgent):
"""
Continues building the frontend of the app after the initial user input.
"""
await self.ui.send_project_stage({"stage": ProjectStage.FRONTEND})
await self.send_message("Continuing to build UI... This may take a couple of minutes")
llm = self.get_llm(FRONTEND_AGENT_NAME)

View File

@@ -9,6 +9,7 @@ from core.agents.response import AgentResponse
from core.config import GET_RELEVANT_FILES_AGENT_NAME, TASK_BREAKDOWN_AGENT_NAME, TROUBLESHOOTER_BUG_REPORT
from core.llm.parser import JSONParser
from core.log import get_logger
from core.ui.base import ProjectStage
log = get_logger(__name__)
@@ -65,6 +66,13 @@ class ChatWithBreakdownMixin:
llm = self.get_llm(TASK_BREAKDOWN_AGENT_NAME, stream_output=True)
while True:
await self.ui.send_project_stage(
{
"stage": ProjectStage.BREAKDOWN_CHAT,
"agent": self.agent_type,
}
)
chat = await self.ask_question(
"Are you happy with the breakdown? Now is a good time to ask questions or suggest changes.",
buttons={"yes": "Yes, looks good!"},
@@ -84,7 +92,7 @@ class ChatWithBreakdownMixin:
return breakdown
class IterationPromptMixin(ChatWithBreakdownMixin):
class IterationPromptMixin:
"""
Provides a method to find a solution to a problem based on user feedback.

View File

@@ -24,7 +24,6 @@ from core.agents.troubleshooter import Troubleshooter
from core.db.models.project_state import IterationStatus, TaskStatus
from core.log import get_logger
from core.telemetry import telemetry
from core.ui.base import ProjectStage
log = get_logger(__name__)
@@ -363,16 +362,10 @@ class Orchestrator(BaseAgent, GitMixin):
await self.ui.loading_finished()
if self.current_state.epics:
await self.ui.send_project_stage(ProjectStage.CODING)
if len(self.current_state.epics) > 3:
# We only want to send previous features, ie. exclude current one and the initial project (first epic)
await self.ui.send_features_list([e["description"] for e in self.current_state.epics[2:-1]])
elif self.current_state.specification.description:
await self.ui.send_project_stage(ProjectStage.ARCHITECTURE)
else:
await self.ui.send_project_stage(ProjectStage.DESCRIPTION)
if self.current_state.specification.description:
await self.ui.send_project_description(self.current_state.specification.description)

View File

@@ -55,8 +55,6 @@ class TechLead(RelevantFilesMixin, BaseAgent):
self.create_initial_project_epic()
return AgentResponse.done(self)
await self.ui.send_project_stage(ProjectStage.CODING)
# if self.current_state.specification.templates and len(self.current_state.files) < 2:
# await self.apply_project_templates()
# self.next_state.action = "Apply project templates"
@@ -260,6 +258,7 @@ class TechLead(RelevantFilesMixin, BaseAgent):
self.next_state.tasks,
)
await self.ui.send_project_stage({"stage": ProjectStage.OPEN_PLAN})
response = await self.ask_question(
"Open and edit your development plan in the Progress tab",
buttons={"done_editing": "I'm done editing, the plan looks good"},

View File

@@ -14,7 +14,7 @@ from core.db.models.project_state import IterationStatus, TaskStatus
from core.llm.parser import JSONParser, OptionalCodeBlockParser
from core.log import get_logger
from core.telemetry import telemetry
from core.ui.base import pythagora_source
from core.ui.base import ProjectStage, pythagora_source
log = get_logger(__name__)
@@ -74,6 +74,7 @@ class Troubleshooter(ChatWithBreakdownMixin, IterationPromptMixin, RelevantFiles
self.next_state.flag_tasks_as_modified()
return AgentResponse.done(self)
else:
await self.ui.send_project_stage({"stage": ProjectStage.TEST_APP})
await self.ui.send_message("Test the app by following these steps:", source=pythagora_source)
await self.send_message("")
@@ -277,6 +278,7 @@ class Troubleshooter(ChatWithBreakdownMixin, IterationPromptMixin, RelevantFiles
break
elif user_response.button == "change":
await self.ui.send_project_stage({"stage": ProjectStage.DESCRIBE_CHANGE})
user_description = await self.ask_question(
"Please describe the change you want to make to the project specification (one at a time)",
buttons={"back": "Back"},
@@ -288,6 +290,7 @@ class Troubleshooter(ChatWithBreakdownMixin, IterationPromptMixin, RelevantFiles
break
elif user_response.button == "bug":
await self.ui.send_project_stage({"stage": ProjectStage.DESCRIBE_ISSUE})
user_description = await self.ask_question(
"Please describe the issue you found (one at a time) and share any relevant server logs",
extra_info="collect_logs",

View File

@@ -15,7 +15,7 @@ from core.llm.base import APIError, BaseLLMClient
from core.log import get_logger
from core.state.state_manager import StateManager
from core.telemetry import telemetry
from core.ui.base import UIBase, UIClosedError, UserInput, pythagora_source
from core.ui.base import ProjectStage, UIBase, UIClosedError, UserInput, pythagora_source
log = get_logger(__name__)
@@ -188,6 +188,7 @@ async def start_new_project(sm: StateManager, ui: UIBase) -> bool:
while True:
try:
await ui.send_project_stage({"stage": ProjectStage.PROJECT_NAME})
user_input = await ui.ask_question(
"What is the project name?",
allow_empty=False,

View File

@@ -5,9 +5,15 @@ from pydantic import BaseModel
class ProjectStage(str, Enum):
DESCRIPTION = "project_description"
ARCHITECTURE = "architecture"
CODING = "coding"
PROJECT_NAME = "project_name"
FRONTEND = "frontend"
OPEN_PLAN = "open_plan"
STARTING_TASK = "starting_task"
BREAKDOWN_CHAT = "breakdown_chat"
TEST_APP = "test_app"
ADDITIONAL_FEEDBACK = "additional_feedback"
DESCRIBE_CHANGE = "describe_change"
DESCRIBE_ISSUE = "describe_issue"
class UIClosedError(Exception):
@@ -206,11 +212,11 @@ class UIBase:
"""
raise NotImplementedError()
async def send_project_stage(self, stage: ProjectStage):
async def send_project_stage(self, data: dict):
"""
Send a project stage to the UI.
:param stage: Project stage.
:param data: Project stage data.
"""
raise NotImplementedError()

View File

@@ -3,7 +3,7 @@ from typing import Optional
from prompt_toolkit.shortcuts import PromptSession
from core.log import get_logger
from core.ui.base import ProjectStage, UIBase, UIClosedError, UISource, UserInput
from core.ui.base import UIBase, UIClosedError, UISource, UserInput
log = get_logger(__name__)
@@ -108,7 +108,7 @@ class PlainConsoleUI(UIBase):
return UserInput(button=None, text=choice)
print("Please provide a valid input")
async def send_project_stage(self, stage: ProjectStage):
async def send_project_stage(self, data: dict):
pass
async def send_epics_and_tasks(

View File

@@ -8,7 +8,7 @@ from pydantic import BaseModel, ValidationError
from core.config import LocalIPCConfig
from core.log import get_logger
from core.ui.base import ProjectStage, UIBase, UIClosedError, UISource, UserInput
from core.ui.base import UIBase, UIClosedError, UISource, UserInput
VSCODE_EXTENSION_HOST = "localhost"
VSCODE_EXTENSION_PORT = 8125
@@ -356,8 +356,8 @@ class IPCClientUI(UIBase):
# Empty answer which we don't allow, treat as user cancelled the input
return UserInput(cancelled=True)
async def send_project_stage(self, stage: ProjectStage):
await self._send(MessageType.INFO, content=json.dumps({"project_stage": stage.value}))
async def send_project_stage(self, data: dict):
await self._send(MessageType.INFO, content=json.dumps(data))
async def send_epics_and_tasks(
self,

View File

@@ -1,7 +1,7 @@
from typing import Optional
from core.log import get_logger
from core.ui.base import ProjectStage, UIBase, UISource, UserInput
from core.ui.base import UIBase, UISource, UserInput
log = get_logger(__name__)
@@ -101,7 +101,7 @@ class VirtualUI(UIBase):
else:
return UserInput(text="")
async def send_project_stage(self, stage: ProjectStage):
async def send_project_stage(self, data: dict):
pass
async def send_epics_and_tasks(