Refactored saving and loading development steps to be consistent with command runs and user inputs

This commit is contained in:
Zvonimir Sabljic
2023-08-09 11:35:32 +02:00
parent d5565b7b47
commit b52e567480
2 changed files with 37 additions and 43 deletions

View File

@@ -138,29 +138,6 @@ def get_progress_steps(app_id, step=None):
return steps
def save_development_step(app_id, prompt_path, prompt_data, llm_req_num, messages, response, previous_step=None):
app = get_app(app_id)
hash_id = hash_data({
'prompt_path': prompt_path,
'prompt_data': {} if prompt_data is None else {k: v for k, v in prompt_data.items() if k not in PROMPT_DATA_TO_IGNORE},
'llm_req_num': llm_req_num
})
try:
inserted_id = (DevelopmentSteps
.insert(app=app, hash_id=hash_id, messages=messages, llm_response=response, previous_dev_step=previous_step)
.on_conflict(conflict_target=[DevelopmentSteps.app, DevelopmentSteps.hash_id],
preserve=[DevelopmentSteps.messages, DevelopmentSteps.llm_response],
update={})
.execute())
dev_step = DevelopmentSteps.get_by_id(inserted_id)
print(colored(f"Saved DEV step => {dev_step.id}", "yellow"))
except IntegrityError:
print(f"A Development Step with hash_id {hash_id} already exists.")
return None
return dev_step
def get_db_model_from_hash_id(data_to_hash, model, app_id):
hash_id = hash_data(data_to_hash)
try:
@@ -178,16 +155,19 @@ def hash_and_save_step(Model, app_id, hash_data_args, data_fields, message):
'app': app,
'hash_id': hash_id
}
fields_to_preserve = [getattr(Model, field) for field in list(data_to_insert.keys())]
for field, value in data_fields.items():
data_to_insert[field] = value
try:
inserted_id = (Model
.insert(**data_to_insert)
.on_conflict(conflict_target=[Model.app, Model.hash_id],
preserve=[],
update=data_fields)
.execute())
.insert(**data_to_insert)
.on_conflict(conflict_target=[Model.app, Model.hash_id],
preserve=fields_to_preserve,
update={})
.execute())
record = Model.get_by_id(inserted_id)
print(colored(f"{message} with id {record.id}", "yellow"))
@@ -197,6 +177,33 @@ def hash_and_save_step(Model, app_id, hash_data_args, data_fields, message):
return record
def save_development_step(project, prompt_path, prompt_data, messages, llm_response):
hash_data_args = {
'prompt_path': prompt_path,
'prompt_data': {} if prompt_data is None else {k: v for k, v in prompt_data.items() if k not in PROMPT_DATA_TO_IGNORE},
'llm_req_num': project.llm_req_num
}
data_fields = {
'messages': messages,
'llm_response': llm_response,
'previous_dev_step': project.checkpoints['last_development_step'],
}
development_step = hash_and_save_step(DevelopmentSteps, project.args['app_id'], hash_data_args, data_fields, "Saved Development Step")
project.checkpoints['last_development_step'] = development_step
return development_step
def get_development_step_from_hash_id(project, prompt_path, prompt_data, llm_req_num):
data_to_hash = {
'prompt_path': prompt_path,
'prompt_data': {} if prompt_data is None else {k: v for k, v in prompt_data.items() if k not in PROMPT_DATA_TO_IGNORE},
'llm_req_num': llm_req_num
}
development_step = get_db_model_from_hash_id(data_to_hash, DevelopmentSteps, project.args['app_id'])
return development_step
def save_command_run(project, command, cli_response):
hash_data_args = {
'command': command,
@@ -242,19 +249,6 @@ def get_user_input_from_hash_id(project, query):
user_input = get_db_model_from_hash_id(data_to_hash, UserInputs, project.args['app_id'])
return user_input
def get_development_step_from_hash_id(app_id, prompt_path, prompt_data, llm_req_num):
hash_id = hash_data({
'prompt_path': prompt_path,
'prompt_data': {} if prompt_data is None else {k: v for k, v in prompt_data.items() if k not in PROMPT_DATA_TO_IGNORE},
'llm_req_num': llm_req_num
})
try:
dev_step = DevelopmentSteps.get((DevelopmentSteps.hash_id == hash_id) & (DevelopmentSteps.app == app_id))
except DoesNotExist:
return None
return dev_step
def delete_all_subsequent_steps(project):
delete_subsequent_steps(DevelopmentSteps, project.checkpoints['last_development_step'], 'previous_dev_step')
delete_subsequent_steps(CommandRuns, project.checkpoints['last_command_run'], 'previous_command_run')

View File

@@ -30,7 +30,7 @@ class AgentConvo:
# check if we already have the LLM response saved
self.agent.project.llm_req_num += 1
development_step = get_development_step_from_hash_id(self.agent.project.args['app_id'], prompt_path, prompt_data, self.agent.project.llm_req_num)
development_step = get_development_step_from_hash_id(self.agent.project, prompt_path, prompt_data, self.agent.project.llm_req_num)
if development_step is not None and self.agent.project.skip_steps:
# if we do, use it
if self.agent.project.skip_until_dev_step and str(development_step.id) == self.agent.project.skip_until_dev_step:
@@ -44,7 +44,7 @@ class AgentConvo:
else:
# if we don't, get the response from LLM
response = create_gpt_chat_completion(self.messages, self.high_level_step, function_calls=function_calls)
development_step = save_development_step(self.agent.project.args['app_id'], prompt_path, prompt_data, self.agent.project.llm_req_num, self.messages, response, self.agent.project.checkpoints['last_development_step'])
development_step = save_development_step(self.agent.project, prompt_path, prompt_data, self.messages, response)
self.agent.project.checkpoints['last_development_step'] = development_step
self.agent.project.save_files_snapshot(development_step.id)