Merge pull request #600 from Pythagora-io/fc

add storing of function call message to db
This commit is contained in:
Senko Rašić
2024-01-30 20:25:07 -08:00
committed by GitHub
3 changed files with 13 additions and 6 deletions

View File

@@ -64,7 +64,7 @@ class AgentConvo:
try:
self.replace_files()
response = create_gpt_chat_completion(self.messages, self.high_level_step, self.agent.project,
function_calls=function_calls)
function_calls=function_calls, prompt_data=prompt_data)
except TokenLimitError as e:
save_development_step(self.agent.project, prompt_path, prompt_data, self.messages, '', str(e))
raise e

View File

@@ -36,7 +36,7 @@ class FunctionCallSet(TypedDict):
def add_function_calls_to_request(gpt_data, function_calls: Union[FunctionCallSet, None]):
if function_calls is None:
return
return None
model: str = gpt_data['model']
is_instruct = 'llama' in model or 'anthropic' in model
@@ -50,10 +50,13 @@ def add_function_calls_to_request(gpt_data, function_calls: Union[FunctionCallSe
else:
function_call = function_calls['definitions'][0]['name']
gpt_data['messages'].append({
function_call_message = {
'role': 'user',
'content': prompter.prompt('', function_calls['definitions'], function_call)
})
}
gpt_data['messages'].append(function_call_message)
return function_call_message
def parse_agent_response(response, function_calls: Union[FunctionCallSet, None]):

View File

@@ -92,7 +92,8 @@ def test_api_access(project) -> bool:
def create_gpt_chat_completion(messages: List[dict], req_type, project,
function_calls: FunctionCallSet = None):
function_calls: FunctionCallSet = None,
prompt_data: dict = None):
"""
Called from:
- AgentConvo.send_message() - these calls often have `function_calls`, usually from `pilot/const/function_calls.py`
@@ -105,6 +106,7 @@ def create_gpt_chat_completion(messages: List[dict], req_type, project,
:param project: project
:param function_calls: (optional) {'definitions': [{ 'name': str }, ...]}
see `IMPLEMENT_CHANGES` etc. in `pilot/const/function_calls.py`
:param prompt_data: (optional) { 'prompt': str, 'variables': { 'variable_name': 'variable_value', ... } }
:return: {'text': new_code}
or if `function_calls` param provided
{'function_calls': {'name': str, arguments: {...}}}
@@ -130,7 +132,9 @@ def create_gpt_chat_completion(messages: List[dict], req_type, project,
# Advise the LLM of the JSON response schema we are expecting
messages_length = len(messages)
add_function_calls_to_request(gpt_data, function_calls)
function_call_message = add_function_calls_to_request(gpt_data, function_calls)
if prompt_data is not None and function_call_message is not None:
prompt_data['function_call_message'] = function_call_message
try:
response = stream_gpt_completion(gpt_data, req_type, project)