Feature/enable intuitive logs summarization (#3697)

This commit is contained in:
merwanehamadi
2023-05-03 09:32:03 -07:00
committed by GitHub
parent 26c6cfeefd
commit e21917cc93
3 changed files with 29 additions and 11 deletions

View File

@@ -8,15 +8,8 @@ from autogpt.llm.api_manager import ApiManager
from autogpt.llm.base import Message
from autogpt.llm.llm_utils import create_chat_completion
from autogpt.llm.token_counter import count_message_tokens
from autogpt.log_cycle.log_cycle import PROMPT_NEXT_ACTION_FILE_NAME
from autogpt.log_cycle.log_cycle import CURRENT_CONTEXT_FILE_NAME
from autogpt.logs import logger
from autogpt.memory_management.store_memory import (
save_memory_trimmed_from_context_window,
)
from autogpt.memory_management.summary_memory import (
get_newly_trimmed_messages,
update_running_summary,
)
cfg = Config()
@@ -153,6 +146,10 @@ def chat_with_ai(
# Move to the next most recent message in the full message history
next_message_to_add_index -= 1
from autogpt.memory_management.summary_memory import (
get_newly_trimmed_messages,
update_running_summary,
)
# Insert Memories
if len(full_message_history) > 0:
@@ -164,7 +161,9 @@ def chat_with_ai(
current_context=current_context,
last_memory_index=agent.last_memory_index,
)
agent.summary_memory = update_running_summary(
agent,
current_memory=agent.summary_memory,
new_events=newly_trimmed_messages,
)
@@ -237,7 +236,7 @@ def chat_with_ai(
agent.created_at,
agent.cycle_count,
current_context,
PROMPT_NEXT_ACTION_FILE_NAME,
CURRENT_CONTEXT_FILE_NAME,
)
# TODO: use a model defined elsewhere, so that model can contain

View File

@@ -6,8 +6,10 @@ from autogpt.logs import logger
DEFAULT_PREFIX = "agent"
FULL_MESSAGE_HISTORY_FILE_NAME = "full_message_history.json"
PROMPT_NEXT_ACTION_FILE_NAME = "prompt_next_action.json"
CURRENT_CONTEXT_FILE_NAME = "current_context.json"
NEXT_ACTION_FILE_NAME = "next_action.json"
PROMPT_SUMMARY_FILE_NAME = "prompt_summary.json"
SUMMARY_FILE_NAME = "summary.txt"
class LogCycleHandler:

View File

@@ -2,8 +2,10 @@ import copy
import json
from typing import Dict, List, Tuple
from autogpt.agent import Agent
from autogpt.config import Config
from autogpt.llm.llm_utils import create_chat_completion
from autogpt.log_cycle.log_cycle import PROMPT_SUMMARY_FILE_NAME, SUMMARY_FILE_NAME
cfg = Config()
@@ -46,7 +48,7 @@ def get_newly_trimmed_messages(
def update_running_summary(
current_memory: str, new_events: List[Dict[str, str]]
agent: Agent, current_memory: str, new_events: List[Dict[str, str]]
) -> str:
"""
This function takes a list of dictionaries representing new events and combines them with the current summary,
@@ -110,9 +112,24 @@ Latest Development:
"content": prompt,
}
]
agent.log_cycle_handler.log_cycle(
agent.config.ai_name,
agent.created_at,
agent.cycle_count,
messages,
PROMPT_SUMMARY_FILE_NAME,
)
current_memory = create_chat_completion(messages, cfg.fast_llm_model)
agent.log_cycle_handler.log_cycle(
agent.config.ai_name,
agent.created_at,
agent.cycle_count,
current_memory,
SUMMARY_FILE_NAME,
)
message_to_return = {
"role": "system",
"content": f"This reminds you of these events from your past: \n{current_memory}",