AutoGPT: Fix prompt state pollution

This commit is contained in:
Reinier van der Leer
2023-10-07 15:09:43 -07:00
parent a00d880a3f
commit 683257b697
4 changed files with 17 additions and 11 deletions

View File

@@ -108,12 +108,13 @@ class Agent(
def build_prompt(
self,
*args,
extra_messages: [list[ChatMessage]] = None,
extra_messages: Optional[list[ChatMessage]] = None,
include_os_info: Optional[bool] = None,
**kwargs,
) -> ChatPrompt:
if extra_messages is None:
if not extra_messages:
extra_messages = []
# Clock
extra_messages.append(
ChatMessage.system(f"The current time and date is {time.strftime('%c')}"),

View File

@@ -256,9 +256,9 @@ class BaseAgent(Configurable[BaseAgentSettings], ABC):
Params:
cycle_instruction: The final instruction for a thinking cycle
"""
if extra_commands is None:
if not extra_commands:
extra_commands = []
if extra_messages is None:
if not extra_messages:
extra_messages = []
# Apply additions from plugins

View File

@@ -1,6 +1,6 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from typing import TYPE_CHECKING, Any, Optional
if TYPE_CHECKING:
from autogpt.core.prompting import ChatPrompt
@@ -49,9 +49,12 @@ class ContextMixin:
def build_prompt(
self,
*args: Any,
extra_messages: list[ChatMessage] = [],
extra_messages: Optional[list[ChatMessage]] = None,
**kwargs: Any,
) -> ChatPrompt:
if not extra_messages:
extra_messages = []
# Add context section to prompt
if self.context:
extra_messages.insert(

View File

@@ -198,7 +198,7 @@ class OneShotAgentPromptStrategy(PromptStrategy):
max_prompt_tokens: int,
count_tokens: Callable[[str], int],
count_message_tokens: Callable[[ChatMessage | list[ChatMessage]], int],
extra_messages: list[ChatMessage] = [],
extra_messages: Optional[list[ChatMessage]] = None,
**extras,
) -> ChatPrompt:
"""Constructs and returns a prompt with the following structure:
@@ -209,12 +209,14 @@ class OneShotAgentPromptStrategy(PromptStrategy):
Params:
cycle_instruction: The final instruction for a thinking cycle
"""
if not extra_messages:
extra_messages = []
system_prompt = self.build_system_prompt(
ai_config,
ai_directives,
commands,
include_os_info,
ai_config=ai_config,
ai_directives=ai_directives,
commands=commands,
include_os_info=include_os_info,
)
system_prompt_tlength = count_message_tokens(ChatMessage.system(system_prompt))