mirror of
https://github.com/All-Hands-AI/OpenHands.git
synced 2026-04-29 03:00:45 -04:00
mixin: improve logging (#2713)
* mixin: improve logging * refactor logger creation
This commit is contained in:
@@ -225,32 +225,22 @@ class LlmFileHandler(logging.FileHandler):
|
||||
self.message_counter += 1
|
||||
|
||||
|
||||
def get_llm_prompt_file_handler():
|
||||
"""
|
||||
Returns a file handler for LLM prompt logging.
|
||||
"""
|
||||
llm_prompt_file_handler = LlmFileHandler('prompt', delay=True)
|
||||
llm_prompt_file_handler.setFormatter(llm_formatter)
|
||||
llm_prompt_file_handler.setLevel(logging.DEBUG)
|
||||
return llm_prompt_file_handler
|
||||
def _get_llm_file_handler(name, debug_level=logging.DEBUG):
|
||||
# The 'delay' parameter, when set to True, postpones the opening of the log file
|
||||
# until the first log message is emitted.
|
||||
llm_file_handler = LlmFileHandler(name, delay=True)
|
||||
llm_file_handler.setFormatter(llm_formatter)
|
||||
llm_file_handler.setLevel(debug_level)
|
||||
return llm_file_handler
|
||||
|
||||
|
||||
def get_llm_response_file_handler():
|
||||
"""
|
||||
Returns a file handler for LLM response logging.
|
||||
"""
|
||||
llm_response_file_handler = LlmFileHandler('response', delay=True)
|
||||
llm_response_file_handler.setFormatter(llm_formatter)
|
||||
llm_response_file_handler.setLevel(logging.DEBUG)
|
||||
return llm_response_file_handler
|
||||
def _setup_llm_logger(name, debug_level=logging.DEBUG):
|
||||
logger = logging.getLogger(name)
|
||||
logger.propagate = False
|
||||
logger.setLevel(debug_level)
|
||||
logger.addHandler(_get_llm_file_handler(name, debug_level))
|
||||
return logger
|
||||
|
||||
|
||||
llm_prompt_logger = logging.getLogger('prompt')
|
||||
llm_prompt_logger.propagate = False
|
||||
llm_prompt_logger.setLevel(logging.DEBUG)
|
||||
llm_prompt_logger.addHandler(get_llm_prompt_file_handler())
|
||||
|
||||
llm_response_logger = logging.getLogger('response')
|
||||
llm_response_logger.propagate = False
|
||||
llm_response_logger.setLevel(logging.DEBUG)
|
||||
llm_response_logger.addHandler(get_llm_response_file_handler())
|
||||
llm_prompt_logger = _setup_llm_logger('prompt', logging.DEBUG)
|
||||
llm_response_logger = _setup_llm_logger('response', logging.DEBUG)
|
||||
|
||||
@@ -13,14 +13,16 @@ class SandboxProtocol(Protocol):
|
||||
def initialize_plugins(self) -> bool: ...
|
||||
|
||||
def execute(
|
||||
self, cmd: str, stream: bool = False
|
||||
self, cmd: str, stream: bool = False
|
||||
) -> tuple[int, str | CancellableStream]: ...
|
||||
|
||||
def copy_to(self, host_src: str, sandbox_dest: str, recursive: bool = False): ...
|
||||
|
||||
|
||||
def _source_bashrc(sandbox: SandboxProtocol):
|
||||
exit_code, output = sandbox.execute('source /opendevin/bash.bashrc && source ~/.bashrc')
|
||||
exit_code, output = sandbox.execute(
|
||||
'source /opendevin/bash.bashrc && source ~/.bashrc'
|
||||
)
|
||||
if exit_code != 0:
|
||||
raise RuntimeError(
|
||||
f'Failed to source /opendevin/bash.bashrc and ~/.bashrc with exit code {exit_code} and output: {output}'
|
||||
@@ -66,15 +68,16 @@ class PluginMixin:
|
||||
if isinstance(output, CancellableStream):
|
||||
total_output = ''
|
||||
for line in output:
|
||||
if line.endswith('\n'):
|
||||
line = line[:-1]
|
||||
# Removes any trailing whitespace, including \n and \r\n
|
||||
line = line.rstrip()
|
||||
logger.debug(line)
|
||||
total_output += line
|
||||
# Avoid text from lines running into each other
|
||||
total_output += line + ' '
|
||||
_exit_code = output.exit_code()
|
||||
output.close()
|
||||
if _exit_code != 0:
|
||||
raise RuntimeError(
|
||||
f'Failed to initialize plugin {requirement.name} with exit code {_exit_code} and output: {total_output}'
|
||||
f'Failed to initialize plugin {requirement.name} with exit code {_exit_code} and output: {total_output.strip()}'
|
||||
)
|
||||
logger.info(f'Plugin {requirement.name} initialized successfully')
|
||||
else:
|
||||
|
||||
Reference in New Issue
Block a user