Change debug_mode to debug in chat.py.

This commit is contained in:
Toran Bruce Richards
2023-04-10 14:32:13 +01:00
parent 1d97a1b7a1
commit c300276d6d

View File

@@ -64,7 +64,7 @@ def chat_with_ai(
model = cfg.fast_llm_model # TODO: Change model from hardcode to argument
# Reserve 1000 tokens for the response
if cfg.debug_mode:
if cfg.debug:
print(f"Token limit: {token_limit}")
send_token_limit = token_limit - 1000