Chore: clean up LLM (prompt caching, supports fn calling), leftover renames (#6095)

This commit is contained in:
Engel Nyst
2025-02-01 18:14:08 +01:00
committed by GitHub
parent 3b0bbce54a
commit eb8d1600c3
21 changed files with 119 additions and 187 deletions

View File

@@ -24,7 +24,7 @@ class RetryMixin:
A retry decorator with the parameters customizable in configuration.
"""
num_retries = kwargs.get('num_retries')
retry_exceptions = kwargs.get('retry_exceptions')
retry_exceptions: tuple = kwargs.get('retry_exceptions', ())
retry_min_wait = kwargs.get('retry_min_wait')
retry_max_wait = kwargs.get('retry_max_wait')
retry_multiplier = kwargs.get('retry_multiplier')
@@ -39,7 +39,9 @@ class RetryMixin:
before_sleep=before_sleep,
stop=stop_after_attempt(num_retries) | stop_if_should_exit(),
reraise=True,
retry=(retry_if_exception_type(retry_exceptions)),
retry=(
retry_if_exception_type(retry_exceptions)
), # retry only for these types
wait=wait_exponential(
multiplier=retry_multiplier,
min=retry_min_wait,