refactor(backend/copilot): rename retry variables for clarity

- _MAX_QUERY_ATTEMPTS → _MAX_STREAM_ATTEMPTS with docstring explaining
  the 3-step progression (original → compacted → no transcript)
- _query_attempt → _attempt
- _compaction_attempted → _tried_compaction
- Log message: "Retry attempt" → "Retrying with reduced context"
This commit is contained in:
Zamil Majdy
2026-03-14 19:56:00 +07:00
parent b599858dea
commit 33cd967e66
2 changed files with 22 additions and 19 deletions

View File

@@ -29,7 +29,7 @@ import pytest
from backend.util import json
from .service import _MAX_QUERY_ATTEMPTS
from .service import _MAX_STREAM_ATTEMPTS
from .transcript import (
_flatten_assistant_content,
_flatten_tool_result_content,
@@ -106,7 +106,7 @@ class TestScenarioNormalFlow:
def test_max_query_attempts_is_three(self):
"""Verify the constant is 3 (compact + DB fallback + exhaustion)."""
assert _MAX_QUERY_ATTEMPTS == 3
assert _MAX_STREAM_ATTEMPTS == 3
# ---------------------------------------------------------------------------
@@ -322,7 +322,7 @@ class TestScenarioAllAttemptsExhausted:
_stream_error: Exception | None = None
transcript_caused_error = False
for _query_attempt in range(_MAX_QUERY_ATTEMPTS):
for _query_attempt in range(_MAX_STREAM_ATTEMPTS):
_stream_error = Exception("some error")
# After loop: check exhaustion
@@ -462,17 +462,17 @@ class TestRetryStateMachine:
use_resume = bool(transcript_content)
stream_completed = False
attempts_made = 0
_compaction_attempted = False
_tried_compaction = False
for _query_attempt in range(min(_MAX_QUERY_ATTEMPTS, len(attempt_results))):
for _query_attempt in range(min(_MAX_STREAM_ATTEMPTS, len(attempt_results))):
if _query_attempt > 0:
_stream_error = None
stream_completed = False
# First retry: try compacting the transcript.
# Subsequent retries: drop transcript, rebuild from DB.
if transcript_content and not _compaction_attempted:
_compaction_attempted = True
if transcript_content and not _tried_compaction:
_tried_compaction = True
if compact_result and compact_result != transcript_content:
use_resume = True
else:

View File

@@ -89,7 +89,10 @@ logger = logging.getLogger(__name__)
config = ChatConfig()
_MAX_QUERY_ATTEMPTS = 3
# On any streaming error the SDK query is retried with progressively
# less context: (1) original transcript → (2) compacted transcript →
# (3) no transcript (DB messages only).
_MAX_STREAM_ATTEMPTS = 3
async def _retry_with_compacted_transcript(
@@ -990,24 +993,24 @@ async def stream_chat_completion_sdk(
query_message = f"{query_message}\n\n{attachments.hint}"
_stream_error: Exception | None = None
_compaction_attempted = False
_tried_compaction = False
for _query_attempt in range(_MAX_QUERY_ATTEMPTS):
if _query_attempt > 0:
for _attempt in range(_MAX_STREAM_ATTEMPTS):
if _attempt > 0:
_stream_error = None
stream_completed = False
logger.info(
"%s Retry attempt %d/%d",
"%s Retrying with reduced context (%d/%d)",
log_prefix,
_query_attempt + 1,
_MAX_QUERY_ATTEMPTS,
_attempt + 1,
_MAX_STREAM_ATTEMPTS,
)
# First retry: try compacting the transcript.
# Subsequent retries: drop transcript, rebuild from DB.
if transcript_content and not _compaction_attempted:
_compaction_attempted = True
if transcript_content and not _tried_compaction:
_tried_compaction = True
tb, use_resume, resume_file, success = (
await _retry_with_compacted_transcript(
transcript_content, session_id, sdk_cwd, log_prefix
@@ -1148,8 +1151,8 @@ async def stream_chat_completion_sdk(
logger.warning(
"%s Stream error (attempt %d/%d): %s",
log_prefix,
_query_attempt + 1,
_MAX_QUERY_ATTEMPTS,
_attempt + 1,
_MAX_STREAM_ATTEMPTS,
stream_err,
exc_info=True,
)
@@ -1475,7 +1478,7 @@ async def stream_chat_completion_sdk(
logger.error(
"%s All %d query attempts exhausted: %s",
log_prefix,
_MAX_QUERY_ATTEMPTS,
_MAX_STREAM_ATTEMPTS,
_stream_error,
)
yield StreamError(