fix(chat): prevent race condition in LLM continuation

Reload session from DB before saving the LLM continuation response
to avoid overwriting user messages that may have been sent while
the background task was generating the response.
This commit is contained in:
Zamil Majdy
2026-01-27 14:05:31 -06:00
parent cc7bec7ad7
commit 2e4fd08cf7

View File

@@ -1705,15 +1705,24 @@ async def _generate_llm_continuation(
if response.choices and response.choices[0].message.content:
assistant_content = response.choices[0].message.content
# Reload session from DB to avoid race condition with user messages
# that may have been sent while we were generating the LLM response
fresh_session = await get_chat_session(session_id, user_id)
if not fresh_session:
logger.error(
f"Session {session_id} disappeared during LLM continuation"
)
return
# Save assistant message to database
assistant_message = ChatMessage(
role="assistant",
content=assistant_content,
)
session.messages.append(assistant_message)
fresh_session.messages.append(assistant_message)
# Save to database (not cache) to persist the response
await upsert_chat_session(session)
await upsert_chat_session(fresh_session)
# Invalidate cache so next poll/refresh gets fresh data
await invalidate_session_cache(session_id)