fix(polling): address PR review feedback for Google polling handlers

- Fix fetchHeaderRow to throw on 403/429 rate limits instead of silently
  returning empty headers (prevents rows from being processed without
  headers and lastKnownRowCount from advancing past them permanently)
- Fix Drive pagination to avoid advancing resume cursor past sliced
  changes (prevents permanent change loss when allChanges > maxFiles)
- Remove unused logger import from Google Drive trigger config
This commit is contained in:
Waleed Latif
2026-04-09 14:37:45 -07:00
committed by waleed
parent e563f6d51a
commit cfcc208728
3 changed files with 23 additions and 14 deletions

View File

@@ -252,20 +252,32 @@ async function fetchChanges(
newStartPageToken = data.newStartPageToken as string
}
if (data.nextPageToken) {
lastNextPageToken = data.nextPageToken as string
}
// Only advance the resume cursor when we'll actually use all changes from this page.
// If allChanges exceeds maxFiles, we'll slice off the extras — so we must NOT
// advance past this page, otherwise the sliced changes are lost permanently.
const hasMore = !!data.nextPageToken
const overLimit = allChanges.length >= maxFiles
if (!data.nextPageToken || allChanges.length >= maxFiles || pages >= MAX_PAGES) {
if (!hasMore || overLimit || pages >= MAX_PAGES) {
// If we stopped mid-stream and haven't consumed all changes from this page,
// keep currentPageToken so the next poll re-fetches this page.
// If we consumed everything on this page but there are more pages,
// advance to nextPageToken so we don't re-process this page.
if (hasMore && !overLimit) {
lastNextPageToken = data.nextPageToken as string
} else if (hasMore && overLimit && allChanges.length > maxFiles) {
// We got more changes than maxFiles from this page — don't advance,
// re-fetch this page next time (idempotency deduplicates already-processed ones)
} else if (hasMore) {
lastNextPageToken = data.nextPageToken as string
}
break
}
lastNextPageToken = data.nextPageToken as string
currentPageToken = data.nextPageToken as string
}
// If we exhausted all pages the API returns newStartPageToken on the final page.
// If we broke early, fall back to the last nextPageToken so we resume from where
// we stopped rather than re-fetching from the original cursor.
const resumeToken = newStartPageToken ?? lastNextPageToken ?? config.pageToken!
return { changes: allChanges.slice(0, maxFiles), newStartPageToken: resumeToken }

View File

@@ -315,12 +315,12 @@ async function fetchHeaderRow(
if (!response.ok) {
const status = response.status
if (status === 403 || status === 429) {
logger.warn(
`[${requestId}] Sheets API rate limit (${status}) fetching header row, proceeding without headers`
const errorData = await response.json().catch(() => ({}))
throw new Error(
`Sheets API rate limit (${status}) fetching header row — skipping to retry next poll cycle: ${JSON.stringify(errorData)}`
)
} else {
logger.warn(`[${requestId}] Failed to fetch header row, proceeding without headers`)
}
logger.warn(`[${requestId}] Failed to fetch header row, proceeding without headers`)
return []
}

View File

@@ -1,9 +1,6 @@
import { createLogger } from '@sim/logger'
import { GoogleDriveIcon } from '@/components/icons'
import type { TriggerConfig } from '@/triggers/types'
const logger = createLogger('GoogleDrivePollingTrigger')
const MIME_TYPE_OPTIONS = [
{ id: '', label: 'All Files' },
{ id: 'application/vnd.google-apps.document', label: 'Google Docs' },