mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-07 22:24:06 -05:00
fix(schedules): locking schedules to prevent double runs (#1854)
* fix(schedules): locking schedules to prevent double runs * add migration file * fix
This commit is contained in:
committed by
GitHub
parent
e91a8af7cd
commit
7a8d47a72e
@@ -53,30 +53,46 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
and: vi.fn((...conditions) => ({ type: 'and', conditions })),
|
||||
eq: vi.fn((field, value) => ({ field, value, type: 'eq' })),
|
||||
lte: vi.fn((field, value) => ({ field, value, type: 'lte' })),
|
||||
lt: vi.fn((field, value) => ({ field, value, type: 'lt' })),
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((field) => ({ type: 'isNull', field })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
const mockDb = {
|
||||
select: vi.fn().mockImplementation(() => ({
|
||||
from: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockImplementation(() => [
|
||||
{
|
||||
id: 'schedule-1',
|
||||
workflowId: 'workflow-1',
|
||||
blockId: null,
|
||||
cronExpression: null,
|
||||
lastRanAt: null,
|
||||
failedCount: 0,
|
||||
},
|
||||
]),
|
||||
})),
|
||||
})),
|
||||
}
|
||||
const returningSchedules = [
|
||||
{
|
||||
id: 'schedule-1',
|
||||
workflowId: 'workflow-1',
|
||||
blockId: null,
|
||||
cronExpression: null,
|
||||
lastRanAt: null,
|
||||
failedCount: 0,
|
||||
nextRunAt: new Date('2025-01-01T00:00:00.000Z'),
|
||||
lastQueuedAt: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
const mockReturning = vi.fn().mockReturnValue(returningSchedules)
|
||||
const mockWhere = vi.fn().mockReturnValue({ returning: mockReturning })
|
||||
const mockSet = vi.fn().mockReturnValue({ where: mockWhere })
|
||||
const mockUpdate = vi.fn().mockReturnValue({ set: mockSet })
|
||||
|
||||
return {
|
||||
db: mockDb,
|
||||
workflowSchedule: {},
|
||||
db: {
|
||||
update: mockUpdate,
|
||||
},
|
||||
workflowSchedule: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
blockId: 'blockId',
|
||||
cronExpression: 'cronExpression',
|
||||
lastRanAt: 'lastRanAt',
|
||||
failedCount: 'failedCount',
|
||||
status: 'status',
|
||||
nextRunAt: 'nextRunAt',
|
||||
lastQueuedAt: 'lastQueuedAt',
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
@@ -114,30 +130,46 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
and: vi.fn((...conditions) => ({ type: 'and', conditions })),
|
||||
eq: vi.fn((field, value) => ({ field, value, type: 'eq' })),
|
||||
lte: vi.fn((field, value) => ({ field, value, type: 'lte' })),
|
||||
lt: vi.fn((field, value) => ({ field, value, type: 'lt' })),
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((field) => ({ type: 'isNull', field })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
const mockDb = {
|
||||
select: vi.fn().mockImplementation(() => ({
|
||||
from: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockImplementation(() => [
|
||||
{
|
||||
id: 'schedule-1',
|
||||
workflowId: 'workflow-1',
|
||||
blockId: null,
|
||||
cronExpression: null,
|
||||
lastRanAt: null,
|
||||
failedCount: 0,
|
||||
},
|
||||
]),
|
||||
})),
|
||||
})),
|
||||
}
|
||||
const returningSchedules = [
|
||||
{
|
||||
id: 'schedule-1',
|
||||
workflowId: 'workflow-1',
|
||||
blockId: null,
|
||||
cronExpression: null,
|
||||
lastRanAt: null,
|
||||
failedCount: 0,
|
||||
nextRunAt: new Date('2025-01-01T00:00:00.000Z'),
|
||||
lastQueuedAt: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
const mockReturning = vi.fn().mockReturnValue(returningSchedules)
|
||||
const mockWhere = vi.fn().mockReturnValue({ returning: mockReturning })
|
||||
const mockSet = vi.fn().mockReturnValue({ where: mockWhere })
|
||||
const mockUpdate = vi.fn().mockReturnValue({ set: mockSet })
|
||||
|
||||
return {
|
||||
db: mockDb,
|
||||
workflowSchedule: {},
|
||||
db: {
|
||||
update: mockUpdate,
|
||||
},
|
||||
workflowSchedule: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
blockId: 'blockId',
|
||||
cronExpression: 'cronExpression',
|
||||
lastRanAt: 'lastRanAt',
|
||||
failedCount: 'failedCount',
|
||||
status: 'status',
|
||||
nextRunAt: 'nextRunAt',
|
||||
lastQueuedAt: 'lastQueuedAt',
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
@@ -170,21 +202,33 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
and: vi.fn((...conditions) => ({ type: 'and', conditions })),
|
||||
eq: vi.fn((field, value) => ({ field, value, type: 'eq' })),
|
||||
lte: vi.fn((field, value) => ({ field, value, type: 'lte' })),
|
||||
lt: vi.fn((field, value) => ({ field, value, type: 'lt' })),
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((field) => ({ type: 'isNull', field })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
const mockDb = {
|
||||
select: vi.fn().mockImplementation(() => ({
|
||||
from: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockImplementation(() => []),
|
||||
})),
|
||||
})),
|
||||
}
|
||||
const mockReturning = vi.fn().mockReturnValue([])
|
||||
const mockWhere = vi.fn().mockReturnValue({ returning: mockReturning })
|
||||
const mockSet = vi.fn().mockReturnValue({ where: mockWhere })
|
||||
const mockUpdate = vi.fn().mockReturnValue({ set: mockSet })
|
||||
|
||||
return {
|
||||
db: mockDb,
|
||||
workflowSchedule: {},
|
||||
db: {
|
||||
update: mockUpdate,
|
||||
},
|
||||
workflowSchedule: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
blockId: 'blockId',
|
||||
cronExpression: 'cronExpression',
|
||||
lastRanAt: 'lastRanAt',
|
||||
failedCount: 'failedCount',
|
||||
status: 'status',
|
||||
nextRunAt: 'nextRunAt',
|
||||
lastQueuedAt: 'lastQueuedAt',
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
@@ -217,38 +261,56 @@ describe('Scheduled Workflow Execution API Route', () => {
|
||||
and: vi.fn((...conditions) => ({ type: 'and', conditions })),
|
||||
eq: vi.fn((field, value) => ({ field, value, type: 'eq' })),
|
||||
lte: vi.fn((field, value) => ({ field, value, type: 'lte' })),
|
||||
lt: vi.fn((field, value) => ({ field, value, type: 'lt' })),
|
||||
not: vi.fn((condition) => ({ type: 'not', condition })),
|
||||
isNull: vi.fn((field) => ({ type: 'isNull', field })),
|
||||
or: vi.fn((...conditions) => ({ type: 'or', conditions })),
|
||||
}))
|
||||
|
||||
vi.doMock('@sim/db', () => {
|
||||
const mockDb = {
|
||||
select: vi.fn().mockImplementation(() => ({
|
||||
from: vi.fn().mockImplementation(() => ({
|
||||
where: vi.fn().mockImplementation(() => [
|
||||
{
|
||||
id: 'schedule-1',
|
||||
workflowId: 'workflow-1',
|
||||
blockId: null,
|
||||
cronExpression: null,
|
||||
lastRanAt: null,
|
||||
failedCount: 0,
|
||||
},
|
||||
{
|
||||
id: 'schedule-2',
|
||||
workflowId: 'workflow-2',
|
||||
blockId: null,
|
||||
cronExpression: null,
|
||||
lastRanAt: null,
|
||||
failedCount: 0,
|
||||
},
|
||||
]),
|
||||
})),
|
||||
})),
|
||||
}
|
||||
const returningSchedules = [
|
||||
{
|
||||
id: 'schedule-1',
|
||||
workflowId: 'workflow-1',
|
||||
blockId: null,
|
||||
cronExpression: null,
|
||||
lastRanAt: null,
|
||||
failedCount: 0,
|
||||
nextRunAt: new Date('2025-01-01T00:00:00.000Z'),
|
||||
lastQueuedAt: undefined,
|
||||
},
|
||||
{
|
||||
id: 'schedule-2',
|
||||
workflowId: 'workflow-2',
|
||||
blockId: null,
|
||||
cronExpression: null,
|
||||
lastRanAt: null,
|
||||
failedCount: 0,
|
||||
nextRunAt: new Date('2025-01-01T01:00:00.000Z'),
|
||||
lastQueuedAt: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
const mockReturning = vi.fn().mockReturnValue(returningSchedules)
|
||||
const mockWhere = vi.fn().mockReturnValue({ returning: mockReturning })
|
||||
const mockSet = vi.fn().mockReturnValue({ where: mockWhere })
|
||||
const mockUpdate = vi.fn().mockReturnValue({ set: mockSet })
|
||||
|
||||
return {
|
||||
db: mockDb,
|
||||
workflowSchedule: {},
|
||||
db: {
|
||||
update: mockUpdate,
|
||||
},
|
||||
workflowSchedule: {
|
||||
id: 'id',
|
||||
workflowId: 'workflowId',
|
||||
blockId: 'blockId',
|
||||
cronExpression: 'cronExpression',
|
||||
lastRanAt: 'lastRanAt',
|
||||
failedCount: 'failedCount',
|
||||
status: 'status',
|
||||
nextRunAt: 'nextRunAt',
|
||||
lastQueuedAt: 'lastQueuedAt',
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { db, workflowSchedule } from '@sim/db'
|
||||
import { tasks } from '@trigger.dev/sdk'
|
||||
import { and, eq, lte, not } from 'drizzle-orm'
|
||||
import { and, eq, isNull, lt, lte, not, or } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
import { env, isTruthy } from '@/lib/env'
|
||||
@@ -21,15 +21,35 @@ export async function GET(request: NextRequest) {
|
||||
return authError
|
||||
}
|
||||
|
||||
const now = new Date()
|
||||
const queuedAt = new Date()
|
||||
|
||||
try {
|
||||
const dueSchedules = await db
|
||||
.select()
|
||||
.from(workflowSchedule)
|
||||
.update(workflowSchedule)
|
||||
.set({
|
||||
lastQueuedAt: queuedAt,
|
||||
updatedAt: queuedAt,
|
||||
})
|
||||
.where(
|
||||
and(lte(workflowSchedule.nextRunAt, now), not(eq(workflowSchedule.status, 'disabled')))
|
||||
and(
|
||||
lte(workflowSchedule.nextRunAt, queuedAt),
|
||||
not(eq(workflowSchedule.status, 'disabled')),
|
||||
or(
|
||||
isNull(workflowSchedule.lastQueuedAt),
|
||||
lt(workflowSchedule.lastQueuedAt, workflowSchedule.nextRunAt)
|
||||
)
|
||||
)
|
||||
)
|
||||
.returning({
|
||||
id: workflowSchedule.id,
|
||||
workflowId: workflowSchedule.workflowId,
|
||||
blockId: workflowSchedule.blockId,
|
||||
cronExpression: workflowSchedule.cronExpression,
|
||||
lastRanAt: workflowSchedule.lastRanAt,
|
||||
failedCount: workflowSchedule.failedCount,
|
||||
nextRunAt: workflowSchedule.nextRunAt,
|
||||
lastQueuedAt: workflowSchedule.lastQueuedAt,
|
||||
})
|
||||
|
||||
logger.debug(`[${requestId}] Successfully queried schedules: ${dueSchedules.length} found`)
|
||||
logger.info(`[${requestId}] Processing ${dueSchedules.length} due scheduled workflows`)
|
||||
@@ -38,6 +58,8 @@ export async function GET(request: NextRequest) {
|
||||
|
||||
if (useTrigger) {
|
||||
const triggerPromises = dueSchedules.map(async (schedule) => {
|
||||
const queueTime = schedule.lastQueuedAt ?? queuedAt
|
||||
|
||||
try {
|
||||
const payload = {
|
||||
scheduleId: schedule.id,
|
||||
@@ -46,7 +68,8 @@ export async function GET(request: NextRequest) {
|
||||
cronExpression: schedule.cronExpression || undefined,
|
||||
lastRanAt: schedule.lastRanAt?.toISOString(),
|
||||
failedCount: schedule.failedCount || 0,
|
||||
now: now.toISOString(),
|
||||
now: queueTime.toISOString(),
|
||||
scheduledFor: schedule.nextRunAt?.toISOString(),
|
||||
}
|
||||
|
||||
const handle = await tasks.trigger('schedule-execution', payload)
|
||||
@@ -68,6 +91,8 @@ export async function GET(request: NextRequest) {
|
||||
logger.info(`[${requestId}] Queued ${dueSchedules.length} schedule executions to Trigger.dev`)
|
||||
} else {
|
||||
const directExecutionPromises = dueSchedules.map(async (schedule) => {
|
||||
const queueTime = schedule.lastQueuedAt ?? queuedAt
|
||||
|
||||
const payload = {
|
||||
scheduleId: schedule.id,
|
||||
workflowId: schedule.workflowId,
|
||||
@@ -75,7 +100,8 @@ export async function GET(request: NextRequest) {
|
||||
cronExpression: schedule.cronExpression || undefined,
|
||||
lastRanAt: schedule.lastRanAt?.toISOString(),
|
||||
failedCount: schedule.failedCount || 0,
|
||||
now: now.toISOString(),
|
||||
now: queueTime.toISOString(),
|
||||
scheduledFor: schedule.nextRunAt?.toISOString(),
|
||||
}
|
||||
|
||||
void executeScheduleJob(payload).catch((error) => {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1
packages/db/migrations/0108_cuddly_scream.sql
Normal file
1
packages/db/migrations/0108_cuddly_scream.sql
Normal file
@@ -0,0 +1 @@
|
||||
ALTER TABLE "workflow_schedule" ADD COLUMN "last_queued_at" timestamp;
|
||||
7678
packages/db/migrations/meta/0108_snapshot.json
Normal file
7678
packages/db/migrations/meta/0108_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -750,6 +750,13 @@
|
||||
"when": 1762565365042,
|
||||
"tag": "0107_silky_agent_brand",
|
||||
"breakpoints": true
|
||||
},
|
||||
{
|
||||
"idx": 108,
|
||||
"version": "7",
|
||||
"when": 1762572820066,
|
||||
"tag": "0108_cuddly_scream",
|
||||
"breakpoints": true
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -443,6 +443,7 @@ export const workflowSchedule = pgTable(
|
||||
cronExpression: text('cron_expression'),
|
||||
nextRunAt: timestamp('next_run_at'),
|
||||
lastRanAt: timestamp('last_ran_at'),
|
||||
lastQueuedAt: timestamp('last_queued_at'),
|
||||
triggerType: text('trigger_type').notNull(), // "manual", "webhook", "schedule"
|
||||
timezone: text('timezone').notNull().default('UTC'),
|
||||
failedCount: integer('failed_count').notNull().default(0), // Track consecutive failures
|
||||
|
||||
Reference in New Issue
Block a user