Compare commits

..

13 Commits

Author SHA1 Message Date
Vikhyath Mondreti
efe2b85346 fix bugbot comments 2026-01-29 20:15:34 -08:00
Vikhyath Mondreti
49a6197cd2 cleanup code 2026-01-29 19:56:36 -08:00
Vikhyath Mondreti
175b72899c fix 2026-01-29 19:28:53 -08:00
Vikhyath Mondreti
427e3b9417 null cursor 2026-01-29 19:16:37 -08:00
Vikhyath Mondreti
2b248104e6 fix more bugbot cleanup comments 2026-01-29 19:01:32 -08:00
Vikhyath Mondreti
df1a951e98 bugbot comment 2026-01-29 17:45:24 -08:00
Vikhyath Mondreti
1fbe3029f4 fix bugbot comments 2026-01-29 17:30:19 -08:00
Vikhyath Mondreti
e5d9b98909 use native api 2026-01-29 17:08:44 -08:00
Vikhyath Mondreti
d00ed958cc improve typing 2026-01-29 17:00:33 -08:00
Vikhyath Mondreti
7d23e2363d remove random error code 2026-01-29 16:56:56 -08:00
Vikhyath Mondreti
fc1ca1e36b improvment(sockets): migrate to redis 2026-01-29 16:50:38 -08:00
Siddharth Ganesan
2b026ded16 fix(copilot): hosted api key validation + credential validation (#3000)
* Fix

* Fix greptile

* Fix validation

* Fix comments

* Lint

* Fix

* remove passed in workspace id ref

* Fix comments

---------

Co-authored-by: Vikhyath Mondreti <vikhyath@simstudio.ai>
2026-01-29 10:48:59 -08:00
Siddharth Ganesan
dca0758054 fix(executor): conditional deactivation for loops/parallels (#3069)
* Fix deactivation

* Remove comments
2026-01-29 10:43:30 -08:00
33 changed files with 2293 additions and 972 deletions

View File

@@ -4,22 +4,22 @@ import { auth } from '@/lib/auth'
import { isAuthDisabled } from '@/lib/core/config/feature-flags' import { isAuthDisabled } from '@/lib/core/config/feature-flags'
export async function POST() { export async function POST() {
try { if (isAuthDisabled) {
if (isAuthDisabled) { return NextResponse.json({ token: 'anonymous-socket-token' })
return NextResponse.json({ token: 'anonymous-socket-token' }) }
}
try {
const hdrs = await headers() const hdrs = await headers()
const response = await auth.api.generateOneTimeToken({ const response = await auth.api.generateOneTimeToken({
headers: hdrs, headers: hdrs,
}) })
if (!response) { if (!response?.token) {
return NextResponse.json({ error: 'Failed to generate token' }, { status: 500 }) return NextResponse.json({ error: 'Authentication required' }, { status: 401 })
} }
return NextResponse.json({ token: response.token }) return NextResponse.json({ token: response.token })
} catch (error) { } catch {
return NextResponse.json({ error: 'Failed to generate token' }, { status: 500 }) return NextResponse.json({ error: 'Failed to generate token' }, { status: 500 })
} }
} }

View File

@@ -97,7 +97,10 @@ export async function POST(
const socketServerUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002' const socketServerUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
await fetch(`${socketServerUrl}/api/workflow-reverted`, { await fetch(`${socketServerUrl}/api/workflow-reverted`, {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: {
'Content-Type': 'application/json',
'x-api-key': env.INTERNAL_API_SECRET,
},
body: JSON.stringify({ workflowId: id, timestamp: Date.now() }), body: JSON.stringify({ workflowId: id, timestamp: Date.now() }),
}) })
} catch (e) { } catch (e) {

View File

@@ -361,7 +361,10 @@ export async function DELETE(
const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002' const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
const socketResponse = await fetch(`${socketUrl}/api/workflow-deleted`, { const socketResponse = await fetch(`${socketUrl}/api/workflow-deleted`, {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: {
'Content-Type': 'application/json',
'x-api-key': env.INTERNAL_API_SECRET,
},
body: JSON.stringify({ workflowId }), body: JSON.stringify({ workflowId }),
}) })

View File

@@ -254,7 +254,10 @@ export async function PUT(request: NextRequest, { params }: { params: Promise<{
const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002' const socketUrl = env.SOCKET_SERVER_URL || 'http://localhost:3002'
const notifyResponse = await fetch(`${socketUrl}/api/workflow-updated`, { const notifyResponse = await fetch(`${socketUrl}/api/workflow-updated`, {
method: 'POST', method: 'POST',
headers: { 'Content-Type': 'application/json' }, headers: {
'Content-Type': 'application/json',
'x-api-key': env.INTERNAL_API_SECRET,
},
body: JSON.stringify({ workflowId }), body: JSON.stringify({ workflowId }),
}) })

View File

@@ -17,6 +17,19 @@ import { getEnv } from '@/lib/core/config/env'
const logger = createLogger('SocketContext') const logger = createLogger('SocketContext')
const TAB_SESSION_ID_KEY = 'sim_tab_session_id'
function getTabSessionId(): string {
if (typeof window === 'undefined') return ''
let tabSessionId = sessionStorage.getItem(TAB_SESSION_ID_KEY)
if (!tabSessionId) {
tabSessionId = crypto.randomUUID()
sessionStorage.setItem(TAB_SESSION_ID_KEY, tabSessionId)
}
return tabSessionId
}
interface User { interface User {
id: string id: string
name?: string name?: string
@@ -36,11 +49,13 @@ interface SocketContextType {
socket: Socket | null socket: Socket | null
isConnected: boolean isConnected: boolean
isConnecting: boolean isConnecting: boolean
authFailed: boolean
currentWorkflowId: string | null currentWorkflowId: string | null
currentSocketId: string | null currentSocketId: string | null
presenceUsers: PresenceUser[] presenceUsers: PresenceUser[]
joinWorkflow: (workflowId: string) => void joinWorkflow: (workflowId: string) => void
leaveWorkflow: () => void leaveWorkflow: () => void
retryConnection: () => void
emitWorkflowOperation: ( emitWorkflowOperation: (
operation: string, operation: string,
target: string, target: string,
@@ -63,8 +78,6 @@ interface SocketContextType {
onCursorUpdate: (handler: (data: any) => void) => void onCursorUpdate: (handler: (data: any) => void) => void
onSelectionUpdate: (handler: (data: any) => void) => void onSelectionUpdate: (handler: (data: any) => void) => void
onUserJoined: (handler: (data: any) => void) => void
onUserLeft: (handler: (data: any) => void) => void
onWorkflowDeleted: (handler: (data: any) => void) => void onWorkflowDeleted: (handler: (data: any) => void) => void
onWorkflowReverted: (handler: (data: any) => void) => void onWorkflowReverted: (handler: (data: any) => void) => void
onOperationConfirmed: (handler: (data: any) => void) => void onOperationConfirmed: (handler: (data: any) => void) => void
@@ -75,11 +88,13 @@ const SocketContext = createContext<SocketContextType>({
socket: null, socket: null,
isConnected: false, isConnected: false,
isConnecting: false, isConnecting: false,
authFailed: false,
currentWorkflowId: null, currentWorkflowId: null,
currentSocketId: null, currentSocketId: null,
presenceUsers: [], presenceUsers: [],
joinWorkflow: () => {}, joinWorkflow: () => {},
leaveWorkflow: () => {}, leaveWorkflow: () => {},
retryConnection: () => {},
emitWorkflowOperation: () => {}, emitWorkflowOperation: () => {},
emitSubblockUpdate: () => {}, emitSubblockUpdate: () => {},
emitVariableUpdate: () => {}, emitVariableUpdate: () => {},
@@ -90,8 +105,6 @@ const SocketContext = createContext<SocketContextType>({
onVariableUpdate: () => {}, onVariableUpdate: () => {},
onCursorUpdate: () => {}, onCursorUpdate: () => {},
onSelectionUpdate: () => {}, onSelectionUpdate: () => {},
onUserJoined: () => {},
onUserLeft: () => {},
onWorkflowDeleted: () => {}, onWorkflowDeleted: () => {},
onWorkflowReverted: () => {}, onWorkflowReverted: () => {},
onOperationConfirmed: () => {}, onOperationConfirmed: () => {},
@@ -112,33 +125,43 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const [currentWorkflowId, setCurrentWorkflowId] = useState<string | null>(null) const [currentWorkflowId, setCurrentWorkflowId] = useState<string | null>(null)
const [currentSocketId, setCurrentSocketId] = useState<string | null>(null) const [currentSocketId, setCurrentSocketId] = useState<string | null>(null)
const [presenceUsers, setPresenceUsers] = useState<PresenceUser[]>([]) const [presenceUsers, setPresenceUsers] = useState<PresenceUser[]>([])
const [authFailed, setAuthFailed] = useState(false)
const initializedRef = useRef(false) const initializedRef = useRef(false)
const socketRef = useRef<Socket | null>(null)
const params = useParams() const params = useParams()
const urlWorkflowId = params?.workflowId as string | undefined const urlWorkflowId = params?.workflowId as string | undefined
const urlWorkflowIdRef = useRef(urlWorkflowId)
urlWorkflowIdRef.current = urlWorkflowId
const eventHandlers = useRef<{ const eventHandlers = useRef<{
workflowOperation?: (data: any) => void workflowOperation?: (data: any) => void
subblockUpdate?: (data: any) => void subblockUpdate?: (data: any) => void
variableUpdate?: (data: any) => void variableUpdate?: (data: any) => void
cursorUpdate?: (data: any) => void cursorUpdate?: (data: any) => void
selectionUpdate?: (data: any) => void selectionUpdate?: (data: any) => void
userJoined?: (data: any) => void
userLeft?: (data: any) => void
workflowDeleted?: (data: any) => void workflowDeleted?: (data: any) => void
workflowReverted?: (data: any) => void workflowReverted?: (data: any) => void
operationConfirmed?: (data: any) => void operationConfirmed?: (data: any) => void
operationFailed?: (data: any) => void operationFailed?: (data: any) => void
}>({}) }>({})
const positionUpdateTimeouts = useRef<Map<string, number>>(new Map())
const isRejoiningRef = useRef<boolean>(false)
const pendingPositionUpdates = useRef<Map<string, any>>(new Map())
const generateSocketToken = async (): Promise<string> => { const generateSocketToken = async (): Promise<string> => {
const res = await fetch('/api/auth/socket-token', { const res = await fetch('/api/auth/socket-token', {
method: 'POST', method: 'POST',
credentials: 'include', credentials: 'include',
headers: { 'cache-control': 'no-store' }, headers: { 'cache-control': 'no-store' },
}) })
if (!res.ok) throw new Error('Failed to generate socket token') if (!res.ok) {
if (res.status === 401) {
throw new Error('Authentication required')
}
throw new Error('Failed to generate socket token')
}
const body = await res.json().catch(() => ({})) const body = await res.json().catch(() => ({}))
const token = body?.token const token = body?.token
if (!token || typeof token !== 'string') throw new Error('Invalid socket token') if (!token || typeof token !== 'string') throw new Error('Invalid socket token')
@@ -148,6 +171,11 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
useEffect(() => { useEffect(() => {
if (!user?.id) return if (!user?.id) return
if (authFailed) {
logger.info('Socket initialization skipped - auth failed, waiting for retry')
return
}
if (initializedRef.current || socket || isConnecting) { if (initializedRef.current || socket || isConnecting) {
logger.info('Socket already exists or is connecting, skipping initialization') logger.info('Socket already exists or is connecting, skipping initialization')
return return
@@ -180,7 +208,11 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
cb({ token: freshToken }) cb({ token: freshToken })
} catch (error) { } catch (error) {
logger.error('Failed to generate fresh token for connection:', error) logger.error('Failed to generate fresh token for connection:', error)
cb({ token: null }) if (error instanceof Error && error.message === 'Authentication required') {
// True auth failure - pass null token, server will reject with "Authentication required"
cb({ token: null })
}
// For server errors, don't call cb - connection will timeout and Socket.IO will retry
} }
}, },
}) })
@@ -194,26 +226,19 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
connected: socketInstance.connected, connected: socketInstance.connected,
transport: socketInstance.io.engine?.transport?.name, transport: socketInstance.io.engine?.transport?.name,
}) })
// Note: join-workflow is handled by the useEffect watching isConnected
if (urlWorkflowId) {
logger.info(`Joining workflow room after connection: ${urlWorkflowId}`)
socketInstance.emit('join-workflow', {
workflowId: urlWorkflowId,
})
setCurrentWorkflowId(urlWorkflowId)
}
}) })
socketInstance.on('disconnect', (reason) => { socketInstance.on('disconnect', (reason) => {
setIsConnected(false) setIsConnected(false)
setIsConnecting(false) setIsConnecting(false)
setCurrentSocketId(null) setCurrentSocketId(null)
setCurrentWorkflowId(null)
setPresenceUsers([])
logger.info('Socket disconnected', { logger.info('Socket disconnected', {
reason, reason,
}) })
setPresenceUsers([])
}) })
socketInstance.on('connect_error', (error: any) => { socketInstance.on('connect_error', (error: any) => {
@@ -226,24 +251,34 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
transport: error.transport, transport: error.transport,
}) })
if ( // Check if this is an authentication failure
const isAuthError =
error.message?.includes('Token validation failed') || error.message?.includes('Token validation failed') ||
error.message?.includes('Authentication failed') || error.message?.includes('Authentication failed') ||
error.message?.includes('Authentication required') error.message?.includes('Authentication required')
) {
if (isAuthError) {
logger.warn( logger.warn(
'Authentication failed - this could indicate session expiry or token generation issues' 'Authentication failed - stopping reconnection attempts. User may need to refresh/re-login.'
) )
// Stop reconnection attempts to prevent infinite loop
socketInstance.disconnect()
// Reset state to allow re-initialization when session is restored
setSocket(null)
setAuthFailed(true)
initializedRef.current = false
} }
}) })
socketInstance.on('reconnect', (attemptNumber) => { socketInstance.on('reconnect', (attemptNumber) => {
setIsConnected(true)
setCurrentSocketId(socketInstance.id ?? null) setCurrentSocketId(socketInstance.id ?? null)
logger.info('Socket reconnected successfully', { logger.info('Socket reconnected successfully', {
attemptNumber, attemptNumber,
socketId: socketInstance.id, socketId: socketInstance.id,
transport: socketInstance.io.engine?.transport?.name, transport: socketInstance.io.engine?.transport?.name,
}) })
// Note: join-workflow is handled by the useEffect watching isConnected
}) })
socketInstance.on('reconnect_attempt', (attemptNumber) => { socketInstance.on('reconnect_attempt', (attemptNumber) => {
@@ -284,6 +319,26 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
}) })
}) })
// Handle join workflow success - confirms room membership with presence list
socketInstance.on('join-workflow-success', ({ workflowId, presenceUsers }) => {
isRejoiningRef.current = false
// Ignore stale success responses from previous navigation
if (workflowId !== urlWorkflowIdRef.current) {
logger.debug(`Ignoring stale join-workflow-success for ${workflowId}`)
return
}
setCurrentWorkflowId(workflowId)
setPresenceUsers(presenceUsers || [])
logger.info(`Successfully joined workflow room: ${workflowId}`, {
presenceCount: presenceUsers?.length || 0,
})
})
socketInstance.on('join-workflow-error', ({ error }) => {
isRejoiningRef.current = false
logger.error('Failed to join workflow:', error)
})
socketInstance.on('workflow-operation', (data) => { socketInstance.on('workflow-operation', (data) => {
eventHandlers.current.workflowOperation?.(data) eventHandlers.current.workflowOperation?.(data)
}) })
@@ -298,10 +353,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socketInstance.on('workflow-deleted', (data) => { socketInstance.on('workflow-deleted', (data) => {
logger.warn(`Workflow ${data.workflowId} has been deleted`) logger.warn(`Workflow ${data.workflowId} has been deleted`)
if (currentWorkflowId === data.workflowId) { setCurrentWorkflowId((current) => {
setCurrentWorkflowId(null) if (current === data.workflowId) {
setPresenceUsers([]) setPresenceUsers([])
} return null
}
return current
})
eventHandlers.current.workflowDeleted?.(data) eventHandlers.current.workflowDeleted?.(data)
}) })
@@ -444,25 +502,35 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socketInstance.on('operation-forbidden', (error) => { socketInstance.on('operation-forbidden', (error) => {
logger.warn('Operation forbidden:', error) logger.warn('Operation forbidden:', error)
})
socketInstance.on('operation-confirmed', (data) => { if (error?.type === 'SESSION_ERROR') {
logger.debug('Operation confirmed:', data) const workflowId = urlWorkflowIdRef.current
if (workflowId && !isRejoiningRef.current) {
isRejoiningRef.current = true
logger.info(`Session expired, rejoining workflow: ${workflowId}`)
socketInstance.emit('join-workflow', {
workflowId,
tabSessionId: getTabSessionId(),
})
}
}
}) })
socketInstance.on('workflow-state', async (workflowData) => { socketInstance.on('workflow-state', async (workflowData) => {
logger.info('Received workflow state from server') logger.info('Received workflow state from server')
if (workflowData?.state) { if (workflowData?.state) {
await rehydrateWorkflowStores(workflowData.id, workflowData.state, 'workflow-state') try {
await rehydrateWorkflowStores(workflowData.id, workflowData.state, 'workflow-state')
} catch (error) {
logger.error('Error rehydrating workflow state:', error)
}
} }
}) })
socketRef.current = socketInstance
setSocket(socketInstance) setSocket(socketInstance)
return () => {
socketInstance.close()
}
} catch (error) { } catch (error) {
logger.error('Failed to initialize socket with token:', error) logger.error('Failed to initialize socket with token:', error)
setIsConnecting(false) setIsConnecting(false)
@@ -477,12 +545,20 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
}) })
positionUpdateTimeouts.current.clear() positionUpdateTimeouts.current.clear()
pendingPositionUpdates.current.clear() pendingPositionUpdates.current.clear()
// Close socket on unmount
if (socketRef.current) {
logger.info('Closing socket connection on unmount')
socketRef.current.close()
socketRef.current = null
}
} }
}, [user?.id]) }, [user?.id, authFailed])
useEffect(() => { useEffect(() => {
if (!socket || !isConnected || !urlWorkflowId) return if (!socket || !isConnected || !urlWorkflowId) return
// Skip if already in the correct room
if (currentWorkflowId === urlWorkflowId) return if (currentWorkflowId === urlWorkflowId) return
logger.info( logger.info(
@@ -497,19 +573,10 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
logger.info(`Joining workflow room: ${urlWorkflowId}`) logger.info(`Joining workflow room: ${urlWorkflowId}`)
socket.emit('join-workflow', { socket.emit('join-workflow', {
workflowId: urlWorkflowId, workflowId: urlWorkflowId,
tabSessionId: getTabSessionId(),
}) })
setCurrentWorkflowId(urlWorkflowId)
}, [socket, isConnected, urlWorkflowId, currentWorkflowId]) }, [socket, isConnected, urlWorkflowId, currentWorkflowId])
useEffect(() => {
return () => {
if (socket) {
logger.info('Cleaning up socket connection on unmount')
socket.disconnect()
}
}
}, [])
const joinWorkflow = useCallback( const joinWorkflow = useCallback(
(workflowId: string) => { (workflowId: string) => {
if (!socket || !user?.id) { if (!socket || !user?.id) {
@@ -530,8 +597,9 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
logger.info(`Joining workflow: ${workflowId}`) logger.info(`Joining workflow: ${workflowId}`)
socket.emit('join-workflow', { socket.emit('join-workflow', {
workflowId, workflowId,
tabSessionId: getTabSessionId(),
}) })
setCurrentWorkflowId(workflowId) // currentWorkflowId will be set by join-workflow-success handler
}, },
[socket, user, currentWorkflowId] [socket, user, currentWorkflowId]
) )
@@ -539,10 +607,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
const leaveWorkflow = useCallback(() => { const leaveWorkflow = useCallback(() => {
if (socket && currentWorkflowId) { if (socket && currentWorkflowId) {
logger.info(`Leaving workflow: ${currentWorkflowId}`) logger.info(`Leaving workflow: ${currentWorkflowId}`)
try { import('@/stores/operation-queue/store')
const { useOperationQueueStore } = require('@/stores/operation-queue/store') .then(({ useOperationQueueStore }) => {
useOperationQueueStore.getState().cancelOperationsForWorkflow(currentWorkflowId) useOperationQueueStore.getState().cancelOperationsForWorkflow(currentWorkflowId)
} catch {} })
.catch((error) => {
logger.warn('Failed to cancel operations for workflow:', error)
})
socket.emit('leave-workflow') socket.emit('leave-workflow')
setCurrentWorkflowId(null) setCurrentWorkflowId(null)
setPresenceUsers([]) setPresenceUsers([])
@@ -555,8 +626,20 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
} }
}, [socket, currentWorkflowId]) }, [socket, currentWorkflowId])
const positionUpdateTimeouts = useRef<Map<string, number>>(new Map()) /**
const pendingPositionUpdates = useRef<Map<string, any>>(new Map()) * Retry socket connection after auth failure.
* Call this when user has re-authenticated (e.g., after login redirect).
*/
const retryConnection = useCallback(() => {
if (!authFailed) {
logger.info('retryConnection called but no auth failure - ignoring')
return
}
logger.info('Retrying socket connection after auth failure')
setAuthFailed(false)
// initializedRef.current was already reset in connect_error handler
// Effect will re-run and attempt connection
}, [authFailed])
const emitWorkflowOperation = useCallback( const emitWorkflowOperation = useCallback(
(operation: string, target: string, payload: any, operationId?: string) => { (operation: string, target: string, payload: any, operationId?: string) => {
@@ -716,14 +799,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
eventHandlers.current.selectionUpdate = handler eventHandlers.current.selectionUpdate = handler
}, []) }, [])
const onUserJoined = useCallback((handler: (data: any) => void) => {
eventHandlers.current.userJoined = handler
}, [])
const onUserLeft = useCallback((handler: (data: any) => void) => {
eventHandlers.current.userLeft = handler
}, [])
const onWorkflowDeleted = useCallback((handler: (data: any) => void) => { const onWorkflowDeleted = useCallback((handler: (data: any) => void) => {
eventHandlers.current.workflowDeleted = handler eventHandlers.current.workflowDeleted = handler
}, []) }, [])
@@ -745,11 +820,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socket, socket,
isConnected, isConnected,
isConnecting, isConnecting,
authFailed,
currentWorkflowId, currentWorkflowId,
currentSocketId, currentSocketId,
presenceUsers, presenceUsers,
joinWorkflow, joinWorkflow,
leaveWorkflow, leaveWorkflow,
retryConnection,
emitWorkflowOperation, emitWorkflowOperation,
emitSubblockUpdate, emitSubblockUpdate,
emitVariableUpdate, emitVariableUpdate,
@@ -760,8 +837,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
onVariableUpdate, onVariableUpdate,
onCursorUpdate, onCursorUpdate,
onSelectionUpdate, onSelectionUpdate,
onUserJoined,
onUserLeft,
onWorkflowDeleted, onWorkflowDeleted,
onWorkflowReverted, onWorkflowReverted,
onOperationConfirmed, onOperationConfirmed,
@@ -771,11 +846,13 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
socket, socket,
isConnected, isConnected,
isConnecting, isConnecting,
authFailed,
currentWorkflowId, currentWorkflowId,
currentSocketId, currentSocketId,
presenceUsers, presenceUsers,
joinWorkflow, joinWorkflow,
leaveWorkflow, leaveWorkflow,
retryConnection,
emitWorkflowOperation, emitWorkflowOperation,
emitSubblockUpdate, emitSubblockUpdate,
emitVariableUpdate, emitVariableUpdate,
@@ -786,8 +863,6 @@ export function SocketProvider({ children, user }: SocketProviderProps) {
onVariableUpdate, onVariableUpdate,
onCursorUpdate, onCursorUpdate,
onSelectionUpdate, onSelectionUpdate,
onUserJoined,
onUserLeft,
onWorkflowDeleted, onWorkflowDeleted,
onWorkflowReverted, onWorkflowReverted,
onOperationConfirmed, onOperationConfirmed,

View File

@@ -2417,4 +2417,177 @@ describe('EdgeManager', () => {
expect(successReady).toContain(targetId) expect(successReady).toContain(targetId)
}) })
}) })
describe('Condition with loop downstream - deactivation propagation', () => {
it('should deactivate nodes after loop when condition branch containing loop is deactivated', () => {
// Scenario: condition → (if) → sentinel_start → loopBody → sentinel_end → (loop_exit) → after_loop
// → (else) → other_branch
// When condition takes "else" path, the entire if-branch including nodes after the loop should be deactivated
const conditionId = 'condition'
const sentinelStartId = 'sentinel-start'
const loopBodyId = 'loop-body'
const sentinelEndId = 'sentinel-end'
const afterLoopId = 'after-loop'
const otherBranchId = 'other-branch'
const conditionNode = createMockNode(conditionId, [
{ target: sentinelStartId, sourceHandle: 'condition-if' },
{ target: otherBranchId, sourceHandle: 'condition-else' },
])
const sentinelStartNode = createMockNode(
sentinelStartId,
[{ target: loopBodyId }],
[conditionId]
)
const loopBodyNode = createMockNode(
loopBodyId,
[{ target: sentinelEndId }],
[sentinelStartId]
)
const sentinelEndNode = createMockNode(
sentinelEndId,
[
{ target: sentinelStartId, sourceHandle: 'loop_continue' },
{ target: afterLoopId, sourceHandle: 'loop_exit' },
],
[loopBodyId]
)
const afterLoopNode = createMockNode(afterLoopId, [], [sentinelEndId])
const otherBranchNode = createMockNode(otherBranchId, [], [conditionId])
const nodes = new Map<string, DAGNode>([
[conditionId, conditionNode],
[sentinelStartId, sentinelStartNode],
[loopBodyId, loopBodyNode],
[sentinelEndId, sentinelEndNode],
[afterLoopId, afterLoopNode],
[otherBranchId, otherBranchNode],
])
const dag = createMockDAG(nodes)
const edgeManager = new EdgeManager(dag)
// Condition selects "else" branch, deactivating the "if" branch (which contains the loop)
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'else' })
// Only otherBranch should be ready
expect(readyNodes).toContain(otherBranchId)
expect(readyNodes).not.toContain(sentinelStartId)
// afterLoop should NOT be ready - its incoming edge from sentinel_end should be deactivated
expect(readyNodes).not.toContain(afterLoopId)
// Verify that countActiveIncomingEdges returns 0 for afterLoop
// (meaning the loop_exit edge was properly deactivated)
// Note: isNodeReady returns true when all edges are deactivated (no pending deps),
// but the node won't be in readyNodes since it wasn't reached via an active path
expect(edgeManager.isNodeReady(afterLoopNode)).toBe(true) // All edges deactivated = no blocking deps
})
it('should deactivate nodes after parallel when condition branch containing parallel is deactivated', () => {
// Similar scenario with parallel instead of loop
const conditionId = 'condition'
const parallelStartId = 'parallel-start'
const parallelBodyId = 'parallel-body'
const parallelEndId = 'parallel-end'
const afterParallelId = 'after-parallel'
const otherBranchId = 'other-branch'
const conditionNode = createMockNode(conditionId, [
{ target: parallelStartId, sourceHandle: 'condition-if' },
{ target: otherBranchId, sourceHandle: 'condition-else' },
])
const parallelStartNode = createMockNode(
parallelStartId,
[{ target: parallelBodyId }],
[conditionId]
)
const parallelBodyNode = createMockNode(
parallelBodyId,
[{ target: parallelEndId }],
[parallelStartId]
)
const parallelEndNode = createMockNode(
parallelEndId,
[{ target: afterParallelId, sourceHandle: 'parallel_exit' }],
[parallelBodyId]
)
const afterParallelNode = createMockNode(afterParallelId, [], [parallelEndId])
const otherBranchNode = createMockNode(otherBranchId, [], [conditionId])
const nodes = new Map<string, DAGNode>([
[conditionId, conditionNode],
[parallelStartId, parallelStartNode],
[parallelBodyId, parallelBodyNode],
[parallelEndId, parallelEndNode],
[afterParallelId, afterParallelNode],
[otherBranchId, otherBranchNode],
])
const dag = createMockDAG(nodes)
const edgeManager = new EdgeManager(dag)
// Condition selects "else" branch
const readyNodes = edgeManager.processOutgoingEdges(conditionNode, { selectedOption: 'else' })
expect(readyNodes).toContain(otherBranchId)
expect(readyNodes).not.toContain(parallelStartId)
expect(readyNodes).not.toContain(afterParallelId)
// isNodeReady returns true when all edges are deactivated (no pending deps)
expect(edgeManager.isNodeReady(afterParallelNode)).toBe(true)
})
it('should still correctly handle normal loop exit (not deactivate when loop runs)', () => {
// When a loop actually executes and exits normally, after_loop should become ready
const sentinelStartId = 'sentinel-start'
const loopBodyId = 'loop-body'
const sentinelEndId = 'sentinel-end'
const afterLoopId = 'after-loop'
const sentinelStartNode = createMockNode(sentinelStartId, [{ target: loopBodyId }])
const loopBodyNode = createMockNode(
loopBodyId,
[{ target: sentinelEndId }],
[sentinelStartId]
)
const sentinelEndNode = createMockNode(
sentinelEndId,
[
{ target: sentinelStartId, sourceHandle: 'loop_continue' },
{ target: afterLoopId, sourceHandle: 'loop_exit' },
],
[loopBodyId]
)
const afterLoopNode = createMockNode(afterLoopId, [], [sentinelEndId])
const nodes = new Map<string, DAGNode>([
[sentinelStartId, sentinelStartNode],
[loopBodyId, loopBodyNode],
[sentinelEndId, sentinelEndNode],
[afterLoopId, afterLoopNode],
])
const dag = createMockDAG(nodes)
const edgeManager = new EdgeManager(dag)
// Simulate sentinel_end completing with loop_exit (loop is done)
const readyNodes = edgeManager.processOutgoingEdges(sentinelEndNode, {
selectedRoute: 'loop_exit',
})
// afterLoop should be ready
expect(readyNodes).toContain(afterLoopId)
})
})
}) })

View File

@@ -243,7 +243,7 @@ export class EdgeManager {
} }
for (const [, outgoingEdge] of targetNode.outgoingEdges) { for (const [, outgoingEdge] of targetNode.outgoingEdges) {
if (!this.isControlEdge(outgoingEdge.sourceHandle)) { if (!this.isBackwardsEdge(outgoingEdge.sourceHandle)) {
this.deactivateEdgeAndDescendants( this.deactivateEdgeAndDescendants(
targetId, targetId,
outgoingEdge.target, outgoingEdge.target,

View File

@@ -119,8 +119,6 @@ export function useCollaborativeWorkflow() {
onWorkflowOperation, onWorkflowOperation,
onSubblockUpdate, onSubblockUpdate,
onVariableUpdate, onVariableUpdate,
onUserJoined,
onUserLeft,
onWorkflowDeleted, onWorkflowDeleted,
onWorkflowReverted, onWorkflowReverted,
onOperationConfirmed, onOperationConfirmed,
@@ -484,14 +482,6 @@ export function useCollaborativeWorkflow() {
} }
} }
const handleUserJoined = (data: any) => {
logger.info(`User joined: ${data.userName}`)
}
const handleUserLeft = (data: any) => {
logger.info(`User left: ${data.userId}`)
}
const handleWorkflowDeleted = (data: any) => { const handleWorkflowDeleted = (data: any) => {
const { workflowId } = data const { workflowId } = data
logger.warn(`Workflow ${workflowId} has been deleted`) logger.warn(`Workflow ${workflowId} has been deleted`)
@@ -600,26 +590,17 @@ export function useCollaborativeWorkflow() {
failOperation(operationId, retryable) failOperation(operationId, retryable)
} }
// Register event handlers
onWorkflowOperation(handleWorkflowOperation) onWorkflowOperation(handleWorkflowOperation)
onSubblockUpdate(handleSubblockUpdate) onSubblockUpdate(handleSubblockUpdate)
onVariableUpdate(handleVariableUpdate) onVariableUpdate(handleVariableUpdate)
onUserJoined(handleUserJoined)
onUserLeft(handleUserLeft)
onWorkflowDeleted(handleWorkflowDeleted) onWorkflowDeleted(handleWorkflowDeleted)
onWorkflowReverted(handleWorkflowReverted) onWorkflowReverted(handleWorkflowReverted)
onOperationConfirmed(handleOperationConfirmed) onOperationConfirmed(handleOperationConfirmed)
onOperationFailed(handleOperationFailed) onOperationFailed(handleOperationFailed)
return () => {
// Cleanup handled by socket context
}
}, [ }, [
onWorkflowOperation, onWorkflowOperation,
onSubblockUpdate, onSubblockUpdate,
onVariableUpdate, onVariableUpdate,
onUserJoined,
onUserLeft,
onWorkflowDeleted, onWorkflowDeleted,
onWorkflowReverted, onWorkflowReverted,
onOperationConfirmed, onOperationConfirmed,

View File

@@ -10,6 +10,7 @@ import {
type KnowledgeBaseArgs, type KnowledgeBaseArgs,
} from '@/lib/copilot/tools/shared/schemas' } from '@/lib/copilot/tools/shared/schemas'
import { useCopilotStore } from '@/stores/panel/copilot/store' import { useCopilotStore } from '@/stores/panel/copilot/store'
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
/** /**
* Client tool for knowledge base operations * Client tool for knowledge base operations
@@ -102,7 +103,19 @@ export class KnowledgeBaseClientTool extends BaseClientTool {
const logger = createLogger('KnowledgeBaseClientTool') const logger = createLogger('KnowledgeBaseClientTool')
try { try {
this.setState(ClientToolCallState.executing) this.setState(ClientToolCallState.executing)
const payload: KnowledgeBaseArgs = { ...(args || { operation: 'list' }) }
// Get the workspace ID from the workflow registry hydration state
const { hydration } = useWorkflowRegistry.getState()
const workspaceId = hydration.workspaceId
// Build payload with workspace ID included in args
const payload: KnowledgeBaseArgs = {
...(args || { operation: 'list' }),
args: {
...(args?.args || {}),
workspaceId: workspaceId || undefined,
},
}
const res = await fetch('/api/copilot/execute-copilot-server-tool', { const res = await fetch('/api/copilot/execute-copilot-server-tool', {
method: 'POST', method: 'POST',

View File

@@ -2508,6 +2508,10 @@ async function validateWorkflowSelectorIds(
for (const subBlockConfig of blockConfig.subBlocks) { for (const subBlockConfig of blockConfig.subBlocks) {
if (!SELECTOR_TYPES.has(subBlockConfig.type)) continue if (!SELECTOR_TYPES.has(subBlockConfig.type)) continue
// Skip oauth-input - credentials are pre-validated before edit application
// This allows existing collaborator credentials to remain untouched
if (subBlockConfig.type === 'oauth-input') continue
const subBlockValue = blockData.subBlocks?.[subBlockConfig.id]?.value const subBlockValue = blockData.subBlocks?.[subBlockConfig.id]?.value
if (!subBlockValue) continue if (!subBlockValue) continue
@@ -2573,6 +2577,295 @@ async function validateWorkflowSelectorIds(
return errors return errors
} }
/**
* Pre-validates credential and apiKey inputs in operations before they are applied.
* - Validates oauth-input (credential) IDs belong to the user
* - Filters out apiKey inputs for hosted models when isHosted is true
* - Also validates credentials and apiKeys in nestedNodes (blocks inside loop/parallel)
* Returns validation errors for any removed inputs.
*/
async function preValidateCredentialInputs(
operations: EditWorkflowOperation[],
context: { userId: string },
workflowState?: Record<string, unknown>
): Promise<{ filteredOperations: EditWorkflowOperation[]; errors: ValidationError[] }> {
const { isHosted } = await import('@/lib/core/config/feature-flags')
const { getHostedModels } = await import('@/providers/utils')
const logger = createLogger('PreValidateCredentials')
const errors: ValidationError[] = []
// Collect credential and apiKey inputs that need validation/filtering
const credentialInputs: Array<{
operationIndex: number
blockId: string
blockType: string
fieldName: string
value: string
nestedBlockId?: string
}> = []
const hostedApiKeyInputs: Array<{
operationIndex: number
blockId: string
blockType: string
model: string
nestedBlockId?: string
}> = []
const hostedModelsLower = isHosted ? new Set(getHostedModels().map((m) => m.toLowerCase())) : null
/**
* Collect credential inputs from a block's inputs based on its block config
*/
function collectCredentialInputs(
blockConfig: ReturnType<typeof getBlock>,
inputs: Record<string, unknown>,
opIndex: number,
blockId: string,
blockType: string,
nestedBlockId?: string
) {
if (!blockConfig) return
for (const subBlockConfig of blockConfig.subBlocks) {
if (subBlockConfig.type !== 'oauth-input') continue
const inputValue = inputs[subBlockConfig.id]
if (!inputValue || typeof inputValue !== 'string' || inputValue.trim() === '') continue
credentialInputs.push({
operationIndex: opIndex,
blockId,
blockType,
fieldName: subBlockConfig.id,
value: inputValue,
nestedBlockId,
})
}
}
/**
* Check if apiKey should be filtered for a block with the given model
*/
function collectHostedApiKeyInput(
inputs: Record<string, unknown>,
modelValue: string | undefined,
opIndex: number,
blockId: string,
blockType: string,
nestedBlockId?: string
) {
if (!hostedModelsLower || !inputs.apiKey) return
if (!modelValue || typeof modelValue !== 'string') return
if (hostedModelsLower.has(modelValue.toLowerCase())) {
hostedApiKeyInputs.push({
operationIndex: opIndex,
blockId,
blockType,
model: modelValue,
nestedBlockId,
})
}
}
operations.forEach((op, opIndex) => {
// Process main block inputs
if (op.params?.inputs && op.params?.type) {
const blockConfig = getBlock(op.params.type)
if (blockConfig) {
// Collect credentials from main block
collectCredentialInputs(
blockConfig,
op.params.inputs as Record<string, unknown>,
opIndex,
op.block_id,
op.params.type
)
// Check for apiKey inputs on hosted models
let modelValue = (op.params.inputs as Record<string, unknown>).model as string | undefined
// For edit operations, if model is not being changed, check existing block's model
if (
!modelValue &&
op.operation_type === 'edit' &&
(op.params.inputs as Record<string, unknown>).apiKey &&
workflowState
) {
const existingBlock = (workflowState.blocks as Record<string, unknown>)?.[op.block_id] as
| Record<string, unknown>
| undefined
const existingSubBlocks = existingBlock?.subBlocks as Record<string, unknown> | undefined
const existingModelSubBlock = existingSubBlocks?.model as
| Record<string, unknown>
| undefined
modelValue = existingModelSubBlock?.value as string | undefined
}
collectHostedApiKeyInput(
op.params.inputs as Record<string, unknown>,
modelValue,
opIndex,
op.block_id,
op.params.type
)
}
}
// Process nested nodes (blocks inside loop/parallel containers)
const nestedNodes = op.params?.nestedNodes as
| Record<string, Record<string, unknown>>
| undefined
if (nestedNodes) {
Object.entries(nestedNodes).forEach(([childId, childBlock]) => {
const childType = childBlock.type as string | undefined
const childInputs = childBlock.inputs as Record<string, unknown> | undefined
if (!childType || !childInputs) return
const childBlockConfig = getBlock(childType)
if (!childBlockConfig) return
// Collect credentials from nested block
collectCredentialInputs(
childBlockConfig,
childInputs,
opIndex,
op.block_id,
childType,
childId
)
// Check for apiKey inputs on hosted models in nested block
const modelValue = childInputs.model as string | undefined
collectHostedApiKeyInput(childInputs, modelValue, opIndex, op.block_id, childType, childId)
})
}
})
const hasCredentialsToValidate = credentialInputs.length > 0
const hasHostedApiKeysToFilter = hostedApiKeyInputs.length > 0
if (!hasCredentialsToValidate && !hasHostedApiKeysToFilter) {
return { filteredOperations: operations, errors }
}
// Deep clone operations so we can modify them
const filteredOperations = structuredClone(operations)
// Filter out apiKey inputs for hosted models and add validation errors
if (hasHostedApiKeysToFilter) {
logger.info('Filtering apiKey inputs for hosted models', { count: hostedApiKeyInputs.length })
for (const apiKeyInput of hostedApiKeyInputs) {
const op = filteredOperations[apiKeyInput.operationIndex]
// Handle nested block apiKey filtering
if (apiKeyInput.nestedBlockId) {
const nestedNodes = op.params?.nestedNodes as
| Record<string, Record<string, unknown>>
| undefined
const nestedBlock = nestedNodes?.[apiKeyInput.nestedBlockId]
const nestedInputs = nestedBlock?.inputs as Record<string, unknown> | undefined
if (nestedInputs?.apiKey) {
nestedInputs.apiKey = undefined
logger.debug('Filtered apiKey for hosted model in nested block', {
parentBlockId: apiKeyInput.blockId,
nestedBlockId: apiKeyInput.nestedBlockId,
model: apiKeyInput.model,
})
errors.push({
blockId: apiKeyInput.nestedBlockId,
blockType: apiKeyInput.blockType,
field: 'apiKey',
value: '[redacted]',
error: `Cannot set API key for hosted model "${apiKeyInput.model}" - API keys are managed by the platform when using hosted models`,
})
}
} else if (op.params?.inputs?.apiKey) {
// Handle main block apiKey filtering
op.params.inputs.apiKey = undefined
logger.debug('Filtered apiKey for hosted model', {
blockId: apiKeyInput.blockId,
model: apiKeyInput.model,
})
errors.push({
blockId: apiKeyInput.blockId,
blockType: apiKeyInput.blockType,
field: 'apiKey',
value: '[redacted]',
error: `Cannot set API key for hosted model "${apiKeyInput.model}" - API keys are managed by the platform when using hosted models`,
})
}
}
}
// Validate credential inputs
if (hasCredentialsToValidate) {
logger.info('Pre-validating credential inputs', {
credentialCount: credentialInputs.length,
userId: context.userId,
})
const allCredentialIds = credentialInputs.map((c) => c.value)
const validationResult = await validateSelectorIds('oauth-input', allCredentialIds, context)
const invalidSet = new Set(validationResult.invalid)
if (invalidSet.size > 0) {
for (const credInput of credentialInputs) {
if (!invalidSet.has(credInput.value)) continue
const op = filteredOperations[credInput.operationIndex]
// Handle nested block credential removal
if (credInput.nestedBlockId) {
const nestedNodes = op.params?.nestedNodes as
| Record<string, Record<string, unknown>>
| undefined
const nestedBlock = nestedNodes?.[credInput.nestedBlockId]
const nestedInputs = nestedBlock?.inputs as Record<string, unknown> | undefined
if (nestedInputs?.[credInput.fieldName]) {
delete nestedInputs[credInput.fieldName]
logger.info('Removed invalid credential from nested block', {
parentBlockId: credInput.blockId,
nestedBlockId: credInput.nestedBlockId,
field: credInput.fieldName,
invalidValue: credInput.value,
})
}
} else if (op.params?.inputs?.[credInput.fieldName]) {
// Handle main block credential removal
delete op.params.inputs[credInput.fieldName]
logger.info('Removed invalid credential from operation', {
blockId: credInput.blockId,
field: credInput.fieldName,
invalidValue: credInput.value,
})
}
const warningInfo = validationResult.warning ? `. ${validationResult.warning}` : ''
const errorBlockId = credInput.nestedBlockId ?? credInput.blockId
errors.push({
blockId: errorBlockId,
blockType: credInput.blockType,
field: credInput.fieldName,
value: credInput.value,
error: `Invalid credential ID "${credInput.value}" - credential does not exist or user doesn't have access${warningInfo}`,
})
}
logger.warn('Filtered out invalid credentials', {
invalidCount: invalidSet.size,
})
}
}
return { filteredOperations, errors }
}
async function getCurrentWorkflowStateFromDb( async function getCurrentWorkflowStateFromDb(
workflowId: string workflowId: string
): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> { ): Promise<{ workflowState: any; subBlockValues: Record<string, Record<string, any>> }> {
@@ -2657,12 +2950,29 @@ export const editWorkflowServerTool: BaseServerTool<EditWorkflowParams, any> = {
// Get permission config for the user // Get permission config for the user
const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null const permissionConfig = context?.userId ? await getUserPermissionConfig(context.userId) : null
// Pre-validate credential and apiKey inputs before applying operations
// This filters out invalid credentials and apiKeys for hosted models
let operationsToApply = operations
const credentialErrors: ValidationError[] = []
if (context?.userId) {
const { filteredOperations, errors: credErrors } = await preValidateCredentialInputs(
operations,
{ userId: context.userId },
workflowState
)
operationsToApply = filteredOperations
credentialErrors.push(...credErrors)
}
// Apply operations directly to the workflow state // Apply operations directly to the workflow state
const { const {
state: modifiedWorkflowState, state: modifiedWorkflowState,
validationErrors, validationErrors,
skippedItems, skippedItems,
} = applyOperationsToWorkflowState(workflowState, operations, permissionConfig) } = applyOperationsToWorkflowState(workflowState, operationsToApply, permissionConfig)
// Add credential validation errors
validationErrors.push(...credentialErrors)
// Get workspaceId for selector validation // Get workspaceId for selector validation
let workspaceId: string | undefined let workspaceId: string | undefined

View File

@@ -74,6 +74,7 @@
"@react-email/components": "^0.0.34", "@react-email/components": "^0.0.34",
"@react-email/render": "2.0.0", "@react-email/render": "2.0.0",
"@sim/logger": "workspace:*", "@sim/logger": "workspace:*",
"@socket.io/redis-adapter": "8.3.0",
"@t3-oss/env-nextjs": "0.13.4", "@t3-oss/env-nextjs": "0.13.4",
"@tanstack/react-query": "5.90.8", "@tanstack/react-query": "5.90.8",
"@tanstack/react-query-devtools": "5.90.2", "@tanstack/react-query-devtools": "5.90.2",
@@ -144,6 +145,7 @@
"react-simple-code-editor": "^0.14.1", "react-simple-code-editor": "^0.14.1",
"react-window": "2.2.3", "react-window": "2.2.3",
"reactflow": "^11.11.4", "reactflow": "^11.11.4",
"redis": "5.10.0",
"rehype-autolink-headings": "^7.1.0", "rehype-autolink-headings": "^7.1.0",
"rehype-slug": "^6.0.0", "rehype-slug": "^6.0.0",
"remark-gfm": "4.0.1", "remark-gfm": "4.0.1",

View File

@@ -1,5 +1,7 @@
import type { Server as HttpServer } from 'http' import type { Server as HttpServer } from 'http'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { createAdapter } from '@socket.io/redis-adapter'
import { createClient, type RedisClientType } from 'redis'
import { Server } from 'socket.io' import { Server } from 'socket.io'
import { env } from '@/lib/core/config/env' import { env } from '@/lib/core/config/env'
import { isProd } from '@/lib/core/config/feature-flags' import { isProd } from '@/lib/core/config/feature-flags'
@@ -7,9 +9,16 @@ import { getBaseUrl } from '@/lib/core/utils/urls'
const logger = createLogger('SocketIOConfig') const logger = createLogger('SocketIOConfig')
/** /** Socket.IO ping timeout - how long to wait for pong before considering connection dead */
* Get allowed origins for Socket.IO CORS configuration const PING_TIMEOUT_MS = 60000
*/ /** Socket.IO ping interval - how often to send ping packets */
const PING_INTERVAL_MS = 25000
/** Maximum HTTP buffer size for Socket.IO messages */
const MAX_HTTP_BUFFER_SIZE = 1e6
let adapterPubClient: RedisClientType | null = null
let adapterSubClient: RedisClientType | null = null
function getAllowedOrigins(): string[] { function getAllowedOrigins(): string[] {
const allowedOrigins = [ const allowedOrigins = [
getBaseUrl(), getBaseUrl(),
@@ -24,11 +33,10 @@ function getAllowedOrigins(): string[] {
} }
/** /**
* Create and configure a Socket.IO server instance * Create and configure a Socket.IO server instance.
* @param httpServer - The HTTP server instance to attach Socket.IO to * If REDIS_URL is configured, adds Redis adapter for cross-pod broadcasting.
* @returns Configured Socket.IO server instance
*/ */
export function createSocketIOServer(httpServer: HttpServer): Server { export async function createSocketIOServer(httpServer: HttpServer): Promise<Server> {
const allowedOrigins = getAllowedOrigins() const allowedOrigins = getAllowedOrigins()
const io = new Server(httpServer, { const io = new Server(httpServer, {
@@ -36,31 +44,110 @@ export function createSocketIOServer(httpServer: HttpServer): Server {
origin: allowedOrigins, origin: allowedOrigins,
methods: ['GET', 'POST', 'OPTIONS'], methods: ['GET', 'POST', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization', 'Cookie', 'socket.io'], allowedHeaders: ['Content-Type', 'Authorization', 'Cookie', 'socket.io'],
credentials: true, // Enable credentials to accept cookies credentials: true,
}, },
transports: ['websocket', 'polling'], // WebSocket first, polling as fallback transports: ['websocket', 'polling'],
allowEIO3: true, // Keep legacy support for compatibility allowEIO3: true,
pingTimeout: 60000, // Back to original conservative setting pingTimeout: PING_TIMEOUT_MS,
pingInterval: 25000, // Back to original interval pingInterval: PING_INTERVAL_MS,
maxHttpBufferSize: 1e6, maxHttpBufferSize: MAX_HTTP_BUFFER_SIZE,
cookie: { cookie: {
name: 'io', name: 'io',
path: '/', path: '/',
httpOnly: true, httpOnly: true,
sameSite: 'none', // Required for cross-origin cookies sameSite: 'none',
secure: isProd, // HTTPS in production secure: isProd,
}, },
}) })
if (env.REDIS_URL) {
logger.info('Configuring Socket.IO Redis adapter...')
const redisOptions = {
url: env.REDIS_URL,
socket: {
reconnectStrategy: (retries: number) => {
if (retries > 10) {
logger.error('Redis adapter reconnection failed after 10 attempts')
return new Error('Redis adapter reconnection failed')
}
const delay = Math.min(retries * 100, 3000)
logger.warn(`Redis adapter reconnecting in ${delay}ms (attempt ${retries})`)
return delay
},
},
}
// Create separate clients for pub and sub (recommended for reliability)
adapterPubClient = createClient(redisOptions)
adapterSubClient = createClient(redisOptions)
adapterPubClient.on('error', (err) => {
logger.error('Redis adapter pub client error:', err)
})
adapterSubClient.on('error', (err) => {
logger.error('Redis adapter sub client error:', err)
})
adapterPubClient.on('ready', () => {
logger.info('Redis adapter pub client ready')
})
adapterSubClient.on('ready', () => {
logger.info('Redis adapter sub client ready')
})
await Promise.all([adapterPubClient.connect(), adapterSubClient.connect()])
io.adapter(createAdapter(adapterPubClient, adapterSubClient))
logger.info('Socket.IO Redis adapter connected - cross-pod broadcasting enabled')
} else {
logger.warn('REDIS_URL not configured - running in single-pod mode')
}
logger.info('Socket.IO server configured with:', { logger.info('Socket.IO server configured with:', {
allowedOrigins: allowedOrigins.length, allowedOrigins: allowedOrigins.length,
transports: ['websocket', 'polling'], transports: ['websocket', 'polling'],
pingTimeout: 60000, pingTimeout: PING_TIMEOUT_MS,
pingInterval: 25000, pingInterval: PING_INTERVAL_MS,
maxHttpBufferSize: 1e6, maxHttpBufferSize: MAX_HTTP_BUFFER_SIZE,
cookieSecure: isProd, cookieSecure: isProd,
corsCredentials: true, corsCredentials: true,
redisAdapter: !!env.REDIS_URL,
}) })
return io return io
} }
/**
* Clean up Redis adapter connections.
* Call this during graceful shutdown.
*/
export async function shutdownSocketIOAdapter(): Promise<void> {
const closePromises: Promise<void>[] = []
if (adapterPubClient) {
closePromises.push(
adapterPubClient.quit().then(() => {
logger.info('Redis adapter pub client closed')
adapterPubClient = null
})
)
}
if (adapterSubClient) {
closePromises.push(
adapterSubClient.quit().then(() => {
logger.info('Redis adapter sub client closed')
adapterSubClient = null
})
)
}
if (closePromises.length > 0) {
await Promise.all(closePromises)
logger.info('Socket.IO Redis adapter shutdown complete')
}
}

View File

@@ -1,17 +1,12 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { HandlerDependencies } from '@/socket/handlers/workflow' import { cleanupPendingSubblocksForSocket } from '@/socket/handlers/subblocks'
import { cleanupPendingVariablesForSocket } from '@/socket/handlers/variables'
import type { AuthenticatedSocket } from '@/socket/middleware/auth' import type { AuthenticatedSocket } from '@/socket/middleware/auth'
import type { RoomManager } from '@/socket/rooms/manager' import type { IRoomManager } from '@/socket/rooms'
const logger = createLogger('ConnectionHandlers') const logger = createLogger('ConnectionHandlers')
export function setupConnectionHandlers( export function setupConnectionHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket: AuthenticatedSocket,
deps: HandlerDependencies | RoomManager
) {
const roomManager =
deps instanceof Object && 'roomManager' in deps ? deps.roomManager : (deps as RoomManager)
socket.on('error', (error) => { socket.on('error', (error) => {
logger.error(`Socket ${socket.id} error:`, error) logger.error(`Socket ${socket.id} error:`, error)
}) })
@@ -20,13 +15,22 @@ export function setupConnectionHandlers(
logger.error(`Socket ${socket.id} connection error:`, error) logger.error(`Socket ${socket.id} connection error:`, error)
}) })
socket.on('disconnect', (reason) => { socket.on('disconnect', async (reason) => {
const workflowId = roomManager.getWorkflowIdForSocket(socket.id) try {
const session = roomManager.getUserSession(socket.id) // Clean up pending debounce entries for this socket to prevent memory leaks
cleanupPendingSubblocksForSocket(socket.id)
cleanupPendingVariablesForSocket(socket.id)
if (workflowId && session) { const workflowId = await roomManager.removeUserFromRoom(socket.id)
roomManager.cleanupUserFromRoom(socket.id, workflowId)
roomManager.broadcastPresenceUpdate(workflowId) if (workflowId) {
await roomManager.broadcastPresenceUpdate(workflowId)
logger.info(
`Socket ${socket.id} disconnected from workflow ${workflowId} (reason: ${reason})`
)
}
} catch (error) {
logger.error(`Error handling disconnect for socket ${socket.id}:`, error)
} }
}) })
} }

View File

@@ -5,16 +5,9 @@ import { setupSubblocksHandlers } from '@/socket/handlers/subblocks'
import { setupVariablesHandlers } from '@/socket/handlers/variables' import { setupVariablesHandlers } from '@/socket/handlers/variables'
import { setupWorkflowHandlers } from '@/socket/handlers/workflow' import { setupWorkflowHandlers } from '@/socket/handlers/workflow'
import type { AuthenticatedSocket } from '@/socket/middleware/auth' import type { AuthenticatedSocket } from '@/socket/middleware/auth'
import type { RoomManager, UserPresence, WorkflowRoom } from '@/socket/rooms/manager' import type { IRoomManager } from '@/socket/rooms'
export type { UserPresence, WorkflowRoom } export function setupAllHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
/**
* Sets up all socket event handlers for an authenticated socket connection
* @param socket - The authenticated socket instance
* @param roomManager - Room manager instance for state management
*/
export function setupAllHandlers(socket: AuthenticatedSocket, roomManager: RoomManager) {
setupWorkflowHandlers(socket, roomManager) setupWorkflowHandlers(socket, roomManager)
setupOperationsHandlers(socket, roomManager) setupOperationsHandlers(socket, roomManager)
setupSubblocksHandlers(socket, roomManager) setupSubblocksHandlers(socket, roomManager)
@@ -22,12 +15,3 @@ export function setupAllHandlers(socket: AuthenticatedSocket, roomManager: RoomM
setupPresenceHandlers(socket, roomManager) setupPresenceHandlers(socket, roomManager)
setupConnectionHandlers(socket, roomManager) setupConnectionHandlers(socket, roomManager)
} }
export {
setupWorkflowHandlers,
setupOperationsHandlers,
setupSubblocksHandlers,
setupVariablesHandlers,
setupPresenceHandlers,
setupConnectionHandlers,
}

View File

@@ -10,38 +10,41 @@ import {
WORKFLOW_OPERATIONS, WORKFLOW_OPERATIONS,
} from '@/socket/constants' } from '@/socket/constants'
import { persistWorkflowOperation } from '@/socket/database/operations' import { persistWorkflowOperation } from '@/socket/database/operations'
import type { HandlerDependencies } from '@/socket/handlers/workflow'
import type { AuthenticatedSocket } from '@/socket/middleware/auth' import type { AuthenticatedSocket } from '@/socket/middleware/auth'
import { checkRolePermission } from '@/socket/middleware/permissions' import { checkRolePermission } from '@/socket/middleware/permissions'
import type { RoomManager } from '@/socket/rooms/manager' import type { IRoomManager } from '@/socket/rooms'
import { WorkflowOperationSchema } from '@/socket/validation/schemas' import { WorkflowOperationSchema } from '@/socket/validation/schemas'
const logger = createLogger('OperationsHandlers') const logger = createLogger('OperationsHandlers')
export function setupOperationsHandlers( export function setupOperationsHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket: AuthenticatedSocket,
deps: HandlerDependencies | RoomManager
) {
const roomManager =
deps instanceof Object && 'roomManager' in deps ? deps.roomManager : (deps as RoomManager)
socket.on('workflow-operation', async (data) => { socket.on('workflow-operation', async (data) => {
const workflowId = roomManager.getWorkflowIdForSocket(socket.id) const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = roomManager.getUserSession(socket.id) const session = await roomManager.getUserSession(socket.id)
if (!workflowId || !session) { if (!workflowId || !session) {
socket.emit('error', { socket.emit('operation-forbidden', {
type: 'NOT_JOINED', type: 'SESSION_ERROR',
message: 'Not joined to any workflow', message: 'Session expired, please rejoin workflow',
}) })
if (data?.operationId) {
socket.emit('operation-failed', { operationId: data.operationId, error: 'Session expired' })
}
return return
} }
const room = roomManager.getWorkflowRoom(workflowId) const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!room) { if (!hasRoom) {
socket.emit('error', { socket.emit('operation-forbidden', {
type: 'ROOM_NOT_FOUND', type: 'ROOM_NOT_FOUND',
message: 'Workflow room not found', message: 'Workflow room not found',
}) })
if (data?.operationId) {
socket.emit('operation-failed', {
operationId: data.operationId,
error: 'Workflow room not found',
})
}
return return
} }
@@ -60,16 +63,18 @@ export function setupOperationsHandlers(
isPositionUpdate && 'commit' in payload ? payload.commit === true : false isPositionUpdate && 'commit' in payload ? payload.commit === true : false
const operationTimestamp = isPositionUpdate ? timestamp : Date.now() const operationTimestamp = isPositionUpdate ? timestamp : Date.now()
// Get user presence for permission checking
const users = await roomManager.getWorkflowUsers(workflowId)
const userPresence = users.find((u) => u.socketId === socket.id)
// Skip permission checks for non-committed position updates (broadcasts only, no persistence) // Skip permission checks for non-committed position updates (broadcasts only, no persistence)
if (isPositionUpdate && !commitPositionUpdate) { if (isPositionUpdate && !commitPositionUpdate) {
// Update last activity // Update last activity
const userPresence = room.users.get(socket.id)
if (userPresence) { if (userPresence) {
userPresence.lastActivity = Date.now() await roomManager.updateUserActivity(workflowId, socket.id, { lastActivity: Date.now() })
} }
} else { } else {
// Check permissions from cached role for all other operations // Check permissions from cached role for all other operations
const userPresence = room.users.get(socket.id)
if (!userPresence) { if (!userPresence) {
logger.warn(`User presence not found for socket ${socket.id}`) logger.warn(`User presence not found for socket ${socket.id}`)
socket.emit('operation-forbidden', { socket.emit('operation-forbidden', {
@@ -78,10 +83,13 @@ export function setupOperationsHandlers(
operation, operation,
target, target,
}) })
if (operationId) {
socket.emit('operation-failed', { operationId, error: 'User session not found' })
}
return return
} }
userPresence.lastActivity = Date.now() await roomManager.updateUserActivity(workflowId, socket.id, { lastActivity: Date.now() })
// Check permissions using cached role (no DB query) // Check permissions using cached role (no DB query)
const permissionCheck = checkRolePermission(userPresence.role, operation) const permissionCheck = checkRolePermission(userPresence.role, operation)
@@ -132,7 +140,7 @@ export function setupOperationsHandlers(
timestamp: operationTimestamp, timestamp: operationTimestamp,
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
if (operationId) { if (operationId) {
socket.emit('operation-confirmed', { socket.emit('operation-confirmed', {
@@ -178,7 +186,7 @@ export function setupOperationsHandlers(
timestamp: operationTimestamp, timestamp: operationTimestamp,
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
if (operationId) { if (operationId) {
socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() }) socket.emit('operation-confirmed', { operationId, serverTimestamp: Date.now() })
@@ -211,7 +219,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
const broadcastData = { const broadcastData = {
operation, operation,
@@ -251,7 +259,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
const broadcastData = { const broadcastData = {
operation, operation,
@@ -288,7 +296,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
socket.to(workflowId).emit('workflow-operation', { socket.to(workflowId).emit('workflow-operation', {
operation, operation,
@@ -320,7 +328,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
socket.to(workflowId).emit('workflow-operation', { socket.to(workflowId).emit('workflow-operation', {
operation, operation,
@@ -349,7 +357,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
socket.to(workflowId).emit('workflow-operation', { socket.to(workflowId).emit('workflow-operation', {
operation, operation,
@@ -381,7 +389,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
socket.to(workflowId).emit('workflow-operation', { socket.to(workflowId).emit('workflow-operation', {
operation, operation,
@@ -413,7 +421,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
socket.to(workflowId).emit('workflow-operation', { socket.to(workflowId).emit('workflow-operation', {
operation, operation,
@@ -445,7 +453,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
socket.to(workflowId).emit('workflow-operation', { socket.to(workflowId).emit('workflow-operation', {
operation, operation,
@@ -474,7 +482,7 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
socket.to(workflowId).emit('workflow-operation', { socket.to(workflowId).emit('workflow-operation', {
operation, operation,
@@ -503,27 +511,24 @@ export function setupOperationsHandlers(
userId: session.userId, userId: session.userId,
}) })
room.lastModified = Date.now() await roomManager.updateRoomLastModified(workflowId)
const broadcastData = { const broadcastData = {
operation, operation,
target, target,
payload, payload,
timestamp: operationTimestamp, // Preserve client timestamp for position updates timestamp: operationTimestamp,
senderId: socket.id, senderId: socket.id,
userId: session.userId, userId: session.userId,
userName: session.userName, userName: session.userName,
// Add operation metadata for better client handling
metadata: { metadata: {
workflowId, workflowId,
operationId: crypto.randomUUID(), operationId: crypto.randomUUID(),
isPositionUpdate, // Flag to help clients handle position updates specially
}, },
} }
socket.to(workflowId).emit('workflow-operation', broadcastData) socket.to(workflowId).emit('workflow-operation', broadcastData)
// Emit confirmation if operationId is provided
if (operationId) { if (operationId) {
socket.emit('operation-confirmed', { socket.emit('operation-confirmed', {
operationId, operationId,
@@ -533,16 +538,14 @@ export function setupOperationsHandlers(
} catch (error) { } catch (error) {
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred' const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred'
// Emit operation-failed for queue-tracked operations
if (operationId) { if (operationId) {
socket.emit('operation-failed', { socket.emit('operation-failed', {
operationId, operationId,
error: errorMessage, error: errorMessage,
retryable: !(error instanceof ZodError), // Don't retry validation errors retryable: !(error instanceof ZodError),
}) })
} }
// Also emit legacy operation-error for backward compatibility
if (error instanceof ZodError) { if (error instanceof ZodError) {
socket.emit('operation-error', { socket.emit('operation-error', {
type: 'VALIDATION_ERROR', type: 'VALIDATION_ERROR',
@@ -553,7 +556,6 @@ export function setupOperationsHandlers(
}) })
logger.warn(`Validation error for operation from ${session.userId}:`, error.errors) logger.warn(`Validation error for operation from ${session.userId}:`, error.errors)
} else if (error instanceof Error) { } else if (error instanceof Error) {
// Handle specific database errors
if (error.message.includes('not found')) { if (error.message.includes('not found')) {
socket.emit('operation-error', { socket.emit('operation-error', {
type: 'RESOURCE_NOT_FOUND', type: 'RESOURCE_NOT_FOUND',

View File

@@ -1,62 +1,53 @@
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { HandlerDependencies } from '@/socket/handlers/workflow'
import type { AuthenticatedSocket } from '@/socket/middleware/auth' import type { AuthenticatedSocket } from '@/socket/middleware/auth'
import type { RoomManager } from '@/socket/rooms/manager' import type { IRoomManager } from '@/socket/rooms'
const logger = createLogger('PresenceHandlers') const logger = createLogger('PresenceHandlers')
export function setupPresenceHandlers( export function setupPresenceHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket: AuthenticatedSocket, socket.on('cursor-update', async ({ cursor }) => {
deps: HandlerDependencies | RoomManager try {
) { const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const roomManager = const session = await roomManager.getUserSession(socket.id)
deps instanceof Object && 'roomManager' in deps ? deps.roomManager : (deps as RoomManager)
socket.on('cursor-update', ({ cursor }) => {
const workflowId = roomManager.getWorkflowIdForSocket(socket.id)
const session = roomManager.getUserSession(socket.id)
if (!workflowId || !session) return if (!workflowId || !session) return
const room = roomManager.getWorkflowRoom(workflowId) // Update cursor in room state
if (!room) return await roomManager.updateUserActivity(workflowId, socket.id, { cursor })
const userPresence = room.users.get(socket.id) // Broadcast to other users in the room
if (userPresence) { socket.to(workflowId).emit('cursor-update', {
userPresence.cursor = cursor socketId: socket.id,
userPresence.lastActivity = Date.now() userId: session.userId,
userName: session.userName,
avatarUrl: session.avatarUrl,
cursor,
})
} catch (error) {
logger.error(`Error handling cursor update for socket ${socket.id}:`, error)
} }
socket.to(workflowId).emit('cursor-update', {
socketId: socket.id,
userId: session.userId,
userName: session.userName,
avatarUrl: session.avatarUrl,
cursor,
})
}) })
// Handle user selection (for showing what block/element a user has selected) socket.on('selection-update', async ({ selection }) => {
socket.on('selection-update', ({ selection }) => { try {
const workflowId = roomManager.getWorkflowIdForSocket(socket.id) const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = roomManager.getUserSession(socket.id) const session = await roomManager.getUserSession(socket.id)
if (!workflowId || !session) return if (!workflowId || !session) return
const room = roomManager.getWorkflowRoom(workflowId) // Update selection in room state
if (!room) return await roomManager.updateUserActivity(workflowId, socket.id, { selection })
const userPresence = room.users.get(socket.id) // Broadcast to other users in the room
if (userPresence) { socket.to(workflowId).emit('selection-update', {
userPresence.selection = selection socketId: socket.id,
userPresence.lastActivity = Date.now() userId: session.userId,
userName: session.userName,
avatarUrl: session.avatarUrl,
selection,
})
} catch (error) {
logger.error(`Error handling selection update for socket ${socket.id}:`, error)
} }
socket.to(workflowId).emit('selection-update', {
socketId: socket.id,
userId: session.userId,
userName: session.userName,
avatarUrl: session.avatarUrl,
selection,
})
}) })
} }

View File

@@ -2,12 +2,14 @@ import { db } from '@sim/db'
import { workflow, workflowBlocks } from '@sim/db/schema' import { workflow, workflowBlocks } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { and, eq } from 'drizzle-orm' import { and, eq } from 'drizzle-orm'
import type { HandlerDependencies } from '@/socket/handlers/workflow'
import type { AuthenticatedSocket } from '@/socket/middleware/auth' import type { AuthenticatedSocket } from '@/socket/middleware/auth'
import type { RoomManager } from '@/socket/rooms/manager' import type { IRoomManager } from '@/socket/rooms'
const logger = createLogger('SubblocksHandlers') const logger = createLogger('SubblocksHandlers')
/** Debounce interval for coalescing rapid subblock updates before persisting */
const DEBOUNCE_INTERVAL_MS = 25
type PendingSubblock = { type PendingSubblock = {
latest: { blockId: string; subblockId: string; value: any; timestamp: number } latest: { blockId: string; subblockId: string; value: any; timestamp: number }
timeout: NodeJS.Timeout timeout: NodeJS.Timeout
@@ -18,44 +20,61 @@ type PendingSubblock = {
// Keyed by `${workflowId}:${blockId}:${subblockId}` // Keyed by `${workflowId}:${blockId}:${subblockId}`
const pendingSubblockUpdates = new Map<string, PendingSubblock>() const pendingSubblockUpdates = new Map<string, PendingSubblock>()
export function setupSubblocksHandlers( /**
socket: AuthenticatedSocket, * Cleans up pending updates for a disconnected socket.
deps: HandlerDependencies | RoomManager * Removes the socket's operationIds from pending updates to prevent memory leaks.
) { */
const roomManager = export function cleanupPendingSubblocksForSocket(socketId: string): void {
deps instanceof Object && 'roomManager' in deps ? deps.roomManager : (deps as RoomManager) for (const [, pending] of pendingSubblockUpdates.entries()) {
// Remove this socket's operation entries
for (const [opId, sid] of pending.opToSocket.entries()) {
if (sid === socketId) {
pending.opToSocket.delete(opId)
}
}
// If no more operations are waiting, the timeout will still fire and flush
// This is fine - the update will still persist, just no confirmation to send
}
}
export function setupSubblocksHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('subblock-update', async (data) => { socket.on('subblock-update', async (data) => {
const workflowId = roomManager.getWorkflowIdForSocket(socket.id)
const session = roomManager.getUserSession(socket.id)
if (!workflowId || !session) {
logger.debug(`Ignoring subblock update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!workflowId,
hasSession: !!session,
})
return
}
const { blockId, subblockId, value, timestamp, operationId } = data const { blockId, subblockId, value, timestamp, operationId } = data
const room = roomManager.getWorkflowRoom(workflowId)
if (!room) {
logger.debug(`Ignoring subblock update: workflow room not found`, {
socketId: socket.id,
workflowId,
blockId,
subblockId,
})
return
}
try { try {
const userPresence = room.users.get(socket.id) const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
if (userPresence) { const session = await roomManager.getUserSession(socket.id)
userPresence.lastActivity = Date.now()
if (!workflowId || !session) {
logger.debug(`Ignoring subblock update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!workflowId,
hasSession: !!session,
})
socket.emit('operation-forbidden', {
type: 'SESSION_ERROR',
message: 'Session expired, please rejoin workflow',
})
if (operationId) {
socket.emit('operation-failed', { operationId, error: 'Session expired' })
}
return
} }
const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`Ignoring subblock update: workflow room not found`, {
socketId: socket.id,
workflowId,
blockId,
subblockId,
})
return
}
// Update user activity
await roomManager.updateUserActivity(workflowId, socket.id, { lastActivity: Date.now() })
// Server-side debounce/coalesce by workflowId+blockId+subblockId // Server-side debounce/coalesce by workflowId+blockId+subblockId
const debouncedKey = `${workflowId}:${blockId}:${subblockId}` const debouncedKey = `${workflowId}:${blockId}:${subblockId}`
const existing = pendingSubblockUpdates.get(debouncedKey) const existing = pendingSubblockUpdates.get(debouncedKey)
@@ -66,7 +85,7 @@ export function setupSubblocksHandlers(
existing.timeout = setTimeout(async () => { existing.timeout = setTimeout(async () => {
await flushSubblockUpdate(workflowId, existing, roomManager) await flushSubblockUpdate(workflowId, existing, roomManager)
pendingSubblockUpdates.delete(debouncedKey) pendingSubblockUpdates.delete(debouncedKey)
}, 25) }, DEBOUNCE_INTERVAL_MS)
} else { } else {
const opToSocket = new Map<string, string>() const opToSocket = new Map<string, string>()
if (operationId) opToSocket.set(operationId, socket.id) if (operationId) opToSocket.set(operationId, socket.id)
@@ -76,7 +95,7 @@ export function setupSubblocksHandlers(
await flushSubblockUpdate(workflowId, pending, roomManager) await flushSubblockUpdate(workflowId, pending, roomManager)
pendingSubblockUpdates.delete(debouncedKey) pendingSubblockUpdates.delete(debouncedKey)
} }
}, 25) }, DEBOUNCE_INTERVAL_MS)
pendingSubblockUpdates.set(debouncedKey, { pendingSubblockUpdates.set(debouncedKey, {
latest: { blockId, subblockId, value, timestamp }, latest: { blockId, subblockId, value, timestamp },
timeout, timeout,
@@ -88,7 +107,6 @@ export function setupSubblocksHandlers(
const errorMessage = error instanceof Error ? error.message : 'Unknown error' const errorMessage = error instanceof Error ? error.message : 'Unknown error'
// Best-effort failure for the single operation if provided
if (operationId) { if (operationId) {
socket.emit('operation-failed', { socket.emit('operation-failed', {
operationId, operationId,
@@ -97,7 +115,6 @@ export function setupSubblocksHandlers(
}) })
} }
// Also emit legacy operation-error for backward compatibility
socket.emit('operation-error', { socket.emit('operation-error', {
type: 'SUBBLOCK_UPDATE_FAILED', type: 'SUBBLOCK_UPDATE_FAILED',
message: `Failed to update subblock ${blockId}.${subblockId}: ${errorMessage}`, message: `Failed to update subblock ${blockId}.${subblockId}: ${errorMessage}`,
@@ -111,9 +128,11 @@ export function setupSubblocksHandlers(
async function flushSubblockUpdate( async function flushSubblockUpdate(
workflowId: string, workflowId: string,
pending: PendingSubblock, pending: PendingSubblock,
roomManager: RoomManager roomManager: IRoomManager
) { ) {
const { blockId, subblockId, value, timestamp } = pending.latest const { blockId, subblockId, value, timestamp } = pending.latest
const io = roomManager.io
try { try {
// Verify workflow still exists // Verify workflow still exists
const workflowExists = await db const workflowExists = await db
@@ -124,14 +143,11 @@ async function flushSubblockUpdate(
if (workflowExists.length === 0) { if (workflowExists.length === 0) {
pending.opToSocket.forEach((socketId, opId) => { pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId) io.to(socketId).emit('operation-failed', {
if (sock) { operationId: opId,
sock.emit('operation-failed', { error: 'Workflow not found',
operationId: opId, retryable: false,
error: 'Workflow not found', })
retryable: false,
})
}
}) })
return return
} }
@@ -164,60 +180,48 @@ async function flushSubblockUpdate(
}) })
if (updateSuccessful) { if (updateSuccessful) {
// Broadcast to other clients (exclude senders to avoid overwriting their local state) // Broadcast to room excluding all senders (works cross-pod via Redis adapter)
const senderSocketIds = new Set(pending.opToSocket.values()) const senderSocketIds = [...pending.opToSocket.values()]
const io = (roomManager as any).io if (senderSocketIds.length > 0) {
if (io) { io.to(workflowId).except(senderSocketIds).emit('subblock-update', {
// Get all sockets in the room blockId,
const roomSockets = io.sockets.adapter.rooms.get(workflowId) subblockId,
if (roomSockets) { value,
roomSockets.forEach((socketId: string) => { timestamp,
// Only emit to sockets that didn't send any of the coalesced ops })
if (!senderSocketIds.has(socketId)) { } else {
const sock = io.sockets.sockets.get(socketId) io.to(workflowId).emit('subblock-update', {
if (sock) { blockId,
sock.emit('subblock-update', { subblockId,
blockId, value,
subblockId, timestamp,
value, })
timestamp,
})
}
}
})
}
} }
// Confirm all coalesced operationIds // Confirm all coalesced operationIds (io.to(socketId) works cross-pod)
pending.opToSocket.forEach((socketId, opId) => { pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId) io.to(socketId).emit('operation-confirmed', {
if (sock) { operationId: opId,
sock.emit('operation-confirmed', { operationId: opId, serverTimestamp: Date.now() }) serverTimestamp: Date.now(),
} })
}) })
} else { } else {
pending.opToSocket.forEach((socketId, opId) => { pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId) io.to(socketId).emit('operation-failed', {
if (sock) { operationId: opId,
sock.emit('operation-failed', { error: 'Block no longer exists',
operationId: opId, retryable: false,
error: 'Block no longer exists', })
retryable: false,
})
}
}) })
} }
} catch (error) { } catch (error) {
logger.error('Error flushing subblock update:', error) logger.error('Error flushing subblock update:', error)
pending.opToSocket.forEach((socketId, opId) => { pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId) io.to(socketId).emit('operation-failed', {
if (sock) { operationId: opId,
sock.emit('operation-failed', { error: error instanceof Error ? error.message : 'Unknown error',
operationId: opId, retryable: true,
error: error instanceof Error ? error.message : 'Unknown error', })
retryable: true,
})
}
}) })
} }
} }

View File

@@ -2,12 +2,14 @@ import { db } from '@sim/db'
import { workflow } from '@sim/db/schema' import { workflow } from '@sim/db/schema'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import { eq } from 'drizzle-orm' import { eq } from 'drizzle-orm'
import type { HandlerDependencies } from '@/socket/handlers/workflow'
import type { AuthenticatedSocket } from '@/socket/middleware/auth' import type { AuthenticatedSocket } from '@/socket/middleware/auth'
import type { RoomManager } from '@/socket/rooms/manager' import type { IRoomManager } from '@/socket/rooms'
const logger = createLogger('VariablesHandlers') const logger = createLogger('VariablesHandlers')
/** Debounce interval for coalescing rapid variable updates before persisting */
const DEBOUNCE_INTERVAL_MS = 25
type PendingVariable = { type PendingVariable = {
latest: { variableId: string; field: string; value: any; timestamp: number } latest: { variableId: string; field: string; value: any; timestamp: number }
timeout: NodeJS.Timeout timeout: NodeJS.Timeout
@@ -17,45 +19,58 @@ type PendingVariable = {
// Keyed by `${workflowId}:${variableId}:${field}` // Keyed by `${workflowId}:${variableId}:${field}`
const pendingVariableUpdates = new Map<string, PendingVariable>() const pendingVariableUpdates = new Map<string, PendingVariable>()
export function setupVariablesHandlers( /**
socket: AuthenticatedSocket, * Cleans up pending updates for a disconnected socket.
deps: HandlerDependencies | RoomManager * Removes the socket's operationIds from pending updates to prevent memory leaks.
) { */
const roomManager = export function cleanupPendingVariablesForSocket(socketId: string): void {
deps instanceof Object && 'roomManager' in deps ? deps.roomManager : (deps as RoomManager) for (const [, pending] of pendingVariableUpdates.entries()) {
for (const [opId, sid] of pending.opToSocket.entries()) {
if (sid === socketId) {
pending.opToSocket.delete(opId)
}
}
}
}
export function setupVariablesHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('variable-update', async (data) => { socket.on('variable-update', async (data) => {
const workflowId = roomManager.getWorkflowIdForSocket(socket.id)
const session = roomManager.getUserSession(socket.id)
if (!workflowId || !session) {
logger.debug(`Ignoring variable update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!workflowId,
hasSession: !!session,
})
return
}
const { variableId, field, value, timestamp, operationId } = data const { variableId, field, value, timestamp, operationId } = data
const room = roomManager.getWorkflowRoom(workflowId)
if (!room) {
logger.debug(`Ignoring variable update: workflow room not found`, {
socketId: socket.id,
workflowId,
variableId,
field,
})
return
}
try { try {
const userPresence = room.users.get(socket.id) const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
if (userPresence) { const session = await roomManager.getUserSession(socket.id)
userPresence.lastActivity = Date.now()
if (!workflowId || !session) {
logger.debug(`Ignoring variable update: socket not connected to any workflow room`, {
socketId: socket.id,
hasWorkflowId: !!workflowId,
hasSession: !!session,
})
socket.emit('operation-forbidden', {
type: 'SESSION_ERROR',
message: 'Session expired, please rejoin workflow',
})
if (operationId) {
socket.emit('operation-failed', { operationId, error: 'Session expired' })
}
return
} }
const hasRoom = await roomManager.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`Ignoring variable update: workflow room not found`, {
socketId: socket.id,
workflowId,
variableId,
field,
})
return
}
// Update user activity
await roomManager.updateUserActivity(workflowId, socket.id, { lastActivity: Date.now() })
const debouncedKey = `${workflowId}:${variableId}:${field}` const debouncedKey = `${workflowId}:${variableId}:${field}`
const existing = pendingVariableUpdates.get(debouncedKey) const existing = pendingVariableUpdates.get(debouncedKey)
if (existing) { if (existing) {
@@ -65,7 +80,7 @@ export function setupVariablesHandlers(
existing.timeout = setTimeout(async () => { existing.timeout = setTimeout(async () => {
await flushVariableUpdate(workflowId, existing, roomManager) await flushVariableUpdate(workflowId, existing, roomManager)
pendingVariableUpdates.delete(debouncedKey) pendingVariableUpdates.delete(debouncedKey)
}, 25) }, DEBOUNCE_INTERVAL_MS)
} else { } else {
const opToSocket = new Map<string, string>() const opToSocket = new Map<string, string>()
if (operationId) opToSocket.set(operationId, socket.id) if (operationId) opToSocket.set(operationId, socket.id)
@@ -75,7 +90,7 @@ export function setupVariablesHandlers(
await flushVariableUpdate(workflowId, pending, roomManager) await flushVariableUpdate(workflowId, pending, roomManager)
pendingVariableUpdates.delete(debouncedKey) pendingVariableUpdates.delete(debouncedKey)
} }
}, 25) }, DEBOUNCE_INTERVAL_MS)
pendingVariableUpdates.set(debouncedKey, { pendingVariableUpdates.set(debouncedKey, {
latest: { variableId, field, value, timestamp }, latest: { variableId, field, value, timestamp },
timeout, timeout,
@@ -108,9 +123,11 @@ export function setupVariablesHandlers(
async function flushVariableUpdate( async function flushVariableUpdate(
workflowId: string, workflowId: string,
pending: PendingVariable, pending: PendingVariable,
roomManager: RoomManager roomManager: IRoomManager
) { ) {
const { variableId, field, value, timestamp } = pending.latest const { variableId, field, value, timestamp } = pending.latest
const io = roomManager.io
try { try {
const workflowExists = await db const workflowExists = await db
.select({ id: workflow.id }) .select({ id: workflow.id })
@@ -120,14 +137,11 @@ async function flushVariableUpdate(
if (workflowExists.length === 0) { if (workflowExists.length === 0) {
pending.opToSocket.forEach((socketId, opId) => { pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId) io.to(socketId).emit('operation-failed', {
if (sock) { operationId: opId,
sock.emit('operation-failed', { error: 'Workflow not found',
operationId: opId, retryable: false,
error: 'Workflow not found', })
retryable: false,
})
}
}) })
return return
} }
@@ -163,59 +177,50 @@ async function flushVariableUpdate(
}) })
if (updateSuccessful) { if (updateSuccessful) {
// Broadcast to other clients (exclude senders to avoid overwriting their local state) // Broadcast to room excluding all senders (works cross-pod via Redis adapter)
const senderSocketIds = new Set(pending.opToSocket.values()) const senderSocketIds = [...pending.opToSocket.values()]
const io = (roomManager as any).io if (senderSocketIds.length > 0) {
if (io) { io.to(workflowId).except(senderSocketIds).emit('variable-update', {
const roomSockets = io.sockets.adapter.rooms.get(workflowId) variableId,
if (roomSockets) { field,
roomSockets.forEach((socketId: string) => { value,
if (!senderSocketIds.has(socketId)) { timestamp,
const sock = io.sockets.sockets.get(socketId) })
if (sock) { } else {
sock.emit('variable-update', { io.to(workflowId).emit('variable-update', {
variableId, variableId,
field, field,
value, value,
timestamp, timestamp,
}) })
}
}
})
}
} }
// Confirm all coalesced operationIds (io.to(socketId) works cross-pod)
pending.opToSocket.forEach((socketId, opId) => { pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId) io.to(socketId).emit('operation-confirmed', {
if (sock) { operationId: opId,
sock.emit('operation-confirmed', { operationId: opId, serverTimestamp: Date.now() }) serverTimestamp: Date.now(),
} })
}) })
logger.debug(`Flushed variable update ${workflowId}: ${variableId}.${field}`) logger.debug(`Flushed variable update ${workflowId}: ${variableId}.${field}`)
} else { } else {
pending.opToSocket.forEach((socketId, opId) => { pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId) io.to(socketId).emit('operation-failed', {
if (sock) { operationId: opId,
sock.emit('operation-failed', { error: 'Variable no longer exists',
operationId: opId, retryable: false,
error: 'Variable no longer exists', })
retryable: false,
})
}
}) })
} }
} catch (error) { } catch (error) {
logger.error('Error flushing variable update:', error) logger.error('Error flushing variable update:', error)
pending.opToSocket.forEach((socketId, opId) => { pending.opToSocket.forEach((socketId, opId) => {
const sock = (roomManager as any).io?.sockets?.sockets?.get(socketId) io.to(socketId).emit('operation-failed', {
if (sock) { operationId: opId,
sock.emit('operation-failed', { error: error instanceof Error ? error.message : 'Unknown error',
operationId: opId, retryable: true,
error: error instanceof Error ? error.message : 'Unknown error', })
retryable: true,
})
}
}) })
} }
} }

View File

@@ -4,38 +4,12 @@ import { eq } from 'drizzle-orm'
import { getWorkflowState } from '@/socket/database/operations' import { getWorkflowState } from '@/socket/database/operations'
import type { AuthenticatedSocket } from '@/socket/middleware/auth' import type { AuthenticatedSocket } from '@/socket/middleware/auth'
import { verifyWorkflowAccess } from '@/socket/middleware/permissions' import { verifyWorkflowAccess } from '@/socket/middleware/permissions'
import type { RoomManager, UserPresence, WorkflowRoom } from '@/socket/rooms/manager' import type { IRoomManager, UserPresence } from '@/socket/rooms'
const logger = createLogger('WorkflowHandlers') const logger = createLogger('WorkflowHandlers')
export type { UserPresence, WorkflowRoom } export function setupWorkflowHandlers(socket: AuthenticatedSocket, roomManager: IRoomManager) {
socket.on('join-workflow', async ({ workflowId, tabSessionId }) => {
export interface HandlerDependencies {
roomManager: RoomManager
}
export const createWorkflowRoom = (workflowId: string): WorkflowRoom => ({
workflowId,
users: new Map(),
lastModified: Date.now(),
activeConnections: 0,
})
export const cleanupUserFromRoom = (
socketId: string,
workflowId: string,
roomManager: RoomManager
) => {
roomManager.cleanupUserFromRoom(socketId, workflowId)
}
export function setupWorkflowHandlers(
socket: AuthenticatedSocket,
deps: HandlerDependencies | RoomManager
) {
const roomManager =
deps instanceof Object && 'roomManager' in deps ? deps.roomManager : (deps as RoomManager)
socket.on('join-workflow', async ({ workflowId }) => {
try { try {
const userId = socket.userId const userId = socket.userId
const userName = socket.userName const userName = socket.userName
@@ -48,6 +22,7 @@ export function setupWorkflowHandlers(
logger.info(`Join workflow request from ${userId} (${userName}) for workflow ${workflowId}`) logger.info(`Join workflow request from ${userId} (${userName}) for workflow ${workflowId}`)
// Verify workflow access
let userRole: string let userRole: string
try { try {
const accessInfo = await verifyWorkflowAccess(userId, workflowId) const accessInfo = await verifyWorkflowAccess(userId, workflowId)
@@ -63,23 +38,37 @@ export function setupWorkflowHandlers(
return return
} }
const currentWorkflowId = roomManager.getWorkflowIdForSocket(socket.id) // Leave current room if in one
const currentWorkflowId = await roomManager.getWorkflowIdForSocket(socket.id)
if (currentWorkflowId) { if (currentWorkflowId) {
socket.leave(currentWorkflowId) socket.leave(currentWorkflowId)
roomManager.cleanupUserFromRoom(socket.id, currentWorkflowId) await roomManager.removeUserFromRoom(socket.id)
await roomManager.broadcastPresenceUpdate(currentWorkflowId)
roomManager.broadcastPresenceUpdate(currentWorkflowId)
} }
const STALE_THRESHOLD_MS = 60_000
const now = Date.now()
const existingUsers = await roomManager.getWorkflowUsers(workflowId)
for (const existingUser of existingUsers) {
if (existingUser.userId === userId && existingUser.socketId !== socket.id) {
const isSameTab = tabSessionId && existingUser.tabSessionId === tabSessionId
const isStale =
now - (existingUser.lastActivity || existingUser.joinedAt || 0) > STALE_THRESHOLD_MS
if (isSameTab || isStale) {
logger.info(
`Cleaning up socket ${existingUser.socketId} for user ${userId} (${isSameTab ? 'same tab' : 'stale'})`
)
await roomManager.removeUserFromRoom(existingUser.socketId)
roomManager.io.in(existingUser.socketId).socketsLeave(workflowId)
}
}
}
// Join the new room
socket.join(workflowId) socket.join(workflowId)
if (!roomManager.hasWorkflowRoom(workflowId)) { // Get avatar URL
roomManager.setWorkflowRoom(workflowId, roomManager.createWorkflowRoom(workflowId))
}
const room = roomManager.getWorkflowRoom(workflowId)!
room.activeConnections++
let avatarUrl = socket.userImage || null let avatarUrl = socket.userImage || null
if (!avatarUrl) { if (!avatarUrl) {
try { try {
@@ -95,54 +84,68 @@ export function setupWorkflowHandlers(
} }
} }
// Create presence entry
const userPresence: UserPresence = { const userPresence: UserPresence = {
userId, userId,
workflowId, workflowId,
userName, userName,
socketId: socket.id, socketId: socket.id,
tabSessionId,
joinedAt: Date.now(), joinedAt: Date.now(),
lastActivity: Date.now(), lastActivity: Date.now(),
role: userRole, role: userRole,
avatarUrl, avatarUrl,
} }
room.users.set(socket.id, userPresence) // Add user to room
roomManager.setWorkflowForSocket(socket.id, workflowId) await roomManager.addUserToRoom(workflowId, socket.id, userPresence)
roomManager.setUserSession(socket.id, {
userId, // Get current presence list for the join acknowledgment
userName, const presenceUsers = await roomManager.getWorkflowUsers(workflowId)
avatarUrl,
// Get workflow state
const workflowState = await getWorkflowState(workflowId)
// Send join success with presence list (client waits for this to confirm join)
socket.emit('join-workflow-success', {
workflowId,
socketId: socket.id,
presenceUsers,
}) })
const workflowState = await getWorkflowState(workflowId) // Send workflow state
socket.emit('workflow-state', workflowState) socket.emit('workflow-state', workflowState)
roomManager.broadcastPresenceUpdate(workflowId) // Broadcast presence update to all users in the room
await roomManager.broadcastPresenceUpdate(workflowId)
const uniqueUserCount = roomManager.getUniqueUserCount(workflowId) const uniqueUserCount = await roomManager.getUniqueUserCount(workflowId)
logger.info( logger.info(
`User ${userId} (${userName}) joined workflow ${workflowId}. Room now has ${uniqueUserCount} unique users (${room.activeConnections} connections).` `User ${userId} (${userName}) joined workflow ${workflowId}. Room now has ${uniqueUserCount} unique users.`
) )
} catch (error) { } catch (error) {
logger.error('Error joining workflow:', error) logger.error('Error joining workflow:', error)
socket.emit('error', { // Undo socket.join and room manager entry if any operation failed
type: 'JOIN_ERROR', socket.leave(workflowId)
message: 'Failed to join workflow', await roomManager.removeUserFromRoom(socket.id)
}) socket.emit('join-workflow-error', { error: 'Failed to join workflow' })
} }
}) })
socket.on('leave-workflow', () => { socket.on('leave-workflow', async () => {
const workflowId = roomManager.getWorkflowIdForSocket(socket.id) try {
const session = roomManager.getUserSession(socket.id) const workflowId = await roomManager.getWorkflowIdForSocket(socket.id)
const session = await roomManager.getUserSession(socket.id)
if (workflowId && session) { if (workflowId && session) {
socket.leave(workflowId) socket.leave(workflowId)
roomManager.cleanupUserFromRoom(socket.id, workflowId) await roomManager.removeUserFromRoom(socket.id)
await roomManager.broadcastPresenceUpdate(workflowId)
roomManager.broadcastPresenceUpdate(workflowId) logger.info(`User ${session.userId} (${session.userName}) left workflow ${workflowId}`)
}
logger.info(`User ${session.userId} (${session.userName}) left workflow ${workflowId}`) } catch (error) {
logger.error('Error leaving workflow:', error)
} }
}) })
} }

View File

@@ -7,7 +7,7 @@ import { createServer, request as httpRequest } from 'http'
import { createMockLogger, databaseMock } from '@sim/testing' import { createMockLogger, databaseMock } from '@sim/testing'
import { afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest' import { afterEach, beforeAll, beforeEach, describe, expect, it, vi } from 'vitest'
import { createSocketIOServer } from '@/socket/config/socket' import { createSocketIOServer } from '@/socket/config/socket'
import { RoomManager } from '@/socket/rooms/manager' import { MemoryRoomManager } from '@/socket/rooms'
import { createHttpHandler } from '@/socket/routes/http' import { createHttpHandler } from '@/socket/routes/http'
vi.mock('@/lib/auth', () => ({ vi.mock('@/lib/auth', () => ({
@@ -20,6 +20,30 @@ vi.mock('@/lib/auth', () => ({
vi.mock('@sim/db', () => databaseMock) vi.mock('@sim/db', () => databaseMock)
// Mock redis package to prevent actual Redis connections
vi.mock('redis', () => ({
createClient: vi.fn(() => ({
on: vi.fn(),
connect: vi.fn().mockResolvedValue(undefined),
quit: vi.fn().mockResolvedValue(undefined),
duplicate: vi.fn().mockReturnThis(),
})),
}))
// Mock env to not have REDIS_URL (use importOriginal to get helper functions)
vi.mock('@/lib/core/config/env', async (importOriginal) => {
const actual = await importOriginal<typeof import('@/lib/core/config/env')>()
return {
...actual,
env: {
...actual.env,
DATABASE_URL: 'postgres://localhost/test',
NODE_ENV: 'test',
REDIS_URL: undefined,
},
}
})
vi.mock('@/socket/middleware/auth', () => ({ vi.mock('@/socket/middleware/auth', () => ({
authenticateSocket: vi.fn((socket, next) => { authenticateSocket: vi.fn((socket, next) => {
socket.userId = 'test-user-id' socket.userId = 'test-user-id'
@@ -51,7 +75,7 @@ vi.mock('@/socket/database/operations', () => ({
describe('Socket Server Index Integration', () => { describe('Socket Server Index Integration', () => {
let httpServer: any let httpServer: any
let io: any let io: any
let roomManager: RoomManager let roomManager: MemoryRoomManager
let logger: ReturnType<typeof createMockLogger> let logger: ReturnType<typeof createMockLogger>
let PORT: number let PORT: number
@@ -64,9 +88,10 @@ describe('Socket Server Index Integration', () => {
httpServer = createServer() httpServer = createServer()
io = createSocketIOServer(httpServer) io = await createSocketIOServer(httpServer)
roomManager = new RoomManager(io) roomManager = new MemoryRoomManager(io)
await roomManager.initialize()
const httpHandler = createHttpHandler(roomManager, logger) const httpHandler = createHttpHandler(roomManager, logger)
httpServer.on('request', httpHandler) httpServer.on('request', httpHandler)
@@ -98,6 +123,9 @@ describe('Socket Server Index Integration', () => {
}, 20000) }, 20000)
afterEach(async () => { afterEach(async () => {
if (roomManager) {
await roomManager.shutdown()
}
if (io) { if (io) {
await new Promise<void>((resolve) => { await new Promise<void>((resolve) => {
io.close(() => resolve()) io.close(() => resolve())
@@ -177,43 +205,60 @@ describe('Socket Server Index Integration', () => {
}) })
describe('Room Manager Integration', () => { describe('Room Manager Integration', () => {
it('should create room manager successfully', () => { it('should create room manager successfully', async () => {
expect(roomManager).toBeDefined() expect(roomManager).toBeDefined()
expect(roomManager.getTotalActiveConnections()).toBe(0) expect(await roomManager.getTotalActiveConnections()).toBe(0)
}) })
it('should create workflow rooms', () => { it('should add and get users from workflow rooms', async () => {
const workflowId = 'test-workflow-123' const workflowId = 'test-workflow-123'
const room = roomManager.createWorkflowRoom(workflowId) const socketId = 'test-socket-123'
roomManager.setWorkflowRoom(workflowId, room)
expect(roomManager.hasWorkflowRoom(workflowId)).toBe(true) const presence = {
const retrievedRoom = roomManager.getWorkflowRoom(workflowId) userId: 'user-123',
expect(retrievedRoom).toBeDefined() workflowId,
expect(retrievedRoom?.workflowId).toBe(workflowId) userName: 'Test User',
socketId,
joinedAt: Date.now(),
lastActivity: Date.now(),
role: 'admin',
}
await roomManager.addUserToRoom(workflowId, socketId, presence)
expect(await roomManager.hasWorkflowRoom(workflowId)).toBe(true)
const users = await roomManager.getWorkflowUsers(workflowId)
expect(users).toHaveLength(1)
expect(users[0].socketId).toBe(socketId)
}) })
it('should manage user sessions', () => { it('should manage user sessions', async () => {
const socketId = 'test-socket-123' const socketId = 'test-socket-123'
const workflowId = 'test-workflow-456' const workflowId = 'test-workflow-456'
const session = { userId: 'user-123', userName: 'Test User' }
roomManager.setWorkflowForSocket(socketId, workflowId) const presence = {
roomManager.setUserSession(socketId, session) userId: 'user-123',
workflowId,
userName: 'Test User',
socketId,
joinedAt: Date.now(),
lastActivity: Date.now(),
role: 'admin',
}
expect(roomManager.getWorkflowIdForSocket(socketId)).toBe(workflowId) await roomManager.addUserToRoom(workflowId, socketId, presence)
expect(roomManager.getUserSession(socketId)).toEqual(session)
expect(await roomManager.getWorkflowIdForSocket(socketId)).toBe(workflowId)
const session = await roomManager.getUserSession(socketId)
expect(session).toBeDefined()
expect(session?.userId).toBe('user-123')
}) })
it('should clean up rooms properly', () => { it('should clean up rooms properly', async () => {
const workflowId = 'test-workflow-789' const workflowId = 'test-workflow-789'
const socketId = 'test-socket-789' const socketId = 'test-socket-789'
const room = roomManager.createWorkflowRoom(workflowId) const presence = {
roomManager.setWorkflowRoom(workflowId, room)
// Add user to room
room.users.set(socketId, {
userId: 'user-789', userId: 'user-789',
workflowId, workflowId,
userName: 'Test User', userName: 'Test User',
@@ -221,16 +266,18 @@ describe('Socket Server Index Integration', () => {
joinedAt: Date.now(), joinedAt: Date.now(),
lastActivity: Date.now(), lastActivity: Date.now(),
role: 'admin', role: 'admin',
}) }
room.activeConnections = 1
roomManager.setWorkflowForSocket(socketId, workflowId) await roomManager.addUserToRoom(workflowId, socketId, presence)
// Clean up user expect(await roomManager.hasWorkflowRoom(workflowId)).toBe(true)
roomManager.cleanupUserFromRoom(socketId, workflowId)
expect(roomManager.hasWorkflowRoom(workflowId)).toBe(false) // Remove user
expect(roomManager.getWorkflowIdForSocket(socketId)).toBeUndefined() await roomManager.removeUserFromRoom(socketId)
// Room should be cleaned up since it's now empty
expect(await roomManager.hasWorkflowRoom(workflowId)).toBe(false)
expect(await roomManager.getWorkflowIdForSocket(socketId)).toBeNull()
}) })
}) })
@@ -238,7 +285,7 @@ describe('Socket Server Index Integration', () => {
it.concurrent('should properly import all extracted modules', async () => { it.concurrent('should properly import all extracted modules', async () => {
const { createSocketIOServer } = await import('@/socket/config/socket') const { createSocketIOServer } = await import('@/socket/config/socket')
const { createHttpHandler } = await import('@/socket/routes/http') const { createHttpHandler } = await import('@/socket/routes/http')
const { RoomManager } = await import('@/socket/rooms/manager') const { MemoryRoomManager, RedisRoomManager } = await import('@/socket/rooms')
const { authenticateSocket } = await import('@/socket/middleware/auth') const { authenticateSocket } = await import('@/socket/middleware/auth')
const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions') const { verifyWorkflowAccess } = await import('@/socket/middleware/permissions')
const { getWorkflowState } = await import('@/socket/database/operations') const { getWorkflowState } = await import('@/socket/database/operations')
@@ -246,22 +293,23 @@ describe('Socket Server Index Integration', () => {
expect(createSocketIOServer).toBeTypeOf('function') expect(createSocketIOServer).toBeTypeOf('function')
expect(createHttpHandler).toBeTypeOf('function') expect(createHttpHandler).toBeTypeOf('function')
expect(RoomManager).toBeTypeOf('function') expect(MemoryRoomManager).toBeTypeOf('function')
expect(RedisRoomManager).toBeTypeOf('function')
expect(authenticateSocket).toBeTypeOf('function') expect(authenticateSocket).toBeTypeOf('function')
expect(verifyWorkflowAccess).toBeTypeOf('function') expect(verifyWorkflowAccess).toBeTypeOf('function')
expect(getWorkflowState).toBeTypeOf('function') expect(getWorkflowState).toBeTypeOf('function')
expect(WorkflowOperationSchema).toBeDefined() expect(WorkflowOperationSchema).toBeDefined()
}) })
it.concurrent('should maintain all original functionality after refactoring', () => { it.concurrent('should maintain all original functionality after refactoring', async () => {
expect(httpServer).toBeDefined() expect(httpServer).toBeDefined()
expect(io).toBeDefined() expect(io).toBeDefined()
expect(roomManager).toBeDefined() expect(roomManager).toBeDefined()
expect(typeof roomManager.createWorkflowRoom).toBe('function') expect(typeof roomManager.addUserToRoom).toBe('function')
expect(typeof roomManager.cleanupUserFromRoom).toBe('function') expect(typeof roomManager.removeUserFromRoom).toBe('function')
expect(typeof roomManager.handleWorkflowDeletion).toBe('function') expect(typeof roomManager.handleWorkflowDeletion).toBe('function')
expect(typeof roomManager.validateWorkflowConsistency).toBe('function') expect(typeof roomManager.broadcastPresenceUpdate).toBe('function')
}) })
}) })
@@ -286,6 +334,7 @@ describe('Socket Server Index Integration', () => {
it('should have shutdown capability', () => { it('should have shutdown capability', () => {
expect(typeof httpServer.close).toBe('function') expect(typeof httpServer.close).toBe('function')
expect(typeof io.close).toBe('function') expect(typeof io.close).toBe('function')
expect(typeof roomManager.shutdown).toBe('function')
}) })
}) })

View File

@@ -1,112 +1,125 @@
import { createServer } from 'http' import { createServer } from 'http'
import { createLogger } from '@sim/logger' import { createLogger } from '@sim/logger'
import type { Server as SocketIOServer } from 'socket.io'
import { env } from '@/lib/core/config/env' import { env } from '@/lib/core/config/env'
import { createSocketIOServer } from '@/socket/config/socket' import { createSocketIOServer, shutdownSocketIOAdapter } from '@/socket/config/socket'
import { setupAllHandlers } from '@/socket/handlers' import { setupAllHandlers } from '@/socket/handlers'
import { type AuthenticatedSocket, authenticateSocket } from '@/socket/middleware/auth' import { type AuthenticatedSocket, authenticateSocket } from '@/socket/middleware/auth'
import { RoomManager } from '@/socket/rooms/manager' import { type IRoomManager, MemoryRoomManager, RedisRoomManager } from '@/socket/rooms'
import { createHttpHandler } from '@/socket/routes/http' import { createHttpHandler } from '@/socket/routes/http'
const logger = createLogger('CollaborativeSocketServer') const logger = createLogger('CollaborativeSocketServer')
// Enhanced server configuration - HTTP server will be configured with handler after all dependencies are set up /** Maximum time to wait for graceful shutdown before forcing exit */
const httpServer = createServer() const SHUTDOWN_TIMEOUT_MS = 10000
const io = createSocketIOServer(httpServer) async function createRoomManager(io: SocketIOServer): Promise<IRoomManager> {
if (env.REDIS_URL) {
logger.info('Initializing Redis-backed RoomManager for multi-pod support')
const manager = new RedisRoomManager(io, env.REDIS_URL)
await manager.initialize()
return manager
}
// Initialize room manager after io is created logger.warn('No REDIS_URL configured - using in-memory RoomManager (single-pod only)')
const roomManager = new RoomManager(io) const manager = new MemoryRoomManager(io)
await manager.initialize()
return manager
}
io.use(authenticateSocket) async function main() {
const httpServer = createServer()
const PORT = Number(env.PORT || env.SOCKET_PORT || 3002)
const httpHandler = createHttpHandler(roomManager, logger) logger.info('Starting Socket.IO server...', {
httpServer.on('request', httpHandler) port: PORT,
nodeEnv: env.NODE_ENV,
process.on('uncaughtException', (error) => { hasDatabase: !!env.DATABASE_URL,
logger.error('Uncaught Exception:', error) hasAuth: !!env.BETTER_AUTH_SECRET,
// Don't exit in production, just log hasRedis: !!env.REDIS_URL,
})
process.on('unhandledRejection', (reason, promise) => {
logger.error('Unhandled Rejection at:', promise, 'reason:', reason)
})
httpServer.on('error', (error) => {
logger.error('HTTP server error:', error)
})
io.engine.on('connection_error', (err) => {
logger.error('Socket.IO connection error:', {
req: err.req?.url,
code: err.code,
message: err.message,
context: err.context,
}) })
})
io.on('connection', (socket: AuthenticatedSocket) => { // Create Socket.IO server with Redis adapter if configured
logger.info(`New socket connection: ${socket.id}`) const io = await createSocketIOServer(httpServer)
setupAllHandlers(socket, roomManager) // Initialize room manager (Redis or in-memory based on config)
}) const roomManager = await createRoomManager(io)
httpServer.on('request', (req, res) => { // Set up authentication middleware
logger.info(`🌐 HTTP Request: ${req.method} ${req.url}`, { io.use(authenticateSocket)
method: req.method,
url: req.url, // Set up HTTP handler for health checks and internal APIs
userAgent: req.headers['user-agent'], const httpHandler = createHttpHandler(roomManager, logger)
origin: req.headers.origin, httpServer.on('request', httpHandler)
host: req.headers.host,
timestamp: new Date().toISOString(), // Global error handlers
process.on('uncaughtException', (error) => {
logger.error('Uncaught Exception:', error)
}) })
})
io.engine.on('connection_error', (err) => { process.on('unhandledRejection', (reason, promise) => {
logger.error('❌ Engine.IO Connection error:', { logger.error('Unhandled Rejection at:', promise, 'reason:', reason)
code: err.code,
message: err.message,
context: err.context,
req: err.req
? {
url: err.req.url,
method: err.req.method,
headers: err.req.headers,
}
: 'No request object',
}) })
})
const PORT = Number(env.PORT || env.SOCKET_PORT || 3002) httpServer.on('error', (error: NodeJS.ErrnoException) => {
logger.error('HTTP server error:', error)
if (error.code === 'EADDRINUSE' || error.code === 'EACCES') {
process.exit(1)
}
})
logger.info('Starting Socket.IO server...', { io.engine.on('connection_error', (err) => {
port: PORT, logger.error('Socket.IO connection error:', {
nodeEnv: env.NODE_ENV, req: err.req?.url,
hasDatabase: !!env.DATABASE_URL, code: err.code,
hasAuth: !!env.BETTER_AUTH_SECRET, message: err.message,
}) context: err.context,
})
})
httpServer.listen(PORT, '0.0.0.0', () => { io.on('connection', (socket: AuthenticatedSocket) => {
logger.info(`Socket.IO server running on port ${PORT}`) logger.info(`New socket connection: ${socket.id}`)
logger.info(`🏥 Health check available at: http://localhost:${PORT}/health`) setupAllHandlers(socket, roomManager)
}) })
httpServer.on('error', (error) => { httpServer.listen(PORT, '0.0.0.0', () => {
logger.error('❌ Server failed to start:', error) logger.info(`Socket.IO server running on port ${PORT}`)
logger.info(`Health check available at: http://localhost:${PORT}/health`)
})
const shutdown = async () => {
logger.info('Shutting down Socket.IO server...')
try {
await roomManager.shutdown()
logger.info('RoomManager shutdown complete')
} catch (error) {
logger.error('Error during RoomManager shutdown:', error)
}
try {
await shutdownSocketIOAdapter()
} catch (error) {
logger.error('Error during Socket.IO adapter shutdown:', error)
}
httpServer.close(() => {
logger.info('Socket.IO server closed')
process.exit(0)
})
setTimeout(() => {
logger.error('Forced shutdown after timeout')
process.exit(1)
}, SHUTDOWN_TIMEOUT_MS)
}
process.on('SIGINT', shutdown)
process.on('SIGTERM', shutdown)
}
// Start the server
main().catch((error) => {
logger.error('Failed to start server:', error)
process.exit(1) process.exit(1)
}) })
process.on('SIGINT', () => {
logger.info('Shutting down Socket.IO server...')
httpServer.close(() => {
logger.info('Socket.IO server closed')
process.exit(0)
})
})
process.on('SIGTERM', () => {
logger.info('Shutting down Socket.IO server...')
httpServer.close(() => {
logger.info('Socket.IO server closed')
process.exit(0)
})
})

View File

@@ -21,7 +21,7 @@ export interface AuthenticatedSocket extends Socket {
* Socket.IO authentication middleware. * Socket.IO authentication middleware.
* Handles both anonymous mode (DISABLE_AUTH=true) and normal token-based auth. * Handles both anonymous mode (DISABLE_AUTH=true) and normal token-based auth.
*/ */
export async function authenticateSocket(socket: AuthenticatedSocket, next: any) { export async function authenticateSocket(socket: AuthenticatedSocket, next: (err?: Error) => void) {
try { try {
if (isAuthDisabled) { if (isAuthDisabled) {
socket.userId = ANONYMOUS_USER_ID socket.userId = ANONYMOUS_USER_ID

View File

@@ -73,7 +73,7 @@ export function checkRolePermission(
return { allowed: true } return { allowed: true }
} }
export async function verifyWorkspaceMembership( async function verifyWorkspaceMembership(
userId: string, userId: string,
workspaceId: string workspaceId: string
): Promise<string | null> { ): Promise<string | null> {

View File

@@ -0,0 +1,3 @@
export { MemoryRoomManager } from '@/socket/rooms/memory-manager'
export { RedisRoomManager } from '@/socket/rooms/redis-manager'
export type { IRoomManager, UserPresence, UserSession, WorkflowRoom } from '@/socket/rooms/types'

View File

@@ -1,291 +0,0 @@
import * as schema from '@sim/db/schema'
import { workflowBlocks, workflowEdges } from '@sim/db/schema'
import { createLogger } from '@sim/logger'
import { and, eq, isNull } from 'drizzle-orm'
import { drizzle } from 'drizzle-orm/postgres-js'
import postgres from 'postgres'
import type { Server } from 'socket.io'
import { env } from '@/lib/core/config/env'
const connectionString = env.DATABASE_URL
const db = drizzle(
postgres(connectionString, {
prepare: false,
idle_timeout: 15,
connect_timeout: 20,
max: 3,
onnotice: () => {},
}),
{ schema }
)
const logger = createLogger('RoomManager')
export interface UserPresence {
userId: string
workflowId: string
userName: string
socketId: string
joinedAt: number
lastActivity: number
role: string
cursor?: { x: number; y: number }
selection?: { type: 'block' | 'edge' | 'none'; id?: string }
avatarUrl?: string | null
}
export interface WorkflowRoom {
workflowId: string
users: Map<string, UserPresence> // socketId -> UserPresence
lastModified: number
activeConnections: number
}
export class RoomManager {
private workflowRooms = new Map<string, WorkflowRoom>()
private socketToWorkflow = new Map<string, string>()
private userSessions = new Map<
string,
{ userId: string; userName: string; avatarUrl?: string | null }
>()
private io: Server
constructor(io: Server) {
this.io = io
}
createWorkflowRoom(workflowId: string): WorkflowRoom {
return {
workflowId,
users: new Map(),
lastModified: Date.now(),
activeConnections: 0,
}
}
cleanupUserFromRoom(socketId: string, workflowId: string) {
const room = this.workflowRooms.get(workflowId)
if (room) {
room.users.delete(socketId)
room.activeConnections = Math.max(0, room.activeConnections - 1)
if (room.activeConnections === 0) {
this.workflowRooms.delete(workflowId)
logger.info(`Cleaned up empty workflow room: ${workflowId}`)
}
}
this.socketToWorkflow.delete(socketId)
this.userSessions.delete(socketId)
}
handleWorkflowDeletion(workflowId: string) {
logger.info(`Handling workflow deletion notification for ${workflowId}`)
const room = this.workflowRooms.get(workflowId)
if (!room) {
logger.debug(`No active room found for deleted workflow ${workflowId}`)
return
}
this.io.to(workflowId).emit('workflow-deleted', {
workflowId,
message: 'This workflow has been deleted',
timestamp: Date.now(),
})
const socketsToDisconnect: string[] = []
room.users.forEach((_presence, socketId) => {
socketsToDisconnect.push(socketId)
})
socketsToDisconnect.forEach((socketId) => {
const socket = this.io.sockets.sockets.get(socketId)
if (socket) {
socket.leave(workflowId)
logger.debug(`Disconnected socket ${socketId} from deleted workflow ${workflowId}`)
}
this.cleanupUserFromRoom(socketId, workflowId)
})
this.workflowRooms.delete(workflowId)
logger.info(
`Cleaned up workflow room ${workflowId} after deletion (${socketsToDisconnect.length} users disconnected)`
)
}
handleWorkflowRevert(workflowId: string, timestamp: number) {
logger.info(`Handling workflow revert notification for ${workflowId}`)
const room = this.workflowRooms.get(workflowId)
if (!room) {
logger.debug(`No active room found for reverted workflow ${workflowId}`)
return
}
this.io.to(workflowId).emit('workflow-reverted', {
workflowId,
message: 'Workflow has been reverted to deployed state',
timestamp,
})
room.lastModified = timestamp
logger.info(`Notified ${room.users.size} users about workflow revert: ${workflowId}`)
}
handleWorkflowUpdate(workflowId: string) {
logger.info(`Handling workflow update notification for ${workflowId}`)
const room = this.workflowRooms.get(workflowId)
if (!room) {
logger.debug(`No active room found for updated workflow ${workflowId}`)
return
}
const timestamp = Date.now()
// Notify all clients in the workflow room that the workflow has been updated
// This will trigger them to refresh their local state
this.io.to(workflowId).emit('workflow-updated', {
workflowId,
message: 'Workflow has been updated externally',
timestamp,
})
room.lastModified = timestamp
logger.info(`Notified ${room.users.size} users about workflow update: ${workflowId}`)
}
handleCopilotWorkflowEdit(workflowId: string, description?: string) {
logger.info(`Handling copilot workflow edit notification for ${workflowId}`)
const room = this.workflowRooms.get(workflowId)
if (!room) {
logger.debug(`No active room found for copilot workflow edit ${workflowId}`)
return
}
const timestamp = Date.now()
// Emit special event for copilot edits that tells clients to rehydrate from database
this.io.to(workflowId).emit('copilot-workflow-edit', {
workflowId,
description,
message: 'Copilot has edited the workflow - rehydrating from database',
timestamp,
})
room.lastModified = timestamp
logger.info(`Notified ${room.users.size} users about copilot workflow edit: ${workflowId}`)
}
async validateWorkflowConsistency(
workflowId: string
): Promise<{ valid: boolean; issues: string[] }> {
try {
const issues: string[] = []
const orphanedEdges = await db
.select({
id: workflowEdges.id,
sourceBlockId: workflowEdges.sourceBlockId,
targetBlockId: workflowEdges.targetBlockId,
})
.from(workflowEdges)
.leftJoin(workflowBlocks, eq(workflowEdges.sourceBlockId, workflowBlocks.id))
.where(and(eq(workflowEdges.workflowId, workflowId), isNull(workflowBlocks.id)))
if (orphanedEdges.length > 0) {
issues.push(`Found ${orphanedEdges.length} orphaned edges with missing source blocks`)
}
return { valid: issues.length === 0, issues }
} catch (error) {
logger.error('Error validating workflow consistency:', error)
return { valid: false, issues: ['Consistency check failed'] }
}
}
getWorkflowRooms(): ReadonlyMap<string, WorkflowRoom> {
return this.workflowRooms
}
getSocketToWorkflow(): ReadonlyMap<string, string> {
return this.socketToWorkflow
}
getUserSessions(): ReadonlyMap<string, { userId: string; userName: string }> {
return this.userSessions
}
hasWorkflowRoom(workflowId: string): boolean {
return this.workflowRooms.has(workflowId)
}
getWorkflowRoom(workflowId: string): WorkflowRoom | undefined {
return this.workflowRooms.get(workflowId)
}
setWorkflowRoom(workflowId: string, room: WorkflowRoom): void {
this.workflowRooms.set(workflowId, room)
}
getWorkflowIdForSocket(socketId: string): string | undefined {
return this.socketToWorkflow.get(socketId)
}
setWorkflowForSocket(socketId: string, workflowId: string): void {
this.socketToWorkflow.set(socketId, workflowId)
}
getUserSession(
socketId: string
): { userId: string; userName: string; avatarUrl?: string | null } | undefined {
return this.userSessions.get(socketId)
}
setUserSession(
socketId: string,
session: { userId: string; userName: string; avatarUrl?: string | null }
): void {
this.userSessions.set(socketId, session)
}
getTotalActiveConnections(): number {
return Array.from(this.workflowRooms.values()).reduce(
(total, room) => total + room.activeConnections,
0
)
}
broadcastPresenceUpdate(workflowId: string): void {
const room = this.workflowRooms.get(workflowId)
if (room) {
const roomPresence = Array.from(room.users.values())
this.io.to(workflowId).emit('presence-update', roomPresence)
}
}
emitToWorkflow<T = unknown>(workflowId: string, event: string, payload: T): void {
this.io.to(workflowId).emit(event, payload)
}
/**
* Get the number of unique users in a workflow room
* (not the number of socket connections)
*/
getUniqueUserCount(workflowId: string): number {
const room = this.workflowRooms.get(workflowId)
if (!room) return 0
const uniqueUsers = new Set<string>()
room.users.forEach((presence) => {
uniqueUsers.add(presence.userId)
})
return uniqueUsers.size
}
}

View File

@@ -0,0 +1,260 @@
import { createLogger } from '@sim/logger'
import type { Server } from 'socket.io'
import type { IRoomManager, UserPresence, UserSession, WorkflowRoom } from '@/socket/rooms/types'
const logger = createLogger('MemoryRoomManager')
/**
* In-memory room manager for single-pod deployments
* Used as fallback when REDIS_URL is not configured
*/
export class MemoryRoomManager implements IRoomManager {
private workflowRooms = new Map<string, WorkflowRoom>()
private socketToWorkflow = new Map<string, string>()
private userSessions = new Map<string, UserSession>()
private _io: Server
constructor(io: Server) {
this._io = io
}
get io(): Server {
return this._io
}
async initialize(): Promise<void> {
logger.info('MemoryRoomManager initialized (single-pod mode)')
}
async shutdown(): Promise<void> {
this.workflowRooms.clear()
this.socketToWorkflow.clear()
this.userSessions.clear()
logger.info('MemoryRoomManager shutdown complete')
}
async addUserToRoom(workflowId: string, socketId: string, presence: UserPresence): Promise<void> {
// Create room if it doesn't exist
if (!this.workflowRooms.has(workflowId)) {
this.workflowRooms.set(workflowId, {
workflowId,
users: new Map(),
lastModified: Date.now(),
activeConnections: 0,
})
}
const room = this.workflowRooms.get(workflowId)!
room.users.set(socketId, presence)
room.activeConnections++
room.lastModified = Date.now()
// Map socket to workflow
this.socketToWorkflow.set(socketId, workflowId)
// Store session
this.userSessions.set(socketId, {
userId: presence.userId,
userName: presence.userName,
avatarUrl: presence.avatarUrl,
})
logger.debug(`Added user ${presence.userId} to workflow ${workflowId} (socket: ${socketId})`)
}
async removeUserFromRoom(socketId: string): Promise<string | null> {
const workflowId = this.socketToWorkflow.get(socketId)
if (!workflowId) {
return null
}
const room = this.workflowRooms.get(workflowId)
if (room) {
room.users.delete(socketId)
room.activeConnections = Math.max(0, room.activeConnections - 1)
// Clean up empty rooms
if (room.activeConnections === 0) {
this.workflowRooms.delete(workflowId)
logger.info(`Cleaned up empty workflow room: ${workflowId}`)
}
}
this.socketToWorkflow.delete(socketId)
this.userSessions.delete(socketId)
logger.debug(`Removed socket ${socketId} from workflow ${workflowId}`)
return workflowId
}
async getWorkflowIdForSocket(socketId: string): Promise<string | null> {
return this.socketToWorkflow.get(socketId) ?? null
}
async getUserSession(socketId: string): Promise<UserSession | null> {
return this.userSessions.get(socketId) ?? null
}
async getWorkflowUsers(workflowId: string): Promise<UserPresence[]> {
const room = this.workflowRooms.get(workflowId)
if (!room) return []
return Array.from(room.users.values())
}
async hasWorkflowRoom(workflowId: string): Promise<boolean> {
return this.workflowRooms.has(workflowId)
}
async updateUserActivity(
workflowId: string,
socketId: string,
updates: Partial<Pick<UserPresence, 'cursor' | 'selection' | 'lastActivity'>>
): Promise<void> {
const room = this.workflowRooms.get(workflowId)
if (!room) return
const presence = room.users.get(socketId)
if (presence) {
if (updates.cursor !== undefined) presence.cursor = updates.cursor
if (updates.selection !== undefined) presence.selection = updates.selection
presence.lastActivity = updates.lastActivity ?? Date.now()
}
}
async updateRoomLastModified(workflowId: string): Promise<void> {
const room = this.workflowRooms.get(workflowId)
if (room) {
room.lastModified = Date.now()
}
}
async broadcastPresenceUpdate(workflowId: string): Promise<void> {
const users = await this.getWorkflowUsers(workflowId)
this._io.to(workflowId).emit('presence-update', users)
}
emitToWorkflow<T = unknown>(workflowId: string, event: string, payload: T): void {
this._io.to(workflowId).emit(event, payload)
}
async getUniqueUserCount(workflowId: string): Promise<number> {
const room = this.workflowRooms.get(workflowId)
if (!room) return 0
const uniqueUsers = new Set<string>()
room.users.forEach((presence) => {
uniqueUsers.add(presence.userId)
})
return uniqueUsers.size
}
async getTotalActiveConnections(): Promise<number> {
let total = 0
for (const room of this.workflowRooms.values()) {
total += room.activeConnections
}
return total
}
async handleWorkflowDeletion(workflowId: string): Promise<void> {
logger.info(`Handling workflow deletion notification for ${workflowId}`)
const room = this.workflowRooms.get(workflowId)
if (!room) {
logger.debug(`No active room found for deleted workflow ${workflowId}`)
return
}
this._io.to(workflowId).emit('workflow-deleted', {
workflowId,
message: 'This workflow has been deleted',
timestamp: Date.now(),
})
const socketsToDisconnect: string[] = []
room.users.forEach((_presence, socketId) => {
socketsToDisconnect.push(socketId)
})
for (const socketId of socketsToDisconnect) {
const socket = this._io.sockets.sockets.get(socketId)
if (socket) {
socket.leave(workflowId)
logger.debug(`Disconnected socket ${socketId} from deleted workflow ${workflowId}`)
}
await this.removeUserFromRoom(socketId)
}
this.workflowRooms.delete(workflowId)
logger.info(
`Cleaned up workflow room ${workflowId} after deletion (${socketsToDisconnect.length} users disconnected)`
)
}
async handleWorkflowRevert(workflowId: string, timestamp: number): Promise<void> {
logger.info(`Handling workflow revert notification for ${workflowId}`)
const room = this.workflowRooms.get(workflowId)
if (!room) {
logger.debug(`No active room found for reverted workflow ${workflowId}`)
return
}
this._io.to(workflowId).emit('workflow-reverted', {
workflowId,
message: 'Workflow has been reverted to deployed state',
timestamp,
})
room.lastModified = timestamp
logger.info(`Notified ${room.users.size} users about workflow revert: ${workflowId}`)
}
async handleWorkflowUpdate(workflowId: string): Promise<void> {
logger.info(`Handling workflow update notification for ${workflowId}`)
const room = this.workflowRooms.get(workflowId)
if (!room) {
logger.debug(`No active room found for updated workflow ${workflowId}`)
return
}
const timestamp = Date.now()
this._io.to(workflowId).emit('workflow-updated', {
workflowId,
message: 'Workflow has been updated externally',
timestamp,
})
room.lastModified = timestamp
logger.info(`Notified ${room.users.size} users about workflow update: ${workflowId}`)
}
async handleCopilotWorkflowEdit(workflowId: string, description?: string): Promise<void> {
logger.info(`Handling copilot workflow edit notification for ${workflowId}`)
const room = this.workflowRooms.get(workflowId)
if (!room) {
logger.debug(`No active room found for copilot workflow edit ${workflowId}`)
return
}
const timestamp = Date.now()
this._io.to(workflowId).emit('copilot-workflow-edit', {
workflowId,
description,
message: 'Copilot has edited the workflow - rehydrating from database',
timestamp,
})
room.lastModified = timestamp
logger.info(`Notified ${room.users.size} users about copilot workflow edit: ${workflowId}`)
}
}

View File

@@ -0,0 +1,434 @@
import { createLogger } from '@sim/logger'
import { createClient, type RedisClientType } from 'redis'
import type { Server } from 'socket.io'
import type { IRoomManager, UserPresence, UserSession } from '@/socket/rooms/types'
const logger = createLogger('RedisRoomManager')
const KEYS = {
workflowUsers: (wfId: string) => `workflow:${wfId}:users`,
workflowMeta: (wfId: string) => `workflow:${wfId}:meta`,
socketWorkflow: (socketId: string) => `socket:${socketId}:workflow`,
socketSession: (socketId: string) => `socket:${socketId}:session`,
} as const
const SOCKET_KEY_TTL = 3600
/**
* Lua script for atomic user removal from room.
* Returns workflowId if user was removed, null otherwise.
* Handles room cleanup atomically to prevent race conditions.
*/
const REMOVE_USER_SCRIPT = `
local socketWorkflowKey = KEYS[1]
local socketSessionKey = KEYS[2]
local workflowUsersPrefix = ARGV[1]
local workflowMetaPrefix = ARGV[2]
local socketId = ARGV[3]
local workflowId = redis.call('GET', socketWorkflowKey)
if not workflowId then
return nil
end
local workflowUsersKey = workflowUsersPrefix .. workflowId .. ':users'
local workflowMetaKey = workflowMetaPrefix .. workflowId .. ':meta'
redis.call('HDEL', workflowUsersKey, socketId)
redis.call('DEL', socketWorkflowKey, socketSessionKey)
local remaining = redis.call('HLEN', workflowUsersKey)
if remaining == 0 then
redis.call('DEL', workflowUsersKey, workflowMetaKey)
end
return workflowId
`
/**
* Lua script for atomic user activity update.
* Performs read-modify-write atomically to prevent lost updates.
* Also refreshes TTL on socket keys to prevent expiry during long sessions.
*/
const UPDATE_ACTIVITY_SCRIPT = `
local workflowUsersKey = KEYS[1]
local socketWorkflowKey = KEYS[2]
local socketSessionKey = KEYS[3]
local socketId = ARGV[1]
local cursorJson = ARGV[2]
local selectionJson = ARGV[3]
local lastActivity = ARGV[4]
local ttl = tonumber(ARGV[5])
local existingJson = redis.call('HGET', workflowUsersKey, socketId)
if not existingJson then
return 0
end
local existing = cjson.decode(existingJson)
if cursorJson ~= '' then
existing.cursor = cjson.decode(cursorJson)
end
if selectionJson ~= '' then
existing.selection = cjson.decode(selectionJson)
end
existing.lastActivity = tonumber(lastActivity)
redis.call('HSET', workflowUsersKey, socketId, cjson.encode(existing))
redis.call('EXPIRE', socketWorkflowKey, ttl)
redis.call('EXPIRE', socketSessionKey, ttl)
return 1
`
/**
* Redis-backed room manager for multi-pod deployments.
* Uses Lua scripts for atomic operations to prevent race conditions.
*/
export class RedisRoomManager implements IRoomManager {
private redis: RedisClientType
private _io: Server
private isConnected = false
private removeUserScriptSha: string | null = null
private updateActivityScriptSha: string | null = null
constructor(io: Server, redisUrl: string) {
this._io = io
this.redis = createClient({
url: redisUrl,
socket: {
reconnectStrategy: (retries) => {
if (retries > 10) {
logger.error('Redis reconnection failed after 10 attempts')
return new Error('Redis reconnection failed')
}
const delay = Math.min(retries * 100, 3000)
logger.warn(`Redis reconnecting in ${delay}ms (attempt ${retries})`)
return delay
},
},
})
this.redis.on('error', (err) => {
logger.error('Redis client error:', err)
})
this.redis.on('reconnecting', () => {
logger.warn('Redis client reconnecting...')
this.isConnected = false
})
this.redis.on('ready', () => {
logger.info('Redis client ready')
this.isConnected = true
})
}
get io(): Server {
return this._io
}
async initialize(): Promise<void> {
if (this.isConnected) return
try {
await this.redis.connect()
this.isConnected = true
// Pre-load Lua scripts for better performance
this.removeUserScriptSha = await this.redis.scriptLoad(REMOVE_USER_SCRIPT)
this.updateActivityScriptSha = await this.redis.scriptLoad(UPDATE_ACTIVITY_SCRIPT)
logger.info('RedisRoomManager connected to Redis and scripts loaded')
} catch (error) {
logger.error('Failed to connect to Redis:', error)
throw error
}
}
async shutdown(): Promise<void> {
if (!this.isConnected) return
try {
await this.redis.quit()
this.isConnected = false
logger.info('RedisRoomManager disconnected from Redis')
} catch (error) {
logger.error('Error during Redis shutdown:', error)
}
}
async addUserToRoom(workflowId: string, socketId: string, presence: UserPresence): Promise<void> {
try {
const pipeline = this.redis.multi()
pipeline.hSet(KEYS.workflowUsers(workflowId), socketId, JSON.stringify(presence))
pipeline.hSet(KEYS.workflowMeta(workflowId), 'lastModified', Date.now().toString())
pipeline.set(KEYS.socketWorkflow(socketId), workflowId)
pipeline.expire(KEYS.socketWorkflow(socketId), SOCKET_KEY_TTL)
pipeline.hSet(KEYS.socketSession(socketId), {
userId: presence.userId,
userName: presence.userName,
avatarUrl: presence.avatarUrl || '',
})
pipeline.expire(KEYS.socketSession(socketId), SOCKET_KEY_TTL)
const results = await pipeline.exec()
// Check if any command failed
const failed = results.some((result) => result instanceof Error)
if (failed) {
logger.error(`Pipeline partially failed when adding user to room`, { workflowId, socketId })
throw new Error('Failed to store user session data in Redis')
}
logger.debug(`Added user ${presence.userId} to workflow ${workflowId} (socket: ${socketId})`)
} catch (error) {
logger.error(`Failed to add user to room: ${socketId} -> ${workflowId}`, error)
throw error
}
}
async removeUserFromRoom(socketId: string, retried = false): Promise<string | null> {
if (!this.removeUserScriptSha) {
logger.error('removeUserFromRoom called before initialize()')
return null
}
try {
const workflowId = await this.redis.evalSha(this.removeUserScriptSha, {
keys: [KEYS.socketWorkflow(socketId), KEYS.socketSession(socketId)],
arguments: ['workflow:', 'workflow:', socketId],
})
if (workflowId) {
logger.debug(`Removed socket ${socketId} from workflow ${workflowId}`)
}
return workflowId as string | null
} catch (error) {
if ((error as Error).message?.includes('NOSCRIPT') && !retried) {
logger.warn('Lua script not found, reloading...')
this.removeUserScriptSha = await this.redis.scriptLoad(REMOVE_USER_SCRIPT)
return this.removeUserFromRoom(socketId, true)
}
logger.error(`Failed to remove user from room: ${socketId}`, error)
return null
}
}
async getWorkflowIdForSocket(socketId: string): Promise<string | null> {
return this.redis.get(KEYS.socketWorkflow(socketId))
}
async getUserSession(socketId: string): Promise<UserSession | null> {
try {
const session = await this.redis.hGetAll(KEYS.socketSession(socketId))
if (!session.userId) {
return null
}
return {
userId: session.userId,
userName: session.userName,
avatarUrl: session.avatarUrl || undefined,
}
} catch (error) {
logger.error(`Failed to get user session for ${socketId}:`, error)
return null
}
}
async getWorkflowUsers(workflowId: string): Promise<UserPresence[]> {
try {
const users = await this.redis.hGetAll(KEYS.workflowUsers(workflowId))
return Object.entries(users)
.map(([socketId, json]) => {
try {
return JSON.parse(json) as UserPresence
} catch {
logger.warn(`Corrupted user data for socket ${socketId}, skipping`)
return null
}
})
.filter((u): u is UserPresence => u !== null)
} catch (error) {
logger.error(`Failed to get workflow users for ${workflowId}:`, error)
return []
}
}
async hasWorkflowRoom(workflowId: string): Promise<boolean> {
const exists = await this.redis.exists(KEYS.workflowUsers(workflowId))
return exists > 0
}
async updateUserActivity(
workflowId: string,
socketId: string,
updates: Partial<Pick<UserPresence, 'cursor' | 'selection' | 'lastActivity'>>,
retried = false
): Promise<void> {
if (!this.updateActivityScriptSha) {
logger.error('updateUserActivity called before initialize()')
return
}
try {
await this.redis.evalSha(this.updateActivityScriptSha, {
keys: [
KEYS.workflowUsers(workflowId),
KEYS.socketWorkflow(socketId),
KEYS.socketSession(socketId),
],
arguments: [
socketId,
updates.cursor !== undefined ? JSON.stringify(updates.cursor) : '',
updates.selection !== undefined ? JSON.stringify(updates.selection) : '',
(updates.lastActivity ?? Date.now()).toString(),
SOCKET_KEY_TTL.toString(),
],
})
} catch (error) {
if ((error as Error).message?.includes('NOSCRIPT') && !retried) {
logger.warn('Lua script not found, reloading...')
this.updateActivityScriptSha = await this.redis.scriptLoad(UPDATE_ACTIVITY_SCRIPT)
return this.updateUserActivity(workflowId, socketId, updates, true)
}
logger.error(`Failed to update user activity: ${socketId}`, error)
}
}
async updateRoomLastModified(workflowId: string): Promise<void> {
await this.redis.hSet(KEYS.workflowMeta(workflowId), 'lastModified', Date.now().toString())
}
async broadcastPresenceUpdate(workflowId: string): Promise<void> {
const users = await this.getWorkflowUsers(workflowId)
// io.to() with Redis adapter broadcasts to all pods
this._io.to(workflowId).emit('presence-update', users)
}
emitToWorkflow<T = unknown>(workflowId: string, event: string, payload: T): void {
this._io.to(workflowId).emit(event, payload)
}
async getUniqueUserCount(workflowId: string): Promise<number> {
const users = await this.getWorkflowUsers(workflowId)
const uniqueUserIds = new Set(users.map((u) => u.userId))
return uniqueUserIds.size
}
async getTotalActiveConnections(): Promise<number> {
// This is more complex with Redis - we'd need to scan all workflow:*:users keys
// For now, just count sockets in this server instance
// The true count would require aggregating across all pods
return this._io.sockets.sockets.size
}
async handleWorkflowDeletion(workflowId: string): Promise<void> {
logger.info(`Handling workflow deletion notification for ${workflowId}`)
try {
const users = await this.getWorkflowUsers(workflowId)
if (users.length === 0) {
logger.debug(`No active users found for deleted workflow ${workflowId}`)
return
}
// Notify all clients across all pods via Redis adapter
this._io.to(workflowId).emit('workflow-deleted', {
workflowId,
message: 'This workflow has been deleted',
timestamp: Date.now(),
})
// Use Socket.IO's cross-pod socketsLeave() to remove all sockets from the room
// This works across all pods when using the Redis adapter
await this._io.in(workflowId).socketsLeave(workflowId)
logger.debug(`All sockets left workflow room ${workflowId} via socketsLeave()`)
// Remove all users from Redis state
for (const user of users) {
await this.removeUserFromRoom(user.socketId)
}
// Clean up room data
await this.redis.del([KEYS.workflowUsers(workflowId), KEYS.workflowMeta(workflowId)])
logger.info(
`Cleaned up workflow room ${workflowId} after deletion (${users.length} users disconnected)`
)
} catch (error) {
logger.error(`Failed to handle workflow deletion for ${workflowId}:`, error)
}
}
async handleWorkflowRevert(workflowId: string, timestamp: number): Promise<void> {
logger.info(`Handling workflow revert notification for ${workflowId}`)
const hasRoom = await this.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`No active room found for reverted workflow ${workflowId}`)
return
}
this._io.to(workflowId).emit('workflow-reverted', {
workflowId,
message: 'Workflow has been reverted to deployed state',
timestamp,
})
await this.updateRoomLastModified(workflowId)
const userCount = await this.getUniqueUserCount(workflowId)
logger.info(`Notified ${userCount} users about workflow revert: ${workflowId}`)
}
async handleWorkflowUpdate(workflowId: string): Promise<void> {
logger.info(`Handling workflow update notification for ${workflowId}`)
const hasRoom = await this.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`No active room found for updated workflow ${workflowId}`)
return
}
const timestamp = Date.now()
this._io.to(workflowId).emit('workflow-updated', {
workflowId,
message: 'Workflow has been updated externally',
timestamp,
})
await this.updateRoomLastModified(workflowId)
const userCount = await this.getUniqueUserCount(workflowId)
logger.info(`Notified ${userCount} users about workflow update: ${workflowId}`)
}
async handleCopilotWorkflowEdit(workflowId: string, description?: string): Promise<void> {
logger.info(`Handling copilot workflow edit notification for ${workflowId}`)
const hasRoom = await this.hasWorkflowRoom(workflowId)
if (!hasRoom) {
logger.debug(`No active room found for copilot workflow edit ${workflowId}`)
return
}
const timestamp = Date.now()
this._io.to(workflowId).emit('copilot-workflow-edit', {
workflowId,
description,
message: 'Copilot has edited the workflow - rehydrating from database',
timestamp,
})
await this.updateRoomLastModified(workflowId)
const userCount = await this.getUniqueUserCount(workflowId)
logger.info(`Notified ${userCount} users about copilot workflow edit: ${workflowId}`)
}
}

View File

@@ -0,0 +1,140 @@
import type { Server } from 'socket.io'
/**
* User presence data stored in room state
*/
export interface UserPresence {
userId: string
workflowId: string
userName: string
socketId: string
tabSessionId?: string
joinedAt: number
lastActivity: number
role: string
cursor?: { x: number; y: number }
selection?: { type: 'block' | 'edge' | 'none'; id?: string }
avatarUrl?: string | null
}
/**
* User session data (minimal info for quick lookups)
*/
export interface UserSession {
userId: string
userName: string
avatarUrl?: string | null
}
/**
* Workflow room state
*/
export interface WorkflowRoom {
workflowId: string
users: Map<string, UserPresence>
lastModified: number
activeConnections: number
}
/**
* Common interface for room managers (in-memory and Redis)
* All methods that access state are async to support Redis operations
*/
export interface IRoomManager {
readonly io: Server
/**
* Initialize the room manager (connect to Redis, etc.)
*/
initialize(): Promise<void>
/**
* Clean shutdown
*/
shutdown(): Promise<void>
/**
* Add a user to a workflow room
*/
addUserToRoom(workflowId: string, socketId: string, presence: UserPresence): Promise<void>
/**
* Remove a user from their current room
* Returns the workflowId they were in, or null if not in any room
*/
removeUserFromRoom(socketId: string): Promise<string | null>
/**
* Get the workflow ID for a socket
*/
getWorkflowIdForSocket(socketId: string): Promise<string | null>
/**
* Get user session data for a socket
*/
getUserSession(socketId: string): Promise<UserSession | null>
/**
* Get all users in a workflow room
*/
getWorkflowUsers(workflowId: string): Promise<UserPresence[]>
/**
* Check if a workflow room exists
*/
hasWorkflowRoom(workflowId: string): Promise<boolean>
/**
* Update user activity (cursor, selection, lastActivity)
*/
updateUserActivity(
workflowId: string,
socketId: string,
updates: Partial<Pick<UserPresence, 'cursor' | 'selection' | 'lastActivity'>>
): Promise<void>
/**
* Update room's lastModified timestamp
*/
updateRoomLastModified(workflowId: string): Promise<void>
/**
* Broadcast presence update to all clients in a workflow room
*/
broadcastPresenceUpdate(workflowId: string): Promise<void>
/**
* Emit an event to all clients in a workflow room
*/
emitToWorkflow<T = unknown>(workflowId: string, event: string, payload: T): void
/**
* Get the number of unique users in a workflow room
*/
getUniqueUserCount(workflowId: string): Promise<number>
/**
* Get total active connections across all rooms
*/
getTotalActiveConnections(): Promise<number>
/**
* Handle workflow deletion - notify users and clean up room
*/
handleWorkflowDeletion(workflowId: string): Promise<void>
/**
* Handle workflow revert - notify users
*/
handleWorkflowRevert(workflowId: string, timestamp: number): Promise<void>
/**
* Handle workflow update - notify users
*/
handleWorkflowUpdate(workflowId: string): Promise<void>
/**
* Handle copilot workflow edit - notify users to rehydrate
*/
handleCopilotWorkflowEdit(workflowId: string, description?: string): Promise<void>
}

View File

@@ -1,11 +1,52 @@
import type { IncomingMessage, ServerResponse } from 'http' import type { IncomingMessage, ServerResponse } from 'http'
import type { RoomManager } from '@/socket/rooms/manager' import { env } from '@/lib/core/config/env'
import type { IRoomManager } from '@/socket/rooms'
interface Logger { interface Logger {
info: (message: string, ...args: any[]) => void info: (message: string, ...args: unknown[]) => void
error: (message: string, ...args: any[]) => void error: (message: string, ...args: unknown[]) => void
debug: (message: string, ...args: any[]) => void debug: (message: string, ...args: unknown[]) => void
warn: (message: string, ...args: any[]) => void warn: (message: string, ...args: unknown[]) => void
}
function checkInternalApiKey(req: IncomingMessage): { success: boolean; error?: string } {
const apiKey = req.headers['x-api-key']
const expectedApiKey = env.INTERNAL_API_SECRET
if (!expectedApiKey) {
return { success: false, error: 'Internal API key not configured' }
}
if (!apiKey) {
return { success: false, error: 'API key required' }
}
if (apiKey !== expectedApiKey) {
return { success: false, error: 'Invalid API key' }
}
return { success: true }
}
function readRequestBody(req: IncomingMessage): Promise<string> {
return new Promise((resolve, reject) => {
let body = ''
req.on('data', (chunk) => {
body += chunk.toString()
})
req.on('end', () => resolve(body))
req.on('error', reject)
})
}
function sendSuccess(res: ServerResponse): void {
res.writeHead(200, { 'Content-Type': 'application/json' })
res.end(JSON.stringify({ success: true }))
}
function sendError(res: ServerResponse, message: string, status = 500): void {
res.writeHead(status, { 'Content-Type': 'application/json' })
res.end(JSON.stringify({ error: message }))
} }
/** /**
@@ -14,101 +55,91 @@ interface Logger {
* @param logger - Logger instance for logging requests and errors * @param logger - Logger instance for logging requests and errors
* @returns HTTP request handler function * @returns HTTP request handler function
*/ */
export function createHttpHandler(roomManager: RoomManager, logger: Logger) { export function createHttpHandler(roomManager: IRoomManager, logger: Logger) {
return (req: IncomingMessage, res: ServerResponse) => { return async (req: IncomingMessage, res: ServerResponse) => {
// Health check doesn't require auth
if (req.method === 'GET' && req.url === '/health') { if (req.method === 'GET' && req.url === '/health') {
res.writeHead(200, { 'Content-Type': 'application/json' }) try {
res.end( const connections = await roomManager.getTotalActiveConnections()
JSON.stringify({ res.writeHead(200, { 'Content-Type': 'application/json' })
status: 'ok', res.end(
timestamp: new Date().toISOString(), JSON.stringify({
connections: roomManager.getTotalActiveConnections(), status: 'ok',
}) timestamp: new Date().toISOString(),
) connections,
})
)
} catch (error) {
logger.error('Error in health check:', error)
res.writeHead(503, { 'Content-Type': 'application/json' })
res.end(JSON.stringify({ status: 'error', message: 'Health check failed' }))
}
return return
} }
// All POST endpoints require internal API key authentication
if (req.method === 'POST') {
const authResult = checkInternalApiKey(req)
if (!authResult.success) {
res.writeHead(401, { 'Content-Type': 'application/json' })
res.end(JSON.stringify({ error: authResult.error }))
return
}
}
// Handle workflow deletion notifications from the main API // Handle workflow deletion notifications from the main API
if (req.method === 'POST' && req.url === '/api/workflow-deleted') { if (req.method === 'POST' && req.url === '/api/workflow-deleted') {
let body = '' try {
req.on('data', (chunk) => { const body = await readRequestBody(req)
body += chunk.toString() const { workflowId } = JSON.parse(body)
}) await roomManager.handleWorkflowDeletion(workflowId)
req.on('end', () => { sendSuccess(res)
try { } catch (error) {
const { workflowId } = JSON.parse(body) logger.error('Error handling workflow deletion notification:', error)
roomManager.handleWorkflowDeletion(workflowId) sendError(res, 'Failed to process deletion notification')
res.writeHead(200, { 'Content-Type': 'application/json' }) }
res.end(JSON.stringify({ success: true }))
} catch (error) {
logger.error('Error handling workflow deletion notification:', error)
res.writeHead(500, { 'Content-Type': 'application/json' })
res.end(JSON.stringify({ error: 'Failed to process deletion notification' }))
}
})
return return
} }
// Handle workflow update notifications from the main API // Handle workflow update notifications from the main API
if (req.method === 'POST' && req.url === '/api/workflow-updated') { if (req.method === 'POST' && req.url === '/api/workflow-updated') {
let body = '' try {
req.on('data', (chunk) => { const body = await readRequestBody(req)
body += chunk.toString() const { workflowId } = JSON.parse(body)
}) await roomManager.handleWorkflowUpdate(workflowId)
req.on('end', () => { sendSuccess(res)
try { } catch (error) {
const { workflowId } = JSON.parse(body) logger.error('Error handling workflow update notification:', error)
roomManager.handleWorkflowUpdate(workflowId) sendError(res, 'Failed to process update notification')
res.writeHead(200, { 'Content-Type': 'application/json' }) }
res.end(JSON.stringify({ success: true }))
} catch (error) {
logger.error('Error handling workflow update notification:', error)
res.writeHead(500, { 'Content-Type': 'application/json' })
res.end(JSON.stringify({ error: 'Failed to process update notification' }))
}
})
return return
} }
// Handle copilot workflow edit notifications from the main API // Handle copilot workflow edit notifications from the main API
if (req.method === 'POST' && req.url === '/api/copilot-workflow-edit') { if (req.method === 'POST' && req.url === '/api/copilot-workflow-edit') {
let body = '' try {
req.on('data', (chunk) => { const body = await readRequestBody(req)
body += chunk.toString() const { workflowId, description } = JSON.parse(body)
}) await roomManager.handleCopilotWorkflowEdit(workflowId, description)
req.on('end', () => { sendSuccess(res)
try { } catch (error) {
const { workflowId, description } = JSON.parse(body) logger.error('Error handling copilot workflow edit notification:', error)
roomManager.handleCopilotWorkflowEdit(workflowId, description) sendError(res, 'Failed to process copilot edit notification')
res.writeHead(200, { 'Content-Type': 'application/json' }) }
res.end(JSON.stringify({ success: true }))
} catch (error) {
logger.error('Error handling copilot workflow edit notification:', error)
res.writeHead(500, { 'Content-Type': 'application/json' })
res.end(JSON.stringify({ error: 'Failed to process copilot edit notification' }))
}
})
return return
} }
// Handle workflow revert notifications from the main API // Handle workflow revert notifications from the main API
if (req.method === 'POST' && req.url === '/api/workflow-reverted') { if (req.method === 'POST' && req.url === '/api/workflow-reverted') {
let body = '' try {
req.on('data', (chunk) => { const body = await readRequestBody(req)
body += chunk.toString() const { workflowId, timestamp } = JSON.parse(body)
}) await roomManager.handleWorkflowRevert(workflowId, timestamp)
req.on('end', () => { sendSuccess(res)
try { } catch (error) {
const { workflowId, timestamp } = JSON.parse(body) logger.error('Error handling workflow revert notification:', error)
roomManager.handleWorkflowRevert(workflowId, timestamp) sendError(res, 'Failed to process revert notification')
res.writeHead(200, { 'Content-Type': 'application/json' }) }
res.end(JSON.stringify({ success: true }))
} catch (error) {
logger.error('Error handling workflow revert notification:', error)
res.writeHead(500, { 'Content-Type': 'application/json' })
res.end(JSON.stringify({ error: 'Failed to process revert notification' }))
}
})
return return
} }

View File

@@ -239,5 +239,3 @@ export const WorkflowOperationSchema = z.union([
VariableOperationSchema, VariableOperationSchema,
WorkflowStateOperationSchema, WorkflowStateOperationSchema,
]) ])
export { PositionSchema, AutoConnectEdgeSchema }

View File

@@ -4,6 +4,19 @@ import type { OperationQueueState, QueuedOperation } from './types'
const logger = createLogger('OperationQueue') const logger = createLogger('OperationQueue')
/** Timeout for subblock/variable operations before considering them failed */
const SUBBLOCK_VARIABLE_TIMEOUT_MS = 15000
/** Timeout for structural operations before considering them failed */
const STRUCTURAL_TIMEOUT_MS = 5000
/** Maximum retry attempts for subblock/variable operations */
const SUBBLOCK_VARIABLE_MAX_RETRIES = 5
/** Maximum retry attempts for structural operations */
const STRUCTURAL_MAX_RETRIES = 3
/** Maximum retry delay cap for subblock/variable operations */
const SUBBLOCK_VARIABLE_MAX_RETRY_DELAY_MS = 3000
/** Base retry delay multiplier (1s, 2s, 3s for linear) */
const RETRY_DELAY_BASE_MS = 1000
const retryTimeouts = new Map<string, NodeJS.Timeout>() const retryTimeouts = new Map<string, NodeJS.Timeout>()
const operationTimeouts = new Map<string, NodeJS.Timeout>() const operationTimeouts = new Map<string, NodeJS.Timeout>()
@@ -200,14 +213,14 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
(operation.operation.operation === 'variable-update' && (operation.operation.operation === 'variable-update' &&
operation.operation.target === 'variable') operation.operation.target === 'variable')
const maxRetries = isSubblockOrVariable ? 5 : 3 // 5 retries for text, 3 for structural const maxRetries = isSubblockOrVariable ? SUBBLOCK_VARIABLE_MAX_RETRIES : STRUCTURAL_MAX_RETRIES
if (operation.retryCount < maxRetries) { if (operation.retryCount < maxRetries) {
const newRetryCount = operation.retryCount + 1 const newRetryCount = operation.retryCount + 1
// Faster retries for subblock/variable, exponential for structural // Faster retries for subblock/variable, exponential for structural
const delay = isSubblockOrVariable const delay = isSubblockOrVariable
? Math.min(1000 * newRetryCount, 3000) // 1s, 2s, 3s, 3s, 3s (cap at 3s) ? Math.min(RETRY_DELAY_BASE_MS * newRetryCount, SUBBLOCK_VARIABLE_MAX_RETRY_DELAY_MS)
: 2 ** newRetryCount * 1000 // 2s, 4s, 8s (exponential for structural) : 2 ** newRetryCount * RETRY_DELAY_BASE_MS
logger.warn( logger.warn(
`Operation failed, retrying in ${delay}ms (attempt ${newRetryCount}/${maxRetries})`, `Operation failed, retrying in ${delay}ms (attempt ${newRetryCount}/${maxRetries})`,
@@ -309,7 +322,9 @@ export const useOperationQueueStore = create<OperationQueueState>((set, get) =>
nextOperation.operation.target === 'subblock') || nextOperation.operation.target === 'subblock') ||
(nextOperation.operation.operation === 'variable-update' && (nextOperation.operation.operation === 'variable-update' &&
nextOperation.operation.target === 'variable') nextOperation.operation.target === 'variable')
const timeoutDuration = isSubblockOrVariable ? 15000 : 5000 // 15s for text edits, 5s for structural ops const timeoutDuration = isSubblockOrVariable
? SUBBLOCK_VARIABLE_TIMEOUT_MS
: STRUCTURAL_TIMEOUT_MS
const timeoutId = setTimeout(() => { const timeoutId = setTimeout(() => {
logger.warn(`Operation timeout - no server response after ${timeoutDuration}ms`, { logger.warn(`Operation timeout - no server response after ${timeoutDuration}ms`, {

View File

@@ -104,6 +104,7 @@
"@react-email/components": "^0.0.34", "@react-email/components": "^0.0.34",
"@react-email/render": "2.0.0", "@react-email/render": "2.0.0",
"@sim/logger": "workspace:*", "@sim/logger": "workspace:*",
"@socket.io/redis-adapter": "8.3.0",
"@t3-oss/env-nextjs": "0.13.4", "@t3-oss/env-nextjs": "0.13.4",
"@tanstack/react-query": "5.90.8", "@tanstack/react-query": "5.90.8",
"@tanstack/react-query-devtools": "5.90.2", "@tanstack/react-query-devtools": "5.90.2",
@@ -174,6 +175,7 @@
"react-simple-code-editor": "^0.14.1", "react-simple-code-editor": "^0.14.1",
"react-window": "2.2.3", "react-window": "2.2.3",
"reactflow": "^11.11.4", "reactflow": "^11.11.4",
"redis": "5.10.0",
"rehype-autolink-headings": "^7.1.0", "rehype-autolink-headings": "^7.1.0",
"rehype-slug": "^6.0.0", "rehype-slug": "^6.0.0",
"remark-gfm": "4.0.1", "remark-gfm": "4.0.1",
@@ -1146,6 +1148,16 @@
"@reactflow/node-toolbar": ["@reactflow/node-toolbar@1.3.14", "", { "dependencies": { "@reactflow/core": "11.11.4", "classcat": "^5.0.3", "zustand": "^4.4.1" }, "peerDependencies": { "react": ">=17", "react-dom": ">=17" } }, "sha512-rbynXQnH/xFNu4P9H+hVqlEUafDCkEoCy0Dg9mG22Sg+rY/0ck6KkrAQrYrTgXusd+cEJOMK0uOOFCK2/5rSGQ=="], "@reactflow/node-toolbar": ["@reactflow/node-toolbar@1.3.14", "", { "dependencies": { "@reactflow/core": "11.11.4", "classcat": "^5.0.3", "zustand": "^4.4.1" }, "peerDependencies": { "react": ">=17", "react-dom": ">=17" } }, "sha512-rbynXQnH/xFNu4P9H+hVqlEUafDCkEoCy0Dg9mG22Sg+rY/0ck6KkrAQrYrTgXusd+cEJOMK0uOOFCK2/5rSGQ=="],
"@redis/bloom": ["@redis/bloom@5.10.0", "", { "peerDependencies": { "@redis/client": "^5.10.0" } }, "sha512-doIF37ob+l47n0rkpRNgU8n4iacBlKM9xLiP1LtTZTvz8TloJB8qx/MgvhMhKdYG+CvCY2aPBnN2706izFn/4A=="],
"@redis/client": ["@redis/client@5.10.0", "", { "dependencies": { "cluster-key-slot": "1.1.2" } }, "sha512-JXmM4XCoso6C75Mr3lhKA3eNxSzkYi3nCzxDIKY+YOszYsJjuKbFgVtguVPbLMOttN4iu2fXoc2BGhdnYhIOxA=="],
"@redis/json": ["@redis/json@5.10.0", "", { "peerDependencies": { "@redis/client": "^5.10.0" } }, "sha512-B2G8XlOmTPUuZtD44EMGbtoepQG34RCDXLZbjrtON1Djet0t5Ri7/YPXvL9aomXqP8lLTreaprtyLKF4tmXEEA=="],
"@redis/search": ["@redis/search@5.10.0", "", { "peerDependencies": { "@redis/client": "^5.10.0" } }, "sha512-3SVcPswoSfp2HnmWbAGUzlbUPn7fOohVu2weUQ0S+EMiQi8jwjL+aN2p6V3TI65eNfVsJ8vyPvqWklm6H6esmg=="],
"@redis/time-series": ["@redis/time-series@5.10.0", "", { "peerDependencies": { "@redis/client": "^5.10.0" } }, "sha512-cPkpddXH5kc/SdRhF0YG0qtjL+noqFT0AcHbQ6axhsPsO7iqPi1cjxgdkE9TNeKiBUUdCaU1DbqkR/LzbzPBhg=="],
"@resvg/resvg-wasm": ["@resvg/resvg-wasm@2.4.0", "", {}, "sha512-C7c51Nn4yTxXFKvgh2txJFNweaVcfUPQxwEUFw4aWsCmfiBDJsTSwviIF8EcwjQ6k8bPyMWCl1vw4BdxE569Cg=="], "@resvg/resvg-wasm": ["@resvg/resvg-wasm@2.4.0", "", {}, "sha512-C7c51Nn4yTxXFKvgh2txJFNweaVcfUPQxwEUFw4aWsCmfiBDJsTSwviIF8EcwjQ6k8bPyMWCl1vw4BdxE569Cg=="],
"@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.27", "", {}, "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA=="], "@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.27", "", {}, "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA=="],
@@ -1340,6 +1352,8 @@
"@socket.io/component-emitter": ["@socket.io/component-emitter@3.1.2", "", {}, "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA=="], "@socket.io/component-emitter": ["@socket.io/component-emitter@3.1.2", "", {}, "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA=="],
"@socket.io/redis-adapter": ["@socket.io/redis-adapter@8.3.0", "", { "dependencies": { "debug": "~4.3.1", "notepack.io": "~3.0.1", "uid2": "1.0.0" }, "peerDependencies": { "socket.io-adapter": "^2.5.4" } }, "sha512-ly0cra+48hDmChxmIpnESKrc94LjRL80TEmZVscuQ/WWkRP81nNj8W8cCGMqbI4L6NCuAaPRSzZF1a9GlAxxnA=="],
"@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="], "@standard-schema/spec": ["@standard-schema/spec@1.1.0", "", {}, "sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w=="],
"@standard-schema/utils": ["@standard-schema/utils@0.3.0", "", {}, "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g=="], "@standard-schema/utils": ["@standard-schema/utils@0.3.0", "", {}, "sha512-e7Mew686owMaPJVNNLs55PUvgz371nKgwsc4vxE49zsODpJEnxgxRo2y/OKrqueavXgZNMDVj3DdHFlaSAeU8g=="],
@@ -2802,6 +2816,8 @@
"normalize-range": ["normalize-range@0.1.2", "", {}, "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA=="], "normalize-range": ["normalize-range@0.1.2", "", {}, "sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA=="],
"notepack.io": ["notepack.io@3.0.1", "", {}, "sha512-TKC/8zH5pXIAMVQio2TvVDTtPRX+DJPHDqjRbxogtFiByHyzKmy96RA0JtCQJ+WouyyL4A10xomQzgbUT+1jCg=="],
"npm-run-path": ["npm-run-path@5.3.0", "", { "dependencies": { "path-key": "^4.0.0" } }, "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ=="], "npm-run-path": ["npm-run-path@5.3.0", "", { "dependencies": { "path-key": "^4.0.0" } }, "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ=="],
"npm-to-yarn": ["npm-to-yarn@3.0.1", "", {}, "sha512-tt6PvKu4WyzPwWUzy/hvPFqn+uwXO0K1ZHka8az3NnrhWJDmSqI8ncWq0fkL0k/lmmi5tAC11FXwXuh0rFbt1A=="], "npm-to-yarn": ["npm-to-yarn@3.0.1", "", {}, "sha512-tt6PvKu4WyzPwWUzy/hvPFqn+uwXO0K1ZHka8az3NnrhWJDmSqI8ncWq0fkL0k/lmmi5tAC11FXwXuh0rFbt1A=="],
@@ -3072,6 +3088,8 @@
"redent": ["redent@3.0.0", "", { "dependencies": { "indent-string": "^4.0.0", "strip-indent": "^3.0.0" } }, "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg=="], "redent": ["redent@3.0.0", "", { "dependencies": { "indent-string": "^4.0.0", "strip-indent": "^3.0.0" } }, "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg=="],
"redis": ["redis@5.10.0", "", { "dependencies": { "@redis/bloom": "5.10.0", "@redis/client": "5.10.0", "@redis/json": "5.10.0", "@redis/search": "5.10.0", "@redis/time-series": "5.10.0" } }, "sha512-0/Y+7IEiTgVGPrLFKy8oAEArSyEJkU0zvgV5xyi9NzNQ+SLZmyFbUsWIbgPcd4UdUh00opXGKlXJwMmsis5Byw=="],
"redis-errors": ["redis-errors@1.2.0", "", {}, "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w=="], "redis-errors": ["redis-errors@1.2.0", "", {}, "sha512-1qny3OExCf0UvUV/5wpYKf2YwPcOqXzkwKKSmKHiE6ZMQs5heeE/c8eXK+PNllPvmjgAbfnsbpkGZWy8cBpn9w=="],
"redis-parser": ["redis-parser@3.0.0", "", { "dependencies": { "redis-errors": "^1.0.0" } }, "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A=="], "redis-parser": ["redis-parser@3.0.0", "", { "dependencies": { "redis-errors": "^1.0.0" } }, "sha512-DJnGAeenTdpMEH6uAJRK/uiyEIH9WVsUmoLwzudwGJUwZPp80PDBWPHXSAGNPwNvIXAbe7MSUB1zQFugFml66A=="],
@@ -3434,6 +3452,8 @@
"ufo": ["ufo@1.6.3", "", {}, "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q=="], "ufo": ["ufo@1.6.3", "", {}, "sha512-yDJTmhydvl5lJzBmy/hyOAA0d+aqCBuwl818haVdYCRrWV84o7YyeVm4QlVHStqNrrJSTb6jKuFAVqAFsr+K3Q=="],
"uid2": ["uid2@1.0.0", "", {}, "sha512-+I6aJUv63YAcY9n4mQreLUt0d4lvwkkopDNmpomkAUz0fAkEMV9pRWxN0EjhW1YfRhcuyHg2v3mwddCDW1+LFQ=="],
"ulid": ["ulid@2.4.0", "", { "bin": { "ulid": "bin/cli.js" } }, "sha512-fIRiVTJNcSRmXKPZtGzFQv9WRrZ3M9eoptl/teFJvjOzmpU+/K/JH6HZ8deBfb5vMEpicJcLn7JmvdknlMq7Zg=="], "ulid": ["ulid@2.4.0", "", { "bin": { "ulid": "bin/cli.js" } }, "sha512-fIRiVTJNcSRmXKPZtGzFQv9WRrZ3M9eoptl/teFJvjOzmpU+/K/JH6HZ8deBfb5vMEpicJcLn7JmvdknlMq7Zg=="],
"unbzip2-stream": ["unbzip2-stream@1.4.3", "", { "dependencies": { "buffer": "^5.2.1", "through": "^2.3.8" } }, "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg=="], "unbzip2-stream": ["unbzip2-stream@1.4.3", "", { "dependencies": { "buffer": "^5.2.1", "through": "^2.3.8" } }, "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg=="],
@@ -3852,6 +3872,8 @@
"@shuding/opentype.js/fflate": ["fflate@0.7.4", "", {}, "sha512-5u2V/CDW15QM1XbbgS+0DfPxVB+jUKhWEKuuFuHncbk3tEEqzmoXL+2KyOFuKGqOnmdIy0/davWF1CkuwtibCw=="], "@shuding/opentype.js/fflate": ["fflate@0.7.4", "", {}, "sha512-5u2V/CDW15QM1XbbgS+0DfPxVB+jUKhWEKuuFuHncbk3tEEqzmoXL+2KyOFuKGqOnmdIy0/davWF1CkuwtibCw=="],
"@socket.io/redis-adapter/debug": ["debug@4.3.7", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ=="],
"@tailwindcss/node/jiti": ["jiti@2.6.1", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="], "@tailwindcss/node/jiti": ["jiti@2.6.1", "", { "bin": { "jiti": "lib/jiti-cli.mjs" } }, "sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ=="],
"@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="], "@tailwindcss/oxide-wasm32-wasi/@emnapi/core": ["@emnapi/core@1.8.1", "", { "dependencies": { "@emnapi/wasi-threads": "1.1.0", "tslib": "^2.4.0" }, "bundled": true }, "sha512-AvT9QFpxK0Zd8J0jopedNm+w/2fIzvtPKPjqyw9jwvBaReTTqPBk9Hixaz7KbjimP+QNz605/XnjFcDAL2pqBg=="],

View File

@@ -44,6 +44,10 @@ spec:
env: env:
- name: DATABASE_URL - name: DATABASE_URL
value: {{ include "sim.databaseUrl" . | quote }} value: {{ include "sim.databaseUrl" . | quote }}
{{- if .Values.app.env.REDIS_URL }}
- name: REDIS_URL
value: {{ .Values.app.env.REDIS_URL | quote }}
{{- end }}
{{- range $key, $value := .Values.realtime.env }} {{- range $key, $value := .Values.realtime.env }}
- name: {{ $key }} - name: {{ $key }}
value: {{ $value | quote }} value: {{ $value | quote }}