mirror of
https://github.com/simstudioai/sim.git
synced 2026-01-09 23:17:59 -05:00
improvement(auth): added ability to inject secrets to kubernetes, server-side ff to disable email registration (#2728)
* improvement(auth): added ability to inject secrets to kubernetes, server-side ff to disable email registration * consolidated telemetry events * comments cleanup * ack PR comment * refactor to use createEnvMock helper instead of local mocks
This commit is contained in:
@@ -212,6 +212,18 @@ export async function POST(request: NextRequest) {
|
||||
|
||||
logger.info(`Chat "${title}" deployed successfully at ${chatUrl}`)
|
||||
|
||||
try {
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
PlatformEvents.chatDeployed({
|
||||
chatId: id,
|
||||
workflowId,
|
||||
authType,
|
||||
hasOutputConfigs: outputConfigs.length > 0,
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
}
|
||||
|
||||
return createSuccessResponse({
|
||||
id,
|
||||
chatUrl,
|
||||
|
||||
@@ -198,15 +198,14 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
`[${requestId}] Starting controlled async processing of ${createdDocuments.length} documents`
|
||||
)
|
||||
|
||||
// Track bulk document upload
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
trackPlatformEvent('platform.knowledge_base.documents_uploaded', {
|
||||
'knowledge_base.id': knowledgeBaseId,
|
||||
'documents.count': createdDocuments.length,
|
||||
'documents.upload_type': 'bulk',
|
||||
'processing.chunk_size': validatedData.processingOptions.chunkSize,
|
||||
'processing.recipe': validatedData.processingOptions.recipe,
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
PlatformEvents.knowledgeBaseDocumentsUploaded({
|
||||
knowledgeBaseId,
|
||||
documentsCount: createdDocuments.length,
|
||||
uploadType: 'bulk',
|
||||
chunkSize: validatedData.processingOptions.chunkSize,
|
||||
recipe: validatedData.processingOptions.recipe,
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
@@ -262,15 +261,14 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
userId
|
||||
)
|
||||
|
||||
// Track single document upload
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
trackPlatformEvent('platform.knowledge_base.documents_uploaded', {
|
||||
'knowledge_base.id': knowledgeBaseId,
|
||||
'documents.count': 1,
|
||||
'documents.upload_type': 'single',
|
||||
'document.mime_type': validatedData.mimeType,
|
||||
'document.file_size': validatedData.fileSize,
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
PlatformEvents.knowledgeBaseDocumentsUploaded({
|
||||
knowledgeBaseId,
|
||||
documentsCount: 1,
|
||||
uploadType: 'single',
|
||||
mimeType: validatedData.mimeType,
|
||||
fileSize: validatedData.fileSize,
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
|
||||
@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import {
|
||||
deleteKnowledgeBase,
|
||||
@@ -183,6 +184,14 @@ export async function DELETE(
|
||||
|
||||
await deleteKnowledgeBase(id, requestId)
|
||||
|
||||
try {
|
||||
PlatformEvents.knowledgeBaseDeleted({
|
||||
knowledgeBaseId: id,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Knowledge base deleted: ${id} for user ${session.user.id}`)
|
||||
|
||||
return NextResponse.json({
|
||||
|
||||
@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { createKnowledgeBase, getKnowledgeBases } from '@/lib/knowledge/service'
|
||||
|
||||
@@ -94,6 +95,16 @@ export async function POST(req: NextRequest) {
|
||||
|
||||
const newKnowledgeBase = await createKnowledgeBase(createData, requestId)
|
||||
|
||||
try {
|
||||
PlatformEvents.knowledgeBaseCreated({
|
||||
knowledgeBaseId: newKnowledgeBase.id,
|
||||
name: validatedData.name,
|
||||
workspaceId: validatedData.workspaceId,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Knowledge base created: ${newKnowledgeBase.id} for user ${session.user.id}`
|
||||
)
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import {
|
||||
createMockRequest,
|
||||
@@ -26,13 +27,7 @@ vi.mock('drizzle-orm', () => ({
|
||||
|
||||
mockKnowledgeSchemas()
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
OPENAI_API_KEY: 'test-api-key',
|
||||
},
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
vi.mock('@/lib/core/config/env', () => createEnvMock({ OPENAI_API_KEY: 'test-api-key' }))
|
||||
|
||||
vi.mock('@/lib/core/utils/request', () => ({
|
||||
generateRequestId: vi.fn(() => 'test-request-id'),
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { ALL_TAG_SLOTS } from '@/lib/knowledge/constants'
|
||||
import { getDocumentTagDefinitions } from '@/lib/knowledge/tags/service'
|
||||
@@ -294,6 +295,16 @@ export async function POST(request: NextRequest) {
|
||||
const documentIds = results.map((result) => result.documentId)
|
||||
const documentNameMap = await getDocumentNamesByIds(documentIds)
|
||||
|
||||
try {
|
||||
PlatformEvents.knowledgeBaseSearched({
|
||||
knowledgeBaseId: accessibleKbIds[0],
|
||||
resultsCount: results.length,
|
||||
workspaceId: workspaceId || undefined,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
return NextResponse.json({
|
||||
success: true,
|
||||
data: {
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
*
|
||||
* @vitest-environment node
|
||||
*/
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('drizzle-orm')
|
||||
@@ -30,12 +31,7 @@ vi.stubGlobal(
|
||||
})
|
||||
)
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {},
|
||||
getEnv: (key: string) => process.env[key],
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
vi.mock('@/lib/core/config/env', () => createEnvMock())
|
||||
|
||||
import {
|
||||
generateSearchEmbedding,
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
* This file contains unit tests for the knowledge base utility functions,
|
||||
* including access checks, document processing, and embedding generation.
|
||||
*/
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('drizzle-orm', () => ({
|
||||
@@ -15,12 +16,7 @@ vi.mock('drizzle-orm', () => ({
|
||||
sql: (strings: TemplateStringsArray, ...expr: any[]) => ({ strings, expr }),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: { OPENAI_API_KEY: 'test-key' },
|
||||
getEnv: (key: string) => process.env[key],
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
}))
|
||||
vi.mock('@/lib/core/config/env', () => createEnvMock({ OPENAI_API_KEY: 'test-key' }))
|
||||
|
||||
vi.mock('@/lib/knowledge/documents/utils', () => ({
|
||||
retryWithExponentialBackoff: (fn: any) => fn(),
|
||||
|
||||
@@ -140,12 +140,12 @@ export const POST = withMcpAuth('write')(
|
||||
)
|
||||
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
trackPlatformEvent('platform.mcp.server_added', {
|
||||
'mcp.server_id': serverId,
|
||||
'mcp.server_name': body.name,
|
||||
'mcp.transport': body.transport,
|
||||
'workspace.id': workspaceId,
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
PlatformEvents.mcpServerAdded({
|
||||
serverId,
|
||||
serverName: body.name,
|
||||
transport: body.transport,
|
||||
workspaceId,
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
|
||||
@@ -194,12 +194,12 @@ export const POST = withMcpAuth('read')(
|
||||
logger.info(`[${requestId}] Successfully executed tool ${toolName} on server ${serverId}`)
|
||||
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
trackPlatformEvent('platform.mcp.tool_executed', {
|
||||
'mcp.server_id': serverId,
|
||||
'mcp.tool_name': toolName,
|
||||
'mcp.execution_status': 'success',
|
||||
'workspace.id': workspaceId,
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
PlatformEvents.mcpToolExecuted({
|
||||
serverId,
|
||||
toolName,
|
||||
status: 'success',
|
||||
workspaceId,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry failure is non-critical
|
||||
|
||||
@@ -168,18 +168,15 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
`[${requestId}] Successfully used template: ${id}, created workflow: ${newWorkflowId}`
|
||||
)
|
||||
|
||||
// Track template usage
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
const templateState = templateData.state as any
|
||||
trackPlatformEvent('platform.template.used', {
|
||||
'template.id': id,
|
||||
'template.name': templateData.name,
|
||||
'workflow.created_id': newWorkflowId,
|
||||
'workflow.blocks_count': templateState?.blocks
|
||||
? Object.keys(templateState.blocks).length
|
||||
: 0,
|
||||
'workspace.id': workspaceId,
|
||||
PlatformEvents.templateUsed({
|
||||
templateId: id,
|
||||
templateName: templateData.name,
|
||||
newWorkflowId,
|
||||
blocksCount: templateState?.blocks ? Object.keys(templateState.blocks).length : 0,
|
||||
workspaceId,
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
|
||||
@@ -5,6 +5,7 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { validateInteger } from '@/lib/core/security/input-validation'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
@@ -314,6 +315,17 @@ export async function DELETE(
|
||||
await db.delete(webhook).where(eq(webhook.id, wId))
|
||||
}
|
||||
|
||||
try {
|
||||
for (const wId of idsToDelete) {
|
||||
PlatformEvents.webhookDeleted({
|
||||
webhookId: wId,
|
||||
workflowId: webhookData.workflow.id,
|
||||
})
|
||||
}
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Successfully deleted ${idsToDelete.length} webhooks for credential set`,
|
||||
{
|
||||
@@ -325,6 +337,16 @@ export async function DELETE(
|
||||
} else {
|
||||
await cleanupExternalWebhook(foundWebhook, webhookData.workflow, requestId)
|
||||
await db.delete(webhook).where(eq(webhook.id, id))
|
||||
|
||||
try {
|
||||
PlatformEvents.webhookDeleted({
|
||||
webhookId: id,
|
||||
workflowId: webhookData.workflow.id,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Successfully deleted webhook: ${id}`)
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ import { and, desc, eq } from 'drizzle-orm'
|
||||
import { nanoid } from 'nanoid'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
@@ -790,6 +791,19 @@ export async function POST(request: NextRequest) {
|
||||
}
|
||||
// --- End Grain specific logic ---
|
||||
|
||||
if (!targetWebhookId && savedWebhook) {
|
||||
try {
|
||||
PlatformEvents.webhookCreated({
|
||||
webhookId: savedWebhook.id,
|
||||
workflowId: workflowId,
|
||||
provider: provider || 'generic',
|
||||
workspaceId: workflowRecord.workspaceId || undefined,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
}
|
||||
|
||||
const status = targetWebhookId ? 200 : 201
|
||||
return NextResponse.json({ webhook: savedWebhook }, { status })
|
||||
} catch (error: any) {
|
||||
|
||||
@@ -217,10 +217,8 @@ export async function DELETE(
|
||||
logger.info(`[${requestId}] Workflow undeployed successfully: ${id}`)
|
||||
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
trackPlatformEvent('platform.workflow.undeployed', {
|
||||
'workflow.id': id,
|
||||
})
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
PlatformEvents.workflowUndeployed({ workflowId: id })
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { duplicateWorkflow } from '@/lib/workflows/persistence/duplicate'
|
||||
|
||||
@@ -46,6 +47,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
requestId,
|
||||
})
|
||||
|
||||
try {
|
||||
PlatformEvents.workflowDuplicated({
|
||||
sourceWorkflowId,
|
||||
newWorkflowId: result.id,
|
||||
workspaceId,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.info(
|
||||
`[${requestId}] Successfully duplicated workflow ${sourceWorkflowId} to ${result.id} in ${elapsed}ms`
|
||||
|
||||
@@ -8,6 +8,7 @@ import { authenticateApiKeyFromHeader, updateApiKeyLastUsed } from '@/lib/api-ke
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { verifyInternalToken } from '@/lib/auth/internal'
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { loadWorkflowFromNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
import { getWorkflowAccessContext, getWorkflowById } from '@/lib/workflows/utils'
|
||||
@@ -335,6 +336,15 @@ export async function DELETE(
|
||||
|
||||
await db.delete(workflow).where(eq(workflow.id, workflowId))
|
||||
|
||||
try {
|
||||
PlatformEvents.workflowDeleted({
|
||||
workflowId,
|
||||
workspaceId: workflowData.workspaceId || undefined,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
const elapsed = Date.now() - startTime
|
||||
logger.info(`[${requestId}] Successfully deleted workflow ${workflowId} in ${elapsed}ms`)
|
||||
|
||||
|
||||
@@ -119,12 +119,12 @@ export async function POST(req: NextRequest) {
|
||||
logger.info(`[${requestId}] Creating workflow ${workflowId} for user ${session.user.id}`)
|
||||
|
||||
import('@/lib/core/telemetry')
|
||||
.then(({ trackPlatformEvent }) => {
|
||||
trackPlatformEvent('platform.workflow.created', {
|
||||
'workflow.id': workflowId,
|
||||
'workflow.name': name,
|
||||
'workflow.has_workspace': !!workspaceId,
|
||||
'workflow.has_folder': !!folderId,
|
||||
.then(({ PlatformEvents }) => {
|
||||
PlatformEvents.workflowCreated({
|
||||
workflowId,
|
||||
name,
|
||||
workspaceId: workspaceId || undefined,
|
||||
folderId: folderId || undefined,
|
||||
})
|
||||
})
|
||||
.catch(() => {
|
||||
|
||||
@@ -7,6 +7,7 @@ import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { createApiKey, getApiKeyDisplayFormat } from '@/lib/api-key/auth'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getUserEntityPermissions } from '@/lib/workspaces/permissions/utils'
|
||||
|
||||
@@ -147,6 +148,15 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
createdAt: apiKey.createdAt,
|
||||
})
|
||||
|
||||
try {
|
||||
PlatformEvents.apiKeyGenerated({
|
||||
userId: userId,
|
||||
keyName: name,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
logger.info(`[${requestId}] Created workspace API key: ${name} in workspace ${workspaceId}`)
|
||||
|
||||
return NextResponse.json({
|
||||
@@ -198,6 +208,17 @@ export async function DELETE(
|
||||
)
|
||||
)
|
||||
|
||||
try {
|
||||
for (const keyId of keys) {
|
||||
PlatformEvents.apiKeyRevoked({
|
||||
userId: userId,
|
||||
keyId: keyId,
|
||||
})
|
||||
}
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
logger.info(
|
||||
`[${requestId}] Deleted ${deletedCount} workspace API keys from workspace ${workspaceId}`
|
||||
)
|
||||
|
||||
@@ -14,6 +14,7 @@ import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { WorkspaceInvitationEmail } from '@/components/emails'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
import { getFromEmailAddress } from '@/lib/messaging/email/utils'
|
||||
@@ -81,7 +82,6 @@ export async function POST(req: NextRequest) {
|
||||
return NextResponse.json({ error: 'Workspace ID and email are required' }, { status: 400 })
|
||||
}
|
||||
|
||||
// Validate permission type
|
||||
const validPermissions: PermissionType[] = ['admin', 'write', 'read']
|
||||
if (!validPermissions.includes(permission)) {
|
||||
return NextResponse.json(
|
||||
@@ -90,7 +90,6 @@ export async function POST(req: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Check if user has admin permissions for this workspace
|
||||
const userPermission = await db
|
||||
.select()
|
||||
.from(permissions)
|
||||
@@ -111,7 +110,6 @@ export async function POST(req: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Get the workspace details for the email
|
||||
const workspaceDetails = await db
|
||||
.select()
|
||||
.from(workspace)
|
||||
@@ -122,8 +120,6 @@ export async function POST(req: NextRequest) {
|
||||
return NextResponse.json({ error: 'Workspace not found' }, { status: 404 })
|
||||
}
|
||||
|
||||
// Check if the user is already a member
|
||||
// First find if a user with this email exists
|
||||
const existingUser = await db
|
||||
.select()
|
||||
.from(user)
|
||||
@@ -131,7 +127,6 @@ export async function POST(req: NextRequest) {
|
||||
.then((rows) => rows[0])
|
||||
|
||||
if (existingUser) {
|
||||
// Check if the user already has permissions for this workspace
|
||||
const existingPermission = await db
|
||||
.select()
|
||||
.from(permissions)
|
||||
@@ -155,7 +150,6 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
// Check if there's already a pending invitation
|
||||
const existingInvitation = await db
|
||||
.select()
|
||||
.from(workspaceInvitation)
|
||||
@@ -178,12 +172,10 @@ export async function POST(req: NextRequest) {
|
||||
)
|
||||
}
|
||||
|
||||
// Generate a unique token and set expiry date (1 week from now)
|
||||
const token = randomUUID()
|
||||
const expiresAt = new Date()
|
||||
expiresAt.setDate(expiresAt.getDate() + 7) // 7 days expiry
|
||||
|
||||
// Create the invitation
|
||||
const invitationData = {
|
||||
id: randomUUID(),
|
||||
workspaceId,
|
||||
@@ -198,10 +190,19 @@ export async function POST(req: NextRequest) {
|
||||
updatedAt: new Date(),
|
||||
}
|
||||
|
||||
// Create invitation
|
||||
await db.insert(workspaceInvitation).values(invitationData)
|
||||
|
||||
// Send the invitation email
|
||||
try {
|
||||
PlatformEvents.workspaceMemberInvited({
|
||||
workspaceId,
|
||||
invitedBy: session.user.id,
|
||||
inviteeEmail: email,
|
||||
role: permission,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
await sendInvitationEmail({
|
||||
to: email,
|
||||
inviterName: session.user.name || session.user.email || 'A user',
|
||||
@@ -217,7 +218,6 @@ export async function POST(req: NextRequest) {
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to send invitation email using the Resend API
|
||||
async function sendInvitationEmail({
|
||||
to,
|
||||
inviterName,
|
||||
@@ -233,7 +233,6 @@ async function sendInvitationEmail({
|
||||
}) {
|
||||
try {
|
||||
const baseUrl = getBaseUrl()
|
||||
// Use invitation ID in path, token in query parameter for security
|
||||
const invitationLink = `${baseUrl}/invite/${invitationId}?token=${token}`
|
||||
|
||||
const emailHtml = await render(
|
||||
@@ -263,6 +262,5 @@ async function sendInvitationEmail({
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Error sending invitation email:', error)
|
||||
// Continue even if email fails - the invitation is still created
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { and, desc, eq, isNull } from 'drizzle-orm'
|
||||
import { NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { buildDefaultWorkflowArtifacts } from '@/lib/workflows/defaults'
|
||||
import { saveWorkflowToNormalizedTables } from '@/lib/workflows/persistence/utils'
|
||||
|
||||
@@ -22,7 +23,6 @@ export async function GET() {
|
||||
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 })
|
||||
}
|
||||
|
||||
// Get all workspaces where the user has permissions
|
||||
const userWorkspaces = await db
|
||||
.select({
|
||||
workspace: workspace,
|
||||
@@ -34,19 +34,15 @@ export async function GET() {
|
||||
.orderBy(desc(workspace.createdAt))
|
||||
|
||||
if (userWorkspaces.length === 0) {
|
||||
// Create a default workspace for the user
|
||||
const defaultWorkspace = await createDefaultWorkspace(session.user.id, session.user.name)
|
||||
|
||||
// Migrate existing workflows to the default workspace
|
||||
await migrateExistingWorkflows(session.user.id, defaultWorkspace.id)
|
||||
|
||||
return NextResponse.json({ workspaces: [defaultWorkspace] })
|
||||
}
|
||||
|
||||
// If user has workspaces but might have orphaned workflows, migrate them
|
||||
await ensureWorkflowsHaveWorkspace(session.user.id, userWorkspaces[0].workspace.id)
|
||||
|
||||
// Format the response with permission information
|
||||
const workspacesWithPermissions = userWorkspaces.map(
|
||||
({ workspace: workspaceDetails, permissionType }) => ({
|
||||
...workspaceDetails,
|
||||
@@ -78,24 +74,19 @@ export async function POST(req: Request) {
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to create a default workspace
|
||||
async function createDefaultWorkspace(userId: string, userName?: string | null) {
|
||||
// Extract first name only by splitting on spaces and taking the first part
|
||||
const firstName = userName?.split(' ')[0] || null
|
||||
const workspaceName = firstName ? `${firstName}'s Workspace` : 'My Workspace'
|
||||
return createWorkspace(userId, workspaceName)
|
||||
}
|
||||
|
||||
// Helper function to create a workspace
|
||||
async function createWorkspace(userId: string, name: string) {
|
||||
const workspaceId = crypto.randomUUID()
|
||||
const workflowId = crypto.randomUUID()
|
||||
const now = new Date()
|
||||
|
||||
// Create the workspace and initial workflow in a transaction
|
||||
try {
|
||||
await db.transaction(async (tx) => {
|
||||
// Create the workspace
|
||||
await tx.insert(workspace).values({
|
||||
id: workspaceId,
|
||||
name,
|
||||
@@ -135,8 +126,6 @@ async function createWorkspace(userId: string, name: string) {
|
||||
variables: {},
|
||||
})
|
||||
|
||||
// No blocks are inserted - empty canvas
|
||||
|
||||
logger.info(
|
||||
`Created workspace ${workspaceId} with initial workflow ${workflowId} for user ${userId}`
|
||||
)
|
||||
@@ -153,7 +142,16 @@ async function createWorkspace(userId: string, name: string) {
|
||||
throw error
|
||||
}
|
||||
|
||||
// Return the workspace data directly instead of querying again
|
||||
try {
|
||||
PlatformEvents.workspaceCreated({
|
||||
workspaceId,
|
||||
userId,
|
||||
name,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
return {
|
||||
id: workspaceId,
|
||||
name,
|
||||
@@ -166,9 +164,7 @@ async function createWorkspace(userId: string, name: string) {
|
||||
}
|
||||
}
|
||||
|
||||
// Helper function to migrate existing workflows to a workspace
|
||||
async function migrateExistingWorkflows(userId: string, workspaceId: string) {
|
||||
// Find all workflows that have no workspace ID
|
||||
const orphanedWorkflows = await db
|
||||
.select({ id: workflow.id })
|
||||
.from(workflow)
|
||||
@@ -182,7 +178,6 @@ async function migrateExistingWorkflows(userId: string, workspaceId: string) {
|
||||
`Migrating ${orphanedWorkflows.length} workflows to workspace ${workspaceId} for user ${userId}`
|
||||
)
|
||||
|
||||
// Bulk update all orphaned workflows at once
|
||||
await db
|
||||
.update(workflow)
|
||||
.set({
|
||||
@@ -192,16 +187,13 @@ async function migrateExistingWorkflows(userId: string, workspaceId: string) {
|
||||
.where(and(eq(workflow.userId, userId), isNull(workflow.workspaceId)))
|
||||
}
|
||||
|
||||
// Helper function to ensure all workflows have a workspace
|
||||
async function ensureWorkflowsHaveWorkspace(userId: string, defaultWorkspaceId: string) {
|
||||
// First check if there are any orphaned workflows
|
||||
const orphanedWorkflows = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
.where(and(eq(workflow.userId, userId), isNull(workflow.workspaceId)))
|
||||
|
||||
if (orphanedWorkflows.length > 0) {
|
||||
// Directly update any workflows that don't have a workspace ID in a single query
|
||||
await db
|
||||
.update(workflow)
|
||||
.set({
|
||||
|
||||
@@ -31,7 +31,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
const { setActiveBlocks, setBlockRunStatus, setEdgeRunStatus } = useExecutionStore.getState()
|
||||
const workflowEdges = useWorkflowStore.getState().edges
|
||||
|
||||
// Track active blocks for pulsing animation
|
||||
const activeBlocksSet = new Set<string>()
|
||||
|
||||
const payload: any = {
|
||||
@@ -59,7 +58,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
throw new Error('No response body')
|
||||
}
|
||||
|
||||
// Parse SSE stream
|
||||
const reader = response.body.getReader()
|
||||
const decoder = new TextDecoder()
|
||||
let buffer = ''
|
||||
@@ -89,11 +87,9 @@ export async function executeWorkflowWithFullLogging(
|
||||
|
||||
switch (event.type) {
|
||||
case 'block:started': {
|
||||
// Add block to active set for pulsing animation
|
||||
activeBlocksSet.add(event.data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track edges that led to this block as soon as execution starts
|
||||
const incomingEdges = workflowEdges.filter(
|
||||
(edge) => edge.target === event.data.blockId
|
||||
)
|
||||
@@ -104,11 +100,9 @@ export async function executeWorkflowWithFullLogging(
|
||||
}
|
||||
|
||||
case 'block:completed':
|
||||
// Remove block from active set
|
||||
activeBlocksSet.delete(event.data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track successful block execution in run path
|
||||
setBlockRunStatus(event.data.blockId, 'success')
|
||||
|
||||
addConsole({
|
||||
@@ -134,11 +128,9 @@ export async function executeWorkflowWithFullLogging(
|
||||
break
|
||||
|
||||
case 'block:error':
|
||||
// Remove block from active set
|
||||
activeBlocksSet.delete(event.data.blockId)
|
||||
setActiveBlocks(new Set(activeBlocksSet))
|
||||
|
||||
// Track failed block execution in run path
|
||||
setBlockRunStatus(event.data.blockId, 'error')
|
||||
|
||||
addConsole({
|
||||
@@ -183,7 +175,6 @@ export async function executeWorkflowWithFullLogging(
|
||||
}
|
||||
} finally {
|
||||
reader.releaseLock()
|
||||
// Clear active blocks when execution ends
|
||||
setActiveBlocks(new Set())
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
* Sim OpenTelemetry - Server-side Instrumentation
|
||||
*/
|
||||
|
||||
import type { Attributes, Context, Link, SpanKind } from '@opentelemetry/api'
|
||||
import { DiagConsoleLogger, DiagLogLevel, diag } from '@opentelemetry/api'
|
||||
import type { Sampler, SamplingResult } from '@opentelemetry/sdk-trace-base'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { env } from './lib/core/config/env'
|
||||
|
||||
@@ -24,8 +26,25 @@ const DEFAULT_TELEMETRY_CONFIG = {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize OpenTelemetry SDK with proper configuration
|
||||
* Span name prefixes we want to KEEP
|
||||
*/
|
||||
const ALLOWED_SPAN_PREFIXES = [
|
||||
'platform.', // Our platform events
|
||||
'gen_ai.', // GenAI semantic convention spans
|
||||
'workflow.', // Workflow execution spans
|
||||
'block.', // Block execution spans
|
||||
'http.client.', // Our API block HTTP calls
|
||||
'function.', // Function block execution
|
||||
'router.', // Router block evaluation
|
||||
'condition.', // Condition block evaluation
|
||||
'loop.', // Loop block execution
|
||||
'parallel.', // Parallel block execution
|
||||
]
|
||||
|
||||
function isBusinessSpan(spanName: string): boolean {
|
||||
return ALLOWED_SPAN_PREFIXES.some((prefix) => spanName.startsWith(prefix))
|
||||
}
|
||||
|
||||
async function initializeOpenTelemetry() {
|
||||
try {
|
||||
if (env.NEXT_TELEMETRY_DISABLED === '1') {
|
||||
@@ -52,18 +71,43 @@ async function initializeOpenTelemetry() {
|
||||
)
|
||||
const { OTLPTraceExporter } = await import('@opentelemetry/exporter-trace-otlp-http')
|
||||
const { BatchSpanProcessor } = await import('@opentelemetry/sdk-trace-node')
|
||||
const { ParentBasedSampler, TraceIdRatioBasedSampler } = await import(
|
||||
const { ParentBasedSampler, TraceIdRatioBasedSampler, SamplingDecision } = await import(
|
||||
'@opentelemetry/sdk-trace-base'
|
||||
)
|
||||
|
||||
const createBusinessSpanSampler = (baseSampler: Sampler): Sampler => ({
|
||||
shouldSample(
|
||||
context: Context,
|
||||
traceId: string,
|
||||
spanName: string,
|
||||
spanKind: SpanKind,
|
||||
attributes: Attributes,
|
||||
links: Link[]
|
||||
): SamplingResult {
|
||||
if (attributes['next.span_type']) {
|
||||
return { decision: SamplingDecision.NOT_RECORD }
|
||||
}
|
||||
|
||||
if (isBusinessSpan(spanName)) {
|
||||
return baseSampler.shouldSample(context, traceId, spanName, spanKind, attributes, links)
|
||||
}
|
||||
|
||||
return { decision: SamplingDecision.NOT_RECORD }
|
||||
},
|
||||
|
||||
toString(): string {
|
||||
return `BusinessSpanSampler{baseSampler=${baseSampler.toString()}}`
|
||||
},
|
||||
})
|
||||
|
||||
const exporter = new OTLPTraceExporter({
|
||||
url: telemetryConfig.endpoint,
|
||||
headers: {},
|
||||
timeoutMillis: Math.min(telemetryConfig.batchSettings.exportTimeoutMillis, 10000), // Max 10s
|
||||
timeoutMillis: Math.min(telemetryConfig.batchSettings.exportTimeoutMillis, 10000),
|
||||
keepAlive: false,
|
||||
})
|
||||
|
||||
const spanProcessor = new BatchSpanProcessor(exporter, {
|
||||
const batchProcessor = new BatchSpanProcessor(exporter, {
|
||||
maxQueueSize: telemetryConfig.batchSettings.maxQueueSize,
|
||||
maxExportBatchSize: telemetryConfig.batchSettings.maxExportBatchSize,
|
||||
scheduledDelayMillis: telemetryConfig.batchSettings.scheduledDelayMillis,
|
||||
@@ -82,13 +126,14 @@ async function initializeOpenTelemetry() {
|
||||
})
|
||||
)
|
||||
|
||||
const sampler = new ParentBasedSampler({
|
||||
root: new TraceIdRatioBasedSampler(0.1), // 10% sampling for root spans
|
||||
const baseSampler = new ParentBasedSampler({
|
||||
root: new TraceIdRatioBasedSampler(0.1),
|
||||
})
|
||||
const sampler = createBusinessSpanSampler(baseSampler)
|
||||
|
||||
const sdk = new NodeSDK({
|
||||
resource,
|
||||
spanProcessor,
|
||||
spanProcessor: batchProcessor,
|
||||
sampler,
|
||||
traceExporter: exporter,
|
||||
})
|
||||
@@ -107,7 +152,7 @@ async function initializeOpenTelemetry() {
|
||||
process.on('SIGTERM', shutdownHandler)
|
||||
process.on('SIGINT', shutdownHandler)
|
||||
|
||||
logger.info('OpenTelemetry instrumentation initialized')
|
||||
logger.info('OpenTelemetry instrumentation initialized with business span filtering')
|
||||
} catch (error) {
|
||||
logger.error('Failed to initialize OpenTelemetry instrumentation', error)
|
||||
}
|
||||
|
||||
@@ -47,10 +47,12 @@ import { env } from '@/lib/core/config/env'
|
||||
import {
|
||||
isAuthDisabled,
|
||||
isBillingEnabled,
|
||||
isEmailPasswordEnabled,
|
||||
isEmailVerificationEnabled,
|
||||
isHosted,
|
||||
isRegistrationDisabled,
|
||||
} from '@/lib/core/config/feature-flags'
|
||||
import { PlatformEvents } from '@/lib/core/telemetry'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
import { getFromEmailAddress, getPersonalEmailFrom } from '@/lib/messaging/email/utils'
|
||||
@@ -97,6 +99,15 @@ export const auth = betterAuth({
|
||||
userId: user.id,
|
||||
})
|
||||
|
||||
try {
|
||||
PlatformEvents.userSignedUp({
|
||||
userId: user.id,
|
||||
authMethod: 'email',
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
|
||||
try {
|
||||
await handleNewUser(user.id)
|
||||
} catch (error) {
|
||||
@@ -319,6 +330,15 @@ export const auth = betterAuth({
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
PlatformEvents.oauthConnected({
|
||||
userId: account.userId,
|
||||
provider: account.providerId,
|
||||
})
|
||||
} catch {
|
||||
// Telemetry should not fail the operation
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -461,6 +481,12 @@ export const auth = betterAuth({
|
||||
if (ctx.path.startsWith('/sign-up') && isRegistrationDisabled)
|
||||
throw new Error('Registration is disabled, please contact your admin.')
|
||||
|
||||
if (!isEmailPasswordEnabled) {
|
||||
const emailPasswordPaths = ['/sign-in/email', '/sign-up/email', '/email-otp']
|
||||
if (emailPasswordPaths.some((path) => ctx.path.startsWith(path)))
|
||||
throw new Error('Email/password authentication is disabled. Please use SSO to sign in.')
|
||||
}
|
||||
|
||||
if (
|
||||
(ctx.path.startsWith('/sign-in') || ctx.path.startsWith('/sign-up')) &&
|
||||
(env.ALLOWED_LOGIN_EMAILS || env.ALLOWED_LOGIN_DOMAINS)
|
||||
|
||||
@@ -20,6 +20,7 @@ export const env = createEnv({
|
||||
BETTER_AUTH_URL: z.string().url(), // Base URL for Better Auth service
|
||||
BETTER_AUTH_SECRET: z.string().min(32), // Secret key for Better Auth JWT signing
|
||||
DISABLE_REGISTRATION: z.boolean().optional(), // Flag to disable new user registration
|
||||
EMAIL_PASSWORD_SIGNUP_ENABLED: z.boolean().optional().default(true), // Enable email/password authentication (server-side enforcement)
|
||||
DISABLE_AUTH: z.boolean().optional(), // Bypass authentication entirely (self-hosted only, creates anonymous session)
|
||||
ALLOWED_LOGIN_EMAILS: z.string().optional(), // Comma-separated list of allowed email addresses for login
|
||||
ALLOWED_LOGIN_DOMAINS: z.string().optional(), // Comma-separated list of allowed email domains for login
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/**
|
||||
* Environment utility functions for consistent environment detection across the application
|
||||
*/
|
||||
import { env, getEnv, isTruthy } from './env'
|
||||
import { env, getEnv, isFalsy, isTruthy } from './env'
|
||||
|
||||
/**
|
||||
* Is the application running in production mode
|
||||
@@ -65,6 +65,11 @@ if (isTruthy(env.DISABLE_AUTH)) {
|
||||
*/
|
||||
export const isRegistrationDisabled = isTruthy(env.DISABLE_REGISTRATION)
|
||||
|
||||
/**
|
||||
* Is email/password authentication enabled (defaults to true)
|
||||
*/
|
||||
export const isEmailPasswordEnabled = !isFalsy(env.EMAIL_PASSWORD_SIGNUP_ENABLED)
|
||||
|
||||
/**
|
||||
* Is Trigger.dev enabled for async job processing
|
||||
*/
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
vi.mock('@/lib/core/config/env', () =>
|
||||
createEnvMock({
|
||||
NEXT_PUBLIC_APP_URL: 'https://example.com',
|
||||
NEXT_PUBLIC_SOCKET_URL: 'https://socket.example.com',
|
||||
OLLAMA_URL: 'http://localhost:11434',
|
||||
@@ -13,20 +14,8 @@ vi.mock('@/lib/core/config/env', () => ({
|
||||
NEXT_PUBLIC_BRAND_FAVICON_URL: 'https://brand.example.com/favicon.ico',
|
||||
NEXT_PUBLIC_PRIVACY_URL: 'https://legal.example.com/privacy',
|
||||
NEXT_PUBLIC_TERMS_URL: 'https://legal.example.com/terms',
|
||||
},
|
||||
getEnv: vi.fn((key: string) => {
|
||||
const envMap: Record<string, string> = {
|
||||
NEXT_PUBLIC_APP_URL: 'https://example.com',
|
||||
NEXT_PUBLIC_SOCKET_URL: 'https://socket.example.com',
|
||||
OLLAMA_URL: 'http://localhost:11434',
|
||||
NEXT_PUBLIC_BRAND_LOGO_URL: 'https://brand.example.com/logo.png',
|
||||
NEXT_PUBLIC_BRAND_FAVICON_URL: 'https://brand.example.com/favicon.ico',
|
||||
NEXT_PUBLIC_PRIVACY_URL: 'https://legal.example.com/privacy',
|
||||
NEXT_PUBLIC_TERMS_URL: 'https://legal.example.com/terms',
|
||||
}
|
||||
return envMap[key] || ''
|
||||
}),
|
||||
}))
|
||||
})
|
||||
)
|
||||
|
||||
vi.mock('@/lib/core/config/feature-flags', () => ({
|
||||
isDev: false,
|
||||
|
||||
@@ -6,6 +6,10 @@ const mockEnv = vi.hoisted(() => ({
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: mockEnv,
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value.toLowerCase() === 'true' || value === '1' : Boolean(value),
|
||||
isFalsy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value.toLowerCase() === 'false' || value === '0' : value === false,
|
||||
}))
|
||||
|
||||
vi.mock('@sim/logger', () => ({
|
||||
|
||||
@@ -449,3 +449,505 @@ export function trackPlatformEvent(
|
||||
// Silently fail
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// PLATFORM TELEMETRY EVENTS
|
||||
// ============================================================================
|
||||
//
|
||||
// Naming Convention:
|
||||
// Event: platform.{resource}.{past_tense_action}
|
||||
// Attribute: {resource}.{attribute_name}
|
||||
//
|
||||
// Examples:
|
||||
// Event: platform.user.signed_up
|
||||
// Attribute: user.id, user.auth_method, workspace.id
|
||||
//
|
||||
// Categories:
|
||||
// - User/Auth: platform.user.*
|
||||
// - Workspace: platform.workspace.*
|
||||
// - Workflow: platform.workflow.*
|
||||
// - Knowledge Base: platform.knowledge_base.*
|
||||
// - MCP: platform.mcp.*
|
||||
// - API Keys: platform.api_key.*
|
||||
// - OAuth: platform.oauth.*
|
||||
// - Webhook: platform.webhook.*
|
||||
// - Billing: platform.billing.*
|
||||
// - Template: platform.template.*
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Platform Events - Typed event tracking helpers
|
||||
* These provide type-safe, consistent telemetry across the platform
|
||||
*/
|
||||
export const PlatformEvents = {
|
||||
/**
|
||||
* Track user sign up
|
||||
*/
|
||||
userSignedUp: (attrs: {
|
||||
userId: string
|
||||
authMethod: 'email' | 'oauth' | 'sso'
|
||||
provider?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.user.signed_up', {
|
||||
'user.id': attrs.userId,
|
||||
'user.auth_method': attrs.authMethod,
|
||||
...(attrs.provider && { 'user.auth_provider': attrs.provider }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track user sign in
|
||||
*/
|
||||
userSignedIn: (attrs: {
|
||||
userId: string
|
||||
authMethod: 'email' | 'oauth' | 'sso'
|
||||
provider?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.user.signed_in', {
|
||||
'user.id': attrs.userId,
|
||||
'user.auth_method': attrs.authMethod,
|
||||
...(attrs.provider && { 'user.auth_provider': attrs.provider }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track password reset requested
|
||||
*/
|
||||
passwordResetRequested: (attrs: { userId: string }) => {
|
||||
trackPlatformEvent('platform.user.password_reset_requested', {
|
||||
'user.id': attrs.userId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track workspace created
|
||||
*/
|
||||
workspaceCreated: (attrs: { workspaceId: string; userId: string; name: string }) => {
|
||||
trackPlatformEvent('platform.workspace.created', {
|
||||
'workspace.id': attrs.workspaceId,
|
||||
'workspace.name': attrs.name,
|
||||
'user.id': attrs.userId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track member invited to workspace
|
||||
*/
|
||||
workspaceMemberInvited: (attrs: {
|
||||
workspaceId: string
|
||||
invitedBy: string
|
||||
inviteeEmail: string
|
||||
role: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.workspace.member_invited', {
|
||||
'workspace.id': attrs.workspaceId,
|
||||
'user.id': attrs.invitedBy,
|
||||
'invitation.role': attrs.role,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track member joined workspace
|
||||
*/
|
||||
workspaceMemberJoined: (attrs: { workspaceId: string; userId: string; role: string }) => {
|
||||
trackPlatformEvent('platform.workspace.member_joined', {
|
||||
'workspace.id': attrs.workspaceId,
|
||||
'user.id': attrs.userId,
|
||||
'member.role': attrs.role,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track workflow created
|
||||
*/
|
||||
workflowCreated: (attrs: {
|
||||
workflowId: string
|
||||
name: string
|
||||
workspaceId?: string
|
||||
folderId?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.workflow.created', {
|
||||
'workflow.id': attrs.workflowId,
|
||||
'workflow.name': attrs.name,
|
||||
'workflow.has_workspace': !!attrs.workspaceId,
|
||||
'workflow.has_folder': !!attrs.folderId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track workflow deleted
|
||||
*/
|
||||
workflowDeleted: (attrs: { workflowId: string; workspaceId?: string }) => {
|
||||
trackPlatformEvent('platform.workflow.deleted', {
|
||||
'workflow.id': attrs.workflowId,
|
||||
...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track workflow duplicated
|
||||
*/
|
||||
workflowDuplicated: (attrs: {
|
||||
sourceWorkflowId: string
|
||||
newWorkflowId: string
|
||||
workspaceId?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.workflow.duplicated', {
|
||||
'workflow.source_id': attrs.sourceWorkflowId,
|
||||
'workflow.new_id': attrs.newWorkflowId,
|
||||
...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track workflow deployed
|
||||
*/
|
||||
workflowDeployed: (attrs: {
|
||||
workflowId: string
|
||||
workflowName: string
|
||||
blocksCount: number
|
||||
edgesCount: number
|
||||
version: number
|
||||
loopsCount?: number
|
||||
parallelsCount?: number
|
||||
blockTypes?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.workflow.deployed', {
|
||||
'workflow.id': attrs.workflowId,
|
||||
'workflow.name': attrs.workflowName,
|
||||
'workflow.blocks_count': attrs.blocksCount,
|
||||
'workflow.edges_count': attrs.edgesCount,
|
||||
'deployment.version': attrs.version,
|
||||
...(attrs.loopsCount !== undefined && { 'workflow.loops_count': attrs.loopsCount }),
|
||||
...(attrs.parallelsCount !== undefined && {
|
||||
'workflow.parallels_count': attrs.parallelsCount,
|
||||
}),
|
||||
...(attrs.blockTypes && { 'workflow.block_types': attrs.blockTypes }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track workflow undeployed
|
||||
*/
|
||||
workflowUndeployed: (attrs: { workflowId: string }) => {
|
||||
trackPlatformEvent('platform.workflow.undeployed', {
|
||||
'workflow.id': attrs.workflowId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track workflow executed
|
||||
*/
|
||||
workflowExecuted: (attrs: {
|
||||
workflowId: string
|
||||
durationMs: number
|
||||
status: 'success' | 'error' | 'cancelled' | 'paused'
|
||||
trigger: string
|
||||
blocksExecuted: number
|
||||
hasErrors: boolean
|
||||
totalCost?: number
|
||||
errorMessage?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.workflow.executed', {
|
||||
'workflow.id': attrs.workflowId,
|
||||
'execution.duration_ms': attrs.durationMs,
|
||||
'execution.status': attrs.status,
|
||||
'execution.trigger': attrs.trigger,
|
||||
'execution.blocks_executed': attrs.blocksExecuted,
|
||||
'execution.has_errors': attrs.hasErrors,
|
||||
...(attrs.totalCost !== undefined && { 'execution.total_cost': attrs.totalCost }),
|
||||
...(attrs.errorMessage && { 'execution.error_message': attrs.errorMessage }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track knowledge base created
|
||||
*/
|
||||
knowledgeBaseCreated: (attrs: {
|
||||
knowledgeBaseId: string
|
||||
name: string
|
||||
workspaceId?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.knowledge_base.created', {
|
||||
'knowledge_base.id': attrs.knowledgeBaseId,
|
||||
'knowledge_base.name': attrs.name,
|
||||
...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track knowledge base deleted
|
||||
*/
|
||||
knowledgeBaseDeleted: (attrs: { knowledgeBaseId: string }) => {
|
||||
trackPlatformEvent('platform.knowledge_base.deleted', {
|
||||
'knowledge_base.id': attrs.knowledgeBaseId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track documents uploaded to knowledge base
|
||||
*/
|
||||
knowledgeBaseDocumentsUploaded: (attrs: {
|
||||
knowledgeBaseId: string
|
||||
documentsCount: number
|
||||
uploadType: 'single' | 'bulk'
|
||||
chunkSize?: number
|
||||
recipe?: string
|
||||
mimeType?: string
|
||||
fileSize?: number
|
||||
}) => {
|
||||
trackPlatformEvent('platform.knowledge_base.documents_uploaded', {
|
||||
'knowledge_base.id': attrs.knowledgeBaseId,
|
||||
'documents.count': attrs.documentsCount,
|
||||
'documents.upload_type': attrs.uploadType,
|
||||
...(attrs.chunkSize !== undefined && { 'processing.chunk_size': attrs.chunkSize }),
|
||||
...(attrs.recipe && { 'processing.recipe': attrs.recipe }),
|
||||
...(attrs.mimeType && { 'document.mime_type': attrs.mimeType }),
|
||||
...(attrs.fileSize !== undefined && { 'document.file_size': attrs.fileSize }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track knowledge base searched
|
||||
*/
|
||||
knowledgeBaseSearched: (attrs: {
|
||||
knowledgeBaseId: string
|
||||
resultsCount: number
|
||||
workspaceId?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.knowledge_base.searched', {
|
||||
'knowledge_base.id': attrs.knowledgeBaseId,
|
||||
'search.results_count': attrs.resultsCount,
|
||||
...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track API key generated
|
||||
*/
|
||||
apiKeyGenerated: (attrs: { userId: string; keyName?: string }) => {
|
||||
trackPlatformEvent('platform.api_key.generated', {
|
||||
'user.id': attrs.userId,
|
||||
...(attrs.keyName && { 'api_key.name': attrs.keyName }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track API key revoked
|
||||
*/
|
||||
apiKeyRevoked: (attrs: { userId: string; keyId: string }) => {
|
||||
trackPlatformEvent('platform.api_key.revoked', {
|
||||
'user.id': attrs.userId,
|
||||
'api_key.id': attrs.keyId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track OAuth provider connected
|
||||
*/
|
||||
oauthConnected: (attrs: { userId: string; provider: string; workspaceId?: string }) => {
|
||||
trackPlatformEvent('platform.oauth.connected', {
|
||||
'user.id': attrs.userId,
|
||||
'oauth.provider': attrs.provider,
|
||||
...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track OAuth provider disconnected
|
||||
*/
|
||||
oauthDisconnected: (attrs: { userId: string; provider: string }) => {
|
||||
trackPlatformEvent('platform.oauth.disconnected', {
|
||||
'user.id': attrs.userId,
|
||||
'oauth.provider': attrs.provider,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track credential set created
|
||||
*/
|
||||
credentialSetCreated: (attrs: { credentialSetId: string; userId: string; name: string }) => {
|
||||
trackPlatformEvent('platform.credential_set.created', {
|
||||
'credential_set.id': attrs.credentialSetId,
|
||||
'credential_set.name': attrs.name,
|
||||
'user.id': attrs.userId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track webhook created
|
||||
*/
|
||||
webhookCreated: (attrs: {
|
||||
webhookId: string
|
||||
workflowId: string
|
||||
provider: string
|
||||
workspaceId?: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.webhook.created', {
|
||||
'webhook.id': attrs.webhookId,
|
||||
'workflow.id': attrs.workflowId,
|
||||
'webhook.provider': attrs.provider,
|
||||
...(attrs.workspaceId && { 'workspace.id': attrs.workspaceId }),
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track webhook deleted
|
||||
*/
|
||||
webhookDeleted: (attrs: { webhookId: string; workflowId: string }) => {
|
||||
trackPlatformEvent('platform.webhook.deleted', {
|
||||
'webhook.id': attrs.webhookId,
|
||||
'workflow.id': attrs.workflowId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track webhook triggered
|
||||
*/
|
||||
webhookTriggered: (attrs: {
|
||||
webhookId: string
|
||||
workflowId: string
|
||||
provider: string
|
||||
success: boolean
|
||||
}) => {
|
||||
trackPlatformEvent('platform.webhook.triggered', {
|
||||
'webhook.id': attrs.webhookId,
|
||||
'workflow.id': attrs.workflowId,
|
||||
'webhook.provider': attrs.provider,
|
||||
'webhook.trigger_success': attrs.success,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track MCP server added
|
||||
*/
|
||||
mcpServerAdded: (attrs: {
|
||||
serverId: string
|
||||
serverName: string
|
||||
transport: string
|
||||
workspaceId: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.mcp.server_added', {
|
||||
'mcp.server_id': attrs.serverId,
|
||||
'mcp.server_name': attrs.serverName,
|
||||
'mcp.transport': attrs.transport,
|
||||
'workspace.id': attrs.workspaceId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track MCP tool executed
|
||||
*/
|
||||
mcpToolExecuted: (attrs: {
|
||||
serverId: string
|
||||
toolName: string
|
||||
status: 'success' | 'error'
|
||||
workspaceId: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.mcp.tool_executed', {
|
||||
'mcp.server_id': attrs.serverId,
|
||||
'mcp.tool_name': attrs.toolName,
|
||||
'mcp.execution_status': attrs.status,
|
||||
'workspace.id': attrs.workspaceId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track template used
|
||||
*/
|
||||
templateUsed: (attrs: {
|
||||
templateId: string
|
||||
templateName: string
|
||||
newWorkflowId: string
|
||||
blocksCount: number
|
||||
workspaceId: string
|
||||
}) => {
|
||||
trackPlatformEvent('platform.template.used', {
|
||||
'template.id': attrs.templateId,
|
||||
'template.name': attrs.templateName,
|
||||
'workflow.created_id': attrs.newWorkflowId,
|
||||
'workflow.blocks_count': attrs.blocksCount,
|
||||
'workspace.id': attrs.workspaceId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track subscription created
|
||||
*/
|
||||
subscriptionCreated: (attrs: {
|
||||
userId: string
|
||||
plan: string
|
||||
interval: 'monthly' | 'yearly'
|
||||
}) => {
|
||||
trackPlatformEvent('platform.billing.subscription_created', {
|
||||
'user.id': attrs.userId,
|
||||
'billing.plan': attrs.plan,
|
||||
'billing.interval': attrs.interval,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track subscription changed
|
||||
*/
|
||||
subscriptionChanged: (attrs: {
|
||||
userId: string
|
||||
previousPlan: string
|
||||
newPlan: string
|
||||
changeType: 'upgrade' | 'downgrade'
|
||||
}) => {
|
||||
trackPlatformEvent('platform.billing.subscription_changed', {
|
||||
'user.id': attrs.userId,
|
||||
'billing.previous_plan': attrs.previousPlan,
|
||||
'billing.new_plan': attrs.newPlan,
|
||||
'billing.change_type': attrs.changeType,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track subscription cancelled
|
||||
*/
|
||||
subscriptionCancelled: (attrs: { userId: string; plan: string }) => {
|
||||
trackPlatformEvent('platform.billing.subscription_cancelled', {
|
||||
'user.id': attrs.userId,
|
||||
'billing.plan': attrs.plan,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track folder created
|
||||
*/
|
||||
folderCreated: (attrs: { folderId: string; name: string; workspaceId: string }) => {
|
||||
trackPlatformEvent('platform.folder.created', {
|
||||
'folder.id': attrs.folderId,
|
||||
'folder.name': attrs.name,
|
||||
'workspace.id': attrs.workspaceId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track folder deleted
|
||||
*/
|
||||
folderDeleted: (attrs: { folderId: string; workspaceId: string }) => {
|
||||
trackPlatformEvent('platform.folder.deleted', {
|
||||
'folder.id': attrs.folderId,
|
||||
'workspace.id': attrs.workspaceId,
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Track chat deployed (workflow deployed as chat interface)
|
||||
*/
|
||||
chatDeployed: (attrs: {
|
||||
chatId: string
|
||||
workflowId: string
|
||||
authType: 'public' | 'password' | 'email' | 'sso'
|
||||
hasOutputConfigs: boolean
|
||||
}) => {
|
||||
trackPlatformEvent('platform.chat.deployed', {
|
||||
'chat.id': attrs.chatId,
|
||||
'workflow.id': attrs.workflowId,
|
||||
'chat.auth_type': attrs.authType,
|
||||
'chat.has_output_configs': attrs.hasOutputConfigs,
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
@@ -289,12 +289,12 @@ export class LoggingSession {
|
||||
|
||||
this.completed = true
|
||||
|
||||
// Track workflow execution outcome
|
||||
if (traceSpans && traceSpans.length > 0) {
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
const { PlatformEvents, createOTelSpansForWorkflowExecution } = await import(
|
||||
'@/lib/core/telemetry'
|
||||
)
|
||||
|
||||
// Determine status from trace spans
|
||||
const hasErrors = traceSpans.some((span: any) => {
|
||||
const checkForErrors = (s: any): boolean => {
|
||||
if (s.status === 'error') return true
|
||||
@@ -306,14 +306,27 @@ export class LoggingSession {
|
||||
return checkForErrors(span)
|
||||
})
|
||||
|
||||
trackPlatformEvent('platform.workflow.executed', {
|
||||
'workflow.id': this.workflowId,
|
||||
'execution.duration_ms': duration,
|
||||
'execution.status': hasErrors ? 'error' : 'success',
|
||||
'execution.trigger': this.triggerType,
|
||||
'execution.blocks_executed': traceSpans.length,
|
||||
'execution.has_errors': hasErrors,
|
||||
'execution.total_cost': costSummary.totalCost || 0,
|
||||
PlatformEvents.workflowExecuted({
|
||||
workflowId: this.workflowId,
|
||||
durationMs: duration,
|
||||
status: hasErrors ? 'error' : 'success',
|
||||
trigger: this.triggerType,
|
||||
blocksExecuted: traceSpans.length,
|
||||
hasErrors,
|
||||
totalCost: costSummary.totalCost || 0,
|
||||
})
|
||||
|
||||
const startTime = new Date(new Date(endTime).getTime() - duration).toISOString()
|
||||
createOTelSpansForWorkflowExecution({
|
||||
workflowId: this.workflowId,
|
||||
workflowName: this.workflowState?.metadata?.name,
|
||||
executionId: this.executionId,
|
||||
traceSpans,
|
||||
trigger: this.triggerType,
|
||||
startTime,
|
||||
endTime,
|
||||
totalDurationMs: duration,
|
||||
status: hasErrors ? 'error' : 'success',
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
@@ -324,7 +337,6 @@ export class LoggingSession {
|
||||
logger.debug(`[${this.requestId}] Completed logging for execution ${this.executionId}`)
|
||||
}
|
||||
} catch (error) {
|
||||
// Always log completion failures with full details - these should not be silent
|
||||
logger.error(`Failed to complete logging for execution ${this.executionId}:`, {
|
||||
requestId: this.requestId,
|
||||
workflowId: this.workflowId,
|
||||
@@ -332,7 +344,6 @@ export class LoggingSession {
|
||||
error: error instanceof Error ? error.message : String(error),
|
||||
stack: error instanceof Error ? error.stack : undefined,
|
||||
})
|
||||
// Rethrow so safeComplete can decide what to do
|
||||
throw error
|
||||
}
|
||||
}
|
||||
@@ -404,17 +415,31 @@ export class LoggingSession {
|
||||
|
||||
this.completed = true
|
||||
|
||||
// Track workflow execution error outcome
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
trackPlatformEvent('platform.workflow.executed', {
|
||||
'workflow.id': this.workflowId,
|
||||
'execution.duration_ms': Math.max(1, durationMs),
|
||||
'execution.status': 'error',
|
||||
'execution.trigger': this.triggerType,
|
||||
'execution.blocks_executed': spans.length,
|
||||
'execution.has_errors': true,
|
||||
'execution.error_message': message,
|
||||
const { PlatformEvents, createOTelSpansForWorkflowExecution } = await import(
|
||||
'@/lib/core/telemetry'
|
||||
)
|
||||
PlatformEvents.workflowExecuted({
|
||||
workflowId: this.workflowId,
|
||||
durationMs: Math.max(1, durationMs),
|
||||
status: 'error',
|
||||
trigger: this.triggerType,
|
||||
blocksExecuted: spans.length,
|
||||
hasErrors: true,
|
||||
errorMessage: message,
|
||||
})
|
||||
|
||||
createOTelSpansForWorkflowExecution({
|
||||
workflowId: this.workflowId,
|
||||
workflowName: this.workflowState?.metadata?.name,
|
||||
executionId: this.executionId,
|
||||
traceSpans: spans,
|
||||
trigger: this.triggerType,
|
||||
startTime: startTime.toISOString(),
|
||||
endTime: endTime.toISOString(),
|
||||
totalDurationMs: Math.max(1, durationMs),
|
||||
status: 'error',
|
||||
error: message,
|
||||
})
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
@@ -426,7 +451,6 @@ export class LoggingSession {
|
||||
)
|
||||
}
|
||||
} catch (enhancedError) {
|
||||
// Always log completion failures with full details
|
||||
logger.error(`Failed to complete error logging for execution ${this.executionId}:`, {
|
||||
requestId: this.requestId,
|
||||
workflowId: this.workflowId,
|
||||
@@ -434,7 +458,6 @@ export class LoggingSession {
|
||||
error: enhancedError instanceof Error ? enhancedError.message : String(enhancedError),
|
||||
stack: enhancedError instanceof Error ? enhancedError.stack : undefined,
|
||||
})
|
||||
// Rethrow so safeCompleteWithError can decide what to do
|
||||
throw enhancedError
|
||||
}
|
||||
}
|
||||
@@ -477,15 +500,32 @@ export class LoggingSession {
|
||||
this.completed = true
|
||||
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
trackPlatformEvent('platform.workflow.executed', {
|
||||
'workflow.id': this.workflowId,
|
||||
'execution.duration_ms': Math.max(1, durationMs),
|
||||
'execution.status': 'cancelled',
|
||||
'execution.trigger': this.triggerType,
|
||||
'execution.blocks_executed': traceSpans?.length || 0,
|
||||
'execution.has_errors': false,
|
||||
const { PlatformEvents, createOTelSpansForWorkflowExecution } = await import(
|
||||
'@/lib/core/telemetry'
|
||||
)
|
||||
PlatformEvents.workflowExecuted({
|
||||
workflowId: this.workflowId,
|
||||
durationMs: Math.max(1, durationMs),
|
||||
status: 'cancelled',
|
||||
trigger: this.triggerType,
|
||||
blocksExecuted: traceSpans?.length || 0,
|
||||
hasErrors: false,
|
||||
})
|
||||
|
||||
if (traceSpans && traceSpans.length > 0) {
|
||||
const startTime = new Date(endTime.getTime() - Math.max(1, durationMs))
|
||||
createOTelSpansForWorkflowExecution({
|
||||
workflowId: this.workflowId,
|
||||
workflowName: this.workflowState?.metadata?.name,
|
||||
executionId: this.executionId,
|
||||
traceSpans,
|
||||
trigger: this.triggerType,
|
||||
startTime: startTime.toISOString(),
|
||||
endTime: endTime.toISOString(),
|
||||
totalDurationMs: Math.max(1, durationMs),
|
||||
status: 'success', // Cancelled executions are not errors
|
||||
})
|
||||
}
|
||||
} catch (_e) {
|
||||
// Silently fail
|
||||
}
|
||||
@@ -540,16 +580,33 @@ export class LoggingSession {
|
||||
})
|
||||
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
trackPlatformEvent('platform.workflow.executed', {
|
||||
'workflow.id': this.workflowId,
|
||||
'execution.duration_ms': Math.max(1, durationMs),
|
||||
'execution.status': 'paused',
|
||||
'execution.trigger': this.triggerType,
|
||||
'execution.blocks_executed': traceSpans?.length || 0,
|
||||
'execution.has_errors': false,
|
||||
'execution.total_cost': costSummary.totalCost || 0,
|
||||
const { PlatformEvents, createOTelSpansForWorkflowExecution } = await import(
|
||||
'@/lib/core/telemetry'
|
||||
)
|
||||
PlatformEvents.workflowExecuted({
|
||||
workflowId: this.workflowId,
|
||||
durationMs: Math.max(1, durationMs),
|
||||
status: 'paused',
|
||||
trigger: this.triggerType,
|
||||
blocksExecuted: traceSpans?.length || 0,
|
||||
hasErrors: false,
|
||||
totalCost: costSummary.totalCost || 0,
|
||||
})
|
||||
|
||||
if (traceSpans && traceSpans.length > 0) {
|
||||
const startTime = new Date(endTime.getTime() - Math.max(1, durationMs))
|
||||
createOTelSpansForWorkflowExecution({
|
||||
workflowId: this.workflowId,
|
||||
workflowName: this.workflowState?.metadata?.name,
|
||||
executionId: this.executionId,
|
||||
traceSpans,
|
||||
trigger: this.triggerType,
|
||||
startTime: startTime.toISOString(),
|
||||
endTime: endTime.toISOString(),
|
||||
totalDurationMs: Math.max(1, durationMs),
|
||||
status: 'success', // Paused executions are not errors
|
||||
})
|
||||
}
|
||||
} catch (_e) {}
|
||||
|
||||
if (this.requestId) {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, type Mock, vi } from 'vitest'
|
||||
|
||||
/**
|
||||
@@ -44,15 +45,15 @@ vi.mock('@/lib/messaging/email/unsubscribe', () => ({
|
||||
}))
|
||||
|
||||
// Mock env with valid API keys so the clients get initialized
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
vi.mock('@/lib/core/config/env', () =>
|
||||
createEnvMock({
|
||||
RESEND_API_KEY: 'test-api-key',
|
||||
AZURE_ACS_CONNECTION_STRING: 'test-azure-connection-string',
|
||||
AZURE_COMMUNICATION_EMAIL_DOMAIN: 'test.azurecomm.net',
|
||||
NEXT_PUBLIC_APP_URL: 'https://test.sim.ai',
|
||||
FROM_EMAIL_ADDRESS: 'Sim <noreply@sim.ai>',
|
||||
},
|
||||
}))
|
||||
})
|
||||
)
|
||||
|
||||
// Mock URL utilities
|
||||
vi.mock('@/lib/core/utils/urls', () => ({
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import type { EmailType } from '@/lib/messaging/email/mailer'
|
||||
|
||||
@@ -25,14 +26,7 @@ vi.mock('drizzle-orm', () => ({
|
||||
eq: vi.fn((a, b) => ({ type: 'eq', left: a, right: b })),
|
||||
}))
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
BETTER_AUTH_SECRET: 'test-secret-key',
|
||||
},
|
||||
isTruthy: (value: string | boolean | number | undefined) =>
|
||||
typeof value === 'string' ? value === 'true' || value === '1' : Boolean(value),
|
||||
getEnv: (variable: string) => process.env[variable],
|
||||
}))
|
||||
vi.mock('@/lib/core/config/env', () => createEnvMock({ BETTER_AUTH_SECRET: 'test-secret-key' }))
|
||||
|
||||
vi.mock('@sim/logger', () => ({
|
||||
createLogger: () => ({
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { createEnvMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
/**
|
||||
@@ -8,12 +9,12 @@ import { describe, expect, it, vi } from 'vitest'
|
||||
*/
|
||||
|
||||
// Set up mocks at module level - these will be used for all tests in this file
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
vi.mock('@/lib/core/config/env', () =>
|
||||
createEnvMock({
|
||||
FROM_EMAIL_ADDRESS: 'Sim <noreply@sim.ai>',
|
||||
EMAIL_DOMAIN: 'example.com',
|
||||
},
|
||||
}))
|
||||
})
|
||||
)
|
||||
|
||||
vi.mock('@/lib/core/utils/urls', () => ({
|
||||
getEmailDomain: vi.fn().mockReturnValue('fallback.com'),
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { createMockFetch, loggerMock } from '@sim/testing'
|
||||
import { createEnvMock, createMockFetch, loggerMock } from '@sim/testing'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
vi.mock('@/lib/core/config/env', () => ({
|
||||
env: {
|
||||
vi.mock('@/lib/core/config/env', () =>
|
||||
createEnvMock({
|
||||
GOOGLE_CLIENT_ID: 'google_client_id',
|
||||
GOOGLE_CLIENT_SECRET: 'google_client_secret',
|
||||
GITHUB_CLIENT_ID: 'github_client_id',
|
||||
@@ -49,8 +49,8 @@ vi.mock('@/lib/core/config/env', () => ({
|
||||
WORDPRESS_CLIENT_SECRET: 'wordpress_client_secret',
|
||||
SPOTIFY_CLIENT_ID: 'spotify_client_id',
|
||||
SPOTIFY_CLIENT_SECRET: 'spotify_client_secret',
|
||||
},
|
||||
}))
|
||||
})
|
||||
)
|
||||
|
||||
vi.mock('@sim/logger', () => loggerMock)
|
||||
|
||||
|
||||
@@ -565,10 +565,9 @@ export async function deployWorkflow(params: {
|
||||
|
||||
logger.info(`Deployed workflow ${workflowId} as v${deployedVersion}`)
|
||||
|
||||
// Track deployment telemetry if workflow name is provided
|
||||
if (workflowName) {
|
||||
try {
|
||||
const { trackPlatformEvent } = await import('@/lib/core/telemetry')
|
||||
const { PlatformEvents } = await import('@/lib/core/telemetry')
|
||||
|
||||
const blockTypeCounts: Record<string, number> = {}
|
||||
for (const block of Object.values(currentState.blocks)) {
|
||||
@@ -576,15 +575,15 @@ export async function deployWorkflow(params: {
|
||||
blockTypeCounts[blockType] = (blockTypeCounts[blockType] || 0) + 1
|
||||
}
|
||||
|
||||
trackPlatformEvent('platform.workflow.deployed', {
|
||||
'workflow.id': workflowId,
|
||||
'workflow.name': workflowName,
|
||||
'workflow.blocks_count': Object.keys(currentState.blocks).length,
|
||||
'workflow.edges_count': currentState.edges.length,
|
||||
'workflow.loops_count': Object.keys(currentState.loops).length,
|
||||
'workflow.parallels_count': Object.keys(currentState.parallels).length,
|
||||
'workflow.block_types': JSON.stringify(blockTypeCounts),
|
||||
'deployment.version': deployedVersion,
|
||||
PlatformEvents.workflowDeployed({
|
||||
workflowId,
|
||||
workflowName,
|
||||
blocksCount: Object.keys(currentState.blocks).length,
|
||||
edgesCount: currentState.edges.length,
|
||||
version: deployedVersion,
|
||||
loopsCount: Object.keys(currentState.loops).length,
|
||||
parallelsCount: Object.keys(currentState.parallels).length,
|
||||
blockTypes: JSON.stringify(blockTypeCounts),
|
||||
})
|
||||
} catch (telemetryError) {
|
||||
logger.warn(`Failed to track deployment telemetry for ${workflowId}`, telemetryError)
|
||||
|
||||
@@ -39,6 +39,8 @@ The chart includes several pre-configured values files for different scenarios:
|
||||
| `values-azure.yaml` | Azure AKS optimized | Azure Kubernetes Service |
|
||||
| `values-aws.yaml` | AWS EKS optimized | Amazon Elastic Kubernetes Service |
|
||||
| `values-gcp.yaml` | GCP GKE optimized | Google Kubernetes Engine |
|
||||
| `values-external-secrets.yaml` | External Secrets Operator integration | Using Azure Key Vault, AWS Secrets Manager, Vault |
|
||||
| `values-existing-secret.yaml` | Pre-existing Kubernetes secrets | GitOps, Sealed Secrets, manual secret management |
|
||||
|
||||
### Development Environment
|
||||
|
||||
@@ -623,6 +625,111 @@ To uninstall/delete the release:
|
||||
helm uninstall sim
|
||||
```
|
||||
|
||||
## External Secret Management
|
||||
|
||||
The chart supports integration with external secret management systems for production-grade secret handling. This enables you to store secrets in secure vaults and have them automatically synced to Kubernetes.
|
||||
|
||||
### Option 1: External Secrets Operator (Recommended)
|
||||
|
||||
[External Secrets Operator](https://external-secrets.io/) is the industry-standard solution for syncing secrets from external stores like Azure Key Vault, AWS Secrets Manager, HashiCorp Vault, and GCP Secret Manager.
|
||||
|
||||
**Prerequisites:**
|
||||
```bash
|
||||
# Install External Secrets Operator
|
||||
helm repo add external-secrets https://charts.external-secrets.io
|
||||
helm install external-secrets external-secrets/external-secrets \
|
||||
-n external-secrets --create-namespace
|
||||
```
|
||||
|
||||
**Configuration:**
|
||||
```yaml
|
||||
externalSecrets:
|
||||
enabled: true
|
||||
refreshInterval: "1h"
|
||||
secretStoreRef:
|
||||
name: "my-secret-store"
|
||||
kind: "ClusterSecretStore"
|
||||
remoteRefs:
|
||||
app:
|
||||
BETTER_AUTH_SECRET: "sim/app/better-auth-secret"
|
||||
ENCRYPTION_KEY: "sim/app/encryption-key"
|
||||
INTERNAL_API_SECRET: "sim/app/internal-api-secret"
|
||||
postgresql:
|
||||
password: "sim/postgresql/password"
|
||||
```
|
||||
|
||||
See `examples/values-external-secrets.yaml` for complete examples including SecretStore configurations for Azure, AWS, GCP, and Vault.
|
||||
|
||||
### Option 2: Pre-Existing Kubernetes Secrets
|
||||
|
||||
Reference secrets you've created manually, via GitOps (Sealed Secrets, SOPS), or through other automation.
|
||||
|
||||
**Configuration:**
|
||||
```yaml
|
||||
app:
|
||||
secrets:
|
||||
existingSecret:
|
||||
enabled: true
|
||||
name: "my-app-secrets"
|
||||
|
||||
postgresql:
|
||||
auth:
|
||||
existingSecret:
|
||||
enabled: true
|
||||
name: "my-postgresql-secret"
|
||||
passwordKey: "POSTGRES_PASSWORD"
|
||||
|
||||
externalDatabase:
|
||||
existingSecret:
|
||||
enabled: true
|
||||
name: "my-external-db-secret"
|
||||
passwordKey: "password"
|
||||
```
|
||||
|
||||
**Create secrets manually:**
|
||||
```bash
|
||||
# Generate secure values
|
||||
BETTER_AUTH_SECRET=$(openssl rand -hex 32)
|
||||
ENCRYPTION_KEY=$(openssl rand -hex 32)
|
||||
INTERNAL_API_SECRET=$(openssl rand -hex 32)
|
||||
POSTGRES_PASSWORD=$(openssl rand -base64 16 | tr -d '/+=')
|
||||
|
||||
# Create app secrets
|
||||
kubectl create secret generic my-app-secrets \
|
||||
--namespace sim \
|
||||
--from-literal=BETTER_AUTH_SECRET="$BETTER_AUTH_SECRET" \
|
||||
--from-literal=ENCRYPTION_KEY="$ENCRYPTION_KEY" \
|
||||
--from-literal=INTERNAL_API_SECRET="$INTERNAL_API_SECRET"
|
||||
|
||||
# Create PostgreSQL secret
|
||||
kubectl create secret generic my-postgresql-secret \
|
||||
--namespace sim \
|
||||
--from-literal=POSTGRES_PASSWORD="$POSTGRES_PASSWORD"
|
||||
```
|
||||
|
||||
See `examples/values-existing-secret.yaml` for more details.
|
||||
|
||||
### External Secrets Parameters
|
||||
|
||||
| Parameter | Description | Default |
|
||||
|-----------|-------------|---------|
|
||||
| `app.secrets.existingSecret.enabled` | Use existing secret for app credentials | `false` |
|
||||
| `app.secrets.existingSecret.name` | Name of existing secret | `""` |
|
||||
| `app.secrets.existingSecret.keys` | Key name mappings | See values.yaml |
|
||||
| `postgresql.auth.existingSecret.enabled` | Use existing secret for PostgreSQL | `false` |
|
||||
| `postgresql.auth.existingSecret.name` | Name of existing secret | `""` |
|
||||
| `postgresql.auth.existingSecret.passwordKey` | Key containing password | `"POSTGRES_PASSWORD"` |
|
||||
| `externalDatabase.existingSecret.enabled` | Use existing secret for external DB | `false` |
|
||||
| `externalDatabase.existingSecret.name` | Name of existing secret | `""` |
|
||||
| `externalDatabase.existingSecret.passwordKey` | Key containing password | `"EXTERNAL_DB_PASSWORD"` |
|
||||
| `externalSecrets.enabled` | Enable External Secrets Operator integration | `false` |
|
||||
| `externalSecrets.refreshInterval` | How often to sync secrets | `"1h"` |
|
||||
| `externalSecrets.secretStoreRef.name` | Name of SecretStore/ClusterSecretStore | `""` |
|
||||
| `externalSecrets.secretStoreRef.kind` | Kind of store | `"ClusterSecretStore"` |
|
||||
| `externalSecrets.remoteRefs.app.*` | Remote paths for app secrets | See values.yaml |
|
||||
| `externalSecrets.remoteRefs.postgresql.password` | Remote path for PostgreSQL password | `""` |
|
||||
| `externalSecrets.remoteRefs.externalDatabase.password` | Remote path for external DB password | `""` |
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### Production Secrets
|
||||
|
||||
54
helm/sim/examples/values-existing-secret.yaml
Normal file
54
helm/sim/examples/values-existing-secret.yaml
Normal file
@@ -0,0 +1,54 @@
|
||||
# Using pre-existing Kubernetes secrets for Sim
|
||||
# For GitOps, Sealed Secrets, or manual secret management
|
||||
|
||||
# Prerequisites:
|
||||
# Create your secrets before installing (see examples at bottom of file)
|
||||
|
||||
app:
|
||||
enabled: true
|
||||
replicaCount: 2
|
||||
secrets:
|
||||
existingSecret:
|
||||
enabled: true
|
||||
name: "sim-app-secrets"
|
||||
env:
|
||||
NEXT_PUBLIC_APP_URL: "https://sim.example.com"
|
||||
BETTER_AUTH_URL: "https://sim.example.com"
|
||||
NEXT_PUBLIC_SOCKET_URL: "wss://sim-ws.example.com"
|
||||
NODE_ENV: "production"
|
||||
|
||||
realtime:
|
||||
enabled: true
|
||||
replicaCount: 2
|
||||
env:
|
||||
NEXT_PUBLIC_APP_URL: "https://sim.example.com"
|
||||
BETTER_AUTH_URL: "https://sim.example.com"
|
||||
NEXT_PUBLIC_SOCKET_URL: "wss://sim-ws.example.com"
|
||||
ALLOWED_ORIGINS: "https://sim.example.com"
|
||||
NODE_ENV: "production"
|
||||
|
||||
postgresql:
|
||||
enabled: true
|
||||
auth:
|
||||
username: postgres
|
||||
database: sim
|
||||
existingSecret:
|
||||
enabled: true
|
||||
name: "sim-postgresql-secret"
|
||||
passwordKey: "POSTGRES_PASSWORD"
|
||||
|
||||
# ---
|
||||
# Create secrets before installing:
|
||||
# ---
|
||||
|
||||
# kubectl create secret generic sim-app-secrets \
|
||||
# --namespace sim \
|
||||
# --from-literal=BETTER_AUTH_SECRET="$(openssl rand -hex 32)" \
|
||||
# --from-literal=ENCRYPTION_KEY="$(openssl rand -hex 32)" \
|
||||
# --from-literal=INTERNAL_API_SECRET="$(openssl rand -hex 32)" \
|
||||
# --from-literal=CRON_SECRET="$(openssl rand -hex 32)" \
|
||||
# --from-literal=API_ENCRYPTION_KEY="$(openssl rand -hex 32)"
|
||||
|
||||
# kubectl create secret generic sim-postgresql-secret \
|
||||
# --namespace sim \
|
||||
# --from-literal=POSTGRES_PASSWORD="$(openssl rand -base64 16 | tr -d '/+=')"
|
||||
94
helm/sim/examples/values-external-secrets.yaml
Normal file
94
helm/sim/examples/values-external-secrets.yaml
Normal file
@@ -0,0 +1,94 @@
|
||||
# External Secrets Operator integration for Sim
|
||||
# Syncs secrets from Azure Key Vault, AWS Secrets Manager, HashiCorp Vault, etc.
|
||||
|
||||
# Prerequisites:
|
||||
# 1. Install ESO: helm install external-secrets external-secrets/external-secrets -n external-secrets --create-namespace
|
||||
# 2. Create a SecretStore/ClusterSecretStore for your provider (see examples at bottom of file)
|
||||
|
||||
externalSecrets:
|
||||
enabled: true
|
||||
apiVersion: "v1"
|
||||
refreshInterval: "1h"
|
||||
secretStoreRef:
|
||||
name: "sim-secret-store"
|
||||
kind: "ClusterSecretStore"
|
||||
remoteRefs:
|
||||
app:
|
||||
BETTER_AUTH_SECRET: "sim/app/better-auth-secret"
|
||||
ENCRYPTION_KEY: "sim/app/encryption-key"
|
||||
INTERNAL_API_SECRET: "sim/app/internal-api-secret"
|
||||
CRON_SECRET: "sim/app/cron-secret"
|
||||
API_ENCRYPTION_KEY: "sim/app/api-encryption-key"
|
||||
postgresql:
|
||||
password: "sim/postgresql/password"
|
||||
|
||||
app:
|
||||
enabled: true
|
||||
replicaCount: 2
|
||||
env:
|
||||
NEXT_PUBLIC_APP_URL: "https://sim.example.com"
|
||||
BETTER_AUTH_URL: "https://sim.example.com"
|
||||
NEXT_PUBLIC_SOCKET_URL: "wss://sim-ws.example.com"
|
||||
NODE_ENV: "production"
|
||||
|
||||
realtime:
|
||||
enabled: true
|
||||
replicaCount: 2
|
||||
env:
|
||||
NEXT_PUBLIC_APP_URL: "https://sim.example.com"
|
||||
BETTER_AUTH_URL: "https://sim.example.com"
|
||||
NEXT_PUBLIC_SOCKET_URL: "wss://sim-ws.example.com"
|
||||
ALLOWED_ORIGINS: "https://sim.example.com"
|
||||
NODE_ENV: "production"
|
||||
|
||||
postgresql:
|
||||
enabled: true
|
||||
auth:
|
||||
username: postgres
|
||||
database: sim
|
||||
|
||||
# ---
|
||||
# SecretStore Examples (apply one of these to your cluster before installing)
|
||||
# ---
|
||||
|
||||
# Azure Key Vault (Workload Identity):
|
||||
# apiVersion: external-secrets.io/v1beta1
|
||||
# kind: ClusterSecretStore
|
||||
# metadata:
|
||||
# name: sim-secret-store
|
||||
# spec:
|
||||
# provider:
|
||||
# azurekv:
|
||||
# authType: WorkloadIdentity
|
||||
# vaultUrl: "https://your-keyvault.vault.azure.net"
|
||||
# serviceAccountRef:
|
||||
# name: external-secrets-sa
|
||||
# namespace: external-secrets
|
||||
|
||||
# AWS Secrets Manager (IRSA):
|
||||
# apiVersion: external-secrets.io/v1beta1
|
||||
# kind: ClusterSecretStore
|
||||
# metadata:
|
||||
# name: sim-secret-store
|
||||
# spec:
|
||||
# provider:
|
||||
# aws:
|
||||
# service: SecretsManager
|
||||
# region: us-east-1
|
||||
# role: arn:aws:iam::123456789012:role/external-secrets-role
|
||||
|
||||
# HashiCorp Vault (Kubernetes Auth):
|
||||
# apiVersion: external-secrets.io/v1beta1
|
||||
# kind: ClusterSecretStore
|
||||
# metadata:
|
||||
# name: sim-secret-store
|
||||
# spec:
|
||||
# provider:
|
||||
# vault:
|
||||
# server: "https://vault.example.com"
|
||||
# path: "secret"
|
||||
# version: "v2"
|
||||
# auth:
|
||||
# kubernetes:
|
||||
# mountPath: "kubernetes"
|
||||
# role: "external-secrets"
|
||||
@@ -181,8 +181,15 @@ Database URL for internal PostgreSQL
|
||||
|
||||
{{/*
|
||||
Validate required secrets and reject default placeholder values
|
||||
Skip validation when using existing secrets or External Secrets Operator
|
||||
*/}}
|
||||
{{- define "sim.validateSecrets" -}}
|
||||
{{- $useExistingAppSecret := and .Values.app.secrets .Values.app.secrets.existingSecret .Values.app.secrets.existingSecret.enabled }}
|
||||
{{- $useExternalSecrets := and .Values.externalSecrets .Values.externalSecrets.enabled }}
|
||||
{{- $useExistingPostgresSecret := and .Values.postgresql.auth.existingSecret .Values.postgresql.auth.existingSecret.enabled }}
|
||||
{{- $useExistingExternalDbSecret := and .Values.externalDatabase.existingSecret .Values.externalDatabase.existingSecret.enabled }}
|
||||
{{- /* App secrets validation - skip if using existing secret or ESO */ -}}
|
||||
{{- if not (or $useExistingAppSecret $useExternalSecrets) }}
|
||||
{{- if and .Values.app.enabled (not .Values.app.env.BETTER_AUTH_SECRET) }}
|
||||
{{- fail "app.env.BETTER_AUTH_SECRET is required for production deployment" }}
|
||||
{{- end }}
|
||||
@@ -198,15 +205,21 @@ Validate required secrets and reject default placeholder values
|
||||
{{- if and .Values.realtime.enabled (eq .Values.realtime.env.BETTER_AUTH_SECRET "CHANGE-ME-32-CHAR-SECRET-FOR-PRODUCTION-USE") }}
|
||||
{{- fail "realtime.env.BETTER_AUTH_SECRET must not use the default placeholder value. Generate a secure secret with: openssl rand -hex 32" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- /* PostgreSQL password validation - skip if using existing secret or ESO */ -}}
|
||||
{{- if not (or $useExistingPostgresSecret $useExternalSecrets) }}
|
||||
{{- if and .Values.postgresql.enabled (not .Values.postgresql.auth.password) }}
|
||||
{{- fail "postgresql.auth.password is required when using internal PostgreSQL" }}
|
||||
{{- end }}
|
||||
{{- if and .Values.postgresql.enabled (eq .Values.postgresql.auth.password "CHANGE-ME-SECURE-PASSWORD") }}
|
||||
{{- fail "postgresql.auth.password must not use the default placeholder value. Set a secure password for production" }}
|
||||
{{- end }}
|
||||
{{- if and .Values.postgresql.enabled (not (regexMatch "^[a-zA-Z0-9._-]+$" .Values.postgresql.auth.password)) }}
|
||||
{{- if and .Values.postgresql.enabled .Values.postgresql.auth.password (not (regexMatch "^[a-zA-Z0-9._-]+$" .Values.postgresql.auth.password)) }}
|
||||
{{- fail "postgresql.auth.password must only contain alphanumeric characters, hyphens, underscores, or periods to ensure DATABASE_URL compatibility. Generate with: openssl rand -base64 16 | tr -d '/+='" }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- /* External database password validation - skip if using existing secret or ESO */ -}}
|
||||
{{- if not (or $useExistingExternalDbSecret $useExternalSecrets) }}
|
||||
{{- if and .Values.externalDatabase.enabled (not .Values.externalDatabase.password) }}
|
||||
{{- fail "externalDatabase.password is required when using external database" }}
|
||||
{{- end }}
|
||||
@@ -214,6 +227,103 @@ Validate required secrets and reject default placeholder values
|
||||
{{- fail "externalDatabase.password must only contain alphanumeric characters, hyphens, underscores, or periods to ensure DATABASE_URL compatibility." }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Get the app secrets name
|
||||
Returns the name of the secret containing app credentials (auth, encryption keys)
|
||||
*/}}
|
||||
{{- define "sim.appSecretName" -}}
|
||||
{{- if and .Values.app.secrets .Values.app.secrets.existingSecret .Values.app.secrets.existingSecret.enabled -}}
|
||||
{{- .Values.app.secrets.existingSecret.name -}}
|
||||
{{- else -}}
|
||||
{{- printf "%s-app-secrets" (include "sim.fullname" .) -}}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Get the PostgreSQL secret name
|
||||
Returns the name of the secret containing PostgreSQL password
|
||||
*/}}
|
||||
{{- define "sim.postgresqlSecretName" -}}
|
||||
{{- if and .Values.postgresql.auth.existingSecret .Values.postgresql.auth.existingSecret.enabled -}}
|
||||
{{- .Values.postgresql.auth.existingSecret.name -}}
|
||||
{{- else -}}
|
||||
{{- printf "%s-postgresql-secret" (include "sim.fullname" .) -}}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Get the PostgreSQL password key name
|
||||
Returns the key name in the secret that contains the password
|
||||
*/}}
|
||||
{{- define "sim.postgresqlPasswordKey" -}}
|
||||
{{- if and .Values.postgresql.auth.existingSecret .Values.postgresql.auth.existingSecret.enabled -}}
|
||||
{{- .Values.postgresql.auth.existingSecret.passwordKey | default "POSTGRES_PASSWORD" -}}
|
||||
{{- else -}}
|
||||
{{- print "POSTGRES_PASSWORD" -}}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Get the external database secret name
|
||||
Returns the name of the secret containing external database password
|
||||
*/}}
|
||||
{{- define "sim.externalDbSecretName" -}}
|
||||
{{- if and .Values.externalDatabase.existingSecret .Values.externalDatabase.existingSecret.enabled -}}
|
||||
{{- .Values.externalDatabase.existingSecret.name -}}
|
||||
{{- else -}}
|
||||
{{- printf "%s-external-db-secret" (include "sim.fullname" .) -}}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Get the external database password key name
|
||||
Returns the key name in the secret that contains the password
|
||||
*/}}
|
||||
{{- define "sim.externalDbPasswordKey" -}}
|
||||
{{- if and .Values.externalDatabase.existingSecret .Values.externalDatabase.existingSecret.enabled -}}
|
||||
{{- .Values.externalDatabase.existingSecret.passwordKey | default "EXTERNAL_DB_PASSWORD" -}}
|
||||
{{- else -}}
|
||||
{{- print "EXTERNAL_DB_PASSWORD" -}}
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Check if app secrets should be created by the chart
|
||||
Returns true if we should create the app secrets (not using existing or ESO)
|
||||
*/}}
|
||||
{{- define "sim.createAppSecrets" -}}
|
||||
{{- $useExistingAppSecret := and .Values.app.secrets .Values.app.secrets.existingSecret .Values.app.secrets.existingSecret.enabled }}
|
||||
{{- $useExternalSecrets := and .Values.externalSecrets .Values.externalSecrets.enabled }}
|
||||
{{- if not (or $useExistingAppSecret $useExternalSecrets) -}}
|
||||
true
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Check if PostgreSQL secret should be created by the chart
|
||||
Returns true if we should create the PostgreSQL secret (not using existing or ESO)
|
||||
*/}}
|
||||
{{- define "sim.createPostgresqlSecret" -}}
|
||||
{{- $useExistingSecret := and .Values.postgresql.auth.existingSecret .Values.postgresql.auth.existingSecret.enabled }}
|
||||
{{- $useExternalSecrets := and .Values.externalSecrets .Values.externalSecrets.enabled }}
|
||||
{{- if not (or $useExistingSecret $useExternalSecrets) -}}
|
||||
true
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Check if external database secret should be created by the chart
|
||||
Returns true if we should create the external database secret (not using existing or ESO)
|
||||
*/}}
|
||||
{{- define "sim.createExternalDbSecret" -}}
|
||||
{{- $useExistingSecret := and .Values.externalDatabase.existingSecret .Values.externalDatabase.existingSecret.enabled }}
|
||||
{{- $useExternalSecrets := and .Values.externalSecrets .Values.externalSecrets.enabled }}
|
||||
{{- if not (or $useExistingSecret $useExternalSecrets) -}}
|
||||
true
|
||||
{{- end -}}
|
||||
{{- end }}
|
||||
|
||||
{{/*
|
||||
Ollama URL
|
||||
|
||||
@@ -44,15 +44,14 @@ spec:
|
||||
cd /app/packages/db
|
||||
export DATABASE_URL="{{ include "sim.databaseUrl" . }}"
|
||||
bun run db:migrate
|
||||
{{- if .Values.postgresql.enabled }}
|
||||
envFrom:
|
||||
{{- if .Values.postgresql.enabled }}
|
||||
- secretRef:
|
||||
name: {{ include "sim.fullname" . }}-postgresql-secret
|
||||
{{- else if .Values.externalDatabase.enabled }}
|
||||
envFrom:
|
||||
name: {{ include "sim.postgresqlSecretName" . }}
|
||||
{{- else if .Values.externalDatabase.enabled }}
|
||||
- secretRef:
|
||||
name: {{ include "sim.fullname" . }}-external-db-secret
|
||||
{{- end }}
|
||||
name: {{ include "sim.externalDbSecretName" . }}
|
||||
{{- end }}
|
||||
{{- include "sim.resources" .Values.migrations | nindent 10 }}
|
||||
{{- include "sim.securityContext" .Values.migrations | nindent 10 }}
|
||||
{{- end }}
|
||||
@@ -89,15 +88,18 @@ spec:
|
||||
{{- with .Values.extraEnvVars }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if .Values.postgresql.enabled }}
|
||||
envFrom:
|
||||
# App secrets (authentication, encryption keys)
|
||||
- secretRef:
|
||||
name: {{ include "sim.fullname" . }}-postgresql-secret
|
||||
{{- else if .Values.externalDatabase.enabled }}
|
||||
envFrom:
|
||||
name: {{ include "sim.appSecretName" . }}
|
||||
# Database secrets
|
||||
{{- if .Values.postgresql.enabled }}
|
||||
- secretRef:
|
||||
name: {{ include "sim.fullname" . }}-external-db-secret
|
||||
{{- end }}
|
||||
name: {{ include "sim.postgresqlSecretName" . }}
|
||||
{{- else if .Values.externalDatabase.enabled }}
|
||||
- secretRef:
|
||||
name: {{ include "sim.externalDbSecretName" . }}
|
||||
{{- end }}
|
||||
{{- if .Values.app.livenessProbe }}
|
||||
livenessProbe:
|
||||
{{- toYaml .Values.app.livenessProbe | nindent 12 }}
|
||||
|
||||
@@ -62,15 +62,18 @@ spec:
|
||||
{{- with .Values.extraEnvVars }}
|
||||
{{- toYaml . | nindent 12 }}
|
||||
{{- end }}
|
||||
{{- if .Values.postgresql.enabled }}
|
||||
envFrom:
|
||||
# App secrets (authentication keys shared with main app)
|
||||
- secretRef:
|
||||
name: {{ include "sim.fullname" . }}-postgresql-secret
|
||||
{{- else if .Values.externalDatabase.enabled }}
|
||||
envFrom:
|
||||
name: {{ include "sim.appSecretName" . }}
|
||||
# Database secrets
|
||||
{{- if .Values.postgresql.enabled }}
|
||||
- secretRef:
|
||||
name: {{ include "sim.fullname" . }}-external-db-secret
|
||||
{{- end }}
|
||||
name: {{ include "sim.postgresqlSecretName" . }}
|
||||
{{- else if .Values.externalDatabase.enabled }}
|
||||
- secretRef:
|
||||
name: {{ include "sim.externalDbSecretName" . }}
|
||||
{{- end }}
|
||||
{{- if .Values.realtime.livenessProbe }}
|
||||
livenessProbe:
|
||||
{{- toYaml .Values.realtime.livenessProbe | nindent 12 }}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if .Values.externalDatabase.enabled }}
|
||||
{{- if and .Values.externalDatabase.enabled (include "sim.createExternalDbSecret" .) }}
|
||||
---
|
||||
# Secret for external database credentials
|
||||
apiVersion: v1
|
||||
|
||||
44
helm/sim/templates/external-secret-app.yaml
Normal file
44
helm/sim/templates/external-secret-app.yaml
Normal file
@@ -0,0 +1,44 @@
|
||||
{{- if and .Values.externalSecrets.enabled .Values.app.enabled }}
|
||||
# ExternalSecret for app credentials (syncs from external secret managers)
|
||||
apiVersion: external-secrets.io/{{ .Values.externalSecrets.apiVersion | default "v1" }}
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: {{ include "sim.fullname" . }}-app-secrets
|
||||
namespace: {{ .Release.Namespace }}
|
||||
labels:
|
||||
{{- include "sim.app.labels" . | nindent 4 }}
|
||||
spec:
|
||||
refreshInterval: {{ .Values.externalSecrets.refreshInterval | quote }}
|
||||
secretStoreRef:
|
||||
name: {{ required "externalSecrets.secretStoreRef.name is required when externalSecrets.enabled=true" .Values.externalSecrets.secretStoreRef.name }}
|
||||
kind: {{ .Values.externalSecrets.secretStoreRef.kind | default "ClusterSecretStore" }}
|
||||
target:
|
||||
name: {{ include "sim.fullname" . }}-app-secrets
|
||||
creationPolicy: Owner
|
||||
data:
|
||||
{{- if .Values.externalSecrets.remoteRefs.app.BETTER_AUTH_SECRET }}
|
||||
- secretKey: BETTER_AUTH_SECRET
|
||||
remoteRef:
|
||||
key: {{ .Values.externalSecrets.remoteRefs.app.BETTER_AUTH_SECRET }}
|
||||
{{- end }}
|
||||
{{- if .Values.externalSecrets.remoteRefs.app.ENCRYPTION_KEY }}
|
||||
- secretKey: ENCRYPTION_KEY
|
||||
remoteRef:
|
||||
key: {{ .Values.externalSecrets.remoteRefs.app.ENCRYPTION_KEY }}
|
||||
{{- end }}
|
||||
{{- if .Values.externalSecrets.remoteRefs.app.INTERNAL_API_SECRET }}
|
||||
- secretKey: INTERNAL_API_SECRET
|
||||
remoteRef:
|
||||
key: {{ .Values.externalSecrets.remoteRefs.app.INTERNAL_API_SECRET }}
|
||||
{{- end }}
|
||||
{{- if .Values.externalSecrets.remoteRefs.app.CRON_SECRET }}
|
||||
- secretKey: CRON_SECRET
|
||||
remoteRef:
|
||||
key: {{ .Values.externalSecrets.remoteRefs.app.CRON_SECRET }}
|
||||
{{- end }}
|
||||
{{- if .Values.externalSecrets.remoteRefs.app.API_ENCRYPTION_KEY }}
|
||||
- secretKey: API_ENCRYPTION_KEY
|
||||
remoteRef:
|
||||
key: {{ .Values.externalSecrets.remoteRefs.app.API_ENCRYPTION_KEY }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
23
helm/sim/templates/external-secret-external-db.yaml
Normal file
23
helm/sim/templates/external-secret-external-db.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
{{- if and .Values.externalSecrets.enabled .Values.externalDatabase.enabled .Values.externalSecrets.remoteRefs.externalDatabase.password }}
|
||||
# ExternalSecret for external database password (syncs from external secret managers)
|
||||
apiVersion: external-secrets.io/{{ .Values.externalSecrets.apiVersion | default "v1" }}
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: {{ include "sim.fullname" . }}-external-db-secret
|
||||
namespace: {{ .Release.Namespace }}
|
||||
labels:
|
||||
{{- include "sim.labels" . | nindent 4 }}
|
||||
app.kubernetes.io/component: external-database
|
||||
spec:
|
||||
refreshInterval: {{ .Values.externalSecrets.refreshInterval | quote }}
|
||||
secretStoreRef:
|
||||
name: {{ required "externalSecrets.secretStoreRef.name is required when externalSecrets.enabled=true" .Values.externalSecrets.secretStoreRef.name }}
|
||||
kind: {{ .Values.externalSecrets.secretStoreRef.kind | default "ClusterSecretStore" }}
|
||||
target:
|
||||
name: {{ include "sim.fullname" . }}-external-db-secret
|
||||
creationPolicy: Owner
|
||||
data:
|
||||
- secretKey: EXTERNAL_DB_PASSWORD
|
||||
remoteRef:
|
||||
key: {{ .Values.externalSecrets.remoteRefs.externalDatabase.password }}
|
||||
{{- end }}
|
||||
22
helm/sim/templates/external-secret-postgresql.yaml
Normal file
22
helm/sim/templates/external-secret-postgresql.yaml
Normal file
@@ -0,0 +1,22 @@
|
||||
{{- if and .Values.externalSecrets.enabled .Values.postgresql.enabled .Values.externalSecrets.remoteRefs.postgresql.password }}
|
||||
# ExternalSecret for PostgreSQL password (syncs from external secret managers)
|
||||
apiVersion: external-secrets.io/{{ .Values.externalSecrets.apiVersion | default "v1" }}
|
||||
kind: ExternalSecret
|
||||
metadata:
|
||||
name: {{ include "sim.fullname" . }}-postgresql-secret
|
||||
namespace: {{ .Release.Namespace }}
|
||||
labels:
|
||||
{{- include "sim.postgresql.labels" . | nindent 4 }}
|
||||
spec:
|
||||
refreshInterval: {{ .Values.externalSecrets.refreshInterval | quote }}
|
||||
secretStoreRef:
|
||||
name: {{ required "externalSecrets.secretStoreRef.name is required when externalSecrets.enabled=true" .Values.externalSecrets.secretStoreRef.name }}
|
||||
kind: {{ .Values.externalSecrets.secretStoreRef.kind | default "ClusterSecretStore" }}
|
||||
target:
|
||||
name: {{ include "sim.fullname" . }}-postgresql-secret
|
||||
creationPolicy: Owner
|
||||
data:
|
||||
- secretKey: POSTGRES_PASSWORD
|
||||
remoteRef:
|
||||
key: {{ .Values.externalSecrets.remoteRefs.postgresql.password }}
|
||||
{{- end }}
|
||||
27
helm/sim/templates/secrets-app.yaml
Normal file
27
helm/sim/templates/secrets-app.yaml
Normal file
@@ -0,0 +1,27 @@
|
||||
{{- if and .Values.app.enabled (include "sim.createAppSecrets" .) }}
|
||||
# Secret for app credentials (authentication, encryption keys)
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: {{ include "sim.fullname" . }}-app-secrets
|
||||
namespace: {{ .Release.Namespace }}
|
||||
labels:
|
||||
{{- include "sim.app.labels" . | nindent 4 }}
|
||||
type: Opaque
|
||||
stringData:
|
||||
{{- if .Values.app.env.BETTER_AUTH_SECRET }}
|
||||
BETTER_AUTH_SECRET: {{ .Values.app.env.BETTER_AUTH_SECRET | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.app.env.ENCRYPTION_KEY }}
|
||||
ENCRYPTION_KEY: {{ .Values.app.env.ENCRYPTION_KEY | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.app.env.INTERNAL_API_SECRET }}
|
||||
INTERNAL_API_SECRET: {{ .Values.app.env.INTERNAL_API_SECRET | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.app.env.CRON_SECRET }}
|
||||
CRON_SECRET: {{ .Values.app.env.CRON_SECRET | quote }}
|
||||
{{- end }}
|
||||
{{- if .Values.app.env.API_ENCRYPTION_KEY }}
|
||||
API_ENCRYPTION_KEY: {{ .Values.app.env.API_ENCRYPTION_KEY | quote }}
|
||||
{{- end }}
|
||||
{{- end }}
|
||||
@@ -65,6 +65,7 @@ data:
|
||||
POSTGRES_USER: {{ .Values.postgresql.auth.username | quote }}
|
||||
PGDATA: "/var/lib/postgresql/data/pgdata"
|
||||
|
||||
{{- if (include "sim.createPostgresqlSecret" .) }}
|
||||
---
|
||||
# Secret for PostgreSQL password
|
||||
apiVersion: v1
|
||||
@@ -77,6 +78,7 @@ metadata:
|
||||
type: Opaque
|
||||
data:
|
||||
POSTGRES_PASSWORD: {{ .Values.postgresql.auth.password | b64enc }}
|
||||
{{- end }}
|
||||
|
||||
---
|
||||
# StatefulSet for PostgreSQL
|
||||
@@ -128,7 +130,7 @@ spec:
|
||||
- configMapRef:
|
||||
name: {{ include "sim.fullname" . }}-postgresql-env
|
||||
- secretRef:
|
||||
name: {{ include "sim.fullname" . }}-postgresql-secret
|
||||
name: {{ include "sim.postgresqlSecretName" . }}
|
||||
{{- if .Values.postgresql.livenessProbe }}
|
||||
livenessProbe:
|
||||
{{- toYaml .Values.postgresql.livenessProbe | nindent 12 }}
|
||||
|
||||
@@ -81,18 +81,39 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"secrets": {
|
||||
"type": "object",
|
||||
"description": "Secret management configuration",
|
||||
"properties": {
|
||||
"existingSecret": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"description": "Use an existing secret instead of creating one"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Name of the existing Kubernetes secret"
|
||||
},
|
||||
"keys": {
|
||||
"type": "object",
|
||||
"description": "Key name mappings in the existing secret"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"BETTER_AUTH_SECRET": {
|
||||
"type": "string",
|
||||
"minLength": 32,
|
||||
"description": "Auth secret (minimum 32 characters required)"
|
||||
"description": "Auth secret (minimum 32 characters required when not using existingSecret)"
|
||||
},
|
||||
"ENCRYPTION_KEY": {
|
||||
"type": "string",
|
||||
"minLength": 32,
|
||||
"description": "Encryption key (minimum 32 characters required)"
|
||||
"description": "Encryption key (minimum 32 characters required when not using existingSecret)"
|
||||
},
|
||||
"NEXT_PUBLIC_APP_URL": {
|
||||
"type": "string",
|
||||
@@ -329,8 +350,7 @@
|
||||
"properties": {
|
||||
"BETTER_AUTH_SECRET": {
|
||||
"type": "string",
|
||||
"minLength": 32,
|
||||
"description": "Auth secret (minimum 32 characters required)"
|
||||
"description": "Auth secret (minimum 32 characters required when not using existingSecret)"
|
||||
},
|
||||
"NEXT_PUBLIC_APP_URL": {
|
||||
"type": "string",
|
||||
@@ -431,11 +451,25 @@
|
||||
},
|
||||
"password": {
|
||||
"type": "string",
|
||||
"minLength": 8,
|
||||
"not": {
|
||||
"const": "CHANGE-ME-SECURE-PASSWORD"
|
||||
},
|
||||
"description": "PostgreSQL password (minimum 8 characters, must not be default placeholder)"
|
||||
"description": "PostgreSQL password (minimum 8 characters when not using existingSecret)"
|
||||
},
|
||||
"existingSecret": {
|
||||
"type": "object",
|
||||
"description": "Use an existing secret for PostgreSQL credentials",
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"description": "Use an existing secret instead of creating one"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Name of the existing Kubernetes secret"
|
||||
},
|
||||
"passwordKey": {
|
||||
"type": "string",
|
||||
"description": "Key in the secret containing the password"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -475,6 +509,24 @@
|
||||
"type": "string",
|
||||
"enum": ["disable", "allow", "prefer", "require", "verify-ca", "verify-full"],
|
||||
"description": "SSL mode for database connection"
|
||||
},
|
||||
"existingSecret": {
|
||||
"type": "object",
|
||||
"description": "Use an existing secret for external database credentials",
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"description": "Use an existing secret instead of creating one"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Name of the existing Kubernetes secret"
|
||||
},
|
||||
"passwordKey": {
|
||||
"type": "string",
|
||||
"description": "Key in the secret containing the password"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"if": {
|
||||
@@ -821,6 +873,61 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"externalSecrets": {
|
||||
"type": "object",
|
||||
"description": "External Secrets Operator integration",
|
||||
"properties": {
|
||||
"enabled": {
|
||||
"type": "boolean",
|
||||
"description": "Enable External Secrets Operator integration"
|
||||
},
|
||||
"apiVersion": {
|
||||
"type": "string",
|
||||
"enum": ["v1", "v1beta1"],
|
||||
"description": "ESO API version - use v1 for ESO v0.17+ (recommended), v1beta1 for older versions"
|
||||
},
|
||||
"refreshInterval": {
|
||||
"type": "string",
|
||||
"description": "How often to sync secrets from external store"
|
||||
},
|
||||
"secretStoreRef": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "Name of the SecretStore or ClusterSecretStore"
|
||||
},
|
||||
"kind": {
|
||||
"type": "string",
|
||||
"enum": ["SecretStore", "ClusterSecretStore"],
|
||||
"description": "Kind of the store"
|
||||
}
|
||||
}
|
||||
},
|
||||
"remoteRefs": {
|
||||
"type": "object",
|
||||
"description": "Remote key paths in external secret store",
|
||||
"properties": {
|
||||
"app": {
|
||||
"type": "object",
|
||||
"additionalProperties": { "type": "string" }
|
||||
},
|
||||
"postgresql": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"password": { "type": "string" }
|
||||
}
|
||||
},
|
||||
"externalDatabase": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"password": { "type": "string" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"ingress": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
||||
@@ -16,16 +16,16 @@ global:
|
||||
app:
|
||||
# Enable/disable the main application
|
||||
enabled: true
|
||||
|
||||
|
||||
# Image configuration
|
||||
image:
|
||||
repository: simstudioai/simstudio
|
||||
tag: latest
|
||||
pullPolicy: Always
|
||||
|
||||
|
||||
# Number of replicas
|
||||
replicaCount: 1
|
||||
|
||||
|
||||
# Resource limits and requests
|
||||
resources:
|
||||
limits:
|
||||
@@ -34,19 +34,37 @@ app:
|
||||
requests:
|
||||
memory: "2Gi"
|
||||
cpu: "1000m"
|
||||
|
||||
|
||||
# Node selector for pod scheduling (leave empty to allow scheduling on any node)
|
||||
nodeSelector: {}
|
||||
|
||||
|
||||
# Pod security context
|
||||
podSecurityContext:
|
||||
fsGroup: 1001
|
||||
|
||||
|
||||
# Container security context
|
||||
securityContext:
|
||||
runAsNonRoot: true
|
||||
runAsUser: 1001
|
||||
|
||||
|
||||
# Secret management configuration
|
||||
# Use this to reference pre-existing Kubernetes secrets instead of defining values directly
|
||||
# This enables integration with External Secrets Operator, HashiCorp Vault, Azure Key Vault, etc.
|
||||
secrets:
|
||||
existingSecret:
|
||||
# Set to true to use an existing secret instead of creating one from values
|
||||
enabled: false
|
||||
# Name of the existing Kubernetes secret containing app credentials
|
||||
name: ""
|
||||
# Key mappings - specify the key names in your existing secret
|
||||
# Only needed if your secret uses different key names than the defaults
|
||||
keys:
|
||||
BETTER_AUTH_SECRET: "BETTER_AUTH_SECRET"
|
||||
ENCRYPTION_KEY: "ENCRYPTION_KEY"
|
||||
INTERNAL_API_SECRET: "INTERNAL_API_SECRET"
|
||||
CRON_SECRET: "CRON_SECRET"
|
||||
API_ENCRYPTION_KEY: "API_ENCRYPTION_KEY"
|
||||
|
||||
# Environment variables
|
||||
env:
|
||||
# Application URLs
|
||||
@@ -118,7 +136,9 @@ app:
|
||||
|
||||
# Registration Control
|
||||
DISABLE_REGISTRATION: "" # Set to "true" to disable new user signups
|
||||
|
||||
EMAIL_PASSWORD_SIGNUP_ENABLED: "" # Set to "false" to disable email/password login (SSO-only mode, server-side enforcement)
|
||||
NEXT_PUBLIC_EMAIL_PASSWORD_SIGNUP_ENABLED: "" # Set to "false" to hide email/password login form (UI-side)
|
||||
|
||||
# Access Control (leave empty if not restricting login)
|
||||
ALLOWED_LOGIN_EMAILS: "" # Comma-separated list of allowed email addresses for login
|
||||
ALLOWED_LOGIN_DOMAINS: "" # Comma-separated list of allowed email domains for login
|
||||
@@ -305,6 +325,12 @@ postgresql:
|
||||
username: postgres
|
||||
password: "" # REQUIRED - set via --set flag or external secret manager
|
||||
database: sim
|
||||
# Use an existing secret for PostgreSQL credentials
|
||||
# This enables integration with External Secrets Operator, HashiCorp Vault, etc.
|
||||
existingSecret:
|
||||
enabled: false
|
||||
name: "" # Name of existing Kubernetes secret
|
||||
passwordKey: "POSTGRES_PASSWORD" # Key in the secret containing the password
|
||||
|
||||
# Node selector for database pod scheduling (leave empty to allow scheduling on any node)
|
||||
nodeSelector: {}
|
||||
@@ -387,17 +413,24 @@ postgresql:
|
||||
externalDatabase:
|
||||
# Enable to use an external database instead of the internal PostgreSQL instance
|
||||
enabled: false
|
||||
|
||||
|
||||
# Database connection details
|
||||
host: "external-db.example.com"
|
||||
port: 5432
|
||||
username: postgres
|
||||
password: ""
|
||||
database: sim
|
||||
|
||||
|
||||
# SSL configuration
|
||||
sslMode: require
|
||||
|
||||
# Use an existing secret for external database credentials
|
||||
# This enables integration with External Secrets Operator, HashiCorp Vault, etc.
|
||||
existingSecret:
|
||||
enabled: false
|
||||
name: "" # Name of existing Kubernetes secret
|
||||
passwordKey: "EXTERNAL_DB_PASSWORD" # Key in the secret containing the password
|
||||
|
||||
# Ollama local AI models configuration
|
||||
ollama:
|
||||
# Enable/disable Ollama deployment
|
||||
@@ -1013,4 +1046,51 @@ copilot:
|
||||
|
||||
# Job configuration
|
||||
backoffLimit: 3
|
||||
restartPolicy: OnFailure
|
||||
restartPolicy: OnFailure
|
||||
|
||||
# External Secrets Operator integration
|
||||
# Use this to automatically sync secrets from external secret managers (Azure Key Vault, AWS Secrets Manager, etc.)
|
||||
# Prerequisites: Install External Secrets Operator in your cluster first
|
||||
# See: https://external-secrets.io/latest/introduction/getting-started/
|
||||
externalSecrets:
|
||||
# Enable External Secrets Operator integration
|
||||
enabled: false
|
||||
|
||||
# ESO API version - use "v1" for ESO v0.17+ (recommended), "v1beta1" for older versions
|
||||
apiVersion: "v1"
|
||||
|
||||
# How often to sync secrets from the external store
|
||||
refreshInterval: "1h"
|
||||
|
||||
# Reference to the SecretStore or ClusterSecretStore
|
||||
secretStoreRef:
|
||||
# Name of the SecretStore or ClusterSecretStore resource
|
||||
name: ""
|
||||
# Kind of the store: "SecretStore" (namespaced) or "ClusterSecretStore" (cluster-wide)
|
||||
kind: "ClusterSecretStore"
|
||||
|
||||
# Remote references - paths/keys in your external secret store
|
||||
# These map to the secrets that will be created in Kubernetes
|
||||
remoteRefs:
|
||||
# App secrets (authentication, encryption keys)
|
||||
app:
|
||||
# Path to BETTER_AUTH_SECRET in external store (e.g., "sim/app/better-auth-secret")
|
||||
BETTER_AUTH_SECRET: ""
|
||||
# Path to ENCRYPTION_KEY in external store
|
||||
ENCRYPTION_KEY: ""
|
||||
# Path to INTERNAL_API_SECRET in external store
|
||||
INTERNAL_API_SECRET: ""
|
||||
# Path to CRON_SECRET in external store (optional)
|
||||
CRON_SECRET: ""
|
||||
# Path to API_ENCRYPTION_KEY in external store (optional)
|
||||
API_ENCRYPTION_KEY: ""
|
||||
|
||||
# PostgreSQL password (for internal PostgreSQL)
|
||||
postgresql:
|
||||
# Path to PostgreSQL password in external store (e.g., "sim/postgresql/password")
|
||||
password: ""
|
||||
|
||||
# External database password (when using managed database services)
|
||||
externalDatabase:
|
||||
# Path to external database password in external store
|
||||
password: ""
|
||||
Reference in New Issue
Block a user