Compare commits

..

5 Commits

Author SHA1 Message Date
Waleed
3a1b1a8032 v0.6.43: mothership billing idempotency, env var resolution fixes 2026-04-14 15:22:32 -07:00
Waleed
3d6660ba4d feat(jira): support raw ADF in description and environment fields (#4164)
* fix(security): resolve ReDoS vulnerability in function execute tag pattern

Simplified regex to eliminate overlapping quantifiers that caused exponential
backtracking on malformed input without closing delimiter.

* feat(jira): support raw ADF document objects in description and environment fields

Add toAdf() helper that passes through ADF objects as-is or wraps plain
text in a single-paragraph ADF doc. Update write and update routes to
use it, replacing inline ADF wrapping. Update Zod schema to accept
string or object for description. Fully backward compatible — plain
text still works, but callers can now pass rich ADF with expand nodes,
tables, code blocks, etc.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* fix(jira): handle partial ADF nodes and non-ADF objects in toAdf()

Wrap partial ADF nodes (type + content but not doc) in a doc envelope.
Fall back to JSON.stringify for non-ADF objects instead of String()
which produces [object Object].

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* lint

* fix(jira): handle JSON-stringified ADF in toAdf() for variable resolution

The executor's formatValueForBlock() JSON.stringify's object values when
resolving <Block.output> references. This means an ADF object from an
upstream Agent block arrives at the route as a JSON string. toAdf() now
detects JSON strings containing valid ADF documents or nodes and parses
them back, ensuring rich formatting is preserved through the pipeline.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* lint changes

* fix(jira): update environment Zod schema to accept ADF objects

Match the description field schema change — environment also passes
through toAdf() so its Zod schema must accept objects too.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

* updated lobkc

---------

Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-04-14 15:18:18 -07:00
Waleed
48e174b21f fix(google-drive): add auto export format and validate against Drive API docs (#4161)
* fix(google-drive): add auto export format and Azure storage debug logging

* chore: remove Azure storage debug logging

* fix(google-drive): use status-based fallback instead of string matching for export errors

* fix(google-drive): validate export formats against Drive API docs, remove fallback

* fix(google-drive): use value function for dropdown default

* fix(google-drive): add text/markdown to valid export formats for Google Docs

* fix(google-drive): correct ODS MIME type for Sheets export format
2026-04-14 15:09:36 -07:00
Vikhyath Mondreti
7529a75ac0 fix(triggers): env var resolution in provider configs (#4160)
* fix(triggers): env var resolution in provider configs

* throw on errored resolution
2026-04-14 14:30:26 -07:00
Theodore Li
6b2e83bf58 fix(billing): add idempotency to billing (#4157)
* fix(billing): add idempotency to billing

* Only release redis lock if billed
2026-04-14 17:15:54 -04:00
14 changed files with 388 additions and 73 deletions

View File

@@ -6,6 +6,7 @@ import { recordUsage } from '@/lib/billing/core/usage-log'
import { checkAndBillOverageThreshold } from '@/lib/billing/threshold-billing'
import { checkInternalApiKey } from '@/lib/copilot/request/http'
import { isBillingEnabled } from '@/lib/core/config/feature-flags'
import { type AtomicClaimResult, billingIdempotency } from '@/lib/core/idempotency/service'
import { generateRequestId } from '@/lib/core/utils/request'
const logger = createLogger('BillingUpdateCostAPI')
@@ -19,6 +20,7 @@ const UpdateCostSchema = z.object({
source: z
.enum(['copilot', 'workspace-chat', 'mcp_copilot', 'mothership_block'])
.default('copilot'),
idempotencyKey: z.string().min(1).optional(),
})
/**
@@ -28,6 +30,8 @@ const UpdateCostSchema = z.object({
export async function POST(req: NextRequest) {
const requestId = generateRequestId()
const startTime = Date.now()
let claim: AtomicClaimResult | null = null
let usageCommitted = false
try {
logger.info(`[${requestId}] Update cost request started`)
@@ -75,9 +79,30 @@ export async function POST(req: NextRequest) {
)
}
const { userId, cost, model, inputTokens, outputTokens, source } = validation.data
const { userId, cost, model, inputTokens, outputTokens, source, idempotencyKey } =
validation.data
const isMcp = source === 'mcp_copilot'
claim = idempotencyKey
? await billingIdempotency.atomicallyClaim('update-cost', idempotencyKey)
: null
if (claim && !claim.claimed) {
logger.warn(`[${requestId}] Duplicate billing update rejected`, {
idempotencyKey,
userId,
source,
})
return NextResponse.json(
{
success: false,
error: 'Duplicate request: idempotency key already processed',
requestId,
},
{ status: 409 }
)
}
logger.info(`[${requestId}] Processing cost update`, {
userId,
cost,
@@ -113,6 +138,7 @@ export async function POST(req: NextRequest) {
],
additionalStats,
})
usageCommitted = true
logger.info(`[${requestId}] Recorded usage`, {
userId,
@@ -149,6 +175,22 @@ export async function POST(req: NextRequest) {
duration,
})
if (claim?.claimed && !usageCommitted) {
await billingIdempotency
.release(claim.normalizedKey, claim.storageMethod)
.catch((releaseErr) => {
logger.warn(`[${requestId}] Failed to release idempotency claim`, {
error: releaseErr instanceof Error ? releaseErr.message : String(releaseErr),
normalizedKey: claim?.normalizedKey,
})
})
} else if (claim?.claimed && usageCommitted) {
logger.warn(
`[${requestId}] Error occurred after usage committed; retaining idempotency claim to prevent double-billing`,
{ normalizedKey: claim.normalizedKey }
)
}
return NextResponse.json(
{
success: false,

View File

@@ -13,6 +13,7 @@ import {
ALL_REVISION_FIELDS,
DEFAULT_EXPORT_FORMATS,
GOOGLE_WORKSPACE_MIME_TYPES,
VALID_EXPORT_FORMATS,
} from '@/tools/google_drive/utils'
export const dynamic = 'force-dynamic'
@@ -65,10 +66,12 @@ export async function POST(request: NextRequest) {
const {
accessToken,
fileId,
mimeType: exportMimeType,
mimeType: rawExportMimeType,
fileName,
includeRevisions,
} = validatedData
const exportMimeType =
rawExportMimeType && rawExportMimeType !== 'auto' ? rawExportMimeType : null
const authHeader = `Bearer ${accessToken}`
logger.info(`[${requestId}] Getting file metadata from Google Drive`, { fileId })
@@ -112,6 +115,24 @@ export async function POST(request: NextRequest) {
if (GOOGLE_WORKSPACE_MIME_TYPES.includes(fileMimeType)) {
const exportFormat = exportMimeType || DEFAULT_EXPORT_FORMATS[fileMimeType] || 'text/plain'
const validFormats = VALID_EXPORT_FORMATS[fileMimeType]
if (validFormats && !validFormats.includes(exportFormat)) {
logger.warn(`[${requestId}] Unsupported export format requested`, {
fileId,
fileMimeType,
requestedFormat: exportFormat,
validFormats,
})
return NextResponse.json(
{
success: false,
error: `Export format "${exportFormat}" is not supported for this file type. Supported formats: ${validFormats.join(', ')}`,
},
{ status: 400 }
)
}
finalMimeType = exportFormat
logger.info(`[${requestId}] Exporting Google Workspace file`, {

View File

@@ -3,7 +3,7 @@ import { type NextRequest, NextResponse } from 'next/server'
import { z } from 'zod'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateJiraCloudId, validateJiraIssueKey } from '@/lib/core/security/input-validation'
import { getJiraCloudId, parseAtlassianErrorMessage } from '@/tools/jira/utils'
import { getJiraCloudId, parseAtlassianErrorMessage, toAdf } from '@/tools/jira/utils'
export const dynamic = 'force-dynamic'
@@ -15,14 +15,14 @@ const jiraUpdateSchema = z.object({
issueKey: z.string().min(1, 'Issue key is required'),
summary: z.string().optional(),
title: z.string().optional(),
description: z.string().optional(),
description: z.union([z.string(), z.record(z.unknown())]).optional(),
priority: z.string().optional(),
assignee: z.string().optional(),
labels: z.array(z.string()).optional(),
components: z.array(z.string()).optional(),
duedate: z.string().optional(),
fixVersions: z.array(z.string()).optional(),
environment: z.string().optional(),
environment: z.union([z.string(), z.record(z.unknown())]).optional(),
customFieldId: z.string().optional(),
customFieldValue: z.string().optional(),
notifyUsers: z.boolean().optional(),
@@ -91,21 +91,7 @@ export async function PUT(request: NextRequest) {
}
if (description !== undefined && description !== null && description !== '') {
fields.description = {
type: 'doc',
version: 1,
content: [
{
type: 'paragraph',
content: [
{
type: 'text',
text: description,
},
],
},
],
}
fields.description = toAdf(description)
}
if (priority !== undefined && priority !== null && priority !== '') {
@@ -136,21 +122,7 @@ export async function PUT(request: NextRequest) {
}
if (environment !== undefined && environment !== null && environment !== '') {
fields.environment = {
type: 'doc',
version: 1,
content: [
{
type: 'paragraph',
content: [
{
type: 'text',
text: environment,
},
],
},
],
}
fields.environment = toAdf(environment)
}
if (

View File

@@ -2,7 +2,7 @@ import { createLogger } from '@sim/logger'
import { type NextRequest, NextResponse } from 'next/server'
import { checkSessionOrInternalAuth } from '@/lib/auth/hybrid'
import { validateAlphanumericId, validateJiraCloudId } from '@/lib/core/security/input-validation'
import { getJiraCloudId, parseAtlassianErrorMessage } from '@/tools/jira/utils'
import { getJiraCloudId, parseAtlassianErrorMessage, toAdf } from '@/tools/jira/utils'
export const dynamic = 'force-dynamic'
@@ -85,21 +85,7 @@ export async function POST(request: NextRequest) {
}
if (description !== undefined && description !== null && description !== '') {
fields.description = {
type: 'doc',
version: 1,
content: [
{
type: 'paragraph',
content: [
{
type: 'text',
text: description,
},
],
},
],
}
fields.description = toAdf(description)
}
if (parent !== undefined && parent !== null && parent !== '') {
@@ -144,21 +130,7 @@ export async function POST(request: NextRequest) {
}
if (environment !== undefined && environment !== null && environment !== '') {
fields.environment = {
type: 'doc',
version: 1,
content: [
{
type: 'paragraph',
content: [
{
type: 'text',
text: environment,
},
],
},
],
}
fields.environment = toAdf(environment)
}
if (

View File

@@ -0,0 +1,68 @@
/**
* @vitest-environment node
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
const { mockResolveWebhookRecordProviderConfig } = vi.hoisted(() => ({
mockResolveWebhookRecordProviderConfig: vi.fn(),
}))
vi.mock('@/lib/webhooks/env-resolver', () => ({
resolveWebhookRecordProviderConfig: mockResolveWebhookRecordProviderConfig,
}))
import { resolveWebhookExecutionProviderConfig } from './webhook-execution'
describe('resolveWebhookExecutionProviderConfig', () => {
beforeEach(() => {
vi.clearAllMocks()
})
it('returns the resolved webhook record when provider config resolution succeeds', async () => {
const webhookRecord = {
id: 'webhook-1',
providerConfig: {
botToken: '{{SLACK_BOT_TOKEN}}',
},
}
const resolvedWebhookRecord = {
...webhookRecord,
providerConfig: {
botToken: 'xoxb-resolved',
},
}
mockResolveWebhookRecordProviderConfig.mockResolvedValue(resolvedWebhookRecord)
await expect(
resolveWebhookExecutionProviderConfig(webhookRecord, 'slack', 'user-1', 'workspace-1')
).resolves.toEqual(resolvedWebhookRecord)
expect(mockResolveWebhookRecordProviderConfig).toHaveBeenCalledWith(
webhookRecord,
'user-1',
'workspace-1'
)
})
it('throws a contextual error when provider config resolution fails', async () => {
mockResolveWebhookRecordProviderConfig.mockRejectedValue(new Error('env lookup failed'))
await expect(
resolveWebhookExecutionProviderConfig(
{
id: 'webhook-1',
providerConfig: {
botToken: '{{SLACK_BOT_TOKEN}}',
},
},
'slack',
'user-1',
'workspace-1'
)
).rejects.toThrow(
'Failed to resolve webhook provider config for slack webhook webhook-1: env lookup failed'
)
})
})

View File

@@ -11,6 +11,7 @@ import { preprocessExecution } from '@/lib/execution/preprocessing'
import { LoggingSession } from '@/lib/logs/execution/logging-session'
import { buildTraceSpans } from '@/lib/logs/execution/trace-spans/trace-spans'
import { WebhookAttachmentProcessor } from '@/lib/webhooks/attachment-processor'
import { resolveWebhookRecordProviderConfig } from '@/lib/webhooks/env-resolver'
import { getProviderHandler } from '@/lib/webhooks/providers'
import {
executeWorkflowCore,
@@ -168,6 +169,24 @@ export async function executeWebhookJob(payload: WebhookExecutionPayload) {
)
}
export async function resolveWebhookExecutionProviderConfig<
T extends { id: string; providerConfig?: unknown },
>(
webhookRecord: T,
provider: string,
userId: string,
workspaceId?: string
): Promise<T & { providerConfig: Record<string, unknown> }> {
try {
return await resolveWebhookRecordProviderConfig(webhookRecord, userId, workspaceId)
} catch (error) {
const errorMessage = error instanceof Error ? error.message : String(error)
throw new Error(
`Failed to resolve webhook provider config for ${provider} webhook ${webhookRecord.id}: ${errorMessage}`
)
}
}
async function resolveCredentialAccountUserId(credentialId: string): Promise<string | undefined> {
const resolved = await resolveOAuthAccountId(credentialId)
if (!resolved) {
@@ -300,9 +319,16 @@ async function executeWebhookJobInternal(
throw new Error(`Webhook record not found: ${payload.webhookId}`)
}
const resolvedWebhookRecord = await resolveWebhookExecutionProviderConfig(
webhookRecord,
payload.provider,
workflowRecord.userId,
workspaceId
)
if (handler.formatInput) {
const result = await handler.formatInput({
webhook: webhookRecord,
webhook: resolvedWebhookRecord,
workflow: { id: payload.workflowId, userId: payload.userId },
body: payload.body,
headers: payload.headers,

View File

@@ -316,6 +316,7 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
title: 'Export Format',
type: 'dropdown',
options: [
{ label: 'Auto (best format for file type)', id: 'auto' },
{ label: 'Plain Text (text/plain)', id: 'text/plain' },
{ label: 'HTML (text/html)', id: 'text/html' },
{ label: 'PDF (application/pdf)', id: 'application/pdf' },
@@ -333,7 +334,8 @@ Return ONLY the query string - no explanations, no quotes around the whole thing
},
{ label: 'CSV (text/csv)', id: 'text/csv' },
],
placeholder: 'Optional: Choose export format for Google Docs/Sheets/Slides',
value: () => 'auto',
placeholder: 'Export format for Google Docs/Sheets/Slides',
condition: { field: 'operation', value: 'download' },
},
{
@@ -867,7 +869,7 @@ Return ONLY the message text - no subject line, no greetings/signatures, no extr
destinationFolderId: effectiveDestinationFolderId,
file: normalizedFile,
pageSize: rest.pageSize ? Number.parseInt(rest.pageSize as string, 10) : undefined,
mimeType: mimeType,
mimeType: mimeType === 'auto' ? undefined : mimeType,
type: shareType, // Map shareType to type for share tool
starred: starredValue,
sendNotification: sendNotificationValue,

View File

@@ -343,6 +343,10 @@ export class IdempotencyService {
logger.debug(`Stored idempotency result in database: ${normalizedKey}`)
}
async release(normalizedKey: string, storageMethod: 'redis' | 'database'): Promise<void> {
return this.deleteKey(normalizedKey, storageMethod)
}
private async deleteKey(
normalizedKey: string,
storageMethod: 'redis' | 'database'
@@ -482,3 +486,8 @@ export const pollingIdempotency = new IdempotencyService({
ttlSeconds: 60 * 60 * 24 * 3, // 3 days
retryFailures: true,
})
export const billingIdempotency = new IdempotencyService({
namespace: 'billing',
ttlSeconds: 60 * 60, // 1 hour
})

View File

@@ -0,0 +1,74 @@
/**
* @vitest-environment node
*/
import { beforeEach, describe, expect, it, vi } from 'vitest'
const { mockGetEffectiveDecryptedEnv } = vi.hoisted(() => ({
mockGetEffectiveDecryptedEnv: vi.fn(),
}))
vi.mock('@/lib/environment/utils', () => ({
getEffectiveDecryptedEnv: mockGetEffectiveDecryptedEnv,
}))
import {
resolveWebhookProviderConfig,
resolveWebhookRecordProviderConfig,
} from '@/lib/webhooks/env-resolver'
describe('webhook env resolver', () => {
beforeEach(() => {
vi.clearAllMocks()
mockGetEffectiveDecryptedEnv.mockResolvedValue({
SLACK_BOT_TOKEN: 'xoxb-resolved',
SLACK_HOST: 'files.slack.com',
})
})
it('resolves environment variables inside webhook provider config', async () => {
const result = await resolveWebhookProviderConfig(
{
botToken: '{{SLACK_BOT_TOKEN}}',
includeFiles: true,
nested: {
url: 'https://{{SLACK_HOST}}/api/files.info',
},
},
'user-1',
'workspace-1'
)
expect(result).toEqual({
botToken: 'xoxb-resolved',
includeFiles: true,
nested: {
url: 'https://files.slack.com/api/files.info',
},
})
expect(mockGetEffectiveDecryptedEnv).toHaveBeenCalledWith('user-1', 'workspace-1')
})
it('returns a cloned webhook record with resolved provider config', async () => {
const webhookRecord = {
id: 'webhook-1',
provider: 'slack',
providerConfig: {
botToken: '{{SLACK_BOT_TOKEN}}',
includeFiles: true,
},
}
const result = await resolveWebhookRecordProviderConfig(webhookRecord, 'user-1', 'workspace-1')
expect(result).toEqual({
...webhookRecord,
providerConfig: {
botToken: 'xoxb-resolved',
includeFiles: true,
},
})
expect(result).not.toBe(webhookRecord)
expect(result.providerConfig).not.toBe(webhookRecord.providerConfig)
})
})

View File

@@ -20,3 +20,43 @@ export async function resolveEnvVarsInObject<T extends Record<string, unknown>>(
const envVars = await getEffectiveDecryptedEnv(userId, workspaceId)
return resolveEnvVarReferences(config, envVars, { deep: true }) as T
}
/**
* Normalizes webhook provider config into a plain object for runtime resolution.
*/
export function normalizeWebhookProviderConfig(providerConfig: unknown): Record<string, unknown> {
if (providerConfig && typeof providerConfig === 'object' && !Array.isArray(providerConfig)) {
return providerConfig as Record<string, unknown>
}
return {}
}
/**
* Resolves environment variable references inside a webhook provider config object.
*/
export async function resolveWebhookProviderConfig(
providerConfig: unknown,
userId: string,
workspaceId?: string
): Promise<Record<string, unknown>> {
return resolveEnvVarsInObject(normalizeWebhookProviderConfig(providerConfig), userId, workspaceId)
}
/**
* Clones a webhook-like record with its provider config resolved for runtime use.
*/
export async function resolveWebhookRecordProviderConfig<T extends { providerConfig?: unknown }>(
webhookRecord: T,
userId: string,
workspaceId?: string
): Promise<T & { providerConfig: Record<string, unknown> }> {
return {
...webhookRecord,
providerConfig: await resolveWebhookProviderConfig(
webhookRecord.providerConfig,
userId,
workspaceId
),
}
}

View File

@@ -111,8 +111,50 @@ export const DEFAULT_EXPORT_FORMATS: Record<string, string> = {
'application/vnd.google-apps.spreadsheet': 'text/csv',
'application/vnd.google-apps.presentation': 'text/plain',
'application/vnd.google-apps.drawing': 'image/png',
'application/vnd.google-apps.form': 'application/pdf',
'application/vnd.google-apps.script': 'application/json',
'application/vnd.google-apps.form': 'application/zip',
'application/vnd.google-apps.script': 'application/vnd.google-apps.script+json',
}
/**
* Valid export formats per Google Workspace file type.
* See: https://developers.google.com/drive/api/guides/ref-export-formats
*/
export const VALID_EXPORT_FORMATS: Record<string, string[]> = {
'application/vnd.google-apps.document': [
'text/plain',
'text/html',
'application/pdf',
'application/vnd.openxmlformats-officedocument.wordprocessingml.document',
'application/vnd.oasis.opendocument.text',
'application/rtf',
'application/epub+zip',
'text/markdown',
],
'application/vnd.google-apps.spreadsheet': [
'text/csv',
'text/tab-separated-values',
'application/pdf',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
'application/vnd.oasis.opendocument.spreadsheet',
'application/zip',
],
'application/vnd.google-apps.presentation': [
'text/plain',
'application/pdf',
'application/vnd.openxmlformats-officedocument.presentationml.presentation',
'application/vnd.oasis.opendocument.presentation',
'image/jpeg',
'image/png',
'image/svg+xml',
],
'application/vnd.google-apps.drawing': [
'application/pdf',
'image/jpeg',
'image/png',
'image/svg+xml',
],
'application/vnd.google-apps.form': ['application/zip'],
'application/vnd.google-apps.script': ['application/vnd.google-apps.script+json'],
}
export const SOURCE_MIME_TYPES: Record<string, string> = {

View File

@@ -42,7 +42,8 @@ export const jiraUpdateTool: ToolConfig<JiraUpdateParams, JiraUpdateResponse> =
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'New description for the issue',
description:
'New description for the issue. Accepts plain text (auto-wrapped in ADF) or a raw ADF document object',
},
priority: {
type: 'string',

View File

@@ -5,6 +5,51 @@ const logger = createLogger('JiraUtils')
const MAX_ATTACHMENT_SIZE = 50 * 1024 * 1024
/**
* Converts a value to ADF format. If the value is already an ADF document object,
* it is returned as-is. If it is a plain string, it is wrapped in a single-paragraph ADF doc.
*/
export function toAdf(value: string | Record<string, unknown>): Record<string, unknown> {
if (typeof value === 'object') {
if (value.type === 'doc') {
return value
}
if (value.type && Array.isArray(value.content)) {
return { type: 'doc', version: 1, content: [value] }
}
}
if (typeof value === 'string') {
try {
const parsed = JSON.parse(value)
if (typeof parsed === 'object' && parsed !== null && parsed.type === 'doc') {
return parsed
}
if (
typeof parsed === 'object' &&
parsed !== null &&
parsed.type &&
Array.isArray(parsed.content)
) {
return { type: 'doc', version: 1, content: [parsed] }
}
} catch {
// Not JSON — treat as plain text below
}
}
return {
type: 'doc',
version: 1,
content: [
{
type: 'paragraph',
content: [
{ type: 'text', text: typeof value === 'string' ? value : JSON.stringify(value) },
],
},
],
}
}
/**
* Extracts plain text from Atlassian Document Format (ADF) content.
* Returns null if content is falsy.

View File

@@ -42,7 +42,8 @@ export const jiraWriteTool: ToolConfig<JiraWriteParams, JiraWriteResponse> = {
type: 'string',
required: false,
visibility: 'user-or-llm',
description: 'Description for the issue',
description:
'Description for the issue. Accepts plain text (auto-wrapped in ADF) or a raw ADF document object',
},
priority: {
type: 'string',