mirror of
https://github.com/simstudioai/sim.git
synced 2026-02-03 03:04:57 -05:00
Compare commits
1 Commits
feat/timeo
...
feat/api-e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2b29c8c258 |
@@ -14,7 +14,7 @@ Alle API-Anfragen erfordern einen API-Schlüssel, der im Header `x-api-key` übe
|
||||
|
||||
```bash
|
||||
curl -H "x-api-key: YOUR_API_KEY" \
|
||||
https://sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
https://api.sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
```
|
||||
|
||||
Sie können API-Schlüssel in Ihren Benutzereinstellungen im Sim-Dashboard generieren.
|
||||
@@ -528,7 +528,7 @@ async function pollLogs() {
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`https://sim.ai/api/v1/logs?${params}`,
|
||||
`https://api.sim.ai/api/v1/logs?${params}`,
|
||||
{
|
||||
headers: {
|
||||
'x-api-key': 'YOUR_API_KEY'
|
||||
|
||||
@@ -142,7 +142,7 @@ GET /api/users/me/usage-limits
|
||||
**Beispielanfrage:**
|
||||
|
||||
```bash
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://sim.ai/api/users/me/usage-limits
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://api.sim.ai/api/users/me/usage-limits
|
||||
```
|
||||
|
||||
**Beispielantwort:**
|
||||
|
||||
@@ -647,7 +647,7 @@ def stream_workflow():
|
||||
|
||||
def generate():
|
||||
response = requests.post(
|
||||
'https://sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
'https://api.sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
headers={
|
||||
'Content-Type': 'application/json',
|
||||
'X-API-Key': os.getenv('SIM_API_KEY')
|
||||
|
||||
@@ -965,7 +965,7 @@ function StreamingWorkflow() {
|
||||
|
||||
// IMPORTANT: Make this API call from your backend server, not the browser
|
||||
// Never expose your API key in client-side code
|
||||
const response = await fetch('https://sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
const response = await fetch('https://api.sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
@@ -14,7 +14,7 @@ All API requests require an API key passed in the `x-api-key` header:
|
||||
|
||||
```bash
|
||||
curl -H "x-api-key: YOUR_API_KEY" \
|
||||
https://sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
https://api.sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
```
|
||||
|
||||
You can generate API keys from your user settings in the Sim dashboard.
|
||||
@@ -513,7 +513,7 @@ async function pollLogs() {
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`https://sim.ai/api/v1/logs?${params}`,
|
||||
`https://api.sim.ai/api/v1/logs?${params}`,
|
||||
{
|
||||
headers: {
|
||||
'x-api-key': 'YOUR_API_KEY'
|
||||
|
||||
@@ -160,7 +160,7 @@ GET /api/users/me/usage-limits
|
||||
|
||||
**Example Request:**
|
||||
```bash
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://sim.ai/api/users/me/usage-limits
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://api.sim.ai/api/users/me/usage-limits
|
||||
```
|
||||
|
||||
**Example Response:**
|
||||
|
||||
@@ -82,7 +82,7 @@ Submit forms programmatically:
|
||||
<Tabs items={['cURL', 'TypeScript']}>
|
||||
<Tab value="cURL">
|
||||
```bash
|
||||
curl -X POST https://sim.ai/api/form/your-identifier \
|
||||
curl -X POST https://api.sim.ai/api/form/your-identifier \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{
|
||||
"formData": {
|
||||
@@ -94,7 +94,7 @@ curl -X POST https://sim.ai/api/form/your-identifier \
|
||||
</Tab>
|
||||
<Tab value="TypeScript">
|
||||
```typescript
|
||||
const response = await fetch('https://sim.ai/api/form/your-identifier', {
|
||||
const response = await fetch('https://api.sim.ai/api/form/your-identifier', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({
|
||||
@@ -115,14 +115,14 @@ const result = await response.json();
|
||||
|
||||
For password-protected forms:
|
||||
```bash
|
||||
curl -X POST https://sim.ai/api/form/your-identifier \
|
||||
curl -X POST https://api.sim.ai/api/form/your-identifier \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{ "password": "secret", "formData": { "name": "John" } }'
|
||||
```
|
||||
|
||||
For email-protected forms:
|
||||
```bash
|
||||
curl -X POST https://sim.ai/api/form/your-identifier \
|
||||
curl -X POST https://api.sim.ai/api/form/your-identifier \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{ "email": "allowed@example.com", "formData": { "name": "John" } }'
|
||||
```
|
||||
|
||||
@@ -655,7 +655,7 @@ def stream_workflow():
|
||||
|
||||
def generate():
|
||||
response = requests.post(
|
||||
'https://sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
'https://api.sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
headers={
|
||||
'Content-Type': 'application/json',
|
||||
'X-API-Key': os.getenv('SIM_API_KEY')
|
||||
|
||||
@@ -948,7 +948,7 @@ function StreamingWorkflow() {
|
||||
|
||||
// IMPORTANT: Make this API call from your backend server, not the browser
|
||||
// Never expose your API key in client-side code
|
||||
const response = await fetch('https://sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
const response = await fetch('https://api.sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
@@ -14,7 +14,7 @@ Todas las solicitudes a la API requieren una clave de API pasada en el encabezad
|
||||
|
||||
```bash
|
||||
curl -H "x-api-key: YOUR_API_KEY" \
|
||||
https://sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
https://api.sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
```
|
||||
|
||||
Puedes generar claves de API desde la configuración de usuario en el panel de control de Sim.
|
||||
@@ -528,7 +528,7 @@ async function pollLogs() {
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`https://sim.ai/api/v1/logs?${params}`,
|
||||
`https://api.sim.ai/api/v1/logs?${params}`,
|
||||
{
|
||||
headers: {
|
||||
'x-api-key': 'YOUR_API_KEY'
|
||||
|
||||
@@ -142,7 +142,7 @@ GET /api/users/me/usage-limits
|
||||
**Solicitud de ejemplo:**
|
||||
|
||||
```bash
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://sim.ai/api/users/me/usage-limits
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://api.sim.ai/api/users/me/usage-limits
|
||||
```
|
||||
|
||||
**Respuesta de ejemplo:**
|
||||
|
||||
@@ -656,7 +656,7 @@ def stream_workflow():
|
||||
|
||||
def generate():
|
||||
response = requests.post(
|
||||
'https://sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
'https://api.sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
headers={
|
||||
'Content-Type': 'application/json',
|
||||
'X-API-Key': os.getenv('SIM_API_KEY')
|
||||
|
||||
@@ -965,7 +965,7 @@ function StreamingWorkflow() {
|
||||
|
||||
// IMPORTANT: Make this API call from your backend server, not the browser
|
||||
// Never expose your API key in client-side code
|
||||
const response = await fetch('https://sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
const response = await fetch('https://api.sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
@@ -14,7 +14,7 @@ Toutes les requêtes API nécessitent une clé API transmise dans l'en-tête `x-
|
||||
|
||||
```bash
|
||||
curl -H "x-api-key: YOUR_API_KEY" \
|
||||
https://sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
https://api.sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
```
|
||||
|
||||
Vous pouvez générer des clés API depuis vos paramètres utilisateur dans le tableau de bord Sim.
|
||||
@@ -528,7 +528,7 @@ async function pollLogs() {
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`https://sim.ai/api/v1/logs?${params}`,
|
||||
`https://api.sim.ai/api/v1/logs?${params}`,
|
||||
{
|
||||
headers: {
|
||||
'x-api-key': 'YOUR_API_KEY'
|
||||
|
||||
@@ -142,7 +142,7 @@ GET /api/users/me/usage-limits
|
||||
**Exemple de requête :**
|
||||
|
||||
```bash
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://sim.ai/api/users/me/usage-limits
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://api.sim.ai/api/users/me/usage-limits
|
||||
```
|
||||
|
||||
**Exemple de réponse :**
|
||||
|
||||
@@ -656,7 +656,7 @@ def stream_workflow():
|
||||
|
||||
def generate():
|
||||
response = requests.post(
|
||||
'https://sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
'https://api.sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
headers={
|
||||
'Content-Type': 'application/json',
|
||||
'X-API-Key': os.getenv('SIM_API_KEY')
|
||||
|
||||
@@ -965,7 +965,7 @@ function StreamingWorkflow() {
|
||||
|
||||
// IMPORTANT: Make this API call from your backend server, not the browser
|
||||
// Never expose your API key in client-side code
|
||||
const response = await fetch('https://sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
const response = await fetch('https://api.sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
@@ -14,7 +14,7 @@ Simは、ワークフローの実行ログを照会したり、ワークフロ
|
||||
|
||||
```bash
|
||||
curl -H "x-api-key: YOUR_API_KEY" \
|
||||
https://sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
https://api.sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
```
|
||||
|
||||
SimダッシュボードのユーザーセッティングからAPIキーを生成できます。
|
||||
@@ -528,7 +528,7 @@ async function pollLogs() {
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`https://sim.ai/api/v1/logs?${params}`,
|
||||
`https://api.sim.ai/api/v1/logs?${params}`,
|
||||
{
|
||||
headers: {
|
||||
'x-api-key': 'YOUR_API_KEY'
|
||||
|
||||
@@ -142,7 +142,7 @@ GET /api/users/me/usage-limits
|
||||
**リクエスト例:**
|
||||
|
||||
```bash
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://sim.ai/api/users/me/usage-limits
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://api.sim.ai/api/users/me/usage-limits
|
||||
```
|
||||
|
||||
**レスポンス例:**
|
||||
|
||||
@@ -656,7 +656,7 @@ def stream_workflow():
|
||||
|
||||
def generate():
|
||||
response = requests.post(
|
||||
'https://sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
'https://api.sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
headers={
|
||||
'Content-Type': 'application/json',
|
||||
'X-API-Key': os.getenv('SIM_API_KEY')
|
||||
|
||||
@@ -965,7 +965,7 @@ function StreamingWorkflow() {
|
||||
|
||||
// IMPORTANT: Make this API call from your backend server, not the browser
|
||||
// Never expose your API key in client-side code
|
||||
const response = await fetch('https://sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
const response = await fetch('https://api.sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
@@ -14,7 +14,7 @@ Sim 提供了一个全面的外部 API,用于查询工作流执行日志,并
|
||||
|
||||
```bash
|
||||
curl -H "x-api-key: YOUR_API_KEY" \
|
||||
https://sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
https://api.sim.ai/api/v1/logs?workspaceId=YOUR_WORKSPACE_ID
|
||||
```
|
||||
|
||||
您可以在 Sim 仪表板的用户设置中生成 API 密钥。
|
||||
@@ -528,7 +528,7 @@ async function pollLogs() {
|
||||
}
|
||||
|
||||
const response = await fetch(
|
||||
`https://sim.ai/api/v1/logs?${params}`,
|
||||
`https://api.sim.ai/api/v1/logs?${params}`,
|
||||
{
|
||||
headers: {
|
||||
'x-api-key': 'YOUR_API_KEY'
|
||||
|
||||
@@ -142,7 +142,7 @@ GET /api/users/me/usage-limits
|
||||
**请求示例:**
|
||||
|
||||
```bash
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://sim.ai/api/users/me/usage-limits
|
||||
curl -X GET -H "X-API-Key: YOUR_API_KEY" -H "Content-Type: application/json" https://api.sim.ai/api/users/me/usage-limits
|
||||
```
|
||||
|
||||
**响应示例:**
|
||||
|
||||
@@ -656,7 +656,7 @@ def stream_workflow():
|
||||
|
||||
def generate():
|
||||
response = requests.post(
|
||||
'https://sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
'https://api.sim.ai/api/workflows/WORKFLOW_ID/execute',
|
||||
headers={
|
||||
'Content-Type': 'application/json',
|
||||
'X-API-Key': os.getenv('SIM_API_KEY')
|
||||
|
||||
@@ -965,7 +965,7 @@ function StreamingWorkflow() {
|
||||
|
||||
// IMPORTANT: Make this API call from your backend server, not the browser
|
||||
// Never expose your API key in client-side code
|
||||
const response = await fetch('https://sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
const response = await fetch('https://api.sim.ai/api/workflows/WORKFLOW_ID/execute', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
@@ -8,7 +8,6 @@ import { verifyCronAuth } from '@/lib/auth/internal'
|
||||
const logger = createLogger('CleanupStaleExecutions')
|
||||
|
||||
const STALE_THRESHOLD_MINUTES = 30
|
||||
const MAX_INT32 = 2_147_483_647
|
||||
|
||||
export async function GET(request: NextRequest) {
|
||||
try {
|
||||
@@ -46,14 +45,13 @@ export async function GET(request: NextRequest) {
|
||||
try {
|
||||
const staleDurationMs = Date.now() - new Date(execution.startedAt).getTime()
|
||||
const staleDurationMinutes = Math.round(staleDurationMs / 60000)
|
||||
const totalDurationMs = Math.min(staleDurationMs, MAX_INT32)
|
||||
|
||||
await db
|
||||
.update(workflowExecutionLogs)
|
||||
.set({
|
||||
status: 'failed',
|
||||
endedAt: new Date(),
|
||||
totalDurationMs,
|
||||
totalDurationMs: staleDurationMs,
|
||||
executionData: sql`jsonb_set(
|
||||
COALESCE(execution_data, '{}'::jsonb),
|
||||
ARRAY['error'],
|
||||
|
||||
@@ -21,7 +21,6 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
|
||||
const logger = createLogger('WorkflowMcpServeAPI')
|
||||
@@ -265,7 +264,7 @@ async function handleToolsCall(
|
||||
method: 'POST',
|
||||
headers,
|
||||
body: JSON.stringify({ input: params.arguments || {}, triggerType: 'mcp' }),
|
||||
signal: AbortSignal.timeout(getMaxExecutionTimeout()),
|
||||
signal: AbortSignal.timeout(600000), // 10 minute timeout
|
||||
})
|
||||
|
||||
const executeResult = await response.json()
|
||||
@@ -285,7 +284,7 @@ async function handleToolsCall(
|
||||
content: [
|
||||
{ type: 'text', text: JSON.stringify(executeResult.output || executeResult, null, 2) },
|
||||
],
|
||||
isError: executeResult.success === false,
|
||||
isError: !executeResult.success,
|
||||
}
|
||||
|
||||
return NextResponse.json(createResponse(id, result))
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import type { NextRequest } from 'next/server'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/plan'
|
||||
import { getExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||
import { getParsedBody, withMcpAuth } from '@/lib/mcp/middleware'
|
||||
import { mcpService } from '@/lib/mcp/service'
|
||||
import type { McpTool, McpToolCall, McpToolResult } from '@/lib/mcp/types'
|
||||
@@ -10,6 +7,7 @@ import {
|
||||
categorizeError,
|
||||
createMcpErrorResponse,
|
||||
createMcpSuccessResponse,
|
||||
MCP_CONSTANTS,
|
||||
validateStringParam,
|
||||
} from '@/lib/mcp/utils'
|
||||
|
||||
@@ -173,16 +171,13 @@ export const POST = withMcpAuth('read')(
|
||||
arguments: args,
|
||||
}
|
||||
|
||||
const userSubscription = await getHighestPrioritySubscription(userId)
|
||||
const executionTimeout = getExecutionTimeout(
|
||||
userSubscription?.plan as SubscriptionPlan | undefined,
|
||||
'sync'
|
||||
)
|
||||
|
||||
const result = await Promise.race([
|
||||
mcpService.executeTool(userId, serverId, toolCall, workspaceId),
|
||||
new Promise<never>((_, reject) =>
|
||||
setTimeout(() => reject(new Error('Tool execution timeout')), executionTimeout)
|
||||
setTimeout(
|
||||
() => reject(new Error('Tool execution timeout')),
|
||||
MCP_CONSTANTS.EXECUTION_TIMEOUT
|
||||
)
|
||||
),
|
||||
])
|
||||
|
||||
|
||||
@@ -20,7 +20,6 @@ import { z } from 'zod'
|
||||
import { getEmailSubject, renderInvitationEmail } from '@/components/emails'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasAccessControlAccess } from '@/lib/billing'
|
||||
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
@@ -502,18 +501,6 @@ export async function PUT(
|
||||
}
|
||||
}
|
||||
|
||||
if (status === 'accepted') {
|
||||
try {
|
||||
await syncUsageLimitsFromSubscription(session.user.id)
|
||||
} catch (syncError) {
|
||||
logger.error('Failed to sync usage limits after joining org', {
|
||||
userId: session.user.id,
|
||||
organizationId,
|
||||
error: syncError,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
logger.info(`Organization invitation ${status}`, {
|
||||
organizationId,
|
||||
invitationId,
|
||||
|
||||
@@ -5,7 +5,6 @@ import { and, eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { z } from 'zod'
|
||||
import { getSession } from '@/lib/auth'
|
||||
import { hasActiveSubscription } from '@/lib/billing'
|
||||
|
||||
const logger = createLogger('SubscriptionTransferAPI')
|
||||
|
||||
@@ -89,14 +88,6 @@ export async function POST(request: NextRequest, { params }: { params: Promise<{
|
||||
)
|
||||
}
|
||||
|
||||
// Check if org already has an active subscription (prevent duplicates)
|
||||
if (await hasActiveSubscription(organizationId)) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Organization already has an active subscription' },
|
||||
{ status: 409 }
|
||||
)
|
||||
}
|
||||
|
||||
await db
|
||||
.update(subscription)
|
||||
.set({ referenceId: organizationId })
|
||||
|
||||
@@ -203,10 +203,6 @@ export const PATCH = withAdminAuthParams<RouteParams>(async (request, context) =
|
||||
}
|
||||
|
||||
updateData.billingBlocked = body.billingBlocked
|
||||
// Clear the reason when unblocking
|
||||
if (body.billingBlocked === false) {
|
||||
updateData.billingBlockedReason = null
|
||||
}
|
||||
updated.push('billingBlocked')
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { db, workflow as workflowTable } from '@sim/db'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { type NextRequest, NextResponse } from 'next/server'
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { getTimeoutErrorMessage, isTimeoutError } from '@/lib/core/execution-limits'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { markExecutionCancelled } from '@/lib/execution/cancellation'
|
||||
import { preprocessExecution } from '@/lib/execution/preprocessing'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { executeWorkflowCore } from '@/lib/workflows/executor/execution-core'
|
||||
import { createSSECallbacks } from '@/lib/workflows/executor/execution-events'
|
||||
@@ -75,31 +75,12 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const { startBlockId, sourceSnapshot, input } = validation.data
|
||||
const executionId = uuidv4()
|
||||
|
||||
// Run preprocessing checks (billing, rate limits, usage limits)
|
||||
const preprocessResult = await preprocessExecution({
|
||||
workflowId,
|
||||
userId,
|
||||
triggerType: 'manual',
|
||||
executionId,
|
||||
requestId,
|
||||
checkRateLimit: false, // Manual executions don't rate limit
|
||||
checkDeployment: false, // Run-from-block doesn't require deployment
|
||||
})
|
||||
const [workflowRecord] = await db
|
||||
.select({ workspaceId: workflowTable.workspaceId, userId: workflowTable.userId })
|
||||
.from(workflowTable)
|
||||
.where(eq(workflowTable.id, workflowId))
|
||||
.limit(1)
|
||||
|
||||
if (!preprocessResult.success) {
|
||||
const { error } = preprocessResult
|
||||
logger.warn(`[${requestId}] Preprocessing failed for run-from-block`, {
|
||||
workflowId,
|
||||
error: error?.message,
|
||||
statusCode: error?.statusCode,
|
||||
})
|
||||
return NextResponse.json(
|
||||
{ error: error?.message || 'Execution blocked' },
|
||||
{ status: error?.statusCode || 500 }
|
||||
)
|
||||
}
|
||||
|
||||
const workflowRecord = preprocessResult.workflowRecord
|
||||
if (!workflowRecord?.workspaceId) {
|
||||
return NextResponse.json({ error: 'Workflow not found or has no workspace' }, { status: 404 })
|
||||
}
|
||||
@@ -111,22 +92,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
workflowId,
|
||||
startBlockId,
|
||||
executedBlocksCount: sourceSnapshot.executedBlocks.length,
|
||||
billingActorUserId: preprocessResult.actorUserId,
|
||||
})
|
||||
|
||||
const loggingSession = new LoggingSession(workflowId, executionId, 'manual', requestId)
|
||||
const abortController = new AbortController()
|
||||
let isStreamClosed = false
|
||||
let isTimedOut = false
|
||||
|
||||
const syncTimeout = preprocessResult.executionTimeout?.sync
|
||||
let timeoutId: NodeJS.Timeout | undefined
|
||||
if (syncTimeout) {
|
||||
timeoutId = setTimeout(() => {
|
||||
isTimedOut = true
|
||||
abortController.abort()
|
||||
}, syncTimeout)
|
||||
}
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
@@ -178,33 +148,13 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
})
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
if (isTimedOut && syncTimeout) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, syncTimeout)
|
||||
logger.info(`[${requestId}] Run-from-block execution timed out`, {
|
||||
timeoutMs: syncTimeout,
|
||||
})
|
||||
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
error: timeoutErrorMessage,
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { duration: result.metadata?.duration || 0 },
|
||||
})
|
||||
}
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: { duration: result.metadata?.duration || 0 },
|
||||
})
|
||||
} else {
|
||||
sendEvent({
|
||||
type: 'execution:completed',
|
||||
@@ -221,25 +171,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
})
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
const isTimeout = isTimeoutError(error) || isTimedOut
|
||||
const errorMessage = isTimeout
|
||||
? getTimeoutErrorMessage(error, syncTimeout)
|
||||
: error instanceof Error
|
||||
? error.message
|
||||
: 'Unknown error'
|
||||
|
||||
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`, {
|
||||
isTimeout,
|
||||
})
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Run-from-block execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
totalDurationMs: executionResult?.metadata?.duration,
|
||||
error: { message: errorMessage },
|
||||
traceSpans: executionResult?.logs as any,
|
||||
})
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
@@ -251,7 +187,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (timeoutId) clearTimeout(timeoutId)
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(new TextEncoder().encode('data: [DONE]\n\n'))
|
||||
@@ -262,7 +197,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
if (timeoutId) clearTimeout(timeoutId)
|
||||
abortController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
},
|
||||
|
||||
@@ -5,7 +5,6 @@ import { validate as uuidValidate, v4 as uuidv4 } from 'uuid'
|
||||
import { z } from 'zod'
|
||||
import { checkHybridAuth } from '@/lib/auth/hybrid'
|
||||
import { isTriggerDevEnabled } from '@/lib/core/config/feature-flags'
|
||||
import { getTimeoutErrorMessage, isTimeoutError } from '@/lib/core/execution-limits'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { SSE_HEADERS } from '@/lib/core/utils/sse'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
@@ -121,6 +120,10 @@ type AsyncExecutionParams = {
|
||||
triggerType: CoreTriggerType
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles async workflow execution by queueing a background job.
|
||||
* Returns immediately with a 202 Accepted response containing the job ID.
|
||||
*/
|
||||
async function handleAsyncExecution(params: AsyncExecutionParams): Promise<NextResponse> {
|
||||
const { requestId, workflowId, userId, input, triggerType } = params
|
||||
|
||||
@@ -402,7 +405,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
if (!enableSSE) {
|
||||
logger.info(`[${requestId}] Using non-SSE execution (direct JSON response)`)
|
||||
const syncTimeout = preprocessResult.executionTimeout?.sync
|
||||
try {
|
||||
const metadata: ExecutionMetadata = {
|
||||
requestId,
|
||||
@@ -436,7 +438,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
includeFileBase64,
|
||||
base64MaxBytes,
|
||||
stopAfterBlockId,
|
||||
abortSignal: syncTimeout ? AbortSignal.timeout(syncTimeout) : undefined,
|
||||
})
|
||||
|
||||
const outputWithBase64 = includeFileBase64
|
||||
@@ -472,23 +473,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
|
||||
return NextResponse.json(filteredResult)
|
||||
} catch (error: unknown) {
|
||||
const isTimeout = isTimeoutError(error)
|
||||
const errorMessage = isTimeout
|
||||
? getTimeoutErrorMessage(error, syncTimeout)
|
||||
: error instanceof Error
|
||||
? error.message
|
||||
: 'Unknown error'
|
||||
|
||||
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`, { isTimeout })
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] Non-SSE execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
totalDurationMs: executionResult?.metadata?.duration,
|
||||
error: { message: errorMessage },
|
||||
traceSpans: executionResult?.logs as any,
|
||||
})
|
||||
|
||||
return NextResponse.json(
|
||||
{
|
||||
success: false,
|
||||
@@ -502,7 +491,7 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
: undefined,
|
||||
},
|
||||
{ status: isTimeout ? 408 : 500 }
|
||||
{ status: 500 }
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -548,16 +537,6 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
const encoder = new TextEncoder()
|
||||
const abortController = new AbortController()
|
||||
let isStreamClosed = false
|
||||
let isTimedOut = false
|
||||
|
||||
const syncTimeout = preprocessResult.executionTimeout?.sync
|
||||
let timeoutId: NodeJS.Timeout | undefined
|
||||
if (syncTimeout) {
|
||||
timeoutId = setTimeout(() => {
|
||||
isTimedOut = true
|
||||
abortController.abort()
|
||||
}, syncTimeout)
|
||||
}
|
||||
|
||||
const stream = new ReadableStream<Uint8Array>({
|
||||
async start(controller) {
|
||||
@@ -784,35 +763,16 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
}
|
||||
|
||||
if (result.status === 'cancelled') {
|
||||
if (isTimedOut && syncTimeout) {
|
||||
const timeoutErrorMessage = getTimeoutErrorMessage(null, syncTimeout)
|
||||
logger.info(`[${requestId}] Workflow execution timed out`, { timeoutMs: syncTimeout })
|
||||
|
||||
await loggingSession.markAsFailed(timeoutErrorMessage)
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
error: timeoutErrorMessage,
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
}
|
||||
logger.info(`[${requestId}] Workflow execution was cancelled`)
|
||||
sendEvent({
|
||||
type: 'execution:cancelled',
|
||||
timestamp: new Date().toISOString(),
|
||||
executionId,
|
||||
workflowId,
|
||||
data: {
|
||||
duration: result.metadata?.duration || 0,
|
||||
},
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
@@ -839,23 +799,11 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
// Cleanup base64 cache for this execution
|
||||
await cleanupExecutionBase64Cache(executionId)
|
||||
} catch (error: unknown) {
|
||||
const isTimeout = isTimeoutError(error) || isTimedOut
|
||||
const errorMessage = isTimeout
|
||||
? getTimeoutErrorMessage(error, syncTimeout)
|
||||
: error instanceof Error
|
||||
? error.message
|
||||
: 'Unknown error'
|
||||
|
||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`, { isTimeout })
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error'
|
||||
logger.error(`[${requestId}] SSE execution failed: ${errorMessage}`)
|
||||
|
||||
const executionResult = hasExecutionResult(error) ? error.executionResult : undefined
|
||||
|
||||
await loggingSession.safeCompleteWithError({
|
||||
totalDurationMs: executionResult?.metadata?.duration,
|
||||
error: { message: errorMessage },
|
||||
traceSpans: executionResult?.logs as any,
|
||||
})
|
||||
|
||||
sendEvent({
|
||||
type: 'execution:error',
|
||||
timestamp: new Date().toISOString(),
|
||||
@@ -867,18 +815,18 @@ export async function POST(req: NextRequest, { params }: { params: Promise<{ id:
|
||||
},
|
||||
})
|
||||
} finally {
|
||||
if (timeoutId) clearTimeout(timeoutId)
|
||||
if (!isStreamClosed) {
|
||||
try {
|
||||
controller.enqueue(encoder.encode('data: [DONE]\n\n'))
|
||||
controller.close()
|
||||
} catch {}
|
||||
} catch {
|
||||
// Stream already closed - nothing to do
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
cancel() {
|
||||
isStreamClosed = true
|
||||
if (timeoutId) clearTimeout(timeoutId)
|
||||
logger.info(`[${requestId}] Client aborted SSE stream, signalling cancellation`)
|
||||
abortController.abort()
|
||||
markExecutionCancelled(executionId).catch(() => {})
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
ModalTabsList,
|
||||
ModalTabsTrigger,
|
||||
} from '@/components/emcn'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getApiUrl } from '@/lib/core/utils/urls'
|
||||
import { getInputFormatExample as getInputFormatExampleUtil } from '@/lib/workflows/operations/deployment-utils'
|
||||
import { useUserPermissionsContext } from '@/app/workspace/[workspaceId]/providers/workspace-permissions-provider'
|
||||
import { CreateApiKeyModal } from '@/app/workspace/[workspaceId]/w/components/sidebar/components/settings-modal/components/api-keys/components'
|
||||
@@ -201,7 +201,7 @@ export function DeployModal({
|
||||
return null
|
||||
}
|
||||
|
||||
const endpoint = `${getBaseUrl()}/api/workflows/${workflowId}/execute`
|
||||
const endpoint = `${getApiUrl()}/api/workflows/${workflowId}/execute`
|
||||
const inputFormatExample = getInputFormatExample(selectedStreamingOutputs.length > 0)
|
||||
const placeholderKey = getApiHeaderPlaceholder()
|
||||
|
||||
|
||||
@@ -50,12 +50,6 @@ import { useSubBlockStore } from '@/stores/workflows/subblock/store'
|
||||
/** Stable empty object to avoid creating new references */
|
||||
const EMPTY_SUBBLOCK_VALUES = {} as Record<string, any>
|
||||
|
||||
/** Shared style for dashed divider lines */
|
||||
const DASHED_DIVIDER_STYLE = {
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
} as const
|
||||
|
||||
/**
|
||||
* Icon component for rendering block icons.
|
||||
*
|
||||
@@ -95,23 +89,31 @@ export function Editor() {
|
||||
const blockConfig = currentBlock ? getBlock(currentBlock.type) : null
|
||||
const title = currentBlock?.name || 'Editor'
|
||||
|
||||
// Check if selected block is a subflow (loop or parallel)
|
||||
const isSubflow =
|
||||
currentBlock && (currentBlock.type === 'loop' || currentBlock.type === 'parallel')
|
||||
|
||||
// Get subflow display properties from configs
|
||||
const subflowConfig = isSubflow ? (currentBlock.type === 'loop' ? LoopTool : ParallelTool) : null
|
||||
|
||||
// Check if selected block is a workflow block
|
||||
const isWorkflowBlock =
|
||||
currentBlock && (currentBlock.type === 'workflow' || currentBlock.type === 'workflow_input')
|
||||
|
||||
// Get workspace ID from params
|
||||
const params = useParams()
|
||||
const workspaceId = params.workspaceId as string
|
||||
|
||||
// Refs for resize functionality
|
||||
const subBlocksRef = useRef<HTMLDivElement>(null)
|
||||
|
||||
// Get user permissions
|
||||
const userPermissions = useUserPermissionsContext()
|
||||
|
||||
// Get active workflow ID
|
||||
const activeWorkflowId = useWorkflowRegistry((state) => state.activeWorkflowId)
|
||||
|
||||
// Get block properties (advanced/trigger modes)
|
||||
const { advancedMode, triggerMode } = useEditorBlockProperties(
|
||||
currentBlockId,
|
||||
currentWorkflow.isSnapshotView
|
||||
@@ -143,9 +145,10 @@ export function Editor() {
|
||||
[subBlocksForCanonical]
|
||||
)
|
||||
const canonicalModeOverrides = currentBlock?.data?.canonicalModes
|
||||
const advancedValuesPresent = useMemo(
|
||||
() => hasAdvancedValues(subBlocksForCanonical, blockSubBlockValues, canonicalIndex),
|
||||
[subBlocksForCanonical, blockSubBlockValues, canonicalIndex]
|
||||
const advancedValuesPresent = hasAdvancedValues(
|
||||
subBlocksForCanonical,
|
||||
blockSubBlockValues,
|
||||
canonicalIndex
|
||||
)
|
||||
const displayAdvancedOptions = userPermissions.canEdit
|
||||
? advancedMode
|
||||
@@ -153,9 +156,11 @@ export function Editor() {
|
||||
|
||||
const hasAdvancedOnlyFields = useMemo(() => {
|
||||
for (const subBlock of subBlocksForCanonical) {
|
||||
// Must be standalone advanced (mode: 'advanced' without canonicalParamId)
|
||||
if (subBlock.mode !== 'advanced') continue
|
||||
if (canonicalIndex.canonicalIdBySubBlockId[subBlock.id]) continue
|
||||
|
||||
// Check condition - skip if condition not met for current values
|
||||
if (
|
||||
subBlock.condition &&
|
||||
!evaluateSubBlockCondition(subBlock.condition, blockSubBlockValues)
|
||||
@@ -168,6 +173,7 @@ export function Editor() {
|
||||
return false
|
||||
}, [subBlocksForCanonical, canonicalIndex.canonicalIdBySubBlockId, blockSubBlockValues])
|
||||
|
||||
// Get subblock layout using custom hook
|
||||
const { subBlocks, stateToUse: subBlockState } = useEditorSubblockLayout(
|
||||
blockConfig || ({} as any),
|
||||
currentBlockId || '',
|
||||
@@ -200,34 +206,31 @@ export function Editor() {
|
||||
return { regularSubBlocks: regular, advancedOnlySubBlocks: advancedOnly }
|
||||
}, [subBlocks, canonicalIndex.canonicalIdBySubBlockId])
|
||||
|
||||
// Get block connections
|
||||
const { incomingConnections, hasIncomingConnections } = useBlockConnections(currentBlockId || '')
|
||||
|
||||
// Connections resize hook
|
||||
const { handleMouseDown: handleConnectionsResizeMouseDown, isResizing } = useConnectionsResize({
|
||||
subBlocksRef,
|
||||
})
|
||||
|
||||
// Collaborative actions
|
||||
const {
|
||||
collaborativeSetBlockCanonicalMode,
|
||||
collaborativeUpdateBlockName,
|
||||
collaborativeToggleBlockAdvancedMode,
|
||||
} = useCollaborativeWorkflow()
|
||||
|
||||
// Advanced mode toggle handler
|
||||
const handleToggleAdvancedMode = useCallback(() => {
|
||||
if (!currentBlockId || !userPermissions.canEdit) return
|
||||
collaborativeToggleBlockAdvancedMode(currentBlockId)
|
||||
}, [currentBlockId, userPermissions.canEdit, collaborativeToggleBlockAdvancedMode])
|
||||
|
||||
// Rename state
|
||||
const [isRenaming, setIsRenaming] = useState(false)
|
||||
const [editedName, setEditedName] = useState('')
|
||||
|
||||
/**
|
||||
* Ref callback that auto-selects the input text when mounted.
|
||||
*/
|
||||
const nameInputRefCallback = useCallback((element: HTMLInputElement | null) => {
|
||||
if (element) {
|
||||
element.select()
|
||||
}
|
||||
}, [])
|
||||
const nameInputRef = useRef<HTMLInputElement>(null)
|
||||
|
||||
/**
|
||||
* Handles starting the rename process.
|
||||
@@ -248,6 +251,7 @@ export function Editor() {
|
||||
if (trimmedName && trimmedName !== currentBlock?.name) {
|
||||
const result = collaborativeUpdateBlockName(currentBlockId, trimmedName)
|
||||
if (!result.success) {
|
||||
// Keep rename mode open on error so user can correct the name
|
||||
return
|
||||
}
|
||||
}
|
||||
@@ -262,6 +266,14 @@ export function Editor() {
|
||||
setEditedName('')
|
||||
}, [])
|
||||
|
||||
// Focus input when entering rename mode
|
||||
useEffect(() => {
|
||||
if (isRenaming && nameInputRef.current) {
|
||||
nameInputRef.current.select()
|
||||
}
|
||||
}, [isRenaming])
|
||||
|
||||
// Trigger rename mode when signaled from context menu
|
||||
useEffect(() => {
|
||||
if (shouldFocusRename && currentBlock) {
|
||||
handleStartRename()
|
||||
@@ -272,13 +284,17 @@ export function Editor() {
|
||||
/**
|
||||
* Handles opening documentation link in a new secure tab.
|
||||
*/
|
||||
const handleOpenDocs = useCallback(() => {
|
||||
const handleOpenDocs = () => {
|
||||
const docsLink = isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink
|
||||
window.open(docsLink || 'https://docs.sim.ai/quick-reference', '_blank', 'noopener,noreferrer')
|
||||
}, [isSubflow, subflowConfig?.docsLink, blockConfig?.docsLink])
|
||||
if (docsLink) {
|
||||
window.open(docsLink, '_blank', 'noopener,noreferrer')
|
||||
}
|
||||
}
|
||||
|
||||
// Get child workflow ID for workflow blocks
|
||||
const childWorkflowId = isWorkflowBlock ? blockSubBlockValues?.workflowId : null
|
||||
|
||||
// Fetch child workflow state for preview (only for workflow blocks with a selected workflow)
|
||||
const { data: childWorkflowState, isLoading: isLoadingChildWorkflow } =
|
||||
useWorkflowState(childWorkflowId)
|
||||
|
||||
@@ -291,6 +307,7 @@ export function Editor() {
|
||||
}
|
||||
}, [childWorkflowId, workspaceId])
|
||||
|
||||
// Determine if connections are at minimum height (collapsed state)
|
||||
const isConnectionsAtMinHeight = connectionsHeight <= 35
|
||||
|
||||
return (
|
||||
@@ -311,7 +328,7 @@ export function Editor() {
|
||||
)}
|
||||
{isRenaming ? (
|
||||
<input
|
||||
ref={nameInputRefCallback}
|
||||
ref={nameInputRef}
|
||||
type='text'
|
||||
value={editedName}
|
||||
onChange={(e) => setEditedName(e.target.value)}
|
||||
@@ -382,21 +399,23 @@ export function Editor() {
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)} */}
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='p-0'
|
||||
onClick={handleOpenDocs}
|
||||
aria-label='Open documentation'
|
||||
>
|
||||
<BookOpen className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>Open docs</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
{currentBlock && (isSubflow ? subflowConfig?.docsLink : blockConfig?.docsLink) && (
|
||||
<Tooltip.Root>
|
||||
<Tooltip.Trigger asChild>
|
||||
<Button
|
||||
variant='ghost'
|
||||
className='p-0'
|
||||
onClick={handleOpenDocs}
|
||||
aria-label='Open documentation'
|
||||
>
|
||||
<BookOpen className='h-[14px] w-[14px]' />
|
||||
</Button>
|
||||
</Tooltip.Trigger>
|
||||
<Tooltip.Content side='top'>
|
||||
<p>Open docs</p>
|
||||
</Tooltip.Content>
|
||||
</Tooltip.Root>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -476,7 +495,13 @@ export function Editor() {
|
||||
</div>
|
||||
</div>
|
||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
|
||||
<div
|
||||
className='h-[1.25px]'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
@@ -541,7 +566,13 @@ export function Editor() {
|
||||
/>
|
||||
{showDivider && (
|
||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
|
||||
<div
|
||||
className='h-[1.25px]'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
@@ -550,7 +581,13 @@ export function Editor() {
|
||||
|
||||
{hasAdvancedOnlyFields && userPermissions.canEdit && (
|
||||
<div className='flex items-center gap-[10px] px-[2px] pt-[14px] pb-[12px]'>
|
||||
<div className='h-[1.25px] flex-1' style={DASHED_DIVIDER_STYLE} />
|
||||
<div
|
||||
className='h-[1.25px] flex-1'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
<button
|
||||
type='button'
|
||||
onClick={handleToggleAdvancedMode}
|
||||
@@ -563,7 +600,13 @@ export function Editor() {
|
||||
className={`h-[14px] w-[14px] transition-transform duration-200 ${displayAdvancedOptions ? 'rotate-180' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
<div className='h-[1.25px] flex-1' style={DASHED_DIVIDER_STYLE} />
|
||||
<div
|
||||
className='h-[1.25px] flex-1'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -587,7 +630,13 @@ export function Editor() {
|
||||
/>
|
||||
{index < advancedOnlySubBlocks.length - 1 && (
|
||||
<div className='subblock-divider px-[2px] pt-[16px] pb-[13px]'>
|
||||
<div className='h-[1.25px]' style={DASHED_DIVIDER_STYLE} />
|
||||
<div
|
||||
className='h-[1.25px]'
|
||||
style={{
|
||||
backgroundImage:
|
||||
'repeating-linear-gradient(to right, var(--border) 0px, var(--border) 6px, transparent 6px, transparent 12px)',
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -27,7 +27,7 @@ import { useExecutionStore } from '@/stores/execution'
|
||||
import { useNotificationStore } from '@/stores/notifications'
|
||||
import { useVariablesStore } from '@/stores/panel'
|
||||
import { useEnvironmentStore } from '@/stores/settings/environment'
|
||||
import { useTerminalConsoleStore } from '@/stores/terminal'
|
||||
import { type ConsoleEntry, useTerminalConsoleStore } from '@/stores/terminal'
|
||||
import { useWorkflowDiffStore } from '@/stores/workflow-diff'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
import { mergeSubblockState } from '@/stores/workflows/utils'
|
||||
@@ -1153,29 +1153,30 @@ export function useWorkflowExecution() {
|
||||
logs: accumulatedBlockLogs,
|
||||
}
|
||||
|
||||
if (activeWorkflowId) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
}
|
||||
// Only add workflow-level error if no blocks have executed yet
|
||||
// This catches pre-execution errors (validation, serialization, etc.)
|
||||
// Block execution errors are already logged via onBlockError callback
|
||||
const { entries } = useTerminalConsoleStore.getState()
|
||||
const existingLogs = entries.filter(
|
||||
(log: ConsoleEntry) => log.executionId === executionId
|
||||
)
|
||||
|
||||
addConsole({
|
||||
input: {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.duration || 0,
|
||||
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
|
||||
endedAt: new Date().toISOString(),
|
||||
workflowId: activeWorkflowId,
|
||||
blockId: 'workflow-error',
|
||||
executionId,
|
||||
blockName: 'Workflow Error',
|
||||
blockType: 'error',
|
||||
})
|
||||
},
|
||||
|
||||
onExecutionCancelled: () => {
|
||||
if (activeWorkflowId) {
|
||||
cancelRunningEntries(activeWorkflowId)
|
||||
if (existingLogs.length === 0) {
|
||||
// No blocks executed yet - this is a pre-execution error
|
||||
addConsole({
|
||||
input: {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.duration || 0,
|
||||
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
|
||||
endedAt: new Date().toISOString(),
|
||||
workflowId: activeWorkflowId,
|
||||
blockId: 'validation',
|
||||
executionId,
|
||||
blockName: 'Workflow Validation',
|
||||
blockType: 'validation',
|
||||
})
|
||||
}
|
||||
},
|
||||
},
|
||||
@@ -1717,28 +1718,13 @@ export function useWorkflowExecution() {
|
||||
'Workflow was modified. Run the workflow again to enable running from block.',
|
||||
workflowId,
|
||||
})
|
||||
} else {
|
||||
addNotification({
|
||||
level: 'error',
|
||||
message: data.error || 'Run from block failed',
|
||||
workflowId,
|
||||
})
|
||||
}
|
||||
|
||||
cancelRunningEntries(workflowId)
|
||||
|
||||
addConsole({
|
||||
input: {},
|
||||
output: {},
|
||||
success: false,
|
||||
error: data.error,
|
||||
durationMs: data.duration || 0,
|
||||
startedAt: new Date(Date.now() - (data.duration || 0)).toISOString(),
|
||||
endedAt: new Date().toISOString(),
|
||||
workflowId,
|
||||
blockId: 'workflow-error',
|
||||
executionId,
|
||||
blockName: 'Workflow Error',
|
||||
blockType: 'error',
|
||||
})
|
||||
},
|
||||
|
||||
onExecutionCancelled: () => {
|
||||
cancelRunningEntries(workflowId)
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { v4 as uuidv4 } from 'uuid'
|
||||
import { getApiUrl } from '@/lib/core/utils/urls'
|
||||
import type { ExecutionResult, StreamingExecution } from '@/executor/types'
|
||||
import { useExecutionStore } from '@/stores/execution'
|
||||
import { useTerminalConsoleStore } from '@/stores/terminal'
|
||||
@@ -41,7 +42,8 @@ export async function executeWorkflowWithFullLogging(
|
||||
isClientSession: true,
|
||||
}
|
||||
|
||||
const response = await fetch(`/api/workflows/${activeWorkflowId}/execute`, {
|
||||
const apiUrl = getApiUrl()
|
||||
const response = await fetch(`${apiUrl}/api/workflows/${activeWorkflowId}/execute`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
|
||||
@@ -11,7 +11,7 @@ export const ApiTriggerBlock: BlockConfig = {
|
||||
bestPractices: `
|
||||
- Can run the workflow manually to test implementation when this is the trigger point.
|
||||
- The input format determines variables accesssible in the following blocks. E.g. <api1.paramName>. You can set the value in the input format to test the workflow manually.
|
||||
- In production, the curl would come in as e.g. curl -X POST -H "X-API-Key: $SIM_API_KEY" -H "Content-Type: application/json" -d '{"paramName":"example"}' https://www.staging.sim.ai/api/workflows/9e7e4f26-fc5e-4659-b270-7ea474b14f4a/execute -- If user asks to test via API, you might need to clarify the API key.
|
||||
- In production, the curl would come in as e.g. curl -X POST -H "X-API-Key: $SIM_API_KEY" -H "Content-Type: application/json" -d '{"paramName":"example"}' https://api.sim.ai/api/workflows/9e7e4f26-fc5e-4659-b270-7ea474b14f4a/execute -- If user asks to test via API, you might need to clarify the API key.
|
||||
`,
|
||||
category: 'triggers',
|
||||
hideFromToolbar: true,
|
||||
|
||||
@@ -185,16 +185,10 @@ export const HTTP = {
|
||||
},
|
||||
} as const
|
||||
|
||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
|
||||
export const AGENT = {
|
||||
DEFAULT_MODEL: 'claude-sonnet-4-5',
|
||||
get DEFAULT_FUNCTION_TIMEOUT() {
|
||||
return getMaxExecutionTimeout()
|
||||
},
|
||||
get REQUEST_TIMEOUT() {
|
||||
return getMaxExecutionTimeout()
|
||||
},
|
||||
DEFAULT_FUNCTION_TIMEOUT: 600000,
|
||||
REQUEST_TIMEOUT: 600000,
|
||||
CUSTOM_TOOL_PREFIX: 'custom_',
|
||||
} as const
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
|
||||
const { signal, executionId } = options
|
||||
const useRedis = isRedisCancellationEnabled() && !!executionId
|
||||
|
||||
if (signal?.aborted) {
|
||||
if (!useRedis && signal?.aborted) {
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
|
||||
const cleanup = () => {
|
||||
if (mainTimeoutId) clearTimeout(mainTimeoutId)
|
||||
if (checkIntervalId) clearInterval(checkIntervalId)
|
||||
if (signal) signal.removeEventListener('abort', onAbort)
|
||||
if (!useRedis && signal) signal.removeEventListener('abort', onAbort)
|
||||
}
|
||||
|
||||
const onAbort = () => {
|
||||
@@ -37,10 +37,6 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
|
||||
resolve(false)
|
||||
}
|
||||
|
||||
if (signal) {
|
||||
signal.addEventListener('abort', onAbort, { once: true })
|
||||
}
|
||||
|
||||
if (useRedis) {
|
||||
checkIntervalId = setInterval(async () => {
|
||||
if (resolved) return
|
||||
@@ -53,6 +49,8 @@ const sleep = async (ms: number, options: SleepOptions = {}): Promise<boolean> =
|
||||
}
|
||||
} catch {}
|
||||
}, CANCELLATION_CHECK_INTERVAL_MS)
|
||||
} else if (signal) {
|
||||
signal.addEventListener('abort', onAbort, { once: true })
|
||||
}
|
||||
|
||||
mainTimeoutId = setTimeout(() => {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCallback, useRef } from 'react'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getApiUrl } from '@/lib/core/utils/urls'
|
||||
import type {
|
||||
BlockCompletedData,
|
||||
BlockErrorData,
|
||||
@@ -151,7 +152,8 @@ export function useExecutionStream() {
|
||||
currentExecutionRef.current = null
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute`, {
|
||||
const apiUrl = getApiUrl()
|
||||
const response = await fetch(`${apiUrl}/api/workflows/${workflowId}/execute`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
@@ -211,7 +213,8 @@ export function useExecutionStream() {
|
||||
currentExecutionRef.current = null
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/workflows/${workflowId}/execute-from-block`, {
|
||||
const apiUrl = getApiUrl()
|
||||
const response = await fetch(`${apiUrl}/api/workflows/${workflowId}/execute-from-block`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
@@ -266,9 +269,13 @@ export function useExecutionStream() {
|
||||
const cancel = useCallback(() => {
|
||||
const execution = currentExecutionRef.current
|
||||
if (execution) {
|
||||
fetch(`/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`, {
|
||||
method: 'POST',
|
||||
}).catch(() => {})
|
||||
const apiUrl = getApiUrl()
|
||||
fetch(
|
||||
`${apiUrl}/api/workflows/${execution.workflowId}/executions/${execution.executionId}/cancel`,
|
||||
{
|
||||
method: 'POST',
|
||||
}
|
||||
).catch(() => {})
|
||||
}
|
||||
|
||||
if (abortControllerRef.current) {
|
||||
|
||||
@@ -1,10 +1,7 @@
|
||||
export { AGENT_CARD_PATH } from '@a2a-js/sdk'
|
||||
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
|
||||
export const A2A_PROTOCOL_VERSION = '0.3.0'
|
||||
|
||||
export const A2A_DEFAULT_TIMEOUT = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
export const A2A_DEFAULT_TIMEOUT = 300000
|
||||
|
||||
/**
|
||||
* Maximum number of messages stored per task in the database.
|
||||
|
||||
@@ -1,37 +1,20 @@
|
||||
import { db } from '@sim/db'
|
||||
import * as schema from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { hasActiveSubscription } from '@/lib/billing'
|
||||
|
||||
const logger = createLogger('BillingAuthorization')
|
||||
|
||||
/**
|
||||
* Check if a user is authorized to manage billing for a given reference ID
|
||||
* Reference ID can be either a user ID (individual subscription) or organization ID (team subscription)
|
||||
*
|
||||
* This function also performs duplicate subscription validation for organizations:
|
||||
* - Rejects if an organization already has an active subscription (prevents duplicates)
|
||||
* - Personal subscriptions (referenceId === userId) skip this check to allow upgrades
|
||||
*/
|
||||
export async function authorizeSubscriptionReference(
|
||||
userId: string,
|
||||
referenceId: string
|
||||
): Promise<boolean> {
|
||||
// User can always manage their own subscriptions (Pro upgrades, etc.)
|
||||
// User can always manage their own subscriptions
|
||||
if (referenceId === userId) {
|
||||
return true
|
||||
}
|
||||
|
||||
// For organizations: check for existing active subscriptions to prevent duplicates
|
||||
if (await hasActiveSubscription(referenceId)) {
|
||||
logger.warn('Blocking checkout - active subscription already exists for organization', {
|
||||
userId,
|
||||
referenceId,
|
||||
})
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if referenceId is an organizationId the user has admin rights to
|
||||
const members = await db
|
||||
.select()
|
||||
|
||||
@@ -25,11 +25,9 @@ export function useSubscriptionUpgrade() {
|
||||
}
|
||||
|
||||
let currentSubscriptionId: string | undefined
|
||||
let allSubscriptions: any[] = []
|
||||
try {
|
||||
const listResult = await client.subscription.list()
|
||||
allSubscriptions = listResult.data || []
|
||||
const activePersonalSub = allSubscriptions.find(
|
||||
const activePersonalSub = listResult.data?.find(
|
||||
(sub: any) => sub.status === 'active' && sub.referenceId === userId
|
||||
)
|
||||
currentSubscriptionId = activePersonalSub?.id
|
||||
@@ -52,25 +50,6 @@ export function useSubscriptionUpgrade() {
|
||||
)
|
||||
|
||||
if (existingOrg) {
|
||||
// Check if this org already has an active team subscription
|
||||
const existingTeamSub = allSubscriptions.find(
|
||||
(sub: any) =>
|
||||
sub.status === 'active' &&
|
||||
sub.referenceId === existingOrg.id &&
|
||||
(sub.plan === 'team' || sub.plan === 'enterprise')
|
||||
)
|
||||
|
||||
if (existingTeamSub) {
|
||||
logger.warn('Organization already has an active team subscription', {
|
||||
userId,
|
||||
organizationId: existingOrg.id,
|
||||
existingSubscriptionId: existingTeamSub.id,
|
||||
})
|
||||
throw new Error(
|
||||
'This organization already has an active team subscription. Please manage it from the billing settings.'
|
||||
)
|
||||
}
|
||||
|
||||
logger.info('Using existing organization for team plan upgrade', {
|
||||
userId,
|
||||
organizationId: existingOrg.id,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { db } from '@sim/db'
|
||||
import { member, organization, subscription } from '@sim/db/schema'
|
||||
import { member, subscription } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import { checkEnterprisePlan, checkProPlan, checkTeamPlan } from '@/lib/billing/subscriptions/utils'
|
||||
@@ -26,22 +26,10 @@ export async function getHighestPrioritySubscription(userId: string) {
|
||||
|
||||
let orgSubs: typeof personalSubs = []
|
||||
if (orgIds.length > 0) {
|
||||
// Verify orgs exist to filter out orphaned subscriptions
|
||||
const existingOrgs = await db
|
||||
.select({ id: organization.id })
|
||||
.from(organization)
|
||||
.where(inArray(organization.id, orgIds))
|
||||
|
||||
const validOrgIds = existingOrgs.map((o) => o.id)
|
||||
|
||||
if (validOrgIds.length > 0) {
|
||||
orgSubs = await db
|
||||
.select()
|
||||
.from(subscription)
|
||||
.where(
|
||||
and(inArray(subscription.referenceId, validOrgIds), eq(subscription.status, 'active'))
|
||||
)
|
||||
}
|
||||
orgSubs = await db
|
||||
.select()
|
||||
.from(subscription)
|
||||
.where(and(inArray(subscription.referenceId, orgIds), eq(subscription.status, 'active')))
|
||||
}
|
||||
|
||||
const allSubs = [...personalSubs, ...orgSubs]
|
||||
|
||||
@@ -25,28 +25,6 @@ const logger = createLogger('SubscriptionCore')
|
||||
|
||||
export { getHighestPrioritySubscription }
|
||||
|
||||
/**
|
||||
* Check if a referenceId (user ID or org ID) has an active subscription
|
||||
* Used for duplicate subscription prevention
|
||||
*
|
||||
* Fails closed: returns true on error to prevent duplicate creation
|
||||
*/
|
||||
export async function hasActiveSubscription(referenceId: string): Promise<boolean> {
|
||||
try {
|
||||
const [activeSub] = await db
|
||||
.select({ id: subscription.id })
|
||||
.from(subscription)
|
||||
.where(and(eq(subscription.referenceId, referenceId), eq(subscription.status, 'active')))
|
||||
.limit(1)
|
||||
|
||||
return !!activeSub
|
||||
} catch (error) {
|
||||
logger.error('Error checking active subscription', { error, referenceId })
|
||||
// Fail closed: assume subscription exists to prevent duplicate creation
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if user is on Pro plan (direct or via organization)
|
||||
*/
|
||||
|
||||
@@ -11,7 +11,6 @@ export {
|
||||
getHighestPrioritySubscription as getActiveSubscription,
|
||||
getUserSubscriptionState as getSubscriptionState,
|
||||
hasAccessControlAccess,
|
||||
hasActiveSubscription,
|
||||
hasCredentialSetsAccess,
|
||||
hasSSOAccess,
|
||||
isEnterpriseOrgAdminOrOwner,
|
||||
@@ -33,11 +32,6 @@ export {
|
||||
} from '@/lib/billing/core/usage'
|
||||
export * from '@/lib/billing/credits/balance'
|
||||
export * from '@/lib/billing/credits/purchase'
|
||||
export {
|
||||
blockOrgMembers,
|
||||
getOrgMemberIds,
|
||||
unblockOrgMembers,
|
||||
} from '@/lib/billing/organizations/membership'
|
||||
export * from '@/lib/billing/subscriptions/utils'
|
||||
export { canEditUsageLimit as canEditLimit } from '@/lib/billing/subscriptions/utils'
|
||||
export * from '@/lib/billing/types'
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import { hasActiveSubscription } from '@/lib/billing'
|
||||
import { getPlanPricing } from '@/lib/billing/core/billing'
|
||||
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
|
||||
|
||||
@@ -160,16 +159,6 @@ export async function ensureOrganizationForTeamSubscription(
|
||||
if (existingMembership.length > 0) {
|
||||
const membership = existingMembership[0]
|
||||
if (membership.role === 'owner' || membership.role === 'admin') {
|
||||
// Check if org already has an active subscription (prevent duplicates)
|
||||
if (await hasActiveSubscription(membership.organizationId)) {
|
||||
logger.error('Organization already has an active subscription', {
|
||||
userId,
|
||||
organizationId: membership.organizationId,
|
||||
newSubscriptionId: subscription.id,
|
||||
})
|
||||
throw new Error('Organization already has an active subscription')
|
||||
}
|
||||
|
||||
logger.info('User already owns/admins an org, using it', {
|
||||
userId,
|
||||
organizationId: membership.organizationId,
|
||||
|
||||
@@ -15,86 +15,13 @@ import {
|
||||
userStats,
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull, ne, or, sql } from 'drizzle-orm'
|
||||
import { and, eq, sql } from 'drizzle-orm'
|
||||
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { validateSeatAvailability } from '@/lib/billing/validation/seat-management'
|
||||
|
||||
const logger = createLogger('OrganizationMembership')
|
||||
|
||||
export type BillingBlockReason = 'payment_failed' | 'dispute'
|
||||
|
||||
/**
|
||||
* Get all member user IDs for an organization
|
||||
*/
|
||||
export async function getOrgMemberIds(organizationId: string): Promise<string[]> {
|
||||
const members = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
return members.map((m) => m.userId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Block all members of an organization for billing reasons
|
||||
* Returns the number of members actually blocked
|
||||
*
|
||||
* Reason priority: dispute > payment_failed
|
||||
* A payment_failed block won't overwrite an existing dispute block
|
||||
*/
|
||||
export async function blockOrgMembers(
|
||||
organizationId: string,
|
||||
reason: BillingBlockReason
|
||||
): Promise<number> {
|
||||
const memberIds = await getOrgMemberIds(organizationId)
|
||||
|
||||
if (memberIds.length === 0) {
|
||||
return 0
|
||||
}
|
||||
|
||||
// Don't overwrite dispute blocks with payment_failed (dispute is higher priority)
|
||||
const whereClause =
|
||||
reason === 'payment_failed'
|
||||
? and(
|
||||
inArray(userStats.userId, memberIds),
|
||||
or(ne(userStats.billingBlockedReason, 'dispute'), isNull(userStats.billingBlockedReason))
|
||||
)
|
||||
: inArray(userStats.userId, memberIds)
|
||||
|
||||
const result = await db
|
||||
.update(userStats)
|
||||
.set({ billingBlocked: true, billingBlockedReason: reason })
|
||||
.where(whereClause)
|
||||
.returning({ userId: userStats.userId })
|
||||
|
||||
return result.length
|
||||
}
|
||||
|
||||
/**
|
||||
* Unblock all members of an organization blocked for a specific reason
|
||||
* Only unblocks members blocked for the specified reason (not other reasons)
|
||||
* Returns the number of members actually unblocked
|
||||
*/
|
||||
export async function unblockOrgMembers(
|
||||
organizationId: string,
|
||||
reason: BillingBlockReason
|
||||
): Promise<number> {
|
||||
const memberIds = await getOrgMemberIds(organizationId)
|
||||
|
||||
if (memberIds.length === 0) {
|
||||
return 0
|
||||
}
|
||||
|
||||
const result = await db
|
||||
.update(userStats)
|
||||
.set({ billingBlocked: false, billingBlockedReason: null })
|
||||
.where(and(inArray(userStats.userId, memberIds), eq(userStats.billingBlockedReason, reason)))
|
||||
.returning({ userId: userStats.userId })
|
||||
|
||||
return result.length
|
||||
}
|
||||
|
||||
export interface RestoreProResult {
|
||||
restored: boolean
|
||||
usageRestored: boolean
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { db } from '@sim/db'
|
||||
import { subscription, user, userStats } from '@sim/db/schema'
|
||||
import { member, subscription, user, userStats } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq } from 'drizzle-orm'
|
||||
import type Stripe from 'stripe'
|
||||
import { blockOrgMembers, unblockOrgMembers } from '@/lib/billing'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
|
||||
const logger = createLogger('DisputeWebhooks')
|
||||
@@ -58,34 +57,36 @@ export async function handleChargeDispute(event: Stripe.Event): Promise<void> {
|
||||
|
||||
if (subs.length > 0) {
|
||||
const orgId = subs[0].referenceId
|
||||
const memberCount = await blockOrgMembers(orgId, 'dispute')
|
||||
|
||||
if (memberCount > 0) {
|
||||
logger.warn('Blocked all org members due to dispute', {
|
||||
const owners = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, orgId), eq(member.role, 'owner')))
|
||||
.limit(1)
|
||||
|
||||
if (owners.length > 0) {
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({ billingBlocked: true, billingBlockedReason: 'dispute' })
|
||||
.where(eq(userStats.userId, owners[0].userId))
|
||||
|
||||
logger.warn('Blocked org owner due to dispute', {
|
||||
disputeId: dispute.id,
|
||||
ownerId: owners[0].userId,
|
||||
organizationId: orgId,
|
||||
memberCount,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles charge.dispute.closed - unblocks user if dispute was won or warning closed
|
||||
*
|
||||
* Status meanings:
|
||||
* - 'won': Merchant won, customer's chargeback denied → unblock
|
||||
* - 'lost': Customer won, money refunded → stay blocked (they owe us)
|
||||
* - 'warning_closed': Pre-dispute inquiry closed without chargeback → unblock (false alarm)
|
||||
* Handles charge.dispute.closed - unblocks user if dispute was won
|
||||
*/
|
||||
export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
|
||||
const dispute = event.data.object as Stripe.Dispute
|
||||
|
||||
// Only unblock if we won or the warning was closed without a full dispute
|
||||
const shouldUnblock = dispute.status === 'won' || dispute.status === 'warning_closed'
|
||||
|
||||
if (!shouldUnblock) {
|
||||
logger.info('Dispute resolved against us, user remains blocked', {
|
||||
if (dispute.status !== 'won') {
|
||||
logger.info('Dispute not won, user remains blocked', {
|
||||
disputeId: dispute.id,
|
||||
status: dispute.status,
|
||||
})
|
||||
@@ -97,7 +98,7 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
|
||||
return
|
||||
}
|
||||
|
||||
// Find and unblock user (Pro plans) - only if blocked for dispute, not other reasons
|
||||
// Find and unblock user (Pro plans)
|
||||
const users = await db
|
||||
.select({ id: user.id })
|
||||
.from(user)
|
||||
@@ -108,17 +109,16 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({ billingBlocked: false, billingBlockedReason: null })
|
||||
.where(and(eq(userStats.userId, users[0].id), eq(userStats.billingBlockedReason, 'dispute')))
|
||||
.where(eq(userStats.userId, users[0].id))
|
||||
|
||||
logger.info('Unblocked user after dispute resolved in our favor', {
|
||||
logger.info('Unblocked user after winning dispute', {
|
||||
disputeId: dispute.id,
|
||||
userId: users[0].id,
|
||||
status: dispute.status,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Find and unblock all org members (Team/Enterprise) - consistent with payment success
|
||||
// Find and unblock org owner (Team/Enterprise)
|
||||
const subs = await db
|
||||
.select({ referenceId: subscription.referenceId })
|
||||
.from(subscription)
|
||||
@@ -127,13 +127,24 @@ export async function handleDisputeClosed(event: Stripe.Event): Promise<void> {
|
||||
|
||||
if (subs.length > 0) {
|
||||
const orgId = subs[0].referenceId
|
||||
const memberCount = await unblockOrgMembers(orgId, 'dispute')
|
||||
|
||||
logger.info('Unblocked all org members after dispute resolved in our favor', {
|
||||
disputeId: dispute.id,
|
||||
organizationId: orgId,
|
||||
memberCount,
|
||||
status: dispute.status,
|
||||
})
|
||||
const owners = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(and(eq(member.organizationId, orgId), eq(member.role, 'owner')))
|
||||
.limit(1)
|
||||
|
||||
if (owners.length > 0) {
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({ billingBlocked: false, billingBlockedReason: null })
|
||||
.where(eq(userStats.userId, owners[0].userId))
|
||||
|
||||
logger.info('Unblocked org owner after winning dispute', {
|
||||
disputeId: dispute.id,
|
||||
ownerId: owners[0].userId,
|
||||
organizationId: orgId,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,13 +8,12 @@ import {
|
||||
userStats,
|
||||
} from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, inArray, isNull, ne, or } from 'drizzle-orm'
|
||||
import { and, eq, inArray } from 'drizzle-orm'
|
||||
import type Stripe from 'stripe'
|
||||
import { getEmailSubject, PaymentFailedEmail, renderCreditPurchaseEmail } from '@/components/emails'
|
||||
import { calculateSubscriptionOverage } from '@/lib/billing/core/billing'
|
||||
import { addCredits, getCreditBalance, removeCredits } from '@/lib/billing/credits/balance'
|
||||
import { setUsageLimitForCredits } from '@/lib/billing/credits/purchase'
|
||||
import { blockOrgMembers, unblockOrgMembers } from '@/lib/billing/organizations/membership'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { sendEmail } from '@/lib/messaging/email/mailer'
|
||||
@@ -503,7 +502,24 @@ export async function handleInvoicePaymentSucceeded(event: Stripe.Event) {
|
||||
}
|
||||
|
||||
if (sub.plan === 'team' || sub.plan === 'enterprise') {
|
||||
await unblockOrgMembers(sub.referenceId, 'payment_failed')
|
||||
const members = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, sub.referenceId))
|
||||
const memberIds = members.map((m) => m.userId)
|
||||
|
||||
if (memberIds.length > 0) {
|
||||
// Only unblock users blocked for payment_failed, not disputes
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({ billingBlocked: false, billingBlockedReason: null })
|
||||
.where(
|
||||
and(
|
||||
inArray(userStats.userId, memberIds),
|
||||
eq(userStats.billingBlockedReason, 'payment_failed')
|
||||
)
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// Only unblock users blocked for payment_failed, not disputes
|
||||
await db
|
||||
@@ -600,26 +616,28 @@ export async function handleInvoicePaymentFailed(event: Stripe.Event) {
|
||||
if (records.length > 0) {
|
||||
const sub = records[0]
|
||||
if (sub.plan === 'team' || sub.plan === 'enterprise') {
|
||||
const memberCount = await blockOrgMembers(sub.referenceId, 'payment_failed')
|
||||
const members = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, sub.referenceId))
|
||||
const memberIds = members.map((m) => m.userId)
|
||||
|
||||
if (memberIds.length > 0) {
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({ billingBlocked: true, billingBlockedReason: 'payment_failed' })
|
||||
.where(inArray(userStats.userId, memberIds))
|
||||
}
|
||||
logger.info('Blocked team/enterprise members due to payment failure', {
|
||||
organizationId: sub.referenceId,
|
||||
memberCount,
|
||||
memberCount: members.length,
|
||||
isOverageInvoice,
|
||||
})
|
||||
} else {
|
||||
// Don't overwrite dispute blocks (dispute > payment_failed priority)
|
||||
await db
|
||||
.update(userStats)
|
||||
.set({ billingBlocked: true, billingBlockedReason: 'payment_failed' })
|
||||
.where(
|
||||
and(
|
||||
eq(userStats.userId, sub.referenceId),
|
||||
or(
|
||||
ne(userStats.billingBlockedReason, 'dispute'),
|
||||
isNull(userStats.billingBlockedReason)
|
||||
)
|
||||
)
|
||||
)
|
||||
.where(eq(userStats.userId, sub.referenceId))
|
||||
logger.info('Blocked user due to payment failure', {
|
||||
userId: sub.referenceId,
|
||||
isOverageInvoice,
|
||||
|
||||
@@ -3,7 +3,6 @@ import { member, organization, subscription } from '@sim/db/schema'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { and, eq, ne } from 'drizzle-orm'
|
||||
import { calculateSubscriptionOverage } from '@/lib/billing/core/billing'
|
||||
import { hasActiveSubscription } from '@/lib/billing/core/subscription'
|
||||
import { syncUsageLimitsFromSubscription } from '@/lib/billing/core/usage'
|
||||
import { restoreUserProSubscription } from '@/lib/billing/organizations/membership'
|
||||
import { requireStripeClient } from '@/lib/billing/stripe-client'
|
||||
@@ -53,37 +52,14 @@ async function restoreMemberProSubscriptions(organizationId: string): Promise<nu
|
||||
|
||||
/**
|
||||
* Cleanup organization when team/enterprise subscription is deleted.
|
||||
* - Checks if other active subscriptions point to this org (skip deletion if so)
|
||||
* - Restores member Pro subscriptions
|
||||
* - Deletes the organization (only if no other active subs)
|
||||
* - Deletes the organization
|
||||
* - Syncs usage limits for former members (resets to free or Pro tier)
|
||||
*/
|
||||
async function cleanupOrganizationSubscription(organizationId: string): Promise<{
|
||||
restoredProCount: number
|
||||
membersSynced: number
|
||||
organizationDeleted: boolean
|
||||
}> {
|
||||
// Check if other active subscriptions still point to this org
|
||||
// Note: The subscription being deleted is already marked as 'canceled' by better-auth
|
||||
// before this handler runs, so we only find truly active ones
|
||||
if (await hasActiveSubscription(organizationId)) {
|
||||
logger.info('Skipping organization deletion - other active subscriptions exist', {
|
||||
organizationId,
|
||||
})
|
||||
|
||||
// Still sync limits for members since this subscription was deleted
|
||||
const memberUserIds = await db
|
||||
.select({ userId: member.userId })
|
||||
.from(member)
|
||||
.where(eq(member.organizationId, organizationId))
|
||||
|
||||
for (const m of memberUserIds) {
|
||||
await syncUsageLimitsFromSubscription(m.userId)
|
||||
}
|
||||
|
||||
return { restoredProCount: 0, membersSynced: memberUserIds.length, organizationDeleted: false }
|
||||
}
|
||||
|
||||
// Get member userIds before deletion (needed for limit syncing after org deletion)
|
||||
const memberUserIds = await db
|
||||
.select({ userId: member.userId })
|
||||
@@ -99,7 +75,7 @@ async function cleanupOrganizationSubscription(organizationId: string): Promise<
|
||||
await syncUsageLimitsFromSubscription(m.userId)
|
||||
}
|
||||
|
||||
return { restoredProCount, membersSynced: memberUserIds.length, organizationDeleted: true }
|
||||
return { restoredProCount, membersSynced: memberUserIds.length }
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -196,14 +172,15 @@ export async function handleSubscriptionDeleted(subscription: {
|
||||
referenceId: subscription.referenceId,
|
||||
})
|
||||
|
||||
const { restoredProCount, membersSynced, organizationDeleted } =
|
||||
await cleanupOrganizationSubscription(subscription.referenceId)
|
||||
const { restoredProCount, membersSynced } = await cleanupOrganizationSubscription(
|
||||
subscription.referenceId
|
||||
)
|
||||
|
||||
logger.info('Successfully processed enterprise subscription cancellation', {
|
||||
subscriptionId: subscription.id,
|
||||
stripeSubscriptionId,
|
||||
restoredProCount,
|
||||
organizationDeleted,
|
||||
organizationDeleted: true,
|
||||
membersSynced,
|
||||
})
|
||||
return
|
||||
@@ -320,7 +297,7 @@ export async function handleSubscriptionDeleted(subscription: {
|
||||
const cleanup = await cleanupOrganizationSubscription(subscription.referenceId)
|
||||
restoredProCount = cleanup.restoredProCount
|
||||
membersSynced = cleanup.membersSynced
|
||||
organizationDeleted = cleanup.organizationDeleted
|
||||
organizationDeleted = true
|
||||
} else if (subscription.plan === 'pro') {
|
||||
await syncUsageLimitsFromSubscription(subscription.referenceId)
|
||||
membersSynced = 1
|
||||
|
||||
@@ -5,9 +5,11 @@ import type { ToolUIConfig } from './ui-config'
|
||||
|
||||
const baseToolLogger = createLogger('BaseClientTool')
|
||||
|
||||
const DEFAULT_TOOL_TIMEOUT_MS = 5 * 60 * 1000
|
||||
/** Default timeout for tool execution (5 minutes) */
|
||||
const DEFAULT_TOOL_TIMEOUT_MS = 2 * 60 * 1000
|
||||
|
||||
export const WORKFLOW_EXECUTION_TIMEOUT_MS = 5 * 60 * 1000
|
||||
/** Timeout for tools that run workflows (10 minutes) */
|
||||
export const WORKFLOW_EXECUTION_TIMEOUT_MS = 10 * 60 * 1000
|
||||
|
||||
// Client tool call states used by the new runtime
|
||||
export enum ClientToolCallState {
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
type BaseClientToolMetadata,
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { getApiUrl } from '@/lib/core/utils/urls'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
|
||||
interface CheckDeploymentStatusArgs {
|
||||
@@ -103,11 +104,12 @@ export class CheckDeploymentStatusClientTool extends BaseClientTool {
|
||||
|
||||
// API deployment details
|
||||
const isApiDeployed = apiDeploy?.isDeployed || false
|
||||
const apiUrl = getApiUrl()
|
||||
const appUrl = typeof window !== 'undefined' ? window.location.origin : ''
|
||||
const apiDetails: ApiDeploymentDetails = {
|
||||
isDeployed: isApiDeployed,
|
||||
deployedAt: apiDeploy?.deployedAt || null,
|
||||
endpoint: isApiDeployed ? `${appUrl}/api/workflows/${workflowId}/execute` : null,
|
||||
endpoint: isApiDeployed ? `${apiUrl}/api/workflows/${workflowId}/execute` : null,
|
||||
apiKey: apiDeploy?.apiKey || null,
|
||||
needsRedeployment: apiDeploy?.needsRedeployment === true,
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
ClientToolCallState,
|
||||
} from '@/lib/copilot/tools/client/base-tool'
|
||||
import { registerToolUIConfig } from '@/lib/copilot/tools/client/ui-config'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { getApiUrl } from '@/lib/core/utils/urls'
|
||||
import { getInputFormatExample } from '@/lib/workflows/operations/deployment-utils'
|
||||
import { useCopilotStore } from '@/stores/panel/copilot/store'
|
||||
import { useWorkflowRegistry } from '@/stores/workflows/registry/store'
|
||||
@@ -230,8 +230,8 @@ export class DeployApiClientTool extends BaseClientTool {
|
||||
}
|
||||
|
||||
if (action === 'deploy') {
|
||||
const appUrl = getBaseUrl()
|
||||
const apiEndpoint = `${appUrl}/api/workflows/${workflowId}/execute`
|
||||
const apiUrl = getApiUrl()
|
||||
const apiEndpoint = `${apiUrl}/api/workflows/${workflowId}/execute`
|
||||
const apiKeyPlaceholder = '$SIM_API_KEY'
|
||||
|
||||
const inputExample = getInputFormatExample(false)
|
||||
|
||||
@@ -170,11 +170,6 @@ export const env = createEnv({
|
||||
RATE_LIMIT_ENTERPRISE_SYNC: z.string().optional().default('600'), // Enterprise tier sync API executions per minute
|
||||
RATE_LIMIT_ENTERPRISE_ASYNC: z.string().optional().default('5000'), // Enterprise tier async API executions per minute
|
||||
|
||||
EXECUTION_TIMEOUT_FREE: z.string().optional().default('300'),
|
||||
EXECUTION_TIMEOUT_PRO: z.string().optional().default('3600'),
|
||||
EXECUTION_TIMEOUT_TEAM: z.string().optional().default('3600'),
|
||||
EXECUTION_TIMEOUT_ENTERPRISE: z.string().optional().default('3600'),
|
||||
|
||||
// Knowledge Base Processing Configuration - Shared across all processing methods
|
||||
KB_CONFIG_MAX_DURATION: z.number().optional().default(600), // Max processing duration in seconds (10 minutes)
|
||||
KB_CONFIG_MAX_ATTEMPTS: z.number().optional().default(3), // Max retry attempts
|
||||
@@ -312,6 +307,7 @@ export const env = createEnv({
|
||||
client: {
|
||||
// Core Application URLs - Required for frontend functionality
|
||||
NEXT_PUBLIC_APP_URL: z.string().url(), // Base URL of the application (e.g., https://www.sim.ai)
|
||||
NEXT_PUBLIC_API_URL: z.string().url().optional(), // API URL for workflow executions (e.g., https://api.sim.ai)
|
||||
|
||||
// Client-side Services
|
||||
NEXT_PUBLIC_SOCKET_URL: z.string().url().optional(), // WebSocket server URL for real-time features
|
||||
@@ -362,6 +358,7 @@ export const env = createEnv({
|
||||
|
||||
experimental__runtimeEnv: {
|
||||
NEXT_PUBLIC_APP_URL: process.env.NEXT_PUBLIC_APP_URL,
|
||||
NEXT_PUBLIC_API_URL: process.env.NEXT_PUBLIC_API_URL,
|
||||
NEXT_PUBLIC_BILLING_ENABLED: process.env.NEXT_PUBLIC_BILLING_ENABLED,
|
||||
NEXT_PUBLIC_SOCKET_URL: process.env.NEXT_PUBLIC_SOCKET_URL,
|
||||
NEXT_PUBLIC_BRAND_NAME: process.env.NEXT_PUBLIC_BRAND_NAME,
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
export * from './types'
|
||||
@@ -1,122 +0,0 @@
|
||||
import { env } from '@/lib/core/config/env'
|
||||
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||
|
||||
export interface ExecutionTimeoutConfig {
|
||||
sync: number
|
||||
async: number
|
||||
}
|
||||
|
||||
const DEFAULT_SYNC_TIMEOUTS = {
|
||||
free: 300,
|
||||
pro: 3600,
|
||||
team: 3600,
|
||||
enterprise: 3600,
|
||||
} as const
|
||||
|
||||
const ASYNC_TIMEOUT_SECONDS = 5400
|
||||
|
||||
function getSyncTimeoutForPlan(plan: SubscriptionPlan): number {
|
||||
const envVarMap: Record<SubscriptionPlan, string | undefined> = {
|
||||
free: env.EXECUTION_TIMEOUT_FREE,
|
||||
pro: env.EXECUTION_TIMEOUT_PRO,
|
||||
team: env.EXECUTION_TIMEOUT_TEAM,
|
||||
enterprise: env.EXECUTION_TIMEOUT_ENTERPRISE,
|
||||
}
|
||||
return (Number.parseInt(envVarMap[plan] || '') || DEFAULT_SYNC_TIMEOUTS[plan]) * 1000
|
||||
}
|
||||
|
||||
export const EXECUTION_TIMEOUTS: Record<SubscriptionPlan, ExecutionTimeoutConfig> = {
|
||||
free: {
|
||||
sync: getSyncTimeoutForPlan('free'),
|
||||
async: ASYNC_TIMEOUT_SECONDS * 1000,
|
||||
},
|
||||
pro: {
|
||||
sync: getSyncTimeoutForPlan('pro'),
|
||||
async: ASYNC_TIMEOUT_SECONDS * 1000,
|
||||
},
|
||||
team: {
|
||||
sync: getSyncTimeoutForPlan('team'),
|
||||
async: ASYNC_TIMEOUT_SECONDS * 1000,
|
||||
},
|
||||
enterprise: {
|
||||
sync: getSyncTimeoutForPlan('enterprise'),
|
||||
async: ASYNC_TIMEOUT_SECONDS * 1000,
|
||||
},
|
||||
}
|
||||
|
||||
export function getExecutionTimeout(
|
||||
plan: SubscriptionPlan | undefined,
|
||||
type: 'sync' | 'async' = 'sync'
|
||||
): number {
|
||||
return EXECUTION_TIMEOUTS[plan || 'free'][type]
|
||||
}
|
||||
|
||||
export function getExecutionTimeoutSeconds(
|
||||
plan: SubscriptionPlan | undefined,
|
||||
type: 'sync' | 'async' = 'sync'
|
||||
): number {
|
||||
return Math.floor(getExecutionTimeout(plan, type) / 1000)
|
||||
}
|
||||
|
||||
export function getMaxExecutionTimeout(): number {
|
||||
return EXECUTION_TIMEOUTS.enterprise.async
|
||||
}
|
||||
|
||||
export const DEFAULT_EXECUTION_TIMEOUT_MS = EXECUTION_TIMEOUTS.free.sync
|
||||
|
||||
export class ExecutionTimeoutError extends Error {
|
||||
constructor(
|
||||
public readonly timeoutMs: number,
|
||||
public readonly plan?: SubscriptionPlan
|
||||
) {
|
||||
const timeoutSeconds = Math.floor(timeoutMs / 1000)
|
||||
const timeoutMinutes = Math.floor(timeoutSeconds / 60)
|
||||
const displayTime =
|
||||
timeoutMinutes > 0
|
||||
? `${timeoutMinutes} minute${timeoutMinutes > 1 ? 's' : ''}`
|
||||
: `${timeoutSeconds} seconds`
|
||||
super(`Execution timed out after ${displayTime}`)
|
||||
this.name = 'ExecutionTimeoutError'
|
||||
}
|
||||
}
|
||||
|
||||
export function isTimeoutError(error: unknown): boolean {
|
||||
if (error instanceof ExecutionTimeoutError) return true
|
||||
if (!(error instanceof Error)) return false
|
||||
|
||||
const name = error.name.toLowerCase()
|
||||
const message = error.message.toLowerCase()
|
||||
|
||||
return (
|
||||
name === 'timeouterror' ||
|
||||
name === 'aborterror' ||
|
||||
message.includes('timeout') ||
|
||||
message.includes('timed out') ||
|
||||
message.includes('aborted')
|
||||
)
|
||||
}
|
||||
|
||||
export function createTimeoutError(
|
||||
timeoutMs: number,
|
||||
plan?: SubscriptionPlan
|
||||
): ExecutionTimeoutError {
|
||||
return new ExecutionTimeoutError(timeoutMs, plan)
|
||||
}
|
||||
|
||||
export function getTimeoutErrorMessage(error: unknown, timeoutMs?: number): string {
|
||||
if (error instanceof ExecutionTimeoutError) {
|
||||
return error.message
|
||||
}
|
||||
|
||||
if (timeoutMs) {
|
||||
const timeoutSeconds = Math.floor(timeoutMs / 1000)
|
||||
const timeoutMinutes = Math.floor(timeoutSeconds / 60)
|
||||
const displayTime =
|
||||
timeoutMinutes > 0
|
||||
? `${timeoutMinutes} minute${timeoutMinutes > 1 ? 's' : ''}`
|
||||
: `${timeoutSeconds} seconds`
|
||||
return `Execution timed out after ${displayTime}`
|
||||
}
|
||||
|
||||
return 'Execution timed out'
|
||||
}
|
||||
@@ -54,3 +54,22 @@ export function getEmailDomain(): string {
|
||||
return isProd ? 'sim.ai' : 'localhost:3000'
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the API URL for workflow executions.
|
||||
* Uses NEXT_PUBLIC_API_URL if configured, otherwise falls back to NEXT_PUBLIC_APP_URL.
|
||||
* @returns The API URL string (e.g., 'https://api.sim.ai' or 'https://example.com')
|
||||
*/
|
||||
export function getApiUrl(): string {
|
||||
const apiUrl = getEnv('NEXT_PUBLIC_API_URL')
|
||||
|
||||
if (apiUrl) {
|
||||
if (apiUrl.startsWith('http://') || apiUrl.startsWith('https://')) {
|
||||
return apiUrl
|
||||
}
|
||||
const protocol = isProd ? 'https://' : 'http://'
|
||||
return `${protocol}${apiUrl}`
|
||||
}
|
||||
|
||||
return getBaseUrl()
|
||||
}
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
/**
|
||||
* Execution timeout constants
|
||||
*
|
||||
* DEFAULT_EXECUTION_TIMEOUT_MS: The default timeout for executing user code (10 minutes)
|
||||
*/
|
||||
|
||||
export { DEFAULT_EXECUTION_TIMEOUT_MS }
|
||||
export const DEFAULT_EXECUTION_TIMEOUT_MS = 600000 // 10 minutes (600 seconds)
|
||||
|
||||
@@ -4,9 +4,7 @@ import { createLogger } from '@sim/logger'
|
||||
import { eq } from 'drizzle-orm'
|
||||
import { checkServerSideUsageLimits } from '@/lib/billing/calculations/usage-monitor'
|
||||
import { getHighestPrioritySubscription } from '@/lib/billing/core/subscription'
|
||||
import { getExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import { RateLimiter } from '@/lib/core/rate-limiter/rate-limiter'
|
||||
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||
import { LoggingSession } from '@/lib/logs/execution/logging-session'
|
||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||
import type { CoreTriggerType } from '@/stores/logs/filters/types'
|
||||
@@ -135,10 +133,10 @@ export interface PreprocessExecutionResult {
|
||||
success: boolean
|
||||
error?: {
|
||||
message: string
|
||||
statusCode: number
|
||||
logCreated: boolean
|
||||
statusCode: number // HTTP status code (401, 402, 403, 404, 429, 500)
|
||||
logCreated: boolean // Whether error was logged to execution_logs
|
||||
}
|
||||
actorUserId?: string
|
||||
actorUserId?: string // The user ID that will be billed
|
||||
workflowRecord?: WorkflowRecord
|
||||
userSubscription?: SubscriptionInfo | null
|
||||
rateLimitInfo?: {
|
||||
@@ -146,10 +144,6 @@ export interface PreprocessExecutionResult {
|
||||
remaining: number
|
||||
resetAt: Date
|
||||
}
|
||||
executionTimeout?: {
|
||||
sync: number
|
||||
async: number
|
||||
}
|
||||
}
|
||||
|
||||
type WorkflowRecord = typeof workflow.$inferSelect
|
||||
@@ -490,17 +484,12 @@ export async function preprocessExecution(
|
||||
triggerType,
|
||||
})
|
||||
|
||||
const plan = userSubscription?.plan as SubscriptionPlan | undefined
|
||||
return {
|
||||
success: true,
|
||||
actorUserId,
|
||||
workflowRecord,
|
||||
userSubscription,
|
||||
rateLimitInfo,
|
||||
executionTimeout: {
|
||||
sync: getExecutionTimeout(plan, 'sync'),
|
||||
async: getExecutionTimeout(plan, 'async'),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -33,7 +33,6 @@ import type {
|
||||
WorkflowExecutionSnapshot,
|
||||
WorkflowState,
|
||||
} from '@/lib/logs/types'
|
||||
import { getWorkspaceBilledAccountUserId } from '@/lib/workspaces/utils'
|
||||
|
||||
export interface ToolCall {
|
||||
name: string
|
||||
@@ -504,7 +503,7 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
}
|
||||
|
||||
try {
|
||||
// Get the workflow record to get workspace and fallback userId
|
||||
// Get the workflow record to get the userId
|
||||
const [workflowRecord] = await db
|
||||
.select()
|
||||
.from(workflow)
|
||||
@@ -516,12 +515,7 @@ export class ExecutionLogger implements IExecutionLoggerService {
|
||||
return
|
||||
}
|
||||
|
||||
let billingUserId: string | null = null
|
||||
if (workflowRecord.workspaceId) {
|
||||
billingUserId = await getWorkspaceBilledAccountUserId(workflowRecord.workspaceId)
|
||||
}
|
||||
|
||||
const userId = billingUserId || workflowRecord.userId
|
||||
const userId = workflowRecord.userId
|
||||
const costToStore = costSummary.totalCost
|
||||
|
||||
const existing = await db.select().from(userStats).where(eq(userStats.userId, userId))
|
||||
|
||||
@@ -776,16 +776,11 @@ export class LoggingSession {
|
||||
await db
|
||||
.update(workflowExecutionLogs)
|
||||
.set({
|
||||
level: 'error',
|
||||
status: 'failed',
|
||||
executionData: sql`jsonb_set(
|
||||
jsonb_set(
|
||||
COALESCE(execution_data, '{}'::jsonb),
|
||||
ARRAY['error'],
|
||||
to_jsonb(${message}::text)
|
||||
),
|
||||
ARRAY['finalOutput'],
|
||||
jsonb_build_object('error', ${message}::text)
|
||||
COALESCE(execution_data, '{}'::jsonb),
|
||||
ARRAY['error'],
|
||||
to_jsonb(${message}::text)
|
||||
)`,
|
||||
})
|
||||
.where(eq(workflowExecutionLogs.executionId, executionId))
|
||||
|
||||
@@ -12,7 +12,6 @@ import { Client } from '@modelcontextprotocol/sdk/client/index.js'
|
||||
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js'
|
||||
import type { ListToolsResult, Tool } from '@modelcontextprotocol/sdk/types.js'
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import {
|
||||
McpConnectionError,
|
||||
type McpConnectionStatus,
|
||||
@@ -203,7 +202,7 @@ export class McpClient {
|
||||
const sdkResult = await this.client.callTool(
|
||||
{ name: toolCall.name, arguments: toolCall.arguments },
|
||||
undefined,
|
||||
{ timeout: getMaxExecutionTimeout() }
|
||||
{ timeout: 600000 } // 10 minutes - override SDK's 60s default
|
||||
)
|
||||
|
||||
return sdkResult as McpToolResult
|
||||
|
||||
@@ -34,7 +34,7 @@ export function sanitizeHeaders(
|
||||
* Client-safe MCP constants
|
||||
*/
|
||||
export const MCP_CLIENT_CONSTANTS = {
|
||||
CLIENT_TIMEOUT: 5 * 60 * 1000,
|
||||
CLIENT_TIMEOUT: 600000,
|
||||
MAX_RETRIES: 3,
|
||||
RECONNECT_DELAY: 1000,
|
||||
} as const
|
||||
|
||||
@@ -81,8 +81,8 @@ describe('generateMcpServerId', () => {
|
||||
})
|
||||
|
||||
describe('MCP_CONSTANTS', () => {
|
||||
it.concurrent('has correct execution timeout (5 minutes)', () => {
|
||||
expect(MCP_CONSTANTS.EXECUTION_TIMEOUT).toBe(300000)
|
||||
it.concurrent('has correct execution timeout (10 minutes)', () => {
|
||||
expect(MCP_CONSTANTS.EXECUTION_TIMEOUT).toBe(600000)
|
||||
})
|
||||
|
||||
it.concurrent('has correct cache timeout (5 minutes)', () => {
|
||||
@@ -107,8 +107,8 @@ describe('MCP_CONSTANTS', () => {
|
||||
})
|
||||
|
||||
describe('MCP_CLIENT_CONSTANTS', () => {
|
||||
it.concurrent('has correct client timeout (5 minutes)', () => {
|
||||
expect(MCP_CLIENT_CONSTANTS.CLIENT_TIMEOUT).toBe(300000)
|
||||
it.concurrent('has correct client timeout (10 minutes)', () => {
|
||||
expect(MCP_CLIENT_CONSTANTS.CLIENT_TIMEOUT).toBe(600000)
|
||||
})
|
||||
|
||||
it.concurrent('has correct auto refresh interval (5 minutes)', () => {
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
import { NextResponse } from 'next/server'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS, getExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import type { SubscriptionPlan } from '@/lib/core/rate-limiter/types'
|
||||
import type { McpApiResponse } from '@/lib/mcp/types'
|
||||
import { isMcpTool, MCP } from '@/executor/constants'
|
||||
|
||||
/**
|
||||
* MCP-specific constants
|
||||
*/
|
||||
export const MCP_CONSTANTS = {
|
||||
EXECUTION_TIMEOUT: DEFAULT_EXECUTION_TIMEOUT_MS,
|
||||
EXECUTION_TIMEOUT: 600000,
|
||||
CACHE_TIMEOUT: 5 * 60 * 1000,
|
||||
DEFAULT_RETRIES: 3,
|
||||
DEFAULT_CONNECTION_TIMEOUT: 30000,
|
||||
@@ -13,10 +14,6 @@ export const MCP_CONSTANTS = {
|
||||
MAX_CONSECUTIVE_FAILURES: 3,
|
||||
} as const
|
||||
|
||||
export function getMcpExecutionTimeout(plan?: SubscriptionPlan): number {
|
||||
return getExecutionTimeout(plan, 'sync')
|
||||
}
|
||||
|
||||
/**
|
||||
* Core MCP tool parameter keys that are metadata, not user-entered test values.
|
||||
* These should be preserved when cleaning up params during schema updates.
|
||||
@@ -48,8 +45,11 @@ export function sanitizeHeaders(
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Client-safe MCP constants
|
||||
*/
|
||||
export const MCP_CLIENT_CONSTANTS = {
|
||||
CLIENT_TIMEOUT: DEFAULT_EXECUTION_TIMEOUT_MS,
|
||||
CLIENT_TIMEOUT: 600000,
|
||||
AUTO_REFRESH_INTERVAL: 5 * 60 * 1000,
|
||||
} as const
|
||||
|
||||
|
||||
@@ -62,6 +62,9 @@ export interface ExecutionErrorEvent extends BaseExecutionEvent {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execution cancelled event
|
||||
*/
|
||||
export interface ExecutionCancelledEvent extends BaseExecutionEvent {
|
||||
type: 'execution:cancelled'
|
||||
workflowId: string
|
||||
@@ -168,6 +171,9 @@ export type ExecutionEvent =
|
||||
| StreamChunkEvent
|
||||
| StreamDoneEvent
|
||||
|
||||
/**
|
||||
* Extracted data types for use in callbacks
|
||||
*/
|
||||
export type ExecutionStartedData = ExecutionStartedEvent['data']
|
||||
export type ExecutionCompletedData = ExecutionCompletedEvent['data']
|
||||
export type ExecutionErrorData = ExecutionErrorEvent['data']
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import type { RunActorParams, RunActorResult } from '@/tools/apify/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const POLL_INTERVAL_MS = 5000
|
||||
const MAX_POLL_TIME_MS = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
const POLL_INTERVAL_MS = 5000 // 5 seconds between polls
|
||||
const MAX_POLL_TIME_MS = 300000 // 5 minutes maximum polling time
|
||||
|
||||
export const apifyRunActorAsyncTool: ToolConfig<RunActorParams, RunActorResult> = {
|
||||
id: 'apify_run_actor_async',
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import type { BrowserUseRunTaskParams, BrowserUseRunTaskResponse } from '@/tools/browser_use/types'
|
||||
import type { ToolConfig, ToolResponse } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('BrowserUseTool')
|
||||
|
||||
const POLL_INTERVAL_MS = 5000
|
||||
const MAX_POLL_TIME_MS = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
const MAX_POLL_TIME_MS = 600000 // 10 minutes
|
||||
const MAX_CONSECUTIVE_ERRORS = 3
|
||||
|
||||
async function createSessionWithProfile(
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import type { ExaResearchParams, ExaResearchResponse } from '@/tools/exa/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('ExaResearchTool')
|
||||
|
||||
const POLL_INTERVAL_MS = 5000
|
||||
const MAX_POLL_TIME_MS = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
const POLL_INTERVAL_MS = 5000 // 5 seconds between polls
|
||||
const MAX_POLL_TIME_MS = 300000 // 5 minutes maximum polling time
|
||||
|
||||
export const researchTool: ToolConfig<ExaResearchParams, ExaResearchResponse> = {
|
||||
id: 'exa_research',
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import type { AgentParams, AgentResponse } from '@/tools/firecrawl/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('FirecrawlAgentTool')
|
||||
|
||||
const POLL_INTERVAL_MS = 5000
|
||||
const MAX_POLL_TIME_MS = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
const POLL_INTERVAL_MS = 5000 // 5 seconds between polls
|
||||
const MAX_POLL_TIME_MS = 300000 // 5 minutes maximum polling time
|
||||
|
||||
export const agentTool: ToolConfig<AgentParams, AgentResponse> = {
|
||||
id: 'firecrawl_agent',
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import type { FirecrawlCrawlParams, FirecrawlCrawlResponse } from '@/tools/firecrawl/types'
|
||||
import { CRAWLED_PAGE_OUTPUT_PROPERTIES } from '@/tools/firecrawl/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('FirecrawlCrawlTool')
|
||||
|
||||
const POLL_INTERVAL_MS = 5000
|
||||
const MAX_POLL_TIME_MS = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
const POLL_INTERVAL_MS = 5000 // 5 seconds between polls
|
||||
const MAX_POLL_TIME_MS = 300000 // 5 minutes maximum polling time
|
||||
|
||||
export const crawlTool: ToolConfig<FirecrawlCrawlParams, FirecrawlCrawlResponse> = {
|
||||
id: 'firecrawl_crawl',
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import type { ExtractParams, ExtractResponse } from '@/tools/firecrawl/types'
|
||||
import type { ToolConfig } from '@/tools/types'
|
||||
|
||||
const logger = createLogger('FirecrawlExtractTool')
|
||||
|
||||
const POLL_INTERVAL_MS = 5000
|
||||
const MAX_POLL_TIME_MS = DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
const POLL_INTERVAL_MS = 5000 // 5 seconds between polls
|
||||
const MAX_POLL_TIME_MS = 300000 // 5 minutes maximum polling time
|
||||
|
||||
export const extractTool: ToolConfig<ExtractParams, ExtractResponse> = {
|
||||
id: 'firecrawl_extract',
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { generateInternalToken } from '@/lib/auth/internal'
|
||||
import { DEFAULT_EXECUTION_TIMEOUT_MS } from '@/lib/core/execution-limits'
|
||||
import { secureFetchWithPinnedIP, validateUrlWithDNS } from '@/lib/core/security/input-validation'
|
||||
import { generateRequestId } from '@/lib/core/utils/request'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
@@ -626,8 +625,9 @@ async function executeToolRequest(
|
||||
let response: Response
|
||||
|
||||
if (isInternalRoute) {
|
||||
// Set up AbortController for timeout support on internal routes
|
||||
const controller = new AbortController()
|
||||
const timeout = requestParams.timeout || DEFAULT_EXECUTION_TIMEOUT_MS
|
||||
const timeout = requestParams.timeout || 300000
|
||||
const timeoutId = setTimeout(() => controller.abort(), timeout)
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { createLogger } from '@sim/logger'
|
||||
import { getMaxExecutionTimeout } from '@/lib/core/execution-limits'
|
||||
import { getBaseUrl } from '@/lib/core/utils/urls'
|
||||
import { AGENT, isCustomTool } from '@/executor/constants'
|
||||
import { getCustomTool } from '@/hooks/queries/custom-tools'
|
||||
@@ -124,7 +123,9 @@ export function formatRequestParams(tool: ToolConfig, params: Record<string, any
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_TIMEOUT_MS = getMaxExecutionTimeout()
|
||||
// Get timeout from params (if specified) and validate
|
||||
// Must be a finite positive number, max 600000ms (10 minutes) as documented
|
||||
const MAX_TIMEOUT_MS = 600000
|
||||
const rawTimeout = params.timeout
|
||||
const timeout = rawTimeout != null ? Number(rawTimeout) : undefined
|
||||
const validTimeout =
|
||||
|
||||
@@ -6,7 +6,7 @@ export default defineConfig({
|
||||
project: env.TRIGGER_PROJECT_ID!,
|
||||
runtime: 'node',
|
||||
logLevel: 'log',
|
||||
maxDuration: 5400,
|
||||
maxDuration: 600,
|
||||
retries: {
|
||||
enabledInDev: false,
|
||||
default: {
|
||||
|
||||
@@ -17,7 +17,7 @@ from simstudio import SimStudioClient
|
||||
# Initialize the client
|
||||
client = SimStudioClient(
|
||||
api_key=os.getenv("SIM_API_KEY", "your-api-key-here"),
|
||||
base_url="https://sim.ai" # optional, defaults to https://sim.ai
|
||||
base_url="https://api.sim.ai" # optional, defaults to https://api.sim.ai
|
||||
)
|
||||
|
||||
# Execute a workflow
|
||||
@@ -35,11 +35,11 @@ except Exception as error:
|
||||
#### Constructor
|
||||
|
||||
```python
|
||||
SimStudioClient(api_key: str, base_url: str = "https://sim.ai")
|
||||
SimStudioClient(api_key: str, base_url: str = "https://api.sim.ai")
|
||||
```
|
||||
|
||||
- `api_key` (str): Your Sim API key
|
||||
- `base_url` (str, optional): Base URL for the Sim API (defaults to `https://sim.ai`)
|
||||
- `base_url` (str, optional): Base URL for the Sim API (defaults to `https://api.sim.ai`)
|
||||
|
||||
#### Methods
|
||||
|
||||
@@ -364,7 +364,7 @@ from simstudio import SimStudioClient
|
||||
# Using environment variables
|
||||
client = SimStudioClient(
|
||||
api_key=os.getenv("SIM_API_KEY"),
|
||||
base_url=os.getenv("SIM_BASE_URL", "https://sim.ai")
|
||||
base_url=os.getenv("SIM_BASE_URL", "https://api.sim.ai")
|
||||
)
|
||||
```
|
||||
|
||||
|
||||
@@ -87,10 +87,10 @@ class SimStudioClient:
|
||||
|
||||
Args:
|
||||
api_key: Your Sim API key
|
||||
base_url: Base URL for the Sim API (defaults to https://sim.ai)
|
||||
base_url: Base URL for the Sim API (defaults to https://api.sim.ai)
|
||||
"""
|
||||
|
||||
def __init__(self, api_key: str, base_url: str = "https://sim.ai"):
|
||||
|
||||
def __init__(self, api_key: str, base_url: str = "https://api.sim.ai"):
|
||||
self.api_key = api_key
|
||||
self.base_url = base_url.rstrip('/')
|
||||
self._session = requests.Session()
|
||||
|
||||
@@ -18,7 +18,7 @@ def test_simstudio_client_default_base_url():
|
||||
"""Test SimStudioClient with default base URL."""
|
||||
client = SimStudioClient(api_key="test-api-key")
|
||||
assert client.api_key == "test-api-key"
|
||||
assert client.base_url == "https://sim.ai"
|
||||
assert client.base_url == "https://api.sim.ai"
|
||||
|
||||
|
||||
def test_set_api_key():
|
||||
@@ -51,7 +51,7 @@ def test_validate_workflow_returns_false_on_error(mock_get):
|
||||
result = client.validate_workflow("test-workflow-id")
|
||||
|
||||
assert result is False
|
||||
mock_get.assert_called_once_with("https://sim.ai/api/workflows/test-workflow-id/status")
|
||||
mock_get.assert_called_once_with("https://api.sim.ai/api/workflows/test-workflow-id/status")
|
||||
|
||||
|
||||
def test_simstudio_error():
|
||||
|
||||
@@ -20,7 +20,7 @@ import { SimStudioClient } from 'simstudio-ts-sdk';
|
||||
// Initialize the client
|
||||
const client = new SimStudioClient({
|
||||
apiKey: 'your-api-key-here',
|
||||
baseUrl: 'https://sim.ai' // optional, defaults to https://sim.ai
|
||||
baseUrl: 'https://api.sim.ai' // optional, defaults to https://api.sim.ai
|
||||
});
|
||||
|
||||
// Execute a workflow
|
||||
@@ -43,7 +43,7 @@ new SimStudioClient(config: SimStudioConfig)
|
||||
```
|
||||
|
||||
- `config.apiKey` (string): Your Sim API key
|
||||
- `config.baseUrl` (string, optional): Base URL for the Sim API (defaults to `https://sim.ai`)
|
||||
- `config.baseUrl` (string, optional): Base URL for the Sim API (defaults to `https://api.sim.ai`)
|
||||
|
||||
#### Methods
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ import { SimStudioClient, SimStudioError } from '../src/index'
|
||||
async function basicExample() {
|
||||
const client = new SimStudioClient({
|
||||
apiKey: process.env.SIM_API_KEY!,
|
||||
baseUrl: 'https://sim.ai',
|
||||
baseUrl: 'https://api.sim.ai',
|
||||
})
|
||||
|
||||
try {
|
||||
|
||||
@@ -113,7 +113,7 @@ export class SimStudioClient {
|
||||
|
||||
constructor(config: SimStudioConfig) {
|
||||
this.apiKey = config.apiKey
|
||||
this.baseUrl = normalizeBaseUrl(config.baseUrl || 'https://sim.ai')
|
||||
this.baseUrl = normalizeBaseUrl(config.baseUrl || 'https://api.sim.ai')
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user